code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
|---|---|---|
import cv2
import glob
import numpy as np
from keras.models import Sequential
from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout
from keras.utils.np_utils import to_categorical
from keras import losses, optimizers, regularizers
X_train = []
x_label = []
for img_class, directory in enumerate(['Red', 'Yellow', 'Green', 'NoTrafficLight']):
for i, file_name in enumerate(glob.glob("simulator_lights/{}/*.png".format(directory))):
# for i, file_name in enumerate(glob.glob("/home/andcircle/FunDriving/Term3/Final_Proj/tl_classifier_exceptsmall/real/{}/*.png".format(directory))):
img = cv2.imread(file_name)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB);
resized = cv2.resize(img, (32,64))
X_train.append(resized/255.)
x_label.append(img_class)
X_train = np.array(X_train)
x_label = np.array(x_label)
categorical_labels = to_categorical(x_label)
num_classes = 4
model = Sequential()
model.add(Conv2D(32, (3, 3), input_shape=(64, 32, 3), padding='same', activation='relu', kernel_initializer='random_uniform', kernel_regularizer=regularizers.l2(0.01)))
model.add(MaxPooling2D(2,2))
Dropout(0.5)
model.add(Conv2D(32, (3, 3), padding='same', activation='relu', kernel_initializer='random_uniform', kernel_regularizer=regularizers.l2(0.01)))
model.add(MaxPooling2D(2,2))
Dropout(0.5)
model.add(Flatten())
model.add(Dense(8, activation='relu', kernel_initializer='random_uniform', kernel_regularizer=regularizers.l2(0.01)))
model.add(Dense(num_classes, activation='softmax'))
loss = losses.categorical_crossentropy
optimizer = optimizers.Adam()
model.compile(loss=loss, optimizer=optimizer, metrics=['accuracy'])
model.fit(X_train, categorical_labels, batch_size=32, epochs=10, verbose=True, validation_split=0.1, shuffle=True)
score = model.evaluate(X_train, categorical_labels, verbose=0)
print(score)
model.save('tl_classifier_simulator.h5')
# model.save('tl_classifier_real.h5')
#--------------------------------------------------------------- model.summary()
|
[
"keras.regularizers.l2",
"cv2.cvtColor",
"keras.layers.Dropout",
"keras.optimizers.Adam",
"keras.layers.Flatten",
"cv2.imread",
"keras.utils.np_utils.to_categorical",
"keras.layers.Dense",
"numpy.array",
"keras.models.Sequential",
"keras.layers.MaxPooling2D",
"cv2.resize"
] |
[((832, 849), 'numpy.array', 'np.array', (['X_train'], {}), '(X_train)\n', (840, 849), True, 'import numpy as np\n'), ((860, 877), 'numpy.array', 'np.array', (['x_label'], {}), '(x_label)\n', (868, 877), True, 'import numpy as np\n'), ((902, 925), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['x_label'], {}), '(x_label)\n', (916, 925), False, 'from keras.utils.np_utils import to_categorical\n'), ((956, 968), 'keras.models.Sequential', 'Sequential', ([], {}), '()\n', (966, 968), False, 'from keras.models import Sequential\n'), ((1167, 1179), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1174, 1179), False, 'from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout\n'), ((1353, 1365), 'keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (1360, 1365), False, 'from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout\n'), ((1614, 1631), 'keras.optimizers.Adam', 'optimizers.Adam', ([], {}), '()\n', (1629, 1631), False, 'from keras import losses, optimizers, regularizers\n'), ((1148, 1166), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (1160, 1166), False, 'from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout\n'), ((1334, 1352), 'keras.layers.MaxPooling2D', 'MaxPooling2D', (['(2)', '(2)'], {}), '(2, 2)\n', (1346, 1352), False, 'from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout\n'), ((1376, 1385), 'keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1383, 1385), False, 'from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout\n'), ((1518, 1558), 'keras.layers.Dense', 'Dense', (['num_classes'], {'activation': '"""softmax"""'}), "(num_classes, activation='softmax')\n", (1523, 1558), False, 'from keras.layers import Conv2D, Flatten, Dense, MaxPooling2D, Dropout\n'), ((622, 643), 'cv2.imread', 'cv2.imread', (['file_name'], {}), '(file_name)\n', (632, 643), False, 'import cv2\n'), ((661, 697), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2RGB'], {}), '(img, cv2.COLOR_BGR2RGB)\n', (673, 697), False, 'import cv2\n'), ((717, 742), 'cv2.resize', 'cv2.resize', (['img', '(32, 64)'], {}), '(img, (32, 64))\n', (727, 742), False, 'import cv2\n'), ((1114, 1135), 'keras.regularizers.l2', 'regularizers.l2', (['(0.01)'], {}), '(0.01)\n', (1129, 1135), False, 'from keras import losses, optimizers, regularizers\n'), ((1300, 1321), 'keras.regularizers.l2', 'regularizers.l2', (['(0.01)'], {}), '(0.01)\n', (1315, 1321), False, 'from keras import losses, optimizers, regularizers\n'), ((1484, 1505), 'keras.regularizers.l2', 'regularizers.l2', (['(0.01)'], {}), '(0.01)\n', (1499, 1505), False, 'from keras import losses, optimizers, regularizers\n')]
|
import pytest
from openeo.capabilities import ComparableVersion
@pytest.mark.parametrize(["a", "b", "c"], [
(ComparableVersion("1.2.3"), ComparableVersion("1.2.3"), ComparableVersion("2.3.4")),
(ComparableVersion("1.2.3"), "1.2.3", "2.3.4"),
("1.2.3", ComparableVersion("1.2.3"), ComparableVersion("2.3.4")),
])
def test_comparable_version_equals(a, b,c):
assert (a == b) is True
assert (a == c) is False
assert (a != b) is False
assert (a != c) is True
if isinstance(a, ComparableVersion):
assert a.equals(b) is True
assert a.equals(c) is False
@pytest.mark.parametrize("b", [
"0.9", "1", "1.2.2",
ComparableVersion("0.9"), ComparableVersion("1.1"),
])
def test_comparable_version_operators(b):
a = ComparableVersion("1.2.3")
assert (a == a) is True
assert (a != a) is False
assert (a > b) is True
assert (a >= b) is True
assert (a < b) is False
assert (a <= b) is False
assert (b < a) is True
assert (b <= a) is True
assert (b > a) is False
assert (b >= a) is False
def test_comparable_version_right_referencing():
v = ComparableVersion('1.2.3')
assert v.equals('1.2.3')
assert v.above('0')
assert v.above('0.1')
assert v.above('0.1.2')
assert v.above('1.2')
assert v.above('1.2.2')
assert v.above('1.2.2b')
assert v.above('1.2.3') is False
assert v.above('1.2.20') is False
assert v.above('1.2.4') is False
assert v.above('1.10.4') is False
assert v.at_least('0')
assert v.at_least('1')
assert v.at_least('1.1')
assert v.at_least('1.10') is False
assert v.at_least('1.2')
assert v.at_least('1.02')
assert v.at_least('1.2.2')
assert v.at_least('1.2.3')
assert v.at_least('1.2.3a') is False
assert v.at_least('1.2.4') is False
assert v.at_least('1.3') is False
assert v.at_least('2') is False
assert v.below('2')
assert v.below('1.3')
assert v.below('1.2.4')
assert v.below('1.2.3b')
assert v.below('1.2.3') is False
assert v.below('1.2') is False
assert v.at_most('2')
assert v.at_most('1.3')
assert v.at_most('1.2.3c')
assert v.at_most('1.2.3')
assert v.at_most('1.02.03')
assert v.at_most('1.2.2b') is False
assert v.at_most('1.2') is False
assert v.at_most('1.10')
assert v.above(ComparableVersion('1.2'))
assert v.at_least(ComparableVersion('1.2.3a')) is False
assert v.at_most(ComparableVersion('1.02.03'))
def test_comparable_version_left_referencing():
v = ComparableVersion("1.2.3")
assert v.or_higher("1.2.2") is False
assert v.or_higher("1.2.3") is True
assert v.or_higher("1.2.4") is True
assert v.or_lower("1.2.2") is True
assert v.or_lower("1.2.3") is True
assert v.or_lower("1.2.4") is False
assert v.accept_higher("1.2.2") is False
assert v.accept_higher("1.2.3") is False
assert v.accept_higher("1.2.4") is True
assert v.accept_lower("1.2.2") is True
assert v.accept_lower("1.2.3") is False
assert v.accept_lower("1.2.4") is False
|
[
"openeo.capabilities.ComparableVersion"
] |
[((765, 791), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (782, 791), False, 'from openeo.capabilities import ComparableVersion\n'), ((1132, 1158), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (1149, 1158), False, 'from openeo.capabilities import ComparableVersion\n'), ((2544, 2570), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (2561, 2570), False, 'from openeo.capabilities import ComparableVersion\n'), ((660, 684), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""0.9"""'], {}), "('0.9')\n", (677, 684), False, 'from openeo.capabilities import ComparableVersion\n'), ((686, 710), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.1"""'], {}), "('1.1')\n", (703, 710), False, 'from openeo.capabilities import ComparableVersion\n'), ((2349, 2373), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2"""'], {}), "('1.2')\n", (2366, 2373), False, 'from openeo.capabilities import ComparableVersion\n'), ((2456, 2484), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.02.03"""'], {}), "('1.02.03')\n", (2473, 2484), False, 'from openeo.capabilities import ComparableVersion\n'), ((116, 142), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (133, 142), False, 'from openeo.capabilities import ComparableVersion\n'), ((144, 170), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (161, 170), False, 'from openeo.capabilities import ComparableVersion\n'), ((172, 198), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""2.3.4"""'], {}), "('2.3.4')\n", (189, 198), False, 'from openeo.capabilities import ComparableVersion\n'), ((206, 232), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (223, 232), False, 'from openeo.capabilities import ComparableVersion\n'), ((267, 293), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3"""'], {}), "('1.2.3')\n", (284, 293), False, 'from openeo.capabilities import ComparableVersion\n'), ((295, 321), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""2.3.4"""'], {}), "('2.3.4')\n", (312, 321), False, 'from openeo.capabilities import ComparableVersion\n'), ((2397, 2424), 'openeo.capabilities.ComparableVersion', 'ComparableVersion', (['"""1.2.3a"""'], {}), "('1.2.3a')\n", (2414, 2424), False, 'from openeo.capabilities import ComparableVersion\n')]
|
import logging
import discord
import pytz
from discord.ext import commands, tasks
from datetime import datetime
from helpers.api_key import discord_key, owner_id, error_channel_id
from helpers.descriptions import bot_description, wrong_message
from helpers.ge import MissingQuery, NoResults
from helpers.hiscore import UserNotFound, MissingUsername, HiscoreUnavailable
from helpers.tracker import NoDataPoints, NoUsername
from helpers.version import get_version
logging.basicConfig(filename='bot.log',
format='%(asctime)s:%(levelname)s:%(message)s',
level=logging.INFO)
def get_prefix(client, message):
prefixes = ['!blue ', '!b ']
if message.content.startswith("!b ") or message.content.startswith("!blue "):
logging.info(f'[{message.guild}/{message.channel}] {message.author}: {message.content}')
return commands.when_mentioned_or(*prefixes)(client, message)
bot = commands.Bot(command_prefix=get_prefix,
description=bot_description,
owner_id=owner_id,
case_insensitive=True)
bot.remove_command('help')
cogs = ['cogs.links', 'cogs.levels', 'cogs.calculators', 'cogs.scores', 'cogs.embed_help.help',
'cogs.refresh']
@bot.event
async def on_ready():
logging.info(f'Logged on as {bot.user.name}!')
for cog in cogs:
logging.info(f'Loading {cog}')
bot.load_extension(cog)
logging.info("Cogs loaded")
print(f'Up and running as {bot.user.name}')
return
@bot.event
async def on_message(message):
# Handles when the user doesn't type a command
if message.content == '!b' or message.content == '!blue':
channel = message.channel
embed = discord.Embed(title='!blue', description='Type `!b help` for a list of commands.')
await channel.send(embed=embed)
return
# Pass message onto the rest of the commands
await bot.process_commands(message)
@bot.event
async def on_command_error(ctx, error):
""" Simply replies with error message, shows error message if I make an error """
logging.error(f'{type(error).__name__}: {error}')
error = getattr(error, 'original', error)
msg = ''
# Exceptions
if isinstance(error, discord.ext.commands.errors.CommandNotFound):
pass
elif isinstance(error, UserNotFound) or isinstance(error, NoDataPoints) or isinstance(error, NoResults)\
or isinstance(error, HiscoreUnavailable) or isinstance(error, NoUsername):
msg += f'{error}\n'
elif isinstance(error, MissingUsername) or isinstance(error, MissingQuery):
msg += f'{error}\n' \
f'Type `!b help {ctx.command}` to see the usage for the command.\n'
elif isinstance(error, commands.MissingRequiredArgument):
msg += f'Not all of the command\'s arguments were met.\n' \
f'You are missing the `{error.param}` argument.\n' \
f'Type `!b help {ctx.command}` to see the usage for the command.\n'
# All other errors
else:
msg += f'To see all commands type `!b help`\n' \
f'Owner has been notified of error. '
# Reply with error message
if msg != '':
embed = discord.Embed(title=f'{bot.user.name}')
embed.add_field(name="Something went wrong", value=msg)
embed.add_field(name="If you continue to have problems",
value=f'Type `!b bug` to be linked to the issues page',
inline=False)
await ctx.send(embed=embed)
# Log the error in the errors channel
error_channel = bot.get_channel(error_channel_id)
time = datetime.now()
timezone = pytz.timezone("America/Los_Angeles")
pst_time = timezone.localize(time)
embed = discord.Embed(title=f'{bot.user.name}', timestamp=pst_time)
embed.add_field(name="Location", value=f'{ctx.guild}/{ctx.channel.mention} - {ctx.author}')
embed.add_field(name="User input", value=f'`{ctx.message.content}`', inline=False)
embed.add_field(name="Error message", value=f'```{type(error).__name__}: {error}```', inline=False)
embed.add_field(name="Cause (if any)", value=f'{error.__cause__}', inline=False)
await error_channel.send(embed=embed)
return
@bot.command(name='reload',
description='Reloads bot',
aliases=['-r'],
hidden=True,
case_insensitive=True)
async def reload(ctx):
""" Reloads cogs while bot is still online """
if ctx.author.id != owner_id:
return
for cog in cogs:
bot.unload_extension(cog)
logging.info(f'Reloading {cog}')
bot.load_extension(cog)
await ctx.send("Cogs reloaded")
@bot.command(name='version',
description='Bot version',
aliases=['--version', '-v'],
hidden=True,
case_insensitive=True)
async def version_command(ctx):
""" Shows bot version number """
version = get_version()
embed = discord.Embed(title="!blue", description="Old School Runescape bot written in Python")
embed.add_field(name="Version Number", value=version)
embed.add_field(name="Recent changes", value=f'https://github.com/zedchance/blues_bot.py/commits/master')
await ctx.send(f'{ctx.message.author.mention}', embed=embed)
return
@bot.command(name='bug',
description='Links to the issue page for the bot',
aliases=['issue'],
hidden=True,
case_insensitive=True)
async def bug_command(ctx):
""" Use to submit bugs/issues """
embed = discord.Embed(title="Bugs/issues", description="Use the following link to submit issues with the bot")
embed.add_field(name="Link", value=f'https://github.com/zedchance/blues_bot.py/issues')
await ctx.send(f'{ctx.message.author.mention}', embed=embed)
return
@bot.command(name='vote',
description='Upvote/invite the bot on top.gg',
aliases=['invite'],
hidden=True,
case_insensitive=True)
async def vote_command(ctx):
""" Links to the bot's top.gg page """
embed = discord.Embed(title="**!blue**", description="Vote for the bot or invite to your own channel!")
embed.add_field(name="Link", value=f'https://top.gg/bot/532782540897910784')
await ctx.send(f'{ctx.message.author.mention}', embed=embed)
return
bot.run(discord_key, bot=True, reconnect=True)
|
[
"discord.ext.commands.when_mentioned_or",
"logging.basicConfig",
"discord.Embed",
"helpers.version.get_version",
"logging.info",
"pytz.timezone",
"discord.ext.commands.Bot",
"datetime.datetime.now"
] |
[((464, 576), 'logging.basicConfig', 'logging.basicConfig', ([], {'filename': '"""bot.log"""', 'format': '"""%(asctime)s:%(levelname)s:%(message)s"""', 'level': 'logging.INFO'}), "(filename='bot.log', format=\n '%(asctime)s:%(levelname)s:%(message)s', level=logging.INFO)\n", (483, 576), False, 'import logging\n'), ((933, 1047), 'discord.ext.commands.Bot', 'commands.Bot', ([], {'command_prefix': 'get_prefix', 'description': 'bot_description', 'owner_id': 'owner_id', 'case_insensitive': '(True)'}), '(command_prefix=get_prefix, description=bot_description,\n owner_id=owner_id, case_insensitive=True)\n', (945, 1047), False, 'from discord.ext import commands, tasks\n'), ((1288, 1334), 'logging.info', 'logging.info', (['f"""Logged on as {bot.user.name}!"""'], {}), "(f'Logged on as {bot.user.name}!')\n", (1300, 1334), False, 'import logging\n'), ((1431, 1458), 'logging.info', 'logging.info', (['"""Cogs loaded"""'], {}), "('Cogs loaded')\n", (1443, 1458), False, 'import logging\n'), ((3641, 3655), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3653, 3655), False, 'from datetime import datetime\n'), ((3671, 3707), 'pytz.timezone', 'pytz.timezone', (['"""America/Los_Angeles"""'], {}), "('America/Los_Angeles')\n", (3684, 3707), False, 'import pytz\n'), ((3759, 3818), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""{bot.user.name}"""', 'timestamp': 'pst_time'}), "(title=f'{bot.user.name}', timestamp=pst_time)\n", (3772, 3818), False, 'import discord\n'), ((4950, 4963), 'helpers.version.get_version', 'get_version', ([], {}), '()\n', (4961, 4963), False, 'from helpers.version import get_version\n'), ((4976, 5067), 'discord.Embed', 'discord.Embed', ([], {'title': '"""!blue"""', 'description': '"""Old School Runescape bot written in Python"""'}), "(title='!blue', description=\n 'Old School Runescape bot written in Python')\n", (4989, 5067), False, 'import discord\n'), ((5570, 5677), 'discord.Embed', 'discord.Embed', ([], {'title': '"""Bugs/issues"""', 'description': '"""Use the following link to submit issues with the bot"""'}), "(title='Bugs/issues', description=\n 'Use the following link to submit issues with the bot')\n", (5583, 5677), False, 'import discord\n'), ((6108, 6208), 'discord.Embed', 'discord.Embed', ([], {'title': '"""**!blue**"""', 'description': '"""Vote for the bot or invite to your own channel!"""'}), "(title='**!blue**', description=\n 'Vote for the bot or invite to your own channel!')\n", (6121, 6208), False, 'import discord\n'), ((770, 863), 'logging.info', 'logging.info', (['f"""[{message.guild}/{message.channel}] {message.author}: {message.content}"""'], {}), "(\n f'[{message.guild}/{message.channel}] {message.author}: {message.content}')\n", (782, 863), False, 'import logging\n'), ((870, 907), 'discord.ext.commands.when_mentioned_or', 'commands.when_mentioned_or', (['*prefixes'], {}), '(*prefixes)\n', (896, 907), False, 'from discord.ext import commands, tasks\n'), ((1364, 1394), 'logging.info', 'logging.info', (['f"""Loading {cog}"""'], {}), "(f'Loading {cog}')\n", (1376, 1394), False, 'import logging\n'), ((1725, 1812), 'discord.Embed', 'discord.Embed', ([], {'title': '"""!blue"""', 'description': '"""Type `!b help` for a list of commands."""'}), "(title='!blue', description=\n 'Type `!b help` for a list of commands.')\n", (1738, 1812), False, 'import discord\n'), ((3211, 3250), 'discord.Embed', 'discord.Embed', ([], {'title': 'f"""{bot.user.name}"""'}), "(title=f'{bot.user.name}')\n", (3224, 3250), False, 'import discord\n'), ((4591, 4623), 'logging.info', 'logging.info', (['f"""Reloading {cog}"""'], {}), "(f'Reloading {cog}')\n", (4603, 4623), False, 'import logging\n')]
|
import struct
import os
import sys
import Image
import random
import logging
from yad2.utils import Sprite
from yad2.encoders import Format40, Format80
class Wsa:
def __init__(self, filename):
self.logger = logging.getLogger('root')
self.filename = filename
self.filesize = os.path.getsize(self.filename)
def extract(self):
f = open(self.filename, "rb")
numframes = struct.unpack('H', f.read(2))[0]
width = struct.unpack('H', f.read(2))[0]
height = struct.unpack('H', f.read(2))[0]
delta = struct.unpack('H', f.read(2))[0]
flags = struct.unpack('H', f.read(2))[0]
self.logger.debug("numframes " + str(numframes) + ", width " + str(width) + ", height " + str(height) + ", delta " + str(delta) + ", flags " + str(flags) + ", fs " + str(self.filesize))
offsets = []
for i in range(0, numframes + 2):
offsets.append(struct.unpack('I', f.read(4))[0])
base = "".join(chr(x) for x in [0] * (width * height))
images = []
for i in range(0, len(offsets) - 2):
length = int(offsets[i + 1] - offsets[i])
f.seek(offsets[i])
data = f.read(offsets[i + 1] - offsets[i])
stage1 = Format80(data).decode()
points = Format40(base, stage1).decode()
image = Sprite(width, height)
for index, pixel in enumerate(points):
if index > width * height - 1:
break
color = struct.unpack('B', pixel)[0]
image.putpixel(index % width, int(index / width), color)
images.append((str(i), image))
base = points
self.logger.debug("offsets " + str(offsets))
f.close()
return images
|
[
"yad2.utils.Sprite",
"os.path.getsize",
"yad2.encoders.Format80",
"struct.unpack",
"yad2.encoders.Format40",
"logging.getLogger"
] |
[((222, 247), 'logging.getLogger', 'logging.getLogger', (['"""root"""'], {}), "('root')\n", (239, 247), False, 'import logging\n'), ((305, 335), 'os.path.getsize', 'os.path.getsize', (['self.filename'], {}), '(self.filename)\n', (320, 335), False, 'import os\n'), ((1358, 1379), 'yad2.utils.Sprite', 'Sprite', (['width', 'height'], {}), '(width, height)\n', (1364, 1379), False, 'from yad2.utils import Sprite\n'), ((1260, 1274), 'yad2.encoders.Format80', 'Format80', (['data'], {}), '(data)\n', (1268, 1274), False, 'from yad2.encoders import Format40, Format80\n'), ((1305, 1327), 'yad2.encoders.Format40', 'Format40', (['base', 'stage1'], {}), '(base, stage1)\n', (1313, 1327), False, 'from yad2.encoders import Format40, Format80\n'), ((1528, 1553), 'struct.unpack', 'struct.unpack', (['"""B"""', 'pixel'], {}), "('B', pixel)\n", (1541, 1553), False, 'import struct\n')]
|
import allure
import pytest
from pages.loganalysiscenter_page.db2_collection import LogHome
from common.po_base import El
from common.po_base import Page
from pages.IndexPage import IndexPage
from common.page_manage import pm
from pages.LoginPage import LoginPage
from time import sleep
from pages.sensor_page.CreateEsPage import CreateEsPage
@pytest.fixture(scope="module")
def sign_in(login_as):
page=login_as("yuchengcheng5","yu123456")
page.click_platform("日志精析中心")
yield page
@allure.feature("db2数据库采集")
class TestLogHome(object):
@allure.title("创建db2采集任务")
def test_home(self,sign_in):
page = LogHome(sign_in)
page.click_sidebar_element('全局')
page.click_sidebar_element("数据接入")
page.switch_to_frame()
page.click_element("//a[text()='数据接入']")
page.click_element("//button[text()=' 创建']")
page.click_element("//li[text()='关系型数据库']")
page.click_element("//span[text()='IBM DB2']")
page.key1.send_keys("192.168.31.53")
page.key2.send_keys("50000")
page.key3.send_keys("sample")
page.key4.send_keys("db2inst1")
page.key5.send_keys("Db2@123")
name=page.random_str()
page.key6.send_keys(name)
page.click_css_element(".select .el-input__inner") # 点击预估日流量
page.click_element("//li/span[text()='200']") # 点击200G
page.click_css_element(".select .el-input__inner")
page.click_element("//li/span[text()='1']") # 点击1G
sleep(1)
page.click_element("//button[text()='连接测试']")
sleep(1)
page.key7.send_keys('select * from 基本信息')
page.click_element("//button[text()='数据预览']")
sleep(2)
page.click_element("//span[text()='全量同步']")
page.click_element("//input[@placeholder='选择关键字']")
page.click_element("//ul[@id='el-autocomplete-9']/li[1]")
page.key8.send_keys('*/1 * * * *')
page.click_element("//button[text()='保存']")
sleep(3)
assert page.return_name(name) !='',"任务不存在"
sleep(3)
# if __name__ =="__main__":
# pytest.main("testcase/test_loganalysiscenter/test_mysql_collection.py","-s")
|
[
"pages.loganalysiscenter_page.db2_collection.LogHome",
"pytest.fixture",
"time.sleep",
"allure.title",
"allure.feature"
] |
[((344, 374), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""module"""'}), "(scope='module')\n", (358, 374), False, 'import pytest\n'), ((494, 520), 'allure.feature', 'allure.feature', (['"""db2数据库采集"""'], {}), "('db2数据库采集')\n", (508, 520), False, 'import allure\n'), ((553, 578), 'allure.title', 'allure.title', (['"""创建db2采集任务"""'], {}), "('创建db2采集任务')\n", (565, 578), False, 'import allure\n'), ((627, 643), 'pages.loganalysiscenter_page.db2_collection.LogHome', 'LogHome', (['sign_in'], {}), '(sign_in)\n', (634, 643), False, 'from pages.loganalysiscenter_page.db2_collection import LogHome\n'), ((1493, 1501), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (1498, 1501), False, 'from time import sleep\n'), ((1564, 1572), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (1569, 1572), False, 'from time import sleep\n'), ((1685, 1693), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (1690, 1693), False, 'from time import sleep\n'), ((1975, 1983), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (1980, 1983), False, 'from time import sleep\n'), ((2043, 2051), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (2048, 2051), False, 'from time import sleep\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['EnvironmentV3Args', 'EnvironmentV3']
@pulumi.input_type
class EnvironmentV3Args:
def __init__(__self__, *,
resource_group_name: pulumi.Input[str],
subnet_id: pulumi.Input[str],
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None):
"""
The set of arguments for constructing a EnvironmentV3 resource.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "subnet_id", subnet_id)
if allow_new_private_endpoint_connections is not None:
pulumi.set(__self__, "allow_new_private_endpoint_connections", allow_new_private_endpoint_connections)
if cluster_settings is not None:
pulumi.set(__self__, "cluster_settings", cluster_settings)
if dedicated_host_count is not None:
pulumi.set(__self__, "dedicated_host_count", dedicated_host_count)
if internal_load_balancing_mode is not None:
pulumi.set(__self__, "internal_load_balancing_mode", internal_load_balancing_mode)
if name is not None:
pulumi.set(__self__, "name", name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if zone_redundant is not None:
pulumi.set(__self__, "zone_redundant", zone_redundant)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Input[str]:
"""
The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="allowNewPrivateEndpointConnections")
def allow_new_private_endpoint_connections(self) -> Optional[pulumi.Input[bool]]:
"""
Should new Private Endpoint Connections be allowed. Defaults to `true`.
"""
return pulumi.get(self, "allow_new_private_endpoint_connections")
@allow_new_private_endpoint_connections.setter
def allow_new_private_endpoint_connections(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_new_private_endpoint_connections", value)
@property
@pulumi.getter(name="clusterSettings")
def cluster_settings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]:
"""
Zero or more `cluster_setting` blocks as defined below.
"""
return pulumi.get(self, "cluster_settings")
@cluster_settings.setter
def cluster_settings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]):
pulumi.set(self, "cluster_settings", value)
@property
@pulumi.getter(name="dedicatedHostCount")
def dedicated_host_count(self) -> Optional[pulumi.Input[int]]:
"""
This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "dedicated_host_count")
@dedicated_host_count.setter
def dedicated_host_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "dedicated_host_count", value)
@property
@pulumi.getter(name="internalLoadBalancingMode")
def internal_load_balancing_mode(self) -> Optional[pulumi.Input[str]]:
"""
Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
"""
return pulumi.get(self, "internal_load_balancing_mode")
@internal_load_balancing_mode.setter
def internal_load_balancing_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_load_balancing_mode", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the App Service Environment. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="zoneRedundant")
def zone_redundant(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "zone_redundant")
@zone_redundant.setter
def zone_redundant(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "zone_redundant", value)
@pulumi.input_type
class _EnvironmentV3State:
def __init__(__self__, *,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
dns_suffix: Optional[pulumi.Input[str]] = None,
external_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
inbound_network_dependencies: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]]] = None,
internal_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
ip_ssl_address_count: Optional[pulumi.Input[int]] = None,
linux_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pricing_tier: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
windows_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None):
"""
Input properties used for looking up and filtering EnvironmentV3 resources.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] dns_suffix: the DNS suffix for this App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] external_inbound_ip_addresses: The external outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]] inbound_network_dependencies: An Inbound Network Dependencies block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] internal_inbound_ip_addresses: The internal outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[int] ip_ssl_address_count: The number of IP SSL addresses reserved for the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] linux_outbound_ip_addresses: Outbound addresses of Linux based Apps in this App Service Environment V3
:param pulumi.Input[str] location: The location where the App Service Environment exists.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[str] pricing_tier: Pricing tier for the front end instances.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] windows_outbound_ip_addresses: Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
if allow_new_private_endpoint_connections is not None:
pulumi.set(__self__, "allow_new_private_endpoint_connections", allow_new_private_endpoint_connections)
if cluster_settings is not None:
pulumi.set(__self__, "cluster_settings", cluster_settings)
if dedicated_host_count is not None:
pulumi.set(__self__, "dedicated_host_count", dedicated_host_count)
if dns_suffix is not None:
pulumi.set(__self__, "dns_suffix", dns_suffix)
if external_inbound_ip_addresses is not None:
pulumi.set(__self__, "external_inbound_ip_addresses", external_inbound_ip_addresses)
if inbound_network_dependencies is not None:
pulumi.set(__self__, "inbound_network_dependencies", inbound_network_dependencies)
if internal_inbound_ip_addresses is not None:
pulumi.set(__self__, "internal_inbound_ip_addresses", internal_inbound_ip_addresses)
if internal_load_balancing_mode is not None:
pulumi.set(__self__, "internal_load_balancing_mode", internal_load_balancing_mode)
if ip_ssl_address_count is not None:
pulumi.set(__self__, "ip_ssl_address_count", ip_ssl_address_count)
if linux_outbound_ip_addresses is not None:
pulumi.set(__self__, "linux_outbound_ip_addresses", linux_outbound_ip_addresses)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if pricing_tier is not None:
pulumi.set(__self__, "pricing_tier", pricing_tier)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if windows_outbound_ip_addresses is not None:
pulumi.set(__self__, "windows_outbound_ip_addresses", windows_outbound_ip_addresses)
if zone_redundant is not None:
pulumi.set(__self__, "zone_redundant", zone_redundant)
@property
@pulumi.getter(name="allowNewPrivateEndpointConnections")
def allow_new_private_endpoint_connections(self) -> Optional[pulumi.Input[bool]]:
"""
Should new Private Endpoint Connections be allowed. Defaults to `true`.
"""
return pulumi.get(self, "allow_new_private_endpoint_connections")
@allow_new_private_endpoint_connections.setter
def allow_new_private_endpoint_connections(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "allow_new_private_endpoint_connections", value)
@property
@pulumi.getter(name="clusterSettings")
def cluster_settings(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]:
"""
Zero or more `cluster_setting` blocks as defined below.
"""
return pulumi.get(self, "cluster_settings")
@cluster_settings.setter
def cluster_settings(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3ClusterSettingArgs']]]]):
pulumi.set(self, "cluster_settings", value)
@property
@pulumi.getter(name="dedicatedHostCount")
def dedicated_host_count(self) -> Optional[pulumi.Input[int]]:
"""
This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "dedicated_host_count")
@dedicated_host_count.setter
def dedicated_host_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "dedicated_host_count", value)
@property
@pulumi.getter(name="dnsSuffix")
def dns_suffix(self) -> Optional[pulumi.Input[str]]:
"""
the DNS suffix for this App Service Environment V3.
"""
return pulumi.get(self, "dns_suffix")
@dns_suffix.setter
def dns_suffix(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "dns_suffix", value)
@property
@pulumi.getter(name="externalInboundIpAddresses")
def external_inbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The external outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "external_inbound_ip_addresses")
@external_inbound_ip_addresses.setter
def external_inbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "external_inbound_ip_addresses", value)
@property
@pulumi.getter(name="inboundNetworkDependencies")
def inbound_network_dependencies(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]]]:
"""
An Inbound Network Dependencies block as defined below.
"""
return pulumi.get(self, "inbound_network_dependencies")
@inbound_network_dependencies.setter
def inbound_network_dependencies(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['EnvironmentV3InboundNetworkDependencyArgs']]]]):
pulumi.set(self, "inbound_network_dependencies", value)
@property
@pulumi.getter(name="internalInboundIpAddresses")
def internal_inbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The internal outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "internal_inbound_ip_addresses")
@internal_inbound_ip_addresses.setter
def internal_inbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "internal_inbound_ip_addresses", value)
@property
@pulumi.getter(name="internalLoadBalancingMode")
def internal_load_balancing_mode(self) -> Optional[pulumi.Input[str]]:
"""
Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
"""
return pulumi.get(self, "internal_load_balancing_mode")
@internal_load_balancing_mode.setter
def internal_load_balancing_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "internal_load_balancing_mode", value)
@property
@pulumi.getter(name="ipSslAddressCount")
def ip_ssl_address_count(self) -> Optional[pulumi.Input[int]]:
"""
The number of IP SSL addresses reserved for the App Service Environment V3.
"""
return pulumi.get(self, "ip_ssl_address_count")
@ip_ssl_address_count.setter
def ip_ssl_address_count(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "ip_ssl_address_count", value)
@property
@pulumi.getter(name="linuxOutboundIpAddresses")
def linux_outbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Outbound addresses of Linux based Apps in this App Service Environment V3
"""
return pulumi.get(self, "linux_outbound_ip_addresses")
@linux_outbound_ip_addresses.setter
def linux_outbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "linux_outbound_ip_addresses", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The location where the App Service Environment exists.
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the App Service Environment. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="pricingTier")
def pricing_tier(self) -> Optional[pulumi.Input[str]]:
"""
Pricing tier for the front end instances.
"""
return pulumi.get(self, "pricing_tier")
@pricing_tier.setter
def pricing_tier(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "pricing_tier", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="windowsOutboundIpAddresses")
def windows_outbound_ip_addresses(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
return pulumi.get(self, "windows_outbound_ip_addresses")
@windows_outbound_ip_addresses.setter
def windows_outbound_ip_addresses(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "windows_outbound_ip_addresses", value)
@property
@pulumi.getter(name="zoneRedundant")
def zone_redundant(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "zone_redundant")
@zone_redundant.setter
def zone_redundant(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "zone_redundant", value)
class EnvironmentV3(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None,
__props__=None):
"""
Manages a 3rd Generation (v3) App Service Environment.
> **NOTE:** App Service Environment V3 is currently in Preview.
## Import
A 3rd Generation (v3) App Service Environment can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appservice/environmentV3:EnvironmentV3 myAppServiceEnv /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Web/hostingEnvironments/myAppServiceEnv
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: EnvironmentV3Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a 3rd Generation (v3) App Service Environment.
> **NOTE:** App Service Environment V3 is currently in Preview.
## Import
A 3rd Generation (v3) App Service Environment can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:appservice/environmentV3:EnvironmentV3 myAppServiceEnv /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/myResourceGroup/providers/Microsoft.Web/hostingEnvironments/myAppServiceEnv
```
:param str resource_name: The name of the resource.
:param EnvironmentV3Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(EnvironmentV3Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = EnvironmentV3Args.__new__(EnvironmentV3Args)
__props__.__dict__["allow_new_private_endpoint_connections"] = allow_new_private_endpoint_connections
__props__.__dict__["cluster_settings"] = cluster_settings
__props__.__dict__["dedicated_host_count"] = dedicated_host_count
__props__.__dict__["internal_load_balancing_mode"] = internal_load_balancing_mode
__props__.__dict__["name"] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if subnet_id is None and not opts.urn:
raise TypeError("Missing required property 'subnet_id'")
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["tags"] = tags
__props__.__dict__["zone_redundant"] = zone_redundant
__props__.__dict__["dns_suffix"] = None
__props__.__dict__["external_inbound_ip_addresses"] = None
__props__.__dict__["inbound_network_dependencies"] = None
__props__.__dict__["internal_inbound_ip_addresses"] = None
__props__.__dict__["ip_ssl_address_count"] = None
__props__.__dict__["linux_outbound_ip_addresses"] = None
__props__.__dict__["location"] = None
__props__.__dict__["pricing_tier"] = None
__props__.__dict__["windows_outbound_ip_addresses"] = None
super(EnvironmentV3, __self__).__init__(
'azure:appservice/environmentV3:EnvironmentV3',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
allow_new_private_endpoint_connections: Optional[pulumi.Input[bool]] = None,
cluster_settings: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]]] = None,
dedicated_host_count: Optional[pulumi.Input[int]] = None,
dns_suffix: Optional[pulumi.Input[str]] = None,
external_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
inbound_network_dependencies: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3InboundNetworkDependencyArgs']]]]] = None,
internal_inbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
internal_load_balancing_mode: Optional[pulumi.Input[str]] = None,
ip_ssl_address_count: Optional[pulumi.Input[int]] = None,
linux_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
pricing_tier: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
windows_outbound_ip_addresses: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
zone_redundant: Optional[pulumi.Input[bool]] = None) -> 'EnvironmentV3':
"""
Get an existing EnvironmentV3 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] allow_new_private_endpoint_connections: Should new Private Endpoint Connections be allowed. Defaults to `true`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3ClusterSettingArgs']]]] cluster_settings: Zero or more `cluster_setting` blocks as defined below.
:param pulumi.Input[int] dedicated_host_count: This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
:param pulumi.Input[str] dns_suffix: the DNS suffix for this App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] external_inbound_ip_addresses: The external outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['EnvironmentV3InboundNetworkDependencyArgs']]]] inbound_network_dependencies: An Inbound Network Dependencies block as defined below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] internal_inbound_ip_addresses: The internal outbound IP addresses of the App Service Environment V3.
:param pulumi.Input[str] internal_load_balancing_mode: Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
:param pulumi.Input[int] ip_ssl_address_count: The number of IP SSL addresses reserved for the App Service Environment V3.
:param pulumi.Input[Sequence[pulumi.Input[str]]] linux_outbound_ip_addresses: Outbound addresses of Linux based Apps in this App Service Environment V3
:param pulumi.Input[str] location: The location where the App Service Environment exists.
:param pulumi.Input[str] name: The name of the App Service Environment. Changing this forces a new resource to be created.
:param pulumi.Input[str] pricing_tier: Pricing tier for the front end instances.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
:param pulumi.Input[str] subnet_id: The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
:param pulumi.Input[Sequence[pulumi.Input[str]]] windows_outbound_ip_addresses: Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _EnvironmentV3State.__new__(_EnvironmentV3State)
__props__.__dict__["allow_new_private_endpoint_connections"] = allow_new_private_endpoint_connections
__props__.__dict__["cluster_settings"] = cluster_settings
__props__.__dict__["dedicated_host_count"] = dedicated_host_count
__props__.__dict__["dns_suffix"] = dns_suffix
__props__.__dict__["external_inbound_ip_addresses"] = external_inbound_ip_addresses
__props__.__dict__["inbound_network_dependencies"] = inbound_network_dependencies
__props__.__dict__["internal_inbound_ip_addresses"] = internal_inbound_ip_addresses
__props__.__dict__["internal_load_balancing_mode"] = internal_load_balancing_mode
__props__.__dict__["ip_ssl_address_count"] = ip_ssl_address_count
__props__.__dict__["linux_outbound_ip_addresses"] = linux_outbound_ip_addresses
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["pricing_tier"] = pricing_tier
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["tags"] = tags
__props__.__dict__["windows_outbound_ip_addresses"] = windows_outbound_ip_addresses
__props__.__dict__["zone_redundant"] = zone_redundant
return EnvironmentV3(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="allowNewPrivateEndpointConnections")
def allow_new_private_endpoint_connections(self) -> pulumi.Output[Optional[bool]]:
"""
Should new Private Endpoint Connections be allowed. Defaults to `true`.
"""
return pulumi.get(self, "allow_new_private_endpoint_connections")
@property
@pulumi.getter(name="clusterSettings")
def cluster_settings(self) -> pulumi.Output[Sequence['outputs.EnvironmentV3ClusterSetting']]:
"""
Zero or more `cluster_setting` blocks as defined below.
"""
return pulumi.get(self, "cluster_settings")
@property
@pulumi.getter(name="dedicatedHostCount")
def dedicated_host_count(self) -> pulumi.Output[Optional[int]]:
"""
This ASEv3 should use dedicated Hosts. Possible vales are `2`. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "dedicated_host_count")
@property
@pulumi.getter(name="dnsSuffix")
def dns_suffix(self) -> pulumi.Output[str]:
"""
the DNS suffix for this App Service Environment V3.
"""
return pulumi.get(self, "dns_suffix")
@property
@pulumi.getter(name="externalInboundIpAddresses")
def external_inbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
The external outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "external_inbound_ip_addresses")
@property
@pulumi.getter(name="inboundNetworkDependencies")
def inbound_network_dependencies(self) -> pulumi.Output[Sequence['outputs.EnvironmentV3InboundNetworkDependency']]:
"""
An Inbound Network Dependencies block as defined below.
"""
return pulumi.get(self, "inbound_network_dependencies")
@property
@pulumi.getter(name="internalInboundIpAddresses")
def internal_inbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
The internal outbound IP addresses of the App Service Environment V3.
"""
return pulumi.get(self, "internal_inbound_ip_addresses")
@property
@pulumi.getter(name="internalLoadBalancingMode")
def internal_load_balancing_mode(self) -> pulumi.Output[Optional[str]]:
"""
Specifies which endpoints to serve internally in the Virtual Network for the App Service Environment. Possible values are `None` (for an External VIP Type), and `"Web, Publishing"` (for an Internal VIP Type). Defaults to `None`.
"""
return pulumi.get(self, "internal_load_balancing_mode")
@property
@pulumi.getter(name="ipSslAddressCount")
def ip_ssl_address_count(self) -> pulumi.Output[int]:
"""
The number of IP SSL addresses reserved for the App Service Environment V3.
"""
return pulumi.get(self, "ip_ssl_address_count")
@property
@pulumi.getter(name="linuxOutboundIpAddresses")
def linux_outbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
Outbound addresses of Linux based Apps in this App Service Environment V3
"""
return pulumi.get(self, "linux_outbound_ip_addresses")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The location where the App Service Environment exists.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the App Service Environment. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="pricingTier")
def pricing_tier(self) -> pulumi.Output[str]:
"""
Pricing tier for the front end instances.
"""
return pulumi.get(self, "pricing_tier")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the App Service Environment exists. Defaults to the Resource Group of the Subnet (specified by `subnet_id`).
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
The ID of the Subnet which the App Service Environment should be connected to. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
A mapping of tags to assign to the resource. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="windowsOutboundIpAddresses")
def windows_outbound_ip_addresses(self) -> pulumi.Output[Sequence[str]]:
"""
Outbound addresses of Windows based Apps in this App Service Environment V3.
"""
return pulumi.get(self, "windows_outbound_ip_addresses")
@property
@pulumi.getter(name="zoneRedundant")
def zone_redundant(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "zone_redundant")
|
[
"pulumi.get",
"pulumi.getter",
"pulumi.ResourceOptions",
"pulumi.set"
] |
[((3698, 3737), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupName"""'}), "(name='resourceGroupName')\n", (3711, 3737), False, 'import pulumi\n'), ((4196, 4226), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""subnetId"""'}), "(name='subnetId')\n", (4209, 4226), False, 'import pulumi\n'), ((4619, 4675), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""allowNewPrivateEndpointConnections"""'}), "(name='allowNewPrivateEndpointConnections')\n", (4632, 4675), False, 'import pulumi\n'), ((5177, 5214), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterSettings"""'}), "(name='clusterSettings')\n", (5190, 5214), False, 'import pulumi\n'), ((5696, 5736), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dedicatedHostCount"""'}), "(name='dedicatedHostCount')\n", (5709, 5736), False, 'import pulumi\n'), ((6188, 6235), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internalLoadBalancingMode"""'}), "(name='internalLoadBalancingMode')\n", (6201, 6235), False, 'import pulumi\n'), ((7628, 7663), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""zoneRedundant"""'}), "(name='zoneRedundant')\n", (7641, 7663), False, 'import pulumi\n'), ((14565, 14621), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""allowNewPrivateEndpointConnections"""'}), "(name='allowNewPrivateEndpointConnections')\n", (14578, 14621), False, 'import pulumi\n'), ((15123, 15160), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterSettings"""'}), "(name='clusterSettings')\n", (15136, 15160), False, 'import pulumi\n'), ((15642, 15682), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dedicatedHostCount"""'}), "(name='dedicatedHostCount')\n", (15655, 15682), False, 'import pulumi\n'), ((16134, 16165), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dnsSuffix"""'}), "(name='dnsSuffix')\n", (16147, 16165), False, 'import pulumi\n'), ((16505, 16553), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""externalInboundIpAddresses"""'}), "(name='externalInboundIpAddresses')\n", (16518, 16553), False, 'import pulumi\n'), ((17054, 17102), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""inboundNetworkDependencies"""'}), "(name='inboundNetworkDependencies')\n", (17067, 17102), False, 'import pulumi\n'), ((17664, 17712), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internalInboundIpAddresses"""'}), "(name='internalInboundIpAddresses')\n", (17677, 17712), False, 'import pulumi\n'), ((18213, 18260), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internalLoadBalancingMode"""'}), "(name='internalLoadBalancingMode')\n", (18226, 18260), False, 'import pulumi\n'), ((18867, 18906), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ipSslAddressCount"""'}), "(name='ipSslAddressCount')\n", (18880, 18906), False, 'import pulumi\n'), ((19320, 19366), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""linuxOutboundIpAddresses"""'}), "(name='linuxOutboundIpAddresses')\n", (19333, 19366), False, 'import pulumi\n'), ((20570, 20603), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""pricingTier"""'}), "(name='pricingTier')\n", (20583, 20603), False, 'import pulumi\n'), ((20943, 20982), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupName"""'}), "(name='resourceGroupName')\n", (20956, 20982), False, 'import pulumi\n'), ((21461, 21491), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""subnetId"""'}), "(name='subnetId')\n", (21474, 21491), False, 'import pulumi\n'), ((22327, 22375), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""windowsOutboundIpAddresses"""'}), "(name='windowsOutboundIpAddresses')\n", (22340, 22375), False, 'import pulumi\n'), ((22883, 22918), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""zoneRedundant"""'}), "(name='zoneRedundant')\n", (22896, 22918), False, 'import pulumi\n'), ((37140, 37196), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""allowNewPrivateEndpointConnections"""'}), "(name='allowNewPrivateEndpointConnections')\n", (37153, 37196), False, 'import pulumi\n'), ((37482, 37519), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clusterSettings"""'}), "(name='clusterSettings')\n", (37495, 37519), False, 'import pulumi\n'), ((37778, 37818), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dedicatedHostCount"""'}), "(name='dedicatedHostCount')\n", (37791, 37818), False, 'import pulumi\n'), ((38109, 38140), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""dnsSuffix"""'}), "(name='dnsSuffix')\n", (38122, 38140), False, 'import pulumi\n'), ((38339, 38387), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""externalInboundIpAddresses"""'}), "(name='externalInboundIpAddresses')\n", (38352, 38387), False, 'import pulumi\n'), ((38652, 38700), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""inboundNetworkDependencies"""'}), "(name='inboundNetworkDependencies')\n", (38665, 38700), False, 'import pulumi\n'), ((38993, 39041), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internalInboundIpAddresses"""'}), "(name='internalInboundIpAddresses')\n", (39006, 39041), False, 'import pulumi\n'), ((39306, 39353), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""internalLoadBalancingMode"""'}), "(name='internalLoadBalancingMode')\n", (39319, 39353), False, 'import pulumi\n'), ((39775, 39814), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""ipSslAddressCount"""'}), "(name='ipSslAddressCount')\n", (39788, 39814), False, 'import pulumi\n'), ((40057, 40103), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""linuxOutboundIpAddresses"""'}), "(name='linuxOutboundIpAddresses')\n", (40070, 40103), False, 'import pulumi\n'), ((40819, 40852), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""pricingTier"""'}), "(name='pricingTier')\n", (40832, 40852), False, 'import pulumi\n'), ((41045, 41084), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""resourceGroupName"""'}), "(name='resourceGroupName')\n", (41058, 41084), False, 'import pulumi\n'), ((41395, 41425), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""subnetId"""'}), "(name='subnetId')\n", (41408, 41425), False, 'import pulumi\n'), ((41968, 42016), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""windowsOutboundIpAddresses"""'}), "(name='windowsOutboundIpAddresses')\n", (41981, 42016), False, 'import pulumi\n'), ((42288, 42323), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""zoneRedundant"""'}), "(name='zoneRedundant')\n", (42301, 42323), False, 'import pulumi\n'), ((2740, 2804), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_group_name"""', 'resource_group_name'], {}), "(__self__, 'resource_group_name', resource_group_name)\n", (2750, 2804), False, 'import pulumi\n'), ((2813, 2857), 'pulumi.set', 'pulumi.set', (['__self__', '"""subnet_id"""', 'subnet_id'], {}), "(__self__, 'subnet_id', subnet_id)\n", (2823, 2857), False, 'import pulumi\n'), ((3987, 4026), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_name"""'], {}), "(self, 'resource_group_name')\n", (3997, 4026), False, 'import pulumi\n'), ((4129, 4175), 'pulumi.set', 'pulumi.set', (['self', '"""resource_group_name"""', 'value'], {}), "(self, 'resource_group_name', value)\n", (4139, 4175), False, 'import pulumi\n'), ((4450, 4479), 'pulumi.get', 'pulumi.get', (['self', '"""subnet_id"""'], {}), "(self, 'subnet_id')\n", (4460, 4479), False, 'import pulumi\n'), ((4562, 4598), 'pulumi.set', 'pulumi.set', (['self', '"""subnet_id"""', 'value'], {}), "(self, 'subnet_id', value)\n", (4572, 4598), False, 'import pulumi\n'), ((4881, 4939), 'pulumi.get', 'pulumi.get', (['self', '"""allow_new_private_endpoint_connections"""'], {}), "(self, 'allow_new_private_endpoint_connections')\n", (4891, 4939), False, 'import pulumi\n'), ((5091, 5156), 'pulumi.set', 'pulumi.set', (['self', '"""allow_new_private_endpoint_connections"""', 'value'], {}), "(self, 'allow_new_private_endpoint_connections', value)\n", (5101, 5156), False, 'import pulumi\n'), ((5435, 5471), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_settings"""'], {}), "(self, 'cluster_settings')\n", (5445, 5471), False, 'import pulumi\n'), ((5632, 5675), 'pulumi.set', 'pulumi.set', (['self', '"""cluster_settings"""', 'value'], {}), "(self, 'cluster_settings', value)\n", (5642, 5675), False, 'import pulumi\n'), ((5965, 6005), 'pulumi.get', 'pulumi.get', (['self', '"""dedicated_host_count"""'], {}), "(self, 'dedicated_host_count')\n", (5975, 6005), False, 'import pulumi\n'), ((6120, 6167), 'pulumi.set', 'pulumi.set', (['self', '"""dedicated_host_count"""', 'value'], {}), "(self, 'dedicated_host_count', value)\n", (6130, 6167), False, 'import pulumi\n'), ((6587, 6635), 'pulumi.get', 'pulumi.get', (['self', '"""internal_load_balancing_mode"""'], {}), "(self, 'internal_load_balancing_mode')\n", (6597, 6635), False, 'import pulumi\n'), ((6766, 6821), 'pulumi.set', 'pulumi.set', (['self', '"""internal_load_balancing_mode"""', 'value'], {}), "(self, 'internal_load_balancing_mode', value)\n", (6776, 6821), False, 'import pulumi\n'), ((7046, 7070), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (7056, 7070), False, 'import pulumi\n'), ((7153, 7184), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (7163, 7184), False, 'import pulumi\n'), ((7441, 7465), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (7451, 7465), False, 'import pulumi\n'), ((7576, 7607), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (7586, 7607), False, 'import pulumi\n'), ((7741, 7775), 'pulumi.get', 'pulumi.get', (['self', '"""zone_redundant"""'], {}), "(self, 'zone_redundant')\n", (7751, 7775), False, 'import pulumi\n'), ((7879, 7920), 'pulumi.set', 'pulumi.set', (['self', '"""zone_redundant"""', 'value'], {}), "(self, 'zone_redundant', value)\n", (7889, 7920), False, 'import pulumi\n'), ((14827, 14885), 'pulumi.get', 'pulumi.get', (['self', '"""allow_new_private_endpoint_connections"""'], {}), "(self, 'allow_new_private_endpoint_connections')\n", (14837, 14885), False, 'import pulumi\n'), ((15037, 15102), 'pulumi.set', 'pulumi.set', (['self', '"""allow_new_private_endpoint_connections"""', 'value'], {}), "(self, 'allow_new_private_endpoint_connections', value)\n", (15047, 15102), False, 'import pulumi\n'), ((15381, 15417), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_settings"""'], {}), "(self, 'cluster_settings')\n", (15391, 15417), False, 'import pulumi\n'), ((15578, 15621), 'pulumi.set', 'pulumi.set', (['self', '"""cluster_settings"""', 'value'], {}), "(self, 'cluster_settings', value)\n", (15588, 15621), False, 'import pulumi\n'), ((15911, 15951), 'pulumi.get', 'pulumi.get', (['self', '"""dedicated_host_count"""'], {}), "(self, 'dedicated_host_count')\n", (15921, 15951), False, 'import pulumi\n'), ((16066, 16113), 'pulumi.set', 'pulumi.set', (['self', '"""dedicated_host_count"""', 'value'], {}), "(self, 'dedicated_host_count', value)\n", (16076, 16113), False, 'import pulumi\n'), ((16322, 16352), 'pulumi.get', 'pulumi.get', (['self', '"""dns_suffix"""'], {}), "(self, 'dns_suffix')\n", (16332, 16352), False, 'import pulumi\n'), ((16447, 16484), 'pulumi.set', 'pulumi.set', (['self', '"""dns_suffix"""', 'value'], {}), "(self, 'dns_suffix', value)\n", (16457, 16484), False, 'import pulumi\n'), ((16771, 16820), 'pulumi.get', 'pulumi.get', (['self', '"""external_inbound_ip_addresses"""'], {}), "(self, 'external_inbound_ip_addresses')\n", (16781, 16820), False, 'import pulumi\n'), ((16977, 17033), 'pulumi.set', 'pulumi.set', (['self', '"""external_inbound_ip_addresses"""', 'value'], {}), "(self, 'external_inbound_ip_addresses', value)\n", (16987, 17033), False, 'import pulumi\n'), ((17345, 17393), 'pulumi.get', 'pulumi.get', (['self', '"""inbound_network_dependencies"""'], {}), "(self, 'inbound_network_dependencies')\n", (17355, 17393), False, 'import pulumi\n'), ((17588, 17643), 'pulumi.set', 'pulumi.set', (['self', '"""inbound_network_dependencies"""', 'value'], {}), "(self, 'inbound_network_dependencies', value)\n", (17598, 17643), False, 'import pulumi\n'), ((17930, 17979), 'pulumi.get', 'pulumi.get', (['self', '"""internal_inbound_ip_addresses"""'], {}), "(self, 'internal_inbound_ip_addresses')\n", (17940, 17979), False, 'import pulumi\n'), ((18136, 18192), 'pulumi.set', 'pulumi.set', (['self', '"""internal_inbound_ip_addresses"""', 'value'], {}), "(self, 'internal_inbound_ip_addresses', value)\n", (18146, 18192), False, 'import pulumi\n'), ((18612, 18660), 'pulumi.get', 'pulumi.get', (['self', '"""internal_load_balancing_mode"""'], {}), "(self, 'internal_load_balancing_mode')\n", (18622, 18660), False, 'import pulumi\n'), ((18791, 18846), 'pulumi.set', 'pulumi.set', (['self', '"""internal_load_balancing_mode"""', 'value'], {}), "(self, 'internal_load_balancing_mode', value)\n", (18801, 18846), False, 'import pulumi\n'), ((19097, 19137), 'pulumi.get', 'pulumi.get', (['self', '"""ip_ssl_address_count"""'], {}), "(self, 'ip_ssl_address_count')\n", (19107, 19137), False, 'import pulumi\n'), ((19252, 19299), 'pulumi.set', 'pulumi.set', (['self', '"""ip_ssl_address_count"""', 'value'], {}), "(self, 'ip_ssl_address_count', value)\n", (19262, 19299), False, 'import pulumi\n'), ((19586, 19633), 'pulumi.get', 'pulumi.get', (['self', '"""linux_outbound_ip_addresses"""'], {}), "(self, 'linux_outbound_ip_addresses')\n", (19596, 19633), False, 'import pulumi\n'), ((19786, 19840), 'pulumi.set', 'pulumi.set', (['self', '"""linux_outbound_ip_addresses"""', 'value'], {}), "(self, 'linux_outbound_ip_addresses', value)\n", (19796, 19840), False, 'import pulumi\n'), ((20032, 20060), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (20042, 20060), False, 'import pulumi\n'), ((20151, 20186), 'pulumi.set', 'pulumi.set', (['self', '"""location"""', 'value'], {}), "(self, 'location', value)\n", (20161, 20186), False, 'import pulumi\n'), ((20411, 20435), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (20421, 20435), False, 'import pulumi\n'), ((20518, 20549), 'pulumi.set', 'pulumi.set', (['self', '"""name"""', 'value'], {}), "(self, 'name', value)\n", (20528, 20549), False, 'import pulumi\n'), ((20752, 20784), 'pulumi.get', 'pulumi.get', (['self', '"""pricing_tier"""'], {}), "(self, 'pricing_tier')\n", (20762, 20784), False, 'import pulumi\n'), ((20883, 20922), 'pulumi.set', 'pulumi.set', (['self', '"""pricing_tier"""', 'value'], {}), "(self, 'pricing_tier', value)\n", (20893, 20922), False, 'import pulumi\n'), ((21242, 21281), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_name"""'], {}), "(self, 'resource_group_name')\n", (21252, 21281), False, 'import pulumi\n'), ((21394, 21440), 'pulumi.set', 'pulumi.set', (['self', '"""resource_group_name"""', 'value'], {}), "(self, 'resource_group_name', value)\n", (21404, 21440), False, 'import pulumi\n'), ((21725, 21754), 'pulumi.get', 'pulumi.get', (['self', '"""subnet_id"""'], {}), "(self, 'subnet_id')\n", (21735, 21754), False, 'import pulumi\n'), ((21847, 21883), 'pulumi.set', 'pulumi.set', (['self', '"""subnet_id"""', 'value'], {}), "(self, 'subnet_id', value)\n", (21857, 21883), False, 'import pulumi\n'), ((22140, 22164), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (22150, 22164), False, 'import pulumi\n'), ((22275, 22306), 'pulumi.set', 'pulumi.set', (['self', '"""tags"""', 'value'], {}), "(self, 'tags', value)\n", (22285, 22306), False, 'import pulumi\n'), ((22600, 22649), 'pulumi.get', 'pulumi.get', (['self', '"""windows_outbound_ip_addresses"""'], {}), "(self, 'windows_outbound_ip_addresses')\n", (22610, 22649), False, 'import pulumi\n'), ((22806, 22862), 'pulumi.set', 'pulumi.set', (['self', '"""windows_outbound_ip_addresses"""', 'value'], {}), "(self, 'windows_outbound_ip_addresses', value)\n", (22816, 22862), False, 'import pulumi\n'), ((22996, 23030), 'pulumi.get', 'pulumi.get', (['self', '"""zone_redundant"""'], {}), "(self, 'zone_redundant')\n", (23006, 23030), False, 'import pulumi\n'), ((23134, 23175), 'pulumi.set', 'pulumi.set', (['self', '"""zone_redundant"""', 'value'], {}), "(self, 'zone_redundant', value)\n", (23144, 23175), False, 'import pulumi\n'), ((37403, 37461), 'pulumi.get', 'pulumi.get', (['self', '"""allow_new_private_endpoint_connections"""'], {}), "(self, 'allow_new_private_endpoint_connections')\n", (37413, 37461), False, 'import pulumi\n'), ((37721, 37757), 'pulumi.get', 'pulumi.get', (['self', '"""cluster_settings"""'], {}), "(self, 'cluster_settings')\n", (37731, 37757), False, 'import pulumi\n'), ((38048, 38088), 'pulumi.get', 'pulumi.get', (['self', '"""dedicated_host_count"""'], {}), "(self, 'dedicated_host_count')\n", (38058, 38088), False, 'import pulumi\n'), ((38288, 38318), 'pulumi.get', 'pulumi.get', (['self', '"""dns_suffix"""'], {}), "(self, 'dns_suffix')\n", (38298, 38318), False, 'import pulumi\n'), ((38582, 38631), 'pulumi.get', 'pulumi.get', (['self', '"""external_inbound_ip_addresses"""'], {}), "(self, 'external_inbound_ip_addresses')\n", (38592, 38631), False, 'import pulumi\n'), ((38924, 38972), 'pulumi.get', 'pulumi.get', (['self', '"""inbound_network_dependencies"""'], {}), "(self, 'inbound_network_dependencies')\n", (38934, 38972), False, 'import pulumi\n'), ((39236, 39285), 'pulumi.get', 'pulumi.get', (['self', '"""internal_inbound_ip_addresses"""'], {}), "(self, 'internal_inbound_ip_addresses')\n", (39246, 39285), False, 'import pulumi\n'), ((39706, 39754), 'pulumi.get', 'pulumi.get', (['self', '"""internal_load_balancing_mode"""'], {}), "(self, 'internal_load_balancing_mode')\n", (39716, 39754), False, 'import pulumi\n'), ((39996, 40036), 'pulumi.get', 'pulumi.get', (['self', '"""ip_ssl_address_count"""'], {}), "(self, 'ip_ssl_address_count')\n", (40006, 40036), False, 'import pulumi\n'), ((40300, 40347), 'pulumi.get', 'pulumi.get', (['self', '"""linux_outbound_ip_addresses"""'], {}), "(self, 'linux_outbound_ip_addresses')\n", (40310, 40347), False, 'import pulumi\n'), ((40530, 40558), 'pulumi.get', 'pulumi.get', (['self', '"""location"""'], {}), "(self, 'location')\n", (40540, 40558), False, 'import pulumi\n'), ((40774, 40798), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (40784, 40798), False, 'import pulumi\n'), ((40992, 41024), 'pulumi.get', 'pulumi.get', (['self', '"""pricing_tier"""'], {}), "(self, 'pricing_tier')\n", (41002, 41024), False, 'import pulumi\n'), ((41335, 41374), 'pulumi.get', 'pulumi.get', (['self', '"""resource_group_name"""'], {}), "(self, 'resource_group_name')\n", (41345, 41374), False, 'import pulumi\n'), ((41650, 41679), 'pulumi.get', 'pulumi.get', (['self', '"""subnet_id"""'], {}), "(self, 'subnet_id')\n", (41660, 41679), False, 'import pulumi\n'), ((41923, 41947), 'pulumi.get', 'pulumi.get', (['self', '"""tags"""'], {}), "(self, 'tags')\n", (41933, 41947), False, 'import pulumi\n'), ((42218, 42267), 'pulumi.get', 'pulumi.get', (['self', '"""windows_outbound_ip_addresses"""'], {}), "(self, 'windows_outbound_ip_addresses')\n", (42228, 42267), False, 'import pulumi\n'), ((42402, 42436), 'pulumi.get', 'pulumi.get', (['self', '"""zone_redundant"""'], {}), "(self, 'zone_redundant')\n", (42412, 42436), False, 'import pulumi\n'), ((2933, 3039), 'pulumi.set', 'pulumi.set', (['__self__', '"""allow_new_private_endpoint_connections"""', 'allow_new_private_endpoint_connections'], {}), "(__self__, 'allow_new_private_endpoint_connections',\n allow_new_private_endpoint_connections)\n", (2943, 3039), False, 'import pulumi\n'), ((3089, 3147), 'pulumi.set', 'pulumi.set', (['__self__', '"""cluster_settings"""', 'cluster_settings'], {}), "(__self__, 'cluster_settings', cluster_settings)\n", (3099, 3147), False, 'import pulumi\n'), ((3205, 3271), 'pulumi.set', 'pulumi.set', (['__self__', '"""dedicated_host_count"""', 'dedicated_host_count'], {}), "(__self__, 'dedicated_host_count', dedicated_host_count)\n", (3215, 3271), False, 'import pulumi\n'), ((3337, 3423), 'pulumi.set', 'pulumi.set', (['__self__', '"""internal_load_balancing_mode"""', 'internal_load_balancing_mode'], {}), "(__self__, 'internal_load_balancing_mode',\n internal_load_balancing_mode)\n", (3347, 3423), False, 'import pulumi\n'), ((3461, 3495), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (3471, 3495), False, 'import pulumi\n'), ((3537, 3571), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (3547, 3571), False, 'import pulumi\n'), ((3623, 3677), 'pulumi.set', 'pulumi.set', (['__self__', '"""zone_redundant"""', 'zone_redundant'], {}), "(__self__, 'zone_redundant', zone_redundant)\n", (3633, 3677), False, 'import pulumi\n'), ((12436, 12542), 'pulumi.set', 'pulumi.set', (['__self__', '"""allow_new_private_endpoint_connections"""', 'allow_new_private_endpoint_connections'], {}), "(__self__, 'allow_new_private_endpoint_connections',\n allow_new_private_endpoint_connections)\n", (12446, 12542), False, 'import pulumi\n'), ((12592, 12650), 'pulumi.set', 'pulumi.set', (['__self__', '"""cluster_settings"""', 'cluster_settings'], {}), "(__self__, 'cluster_settings', cluster_settings)\n", (12602, 12650), False, 'import pulumi\n'), ((12708, 12774), 'pulumi.set', 'pulumi.set', (['__self__', '"""dedicated_host_count"""', 'dedicated_host_count'], {}), "(__self__, 'dedicated_host_count', dedicated_host_count)\n", (12718, 12774), False, 'import pulumi\n'), ((12822, 12868), 'pulumi.set', 'pulumi.set', (['__self__', '"""dns_suffix"""', 'dns_suffix'], {}), "(__self__, 'dns_suffix', dns_suffix)\n", (12832, 12868), False, 'import pulumi\n'), ((12935, 13023), 'pulumi.set', 'pulumi.set', (['__self__', '"""external_inbound_ip_addresses"""', 'external_inbound_ip_addresses'], {}), "(__self__, 'external_inbound_ip_addresses',\n external_inbound_ip_addresses)\n", (12945, 13023), False, 'import pulumi\n'), ((13085, 13171), 'pulumi.set', 'pulumi.set', (['__self__', '"""inbound_network_dependencies"""', 'inbound_network_dependencies'], {}), "(__self__, 'inbound_network_dependencies',\n inbound_network_dependencies)\n", (13095, 13171), False, 'import pulumi\n'), ((13234, 13322), 'pulumi.set', 'pulumi.set', (['__self__', '"""internal_inbound_ip_addresses"""', 'internal_inbound_ip_addresses'], {}), "(__self__, 'internal_inbound_ip_addresses',\n internal_inbound_ip_addresses)\n", (13244, 13322), False, 'import pulumi\n'), ((13384, 13470), 'pulumi.set', 'pulumi.set', (['__self__', '"""internal_load_balancing_mode"""', 'internal_load_balancing_mode'], {}), "(__self__, 'internal_load_balancing_mode',\n internal_load_balancing_mode)\n", (13394, 13470), False, 'import pulumi\n'), ((13524, 13590), 'pulumi.set', 'pulumi.set', (['__self__', '"""ip_ssl_address_count"""', 'ip_ssl_address_count'], {}), "(__self__, 'ip_ssl_address_count', ip_ssl_address_count)\n", (13534, 13590), False, 'import pulumi\n'), ((13655, 13740), 'pulumi.set', 'pulumi.set', (['__self__', '"""linux_outbound_ip_addresses"""', 'linux_outbound_ip_addresses'], {}), "(__self__, 'linux_outbound_ip_addresses', linux_outbound_ip_addresses\n )\n", (13665, 13740), False, 'import pulumi\n'), ((13781, 13823), 'pulumi.set', 'pulumi.set', (['__self__', '"""location"""', 'location'], {}), "(__self__, 'location', location)\n", (13791, 13823), False, 'import pulumi\n'), ((13865, 13899), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (13875, 13899), False, 'import pulumi\n'), ((13949, 13999), 'pulumi.set', 'pulumi.set', (['__self__', '"""pricing_tier"""', 'pricing_tier'], {}), "(__self__, 'pricing_tier', pricing_tier)\n", (13959, 13999), False, 'import pulumi\n'), ((14056, 14120), 'pulumi.set', 'pulumi.set', (['__self__', '"""resource_group_name"""', 'resource_group_name'], {}), "(__self__, 'resource_group_name', resource_group_name)\n", (14066, 14120), False, 'import pulumi\n'), ((14167, 14211), 'pulumi.set', 'pulumi.set', (['__self__', '"""subnet_id"""', 'subnet_id'], {}), "(__self__, 'subnet_id', subnet_id)\n", (14177, 14211), False, 'import pulumi\n'), ((14253, 14287), 'pulumi.set', 'pulumi.set', (['__self__', '"""tags"""', 'tags'], {}), "(__self__, 'tags', tags)\n", (14263, 14287), False, 'import pulumi\n'), ((14354, 14442), 'pulumi.set', 'pulumi.set', (['__self__', '"""windows_outbound_ip_addresses"""', 'windows_outbound_ip_addresses'], {}), "(__self__, 'windows_outbound_ip_addresses',\n windows_outbound_ip_addresses)\n", (14364, 14442), False, 'import pulumi\n'), ((14490, 14544), 'pulumi.set', 'pulumi.set', (['__self__', '"""zone_redundant"""', 'zone_redundant'], {}), "(__self__, 'zone_redundant', zone_redundant)\n", (14500, 14544), False, 'import pulumi\n'), ((28598, 28622), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {}), '()\n', (28620, 28622), False, 'import pulumi\n'), ((35642, 35671), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'id': 'id'}), '(id=id)\n', (35664, 35671), False, 'import pulumi\n')]
|
##############################################################################
#
# Copyright (c) 2007 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import os
from setuptools import find_packages
from setuptools import setup
HERE = os.path.abspath(os.path.dirname(__file__))
def _read_file(filename):
with open(os.path.join(HERE, filename)) as f:
return f.read()
README = _read_file('README.rst')
CHANGES = _read_file('CHANGES.rst')
version = '5.0a3.dev0'
setup(
name='Zope',
version=version,
url='https://zope.readthedocs.io/en/latest/',
project_urls={
'Documentation': 'https://zope.readthedocs.io',
'Issue Tracker': 'https://github.com/zopefoundation/Zope/issues',
'Sources': 'https://github.com/zopefoundation/Zope',
},
license='ZPL 2.1',
description='Zope application server / web framework',
author='Zo<NAME> and Contributors',
author_email='<EMAIL>',
long_description="\n\n".join([README, CHANGES]),
classifiers=[
'Development Status :: 6 - Mature',
"Environment :: Web Environment",
"Framework :: Zope :: 5",
"Intended Audience :: Developers",
"License :: OSI Approved :: Zope Public License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Internet :: WWW/HTTP :: Dynamic Content",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application",
"Topic :: Software Development :: Libraries :: Application Frameworks",
],
packages=find_packages('src'),
namespace_packages=['Products', 'Shared', 'Shared.DC', 'zmi'],
package_dir={'': 'src'},
python_requires='>= 3.5',
install_requires=[
'AccessControl >= 4.2',
'Acquisition',
'BTrees',
'Chameleon >= 3.7.0',
'DateTime',
'DocumentTemplate >= 3.0b9',
'ExtensionClass',
'MultiMapping',
'PasteDeploy',
'Persistence',
'RestrictedPython',
'ZConfig >= 2.9.2',
'ZODB',
'setuptools >= 36.2',
'transaction >= 2.4',
'waitress',
'zExceptions >= 3.4',
'z3c.pt',
'zope.browser',
'zope.browsermenu',
'zope.browserpage >= 4.4.0.dev0',
'zope.browserresource >= 3.11',
'zope.component',
'zope.configuration',
'zope.container',
'zope.contentprovider',
'zope.contenttype',
'zope.deferredimport',
'zope.event',
'zope.exceptions',
'zope.globalrequest',
'zope.i18n [zcml]',
'zope.i18nmessageid',
'zope.interface >= 3.8',
'zope.lifecycleevent',
'zope.location',
'zope.pagetemplate >= 4.0.2',
'zope.processlifetime',
'zope.proxy',
'zope.ptresource',
'zope.publisher',
'zope.schema',
'zope.security',
'zope.sequencesort',
'zope.site',
'zope.size',
'zope.tal',
'zope.tales >= 5.0.2',
'zope.testbrowser',
'zope.testing',
'zope.traversing',
'zope.viewlet',
],
include_package_data=True,
zip_safe=False,
extras_require={
'docs': [
'Sphinx',
'sphinx_rtd_theme',
'repoze.sphinx.autointerface',
],
'wsgi': [
'Paste',
],
},
entry_points={
'paste.app_factory': [
'main=Zope2.Startup.run:make_wsgi_app',
],
'paste.filter_app_factory': [
'httpexceptions=ZPublisher.httpexceptions:main',
],
'console_scripts': [
'addzopeuser=Zope2.utilities.adduser:main',
'runwsgi=Zope2.Startup.serve:main',
'mkwsgiinstance=Zope2.utilities.mkwsgiinstance:main',
'zconsole=Zope2.utilities.zconsole:main',
],
'zodbupdate.decode': [
'decodes = OFS:zodbupdate_decode_dict',
],
'zodbupdate': [
'renames = OFS:zodbupdate_rename_dict',
],
},
)
|
[
"os.path.dirname",
"os.path.join",
"setuptools.find_packages"
] |
[((739, 764), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (754, 764), False, 'import os\n'), ((2359, 2379), 'setuptools.find_packages', 'find_packages', (['"""src"""'], {}), "('src')\n", (2372, 2379), False, 'from setuptools import find_packages\n'), ((808, 836), 'os.path.join', 'os.path.join', (['HERE', 'filename'], {}), '(HERE, filename)\n', (820, 836), False, 'import os\n')]
|
from oscpy.server import OSCThreadServer
from time import sleep
print("Starting")
def callback(*values):
print("got values: {}".format(values))
osc = OSCThreadServer()
sock = osc.listen(address='127.0.0.1', port=8000, default=True)
osc.bind(b'/hello/world', callback, sock)
sleep(5000)
osc.stop()
|
[
"oscpy.server.OSCThreadServer",
"time.sleep"
] |
[((157, 174), 'oscpy.server.OSCThreadServer', 'OSCThreadServer', ([], {}), '()\n', (172, 174), False, 'from oscpy.server import OSCThreadServer\n'), ((281, 292), 'time.sleep', 'sleep', (['(5000)'], {}), '(5000)\n', (286, 292), False, 'from time import sleep\n')]
|
"""Utils for date range generation."""
from datetime import datetime
from typing import Union
from pyspark.sql import DataFrame, functions
from butterfree.clients import SparkClient
from butterfree.constants import DataType
from butterfree.constants.columns import TIMESTAMP_COLUMN
def get_date_range(
client: SparkClient,
start_date: Union[str, datetime],
end_date: Union[str, datetime],
step: int = None,
) -> DataFrame:
"""Create a date range dataframe.
The dataframe returning from this method will containing a single column
TIMESTAMP_COLUMN, of timestamp type, with dates between start and end.
Args:
client: a spark client.
start_date: range beginning value (inclusive).
end_date: range last value (exclusive)
step: optional step, in seconds.
Returns:
A single column date range spark dataframe.
"""
day_in_seconds = 60 * 60 * 24
step = step or day_in_seconds
start_date = (
start_date if isinstance(start_date, str) else start_date.strftime("%Y-%m-%d")
)
end_date = end_date if isinstance(end_date, str) else end_date.strftime("%Y-%m-%d")
date_df = client.conn.createDataFrame(
[(start_date, end_date)], ("start_date", "end_date")
).select(
[
functions.col(c).cast(DataType.TIMESTAMP.spark).cast(DataType.BIGINT.spark)
for c in ("start_date", "end_date")
]
)
start_date, end_date = date_df.first()
return client.conn.range(
start_date, end_date + day_in_seconds, step # type: ignore
).select(functions.col("id").cast(DataType.TIMESTAMP.spark).alias(TIMESTAMP_COLUMN))
|
[
"pyspark.sql.functions.col"
] |
[((1597, 1616), 'pyspark.sql.functions.col', 'functions.col', (['"""id"""'], {}), "('id')\n", (1610, 1616), False, 'from pyspark.sql import DataFrame, functions\n'), ((1303, 1319), 'pyspark.sql.functions.col', 'functions.col', (['c'], {}), '(c)\n', (1316, 1319), False, 'from pyspark.sql import DataFrame, functions\n')]
|
# -*- coding: utf-8 -*-
"""Aplicando estilo via propriedade `name` (Gnome Builder).
Propriedade `name` é adicionada via arquivo de interface
e arquivo css é carregado via linguagem de programação.
"""
import gi
gi.require_version(namespace='Gtk', version='3.0')
from gi.repository import Gtk, Gdk
@Gtk.Template(filename='./mainwindow.glade')
class MainWindow(Gtk.ApplicationWindow):
__gtype_name__ = 'MainWindow'
def __init__(self):
super().__init__()
self._set_custom_css(file='../css/custom.css')
@staticmethod
def _set_custom_css(file):
css_provider = Gtk.CssProvider.new()
css_provider.load_from_path(path=file)
screen = Gdk.Screen()
style_context = Gtk.StyleContext.new()
style_context.add_provider_for_screen(
screen=screen.get_default(),
provider=css_provider,
priority=Gtk.STYLE_PROVIDER_PRIORITY_APPLICATION,
)
if __name__ == '__main__':
win = MainWindow()
win.connect('destroy', Gtk.main_quit)
win.show_all()
Gtk.main()
|
[
"gi.repository.Gtk.Template",
"gi.require_version",
"gi.repository.Gtk.main",
"gi.repository.Gtk.StyleContext.new",
"gi.repository.Gdk.Screen",
"gi.repository.Gtk.CssProvider.new"
] |
[((214, 264), 'gi.require_version', 'gi.require_version', ([], {'namespace': '"""Gtk"""', 'version': '"""3.0"""'}), "(namespace='Gtk', version='3.0')\n", (232, 264), False, 'import gi\n'), ((304, 347), 'gi.repository.Gtk.Template', 'Gtk.Template', ([], {'filename': '"""./mainwindow.glade"""'}), "(filename='./mainwindow.glade')\n", (316, 347), False, 'from gi.repository import Gtk, Gdk\n'), ((1063, 1073), 'gi.repository.Gtk.main', 'Gtk.main', ([], {}), '()\n', (1071, 1073), False, 'from gi.repository import Gtk, Gdk\n'), ((603, 624), 'gi.repository.Gtk.CssProvider.new', 'Gtk.CssProvider.new', ([], {}), '()\n', (622, 624), False, 'from gi.repository import Gtk, Gdk\n'), ((690, 702), 'gi.repository.Gdk.Screen', 'Gdk.Screen', ([], {}), '()\n', (700, 702), False, 'from gi.repository import Gtk, Gdk\n'), ((728, 750), 'gi.repository.Gtk.StyleContext.new', 'Gtk.StyleContext.new', ([], {}), '()\n', (748, 750), False, 'from gi.repository import Gtk, Gdk\n')]
|
import torch
from sklearn.model_selection import train_test_split
import torch.nn.functional as F
import torch.nn as nn
import torch.optim as optim
import random
import numpy as np
import Read_Data
import Confusion_Matrix
def getdata():
features, target = Read_Data.read_data3('mushrooms_data.csv')
X_train, X_test, y_train, y_test = train_test_split(features, target, test_size=0.20)
return X_train.values, X_test.values, y_train.values, y_test.values
class NN(nn.Module,):
def __init__(self,input_size):
super().__init__()
self.fc1 = nn.Linear(input_size, 50)
self.fc2 = nn.Linear(50, 40)
self.fc3 = nn.Linear(40, 20)
self.fc4 = nn.Linear(20,9)
self.dropout = nn.Dropout(0.15)
def forward(self,x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = F.relu(self.fc3(x))
x = self.fc4(x)
return x
def train(epoch, net, train_x, train_y, optimizer):
for e in range(epoch):
optimizer.zero_grad()
output = net(train_x)
loss = F.cross_entropy(output, train_y)
loss.backward()
optimizer.step()
return net
def predict(model, test_x):
pred = []
with torch.no_grad():
for data in test_x:
output = model(data)
predict = np.argmax(output)
pred.append(int(predict))
return pred
def random_state(seed_val):
np.random.seed(seed_val)
random.seed(seed_val)
torch.manual_seed(seed_val)
# if you are using GPU
torch.cuda.manual_seed(seed_val)
torch.cuda.manual_seed_all(seed_val)
def main(train_x,test_x,train_y,test_y,input_size):
random_state(100)
train_x = train_x
test_x = test_x
train_y = train_y
test_y = test_y
train_x_tensor = torch.from_numpy(train_x).float()
train_y_tensor = torch.from_numpy(train_y).long()
test_x_tensor = torch.from_numpy(test_x).float()
net = NN(input_size=input_size)
lr = 0.1
m = 0.9
optimizer = optim.SGD(net.parameters(), lr=lr, momentum=m)
my_net = train(100,net,train_x_tensor,train_y_tensor,optimizer)
predicted = predict(my_net, test_x_tensor)
actual = test_y
f1, recall, accuracy = Confusion_Matrix.main(actual, predicted,"Confusion Matrix: Neural Network")
return f1, recall, accuracy
|
[
"torch.nn.Dropout",
"numpy.random.seed",
"Read_Data.read_data3",
"numpy.argmax",
"torch.manual_seed",
"sklearn.model_selection.train_test_split",
"torch.cuda.manual_seed",
"torch.nn.functional.cross_entropy",
"torch.cuda.manual_seed_all",
"Confusion_Matrix.main",
"random.seed",
"torch.nn.Linear",
"torch.no_grad",
"torch.from_numpy"
] |
[((274, 316), 'Read_Data.read_data3', 'Read_Data.read_data3', (['"""mushrooms_data.csv"""'], {}), "('mushrooms_data.csv')\n", (294, 316), False, 'import Read_Data\n'), ((357, 406), 'sklearn.model_selection.train_test_split', 'train_test_split', (['features', 'target'], {'test_size': '(0.2)'}), '(features, target, test_size=0.2)\n', (373, 406), False, 'from sklearn.model_selection import train_test_split\n'), ((1483, 1507), 'numpy.random.seed', 'np.random.seed', (['seed_val'], {}), '(seed_val)\n', (1497, 1507), True, 'import numpy as np\n'), ((1513, 1534), 'random.seed', 'random.seed', (['seed_val'], {}), '(seed_val)\n', (1524, 1534), False, 'import random\n'), ((1540, 1567), 'torch.manual_seed', 'torch.manual_seed', (['seed_val'], {}), '(seed_val)\n', (1557, 1567), False, 'import torch\n'), ((1601, 1633), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed_val'], {}), '(seed_val)\n', (1623, 1633), False, 'import torch\n'), ((1639, 1675), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed_val'], {}), '(seed_val)\n', (1665, 1675), False, 'import torch\n'), ((2307, 2383), 'Confusion_Matrix.main', 'Confusion_Matrix.main', (['actual', 'predicted', '"""Confusion Matrix: Neural Network"""'], {}), "(actual, predicted, 'Confusion Matrix: Neural Network')\n", (2328, 2383), False, 'import Confusion_Matrix\n'), ((592, 617), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(50)'], {}), '(input_size, 50)\n', (601, 617), True, 'import torch.nn as nn\n'), ((638, 655), 'torch.nn.Linear', 'nn.Linear', (['(50)', '(40)'], {}), '(50, 40)\n', (647, 655), True, 'import torch.nn as nn\n'), ((676, 693), 'torch.nn.Linear', 'nn.Linear', (['(40)', '(20)'], {}), '(40, 20)\n', (685, 693), True, 'import torch.nn as nn\n'), ((714, 730), 'torch.nn.Linear', 'nn.Linear', (['(20)', '(9)'], {}), '(20, 9)\n', (723, 730), True, 'import torch.nn as nn\n'), ((754, 770), 'torch.nn.Dropout', 'nn.Dropout', (['(0.15)'], {}), '(0.15)\n', (764, 770), True, 'import torch.nn as nn\n'), ((1106, 1138), 'torch.nn.functional.cross_entropy', 'F.cross_entropy', (['output', 'train_y'], {}), '(output, train_y)\n', (1121, 1138), True, 'import torch.nn.functional as F\n'), ((1266, 1281), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1279, 1281), False, 'import torch\n'), ((1369, 1386), 'numpy.argmax', 'np.argmax', (['output'], {}), '(output)\n', (1378, 1386), True, 'import numpy as np\n'), ((1866, 1891), 'torch.from_numpy', 'torch.from_numpy', (['train_x'], {}), '(train_x)\n', (1882, 1891), False, 'import torch\n'), ((1922, 1947), 'torch.from_numpy', 'torch.from_numpy', (['train_y'], {}), '(train_y)\n', (1938, 1947), False, 'import torch\n'), ((1976, 2000), 'torch.from_numpy', 'torch.from_numpy', (['test_x'], {}), '(test_x)\n', (1992, 2000), False, 'import torch\n')]
|
#-*- coding:utf-8 -*-
#scrappingV3.py
from bs4 import BeautifulSoup
import requests
import inspect
import os
import re
import string
import pathlib
import random
import json
import logging
import datetime
current_folder = inspect.getfile(inspect.currentframe())
current_path = os.path.dirname(os.path.abspath(current_folder))
#--Debug--#
#Não esquecer de passar toda a parte de debug
#para outro arquivo, main.py (de preferencia)
#e do main.py chamar as funções para download.
#Organizar toda a parte de criação de pastas
#no main.py
now = datetime.datetime.now()
format_log_file = now.strftime("%d-%m-%y-%H-%M")
log_folder = "log2"
pathlib.Path(log_folder).mkdir(parents=True, exist_ok=True)
new_log_folder = os.path.join(os.path.sep, current_path, log_folder)
log_path = os.path.join(os.path.sep, new_log_folder, "download{}.log"
.format(format_log_file))
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s(%(levelname)s) %(message)s")
log_file = logging.FileHandler(log_path, mode="w")
log_file.setLevel(logging.DEBUG)
log_file.setFormatter(formatter)
stream_handler = logging.StreamHandler()
stream_handler.setFormatter(formatter)
logger.addHandler(log_file)
logger.addHandler(stream_handler)
def download_file(url):
"""
Requer uma url de uma imagem.
Checa se a url é valida ou não.
Caso sim, a imagem será processada de
maneira "correta" e será baixada.
Caso não, a imagem será ignorada.
"""
#--Fazer com que a função também aceite lista de url"s
logger.info("--Começo Novo request--")
new_folder = "test6"
#Para o nome do arquivo
file_name = url.split("/")[-1]
try:
r = requests.get(url, stream=True)
if r.status_code == requests.codes.ok:
logger.info("Tentando Baixar -> {}".format(url))
# Check se o nome esta em branco
if not file_name:
file_name = "".join(random.sample(string.ascii_letters,
10)) + ".jpg"
#Loggar o nome do arquivo escolhido
logger.error("O arquivo {} esta sem nome".format(file_name))
logger.info("Novo nome escolhido {}".format(file_name))
#Check se algum character é invalido para o windows
file_name = "".join(re.split(r'[<>:"/\|?*]', file_name))
extension = (".jpg", ".jpeg", ".png")
if not file_name.endswith(tuple(e for e in extension)):
logger.warn("O arquivo falta extensão {}".format(file_name))
file_name = file_name + ".jpg"
#Cria a pasta para download (Depois, talvez ser um argumento?)
new_folder = os.path.join(os.path.sep, current_path, new_folder)
#--pathlib está sendo chamado todas as vezes
#--que o parse_Image e chamado
#Isso não esta certo.
#Colocar a crianção de de pastas fora do loop em main.py
pathlib.Path(new_folder).mkdir(parents=True, exist_ok=True)
files_on_folder = os.listdir(new_folder)
if file_name in [i for i in files_on_folder]:
logger.warn("Arquivo {} já existe.".format(file_name))
file_name = "".join(random.sample(string.ascii_letters,
10)) + ".jpg"
#Loggar o nome do arquivo final
logger.info("Nome final do arquivo: {}".format(file_name))
print("-> Tentando baixar {}!".format(url))
#file_name = current_path + new_folder + file_name
file_name = os.path.join(os.path.sep, new_folder, file_name)
print(file_name)
try:
with open(file_name, "wb") as f:
# chunck_size é a quantidade de codigo processador por vez
# que vai ir para a memoria.
# Não é o tamanho do arquivo que sera baixado.
# Numeros muitos grandes podem causar problemas de memoria,
# Cuidado é crucial
for chunk in r.iter_content(chunk_size=4096):
if chunk:
f.write(chunk)
# força a limpeza da memoria
f.flush()
os.fsync(f.fileno())
if file_name:
print("Download {} -> {}".format(url, file_name))
# Loggar o arquivo baixando com sucesso + nome do arquivo
logger.info("A url {} foi baixada com sucesso com o nome de: {}".format(url, file_name))
#Catching alguns comportamentos estranhos
#Ainda não sei o motivo
#Acredito que seja algo relacionado com o
#nome do arquivo ser muito grande.
#De qualquer forma, se o erro ocorre um novo nome
#será gerado para fazer o download.
#"Problema resolvido"
except FileNotFoundError as err:
file_name = "".join(random.sample(string.ascii_letters,
10)) + ".jpg"
file_name = os.path.join(os.path.sep, new_folder, file_name)
#Loggar o ocorrido + o nome do arquivo escolhido
logger.error("A url {} não pode ser baixada. Erro ocorrido {}. Um novo nome foi gerado {}".format(url, err, file_name))
else:
print("A ulr não pode ser carregada. Erro {}.".format(r.status_code))
# Loggar a url + o erro
logger.error("A url não pode ser carregada. Erro: {}."
.format(r.status_code))
except requests.exceptions.SSLError as err:
print("Imagem {} não pode ser baixada. Erro {}.".format(file_name, err))
logger.erro("A url {} não pode ser baixada. Erro: {}.".format(url, err))
# Loggar a url + o erro
pass
def parse_Image(url, mode="NORMAL", home=None):
"""
Requer uma url.
De preferencia a url deve começar com "http://www.google.com/search?"
Processa o google imagens.
mode = NORMAL ou GOOGLE
NORMAL significa que o site desejado não é o google, qualquer outro site.
(PS: As imagens podem ser procurada por simples "img" / "src")
Google significa que o site desajado é o google imagens.
(PS: As imagens são procurada por "div" "class: rg_meta notranslate",
essa classe contem todas as informações do site da imagem, inclusive a url,
a classe é na verdade um json, dentro do json gerado a url da imagem esta
em ["ou"].
"""
#Não esquecer de olhar multithreads
#Fazer com que o download seja mais rapido.
#append os links encontrados em uma lista, dividir a lista
#na quantidade de threads desejados, e passar a cada lista
#para seu respectivo download_file(url)
au = ["Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:63.0) Gecko/20100101 Firefox/63.0"]
headers= {"User-Agent": random.choice(au)}
source_code = requests.get(url, headers=headers)
html = source_code.content
soup = BeautifulSoup(html, "lxml")
if mode == "NORMAL":
for link in soup.find_all("div", {"class": "rg_meta notranslate"}):
img = json.loads(link.text)
download_file(img["ou"])
elif mode == "GOOGLE":
for link in soup.findl_all("img"):
img = link.get("src")
if home == None:
if not image_links.startswith("http"):
img = url + "/" + img
if not home == None:
if not image_links.startwith("http"):
img = home + "/" + img
download_file(img)
url = "https://www.google.com/search?hl=en&site=imghp&tbm=isch&tbs\=isz:l&q=stardew valley"
parse_Image(url)
# new_folder = "\\test4"
# files_on_folder = os.listdir(current_path + new_folder)
# print(files_on_folder)
# with open("files_on_folder.json", "w") as outfile:
# json.dump(files_on_folder, outfile, indent=4)
|
[
"os.path.abspath",
"re.split",
"os.path.join",
"logging.FileHandler",
"json.loads",
"random.sample",
"logging.StreamHandler",
"random.choice",
"logging.Formatter",
"pathlib.Path",
"requests.get",
"inspect.currentframe",
"bs4.BeautifulSoup",
"datetime.datetime.now",
"os.listdir",
"logging.getLogger"
] |
[((544, 567), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (565, 567), False, 'import datetime\n'), ((716, 767), 'os.path.join', 'os.path.join', (['os.path.sep', 'current_path', 'log_folder'], {}), '(os.path.sep, current_path, log_folder)\n', (728, 767), False, 'import os\n'), ((898, 925), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (915, 925), False, 'import logging\n'), ((970, 1029), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s(%(levelname)s) %(message)s"""'], {}), "('%(asctime)s(%(levelname)s) %(message)s')\n", (987, 1029), False, 'import logging\n'), ((1042, 1081), 'logging.FileHandler', 'logging.FileHandler', (['log_path'], {'mode': '"""w"""'}), "(log_path, mode='w')\n", (1061, 1081), False, 'import logging\n'), ((1166, 1189), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (1187, 1189), False, 'import logging\n'), ((240, 262), 'inspect.currentframe', 'inspect.currentframe', ([], {}), '()\n', (260, 262), False, 'import inspect\n'), ((295, 326), 'os.path.abspath', 'os.path.abspath', (['current_folder'], {}), '(current_folder)\n', (310, 326), False, 'import os\n'), ((7126, 7160), 'requests.get', 'requests.get', (['url'], {'headers': 'headers'}), '(url, headers=headers)\n', (7138, 7160), False, 'import requests\n'), ((7203, 7230), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html', '"""lxml"""'], {}), "(html, 'lxml')\n", (7216, 7230), False, 'from bs4 import BeautifulSoup\n'), ((638, 662), 'pathlib.Path', 'pathlib.Path', (['log_folder'], {}), '(log_folder)\n', (650, 662), False, 'import pathlib\n'), ((1732, 1762), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (1744, 1762), False, 'import requests\n'), ((7089, 7106), 'random.choice', 'random.choice', (['au'], {}), '(au)\n', (7102, 7106), False, 'import random\n'), ((2777, 2828), 'os.path.join', 'os.path.join', (['os.path.sep', 'current_path', 'new_folder'], {}), '(os.path.sep, current_path, new_folder)\n', (2789, 2828), False, 'import os\n'), ((3134, 3156), 'os.listdir', 'os.listdir', (['new_folder'], {}), '(new_folder)\n', (3144, 3156), False, 'import os\n'), ((3680, 3728), 'os.path.join', 'os.path.join', (['os.path.sep', 'new_folder', 'file_name'], {}), '(os.path.sep, new_folder, file_name)\n', (3692, 3728), False, 'import os\n'), ((7350, 7371), 'json.loads', 'json.loads', (['link.text'], {}), '(link.text)\n', (7360, 7371), False, 'import json\n'), ((2380, 2415), 're.split', 're.split', (['"""[<>:"/\\\\|?*]"""', 'file_name'], {}), '(\'[<>:"/\\\\|?*]\', file_name)\n', (2388, 2415), False, 'import re\n'), ((3044, 3068), 'pathlib.Path', 'pathlib.Path', (['new_folder'], {}), '(new_folder)\n', (3056, 3068), False, 'import pathlib\n'), ((5275, 5323), 'os.path.join', 'os.path.join', (['os.path.sep', 'new_folder', 'file_name'], {}), '(os.path.sep, new_folder, file_name)\n', (5287, 5323), False, 'import os\n'), ((1982, 2021), 'random.sample', 'random.sample', (['string.ascii_letters', '(10)'], {}), '(string.ascii_letters, 10)\n', (1995, 2021), False, 'import random\n'), ((3322, 3361), 'random.sample', 'random.sample', (['string.ascii_letters', '(10)'], {}), '(string.ascii_letters, 10)\n', (3335, 3361), False, 'import random\n'), ((5147, 5186), 'random.sample', 'random.sample', (['string.ascii_letters', '(10)'], {}), '(string.ascii_letters, 10)\n', (5160, 5186), False, 'import random\n')]
|
"""
Python rpc agent
Use for test rpc server
"""
import socket
from threading import Thread
import logging
import json
logger = logging.getLogger('Tester')
class Agent:
def __init__(self, device_id):
self.device_id = device_id
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.real_func = {}
def start(self, host, port):
self.sock.connect((host, port))
logger.debug('Connect to {}:{}'.format(host, port))
t = Thread(target=self.receive_msg)
t.setDaemon(True)
t.start()
def stop(self):
unregister_msg = {
'msg_type': 1,
'msg_id': 1,
'version': 1,
'name': 'unregister',
'args': []
}
self.sock.sendall((json.dumps(unregister_msg) + '\n').encode())
self.sock.close()
def receive_msg(self):
register_msg = {
'msg_type': 1,
'msg_id': 1,
'version': 1,
'name': 'register',
'args': [self.device_id]
}
response_msg = {
'msg_type': 2,
'msg_id': 1,
'version': 1,
'name': 'response',
'args': []
}
self.sock.sendall((json.dumps(register_msg) + '\n').encode())
r_buffer = self.sock.makefile(mode='r')
register_result = r_buffer.readline()
#register success
print(register_result)
while True:
line = r_buffer.readline()
logger.debug('receive msg : {}'.format(line))
call_obj = json.loads(line)
method_name = call_obj['name']
func = self.real_func.get(method_name)
if func:
res = func(call_obj['args'])
response_msg['args'] = [res]
response_msg['msg_id'] = call_obj['msg_id']
else:
response_msg['name'] = 'error'
response_msg['args'] = ['{}: method not found'.format(method_name)]
self.sock.sendall((json.dumps(response_msg) + '\n').encode())
def add_func(self, func_name, func):
self.real_func[func_name] = func
def hello():
logger.info('Agent: Hello')
return True
def get_view(view_id):
logger.info('Agent: GetView by id [{}]'.format(view_id))
return {'id': view_id, 'class': 'TextView', 'text': None, 'hash': 123456}
def start_app(package_name):
logger.info('Agent: StartApp bt package name [{}]'.format(package_name))
return True
def finish_app():
logger.info('Agent: finish app')
return True
def click_on_text(text):
logger.info('Agent: click on text [{}]'.format(text))
return True
def enter_text(view_hash, text):
logger.info('Agent: enter text [{}] to view [{}]'.format(text, view_hash))
return True
def wait_for_text(text):
logger.info('Agent: wait for text [{}]'.format(text))
return True
def click_on_view(view_hash):
logger.info('Agent: click on view [{}]'.format(view_hash))
return True
def get_test_agent(device_id):
_agent = Agent(device_id)
_agent.add_func('hello', hello)
_agent.add_func('GetView', get_view)
_agent.add_func('LaunchApp', start_app)
_agent.add_func('FinishApp', finish_app)
_agent.add_func('ClickOnText', click_on_text)
_agent.add_func('EnterText', enter_text)
_agent.add_func('WaitForText', wait_for_text)
_agent.add_func('ClickOnView', click_on_view)
return _agent
|
[
"threading.Thread",
"json.loads",
"socket.socket",
"json.dumps",
"logging.getLogger"
] |
[((130, 157), 'logging.getLogger', 'logging.getLogger', (['"""Tester"""'], {}), "('Tester')\n", (147, 157), False, 'import logging\n'), ((263, 312), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (276, 312), False, 'import socket\n'), ((487, 518), 'threading.Thread', 'Thread', ([], {'target': 'self.receive_msg'}), '(target=self.receive_msg)\n', (493, 518), False, 'from threading import Thread\n'), ((1593, 1609), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1603, 1609), False, 'import json\n'), ((783, 809), 'json.dumps', 'json.dumps', (['unregister_msg'], {}), '(unregister_msg)\n', (793, 809), False, 'import json\n'), ((1259, 1283), 'json.dumps', 'json.dumps', (['register_msg'], {}), '(register_msg)\n', (1269, 1283), False, 'import json\n'), ((2055, 2079), 'json.dumps', 'json.dumps', (['response_msg'], {}), '(response_msg)\n', (2065, 2079), False, 'import json\n')]
|
'''
Created on Dec 4, 2012
Module to handle dynamic decoration of LCIO classes
@author: <a href="mailto:<EMAIL>"><NAME></a>
'''
from ROOT import TVector3, TLorentzVector
from pyLCIO import EVENT, IMPL, IO, UTIL
from array import array
def addMethod( self, method, name=None ):
''' Convenience method to add a method to a class '''
if not name:
name = method.__name__
setattr( self, name, method )
def getEndpointVec( self ):
''' Wrapper for the getEndpoint method to return a TVector3 '''
v = self.getEndpoint()
return TVector3( v[0], v[1], v[2] )
def getLorentzVec( self ):
''' Convenience method for classes that have momentum and energy '''
energy = self.getEnergy()
v = self.getMomentum()
return TLorentzVector( v[0], v[1], v[2], energy )
def getMomentumVec( self ):
''' Wrapper for the getMomentum method to return a TVector3 '''
v = self.getMomentum()
return TVector3( v[0], v[1], v[2] )
def setMomentumVec( self, v ):
''' Wrapper for the setMomentum method passing a vector '''
self.setMomentum( array( 'f', [v.x(), v.y(), v.z()] ) )
def getPositionVec( self ):
''' Wrapper for the getPosition method to return a TVector3 '''
v = self.getPosition()
return TVector3( v[0], v[1], v[2] )
def setPositionVec( self, v ):
''' Wrapper for the setPosition method passing a vector '''
# tracker hits take pointer to doubles, calorimeter hits pointer to floats
try:
self.setPosition( array( 'd', [v.x(), v.y(), v.z()] ) )
except TypeError:
self.setPosition( array( 'f', [v.x(), v.y(), v.z()] ) )
def setRawDataVec( self, v ):
''' Wrapper for the setRawData method passing a vector '''
self.setRawData( array( 'i', v ), len(v) )
def getReferencePointVec( self ):
''' Wrapper for the getReferencePoint method to return a TVector3 '''
v = self.getReferencePoint()
return TVector3( v[0], v[1], v[2] )
def setReferencePointVec( self, v ):
''' Wrapper for the setReferencePoint method passing a vector '''
self.setReferencePoint( array( 'f', [v.x(), v.y(), v.z()] ) )
def getVertexVec( self ):
''' Wrapper for the getVertex method to return a TVector3 '''
v = self.getVertex()
return TVector3( v[0], v[1], v[2] )
def setVertexVec( self, v ):
''' Wrapper for the setVertex method passing a vector '''
self.setVertex( array( 'd', [v.x(), v.y(), v.z()] ) )
def LCCollectionIterator( self ):
''' Helper method to make LCCollection iterable '''
index = 0
nElements = self.getNumberOfElements()
while index < nElements:
yield self.getElementAt( index )
index += 1
def LCEventIterator( self ):
''' Helper method to make LCEvent iterable '''
for collectionName in self.getCollectionNames():
yield collectionName, self.getCollection( collectionName )
def LCReaderIterator( self ):
''' Helper method to make LCReader iterable '''
event = self.readNextEvent()
while event:
yield event
event = self.readNextEvent()
def LCStdHepRdrIterator( self ):
''' Helper method to make LCStdHepRdr iterable '''
processedEvents = 0
while True:
event = IMPL.LCEventImpl()
try:
self.updateNextEvent( event )
event.setEventNumber( processedEvents )
except Exception:
break
yield event
processedEvents += 1
def getMcParticles( self ):
''' Convenience method to get the default mc particle collection '''
return self.getCollection( EVENT.LCIO.MCPARTICLE )
def getTracks( self ):
''' Convenience method to get the default track collection '''
return self.getCollection( EVENT.LCIO.TRACK )
# List of LCIO classes to decorate
lcioClasses = [ EVENT.LCEvent,
EVENT.CalorimeterHit,
EVENT.Cluster,
EVENT.RawCalorimeterHit,
EVENT.ReconstructedParticle,
EVENT.SimCalorimeterHit,
EVENT.SimTrackerHit,
EVENT.TPCHit,
EVENT.Track,
EVENT.TrackerData,
EVENT.TrackerHit,
EVENT.TrackState,
EVENT.Vertex,
EVENT.MCParticle,
EVENT.LCCollection,
IMPL.MCParticleImpl,
IMPL.CalorimeterHitImpl,
IMPL.ClusterImpl,
IMPL.RawCalorimeterHitImpl,
IMPL.ReconstructedParticleImpl,
IMPL.SimCalorimeterHitImpl,
IMPL.SimTrackerHitImpl,
IMPL.TPCHitImpl,
IMPL.TrackerDataImpl,
IMPL.TrackerHitImpl,
IMPL.TrackerHitPlaneImpl,
IMPL.TrackerHitZCylinderImpl,
IMPL.TrackerPulseImpl,
IMPL.TrackerRawDataImpl,
IMPL.TrackImpl,
IMPL.TrackStateImpl,
IMPL.VertexImpl,
IO.LCReader,
UTIL.LCStdHepRdr,
]
# Cache decorated classes
decoratedClasses = []
def decorateClass( o ):
''' Method to wrap the decoration logic '''
if o not in decoratedClasses:
d = dir( o )
if 'getEndpoint' in d:
addMethod( o, getEndpointVec )
if 'getMomentum' in d:
addMethod( o, getMomentumVec )
if 'setMomentum' in d:
addMethod( o, setMomentumVec )
if 'getMomentum' and 'getEnergy' in d:
addMethod( o, getLorentzVec )
if 'getPosition' in d:
addMethod( o, getPositionVec )
if 'setPosition' in d:
addMethod( o, setPositionVec )
if 'getReferencePoint' in d:
addMethod( o, getReferencePointVec )
if 'setRawData' in d:
addMethod( o, setRawDataVec )
if 'setReferencePoint' in d:
addMethod( o, setReferencePointVec )
if 'getVertex' in d:
addMethod( o, getVertexVec )
if 'setVertex' in d:
addMethod( o, setVertexVec )
if 'getCollection' in d:
addMethod( o, getMcParticles )
addMethod( o, getTracks )
if 'getNumberOfElements' and 'getElementAt' in d:
addMethod( o, LCCollectionIterator, '__iter__' )
if 'getCollectionNames' and 'getCollection' in d:
addMethod( o, LCEventIterator, '__iter__' )
if 'readNextEvent' in d:
addMethod( o, LCReaderIterator, '__iter__' )
if 'updateNextEvent' in d:
addMethod( o, LCStdHepRdrIterator, '__iter__' )
decoratedClasses.append( o )
def decorateLcioClasses():
''' Standard method to decorate all LCIO classes '''
for lcioClass in lcioClasses:
decorateClass( lcioClass )
|
[
"ROOT.TLorentzVector",
"pyLCIO.IMPL.LCEventImpl",
"ROOT.TVector3",
"array.array"
] |
[((556, 582), 'ROOT.TVector3', 'TVector3', (['v[0]', 'v[1]', 'v[2]'], {}), '(v[0], v[1], v[2])\n', (564, 582), False, 'from ROOT import TVector3, TLorentzVector\n'), ((758, 798), 'ROOT.TLorentzVector', 'TLorentzVector', (['v[0]', 'v[1]', 'v[2]', 'energy'], {}), '(v[0], v[1], v[2], energy)\n', (772, 798), False, 'from ROOT import TVector3, TLorentzVector\n'), ((936, 962), 'ROOT.TVector3', 'TVector3', (['v[0]', 'v[1]', 'v[2]'], {}), '(v[0], v[1], v[2])\n', (944, 962), False, 'from ROOT import TVector3, TLorentzVector\n'), ((1256, 1282), 'ROOT.TVector3', 'TVector3', (['v[0]', 'v[1]', 'v[2]'], {}), '(v[0], v[1], v[2])\n', (1264, 1282), False, 'from ROOT import TVector3, TLorentzVector\n'), ((1914, 1940), 'ROOT.TVector3', 'TVector3', (['v[0]', 'v[1]', 'v[2]'], {}), '(v[0], v[1], v[2])\n', (1922, 1940), False, 'from ROOT import TVector3, TLorentzVector\n'), ((2247, 2273), 'ROOT.TVector3', 'TVector3', (['v[0]', 'v[1]', 'v[2]'], {}), '(v[0], v[1], v[2])\n', (2255, 2273), False, 'from ROOT import TVector3, TLorentzVector\n'), ((1735, 1748), 'array.array', 'array', (['"""i"""', 'v'], {}), "('i', v)\n", (1740, 1748), False, 'from array import array\n'), ((3223, 3241), 'pyLCIO.IMPL.LCEventImpl', 'IMPL.LCEventImpl', ([], {}), '()\n', (3239, 3241), False, 'from pyLCIO import EVENT, IMPL, IO, UTIL\n')]
|
from core.terraform.resources.aws import iam
from resources.iam.ecs_role import ECSRole
class LambdaPolicyDocument(iam.IAMPolicyDocumentData):
statement = [
{
'actions': ["sts:AssumeRole"],
'principals': {
'type': "AWS",
'identifiers': [ECSRole.get_output_attr('arn')]
}
}
]
class AllReadRole(iam.IAMRoleResource):
name = "" # Empty string will take prefix as the name
assume_role_policy = LambdaPolicyDocument.get_output_attr('json')
force_detach_policies = True
class AllReadOnlyAccessPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = "arn:aws:iam::aws:policy/ReadOnlyAccess"
class AllReadLambdaFullAccessPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = "arn:aws:iam::aws:policy/AWSLambda_FullAccess"
class AllReadIAMFullAccessPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = "arn:aws:iam::aws:policy/IAMFullAccess"
class AllReadConfigRolePolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = "arn:aws:iam::aws:policy/service-role/AWSConfigRole"
class AllReadSupportAccessPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = "arn:aws:iam::aws:policy/AWSSupportAccess"
class AllReadSupportAccessPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = "arn:aws:iam::aws:policy/AWSSupportAccess"
class AllReadRoleAutoFixPolicyDocument(iam.IAMPolicyDocumentData):
statement = [
{
'actions': [
"ec2:AuthorizeSecurityGroupEgress",
"ec2:AuthorizeSecurityGroupIngress",
"ec2:CreateSecurityGroup",
"ec2:CreateTags",
"ec2:DescribeTags",
"ec2:ModifyInstanceAttribute",
"ec2:UpdateSecurityGroupRuleDescriptionsEgress",
"ec2:UpdateSecurityGroupRuleDescriptionsIngress",
"s3:DeleteBucketPolicy",
"s3:GetBucketAcl",
"s3:GetBucketPolicy",
"s3:GetBucketTagging",
"s3:GetObjectAcl",
"s3:ListBucket",
"s3:ListBucketByTags",
"s3:PutBucketAcl",
"s3:PutBucketPolicy",
"s3:PutBucketTagging",
"redshift:AuthorizeClusterSecurityGroupIngress",
"redshift:CreateClusterSecurityGroup",
"redshift:CreateTags",
"redshift:ModifyCluster",
],
'resources': ["*"],
'effect': "Allow"
},
{
'actions': [
"logs:CreateLogGroup",
"logs:CreateLogStream",
"logs:PutLogEvents",
"logs:DescribeLogGroups",
"logs:DescribeLogStreams"
],
'resources': ["*"],
'effect': "Allow"
},
{
'actions': [
"ec2:DeleteSecurityGroup",
],
'resources': ["*"],
'effect': "Allow",
'condition': [
{
'test': "StringEquals",
'variable': "ec2:ResourceTag/pacbot-delete-sg",
'values': ["true"]
}
]
},
{
'actions': [
"ec2:ReleaseAddress",
],
'resources': ["*"],
'effect': "Allow"
},
{
'actions': [
"rds:modifyDBInstance",
"rds:describeDBInstances",
"rds:AddTagsToResource",
"rds:CreateDBSecurityGroup",
"es:describeElasticsearchDomain",
"es:updateElasticsearchDomainConfig",
"es:addTags"
],
'resources': ["*"],
'effect': "Allow"
},
{
'actions': [
"s3:setPublicAccessBlock",
],
'resources': ["*"],
'effect': "Allow"
},
]
class AllReadRoleAutoFixPolicy(iam.IAMRolePolicyResource):
name = "pacbot-autofix"
path = '/'
policy = AllReadRoleAutoFixPolicyDocument.get_output_attr('json')
class AllReadRoleAutoFixPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = AllReadRole.get_output_attr('name')
policy_arn = AllReadRoleAutoFixPolicy.get_output_attr('arn')
class AllReadRolePolicyDocument(iam.IAMPolicyDocumentData):
statement = [
{
'actions': ["sts:AssumeRole"],
'resources': [AllReadRole.get_output_attr('arn')]
}
]
class AllReadRolePolicy(iam.IAMRolePolicyResource):
name = ""
path = '/'
policy = AllReadRolePolicyDocument.get_output_attr('json')
class AllReadRoleLambdaPolicyAttach(iam.IAMRolePolicyAttachmentResource):
role = ECSRole.get_output_attr('name')
policy_arn = AllReadRolePolicy.get_output_attr('arn')
|
[
"resources.iam.ecs_role.ECSRole.get_output_attr"
] |
[((5157, 5188), 'resources.iam.ecs_role.ECSRole.get_output_attr', 'ECSRole.get_output_attr', (['"""name"""'], {}), "('name')\n", (5180, 5188), False, 'from resources.iam.ecs_role import ECSRole\n'), ((307, 337), 'resources.iam.ecs_role.ECSRole.get_output_attr', 'ECSRole.get_output_attr', (['"""arn"""'], {}), "('arn')\n", (330, 337), False, 'from resources.iam.ecs_role import ECSRole\n')]
|
import streamlit as st
import plotly.express as px
import components.base as gSlider
from utils.helpers import load_data
def admissions_plot():
df_opnames_age = load_data('COVID-19_ziekenhuis_ic_opnames_per_leeftijdsgroep.csv', dates="Date_of_statistics_week_start", index="Date_of_statistics_week_start")
df_opnames_dropped = df_opnames_age.drop(columns=['Version', 'Date_of_report'])
# Slice dataframe with global date slider
df_opnames_dropped = df_opnames_dropped[gSlider.start_h: gSlider.end_h]
min, max = df_opnames_dropped.index.min().date(), df_opnames_dropped.index.max().date()
periode = str("<br>over periode: " + str(min) + " tot " + str(max))
# Reset to make sure nothing else is changed
df_opnames_dropped = df_opnames_dropped.reset_index()
df_grouped_by_age = df_opnames_dropped.groupby('Age_group').sum()
region = st.multiselect('Selecteer een leeftijdsgroep om de data te bekijken.',
sorted(df_grouped_by_age.index),
default=sorted(df_grouped_by_age.index[0:2]))
fig2 = px.line(df_opnames_dropped[df_opnames_dropped['Age_group'].isin(region)], x='Date_of_statistics_week_start', y="Hospital_admission", color='Age_group', labels={
"Hospital_admission": "Ziekenhuis opnames",
"Date_of_statistics_week_start": "Datum van statistiek opname",
"Age_group": "Leeftijdsgroep"
}, width=500, height=400, title="Ziekenhuis opnames over verloop van tijd<br>per leeftijdsgroep" + periode)
st.plotly_chart(fig2, use_container_width=True)
def age_groups_plot():
df_opnames_age = load_data('COVID-19_ziekenhuis_ic_opnames_per_leeftijdsgroep.csv', dates="Date_of_statistics_week_start", index="Date_of_statistics_week_start")
df_opnames_dropped = df_opnames_age.drop(columns=['Version', 'Date_of_report'])
# Slice dataframe with global date slider
df_opnames_dropped = df_opnames_dropped[gSlider.start_h: gSlider.end_h]
min, max = df_opnames_dropped.index.min().date(), df_opnames_dropped.index.max().date()
periode = str("<br>over periode: " + str(min) + " tot " + str(max))
# reset to make sure nothing else is changed
df_opnames_dropped = df_opnames_dropped.reset_index()
df_grouped_by_age = df_opnames_dropped.groupby('Age_group').sum()
fig = px.bar(df_grouped_by_age, y=["Hospital_admission", "IC_admission"], barmode="group", labels={
"value": "Aantal mensen",
"Age_group": "Leeftijdsgroepen",
"variable": "Legenda",
}, width=500, height=400, title="Landelijke opnames per leeftijdsgroep" + periode)
fig.data[0].name = "Ziekenhuisopnames"
fig.data[1].name = "IC Opnames"
st.plotly_chart(fig, use_container_width=True)
|
[
"plotly.express.bar",
"utils.helpers.load_data",
"streamlit.plotly_chart"
] |
[((167, 316), 'utils.helpers.load_data', 'load_data', (['"""COVID-19_ziekenhuis_ic_opnames_per_leeftijdsgroep.csv"""'], {'dates': '"""Date_of_statistics_week_start"""', 'index': '"""Date_of_statistics_week_start"""'}), "('COVID-19_ziekenhuis_ic_opnames_per_leeftijdsgroep.csv', dates=\n 'Date_of_statistics_week_start', index='Date_of_statistics_week_start')\n", (176, 316), False, 'from utils.helpers import load_data\n'), ((1532, 1579), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig2'], {'use_container_width': '(True)'}), '(fig2, use_container_width=True)\n', (1547, 1579), True, 'import streamlit as st\n'), ((1625, 1774), 'utils.helpers.load_data', 'load_data', (['"""COVID-19_ziekenhuis_ic_opnames_per_leeftijdsgroep.csv"""'], {'dates': '"""Date_of_statistics_week_start"""', 'index': '"""Date_of_statistics_week_start"""'}), "('COVID-19_ziekenhuis_ic_opnames_per_leeftijdsgroep.csv', dates=\n 'Date_of_statistics_week_start', index='Date_of_statistics_week_start')\n", (1634, 1774), False, 'from utils.helpers import load_data\n'), ((2330, 2598), 'plotly.express.bar', 'px.bar', (['df_grouped_by_age'], {'y': "['Hospital_admission', 'IC_admission']", 'barmode': '"""group"""', 'labels': "{'value': 'Aantal mensen', 'Age_group': 'Leeftijdsgroepen', 'variable':\n 'Legenda'}", 'width': '(500)', 'height': '(400)', 'title': "('Landelijke opnames per leeftijdsgroep' + periode)"}), "(df_grouped_by_age, y=['Hospital_admission', 'IC_admission'], barmode\n ='group', labels={'value': 'Aantal mensen', 'Age_group':\n 'Leeftijdsgroepen', 'variable': 'Legenda'}, width=500, height=400,\n title='Landelijke opnames per leeftijdsgroep' + periode)\n", (2336, 2598), True, 'import plotly.express as px\n'), ((2700, 2746), 'streamlit.plotly_chart', 'st.plotly_chart', (['fig'], {'use_container_width': '(True)'}), '(fig, use_container_width=True)\n', (2715, 2746), True, 'import streamlit as st\n')]
|
import torch
class ConvBlock2d(torch.nn.Module):
def __init__(
self,
input_feature_size,
output_feature_size,
internal_feature_size=None,
num_layers=4,
batch_norm=True,
dropout=None,
bias=False,
bias_initializer=None,
weight_initializer=None
):
""" A trivial implementation of 3x3 conv layers stacked on top of each other
Implemented only to make code more streamline and easier to read
Args
input_feature_size : Input feature size
output_feature_size : Output feature size
internal_feature_size : Feature size inside the block (if None, uses input_feature_size)
num_layers : The number of conv layers in this block (excluding final conv layer)
batch_norm : Flag to raise if batch normalization should be applied at start of conv block
dropout : Percent dropout, 0 if None
bias : Flag to raise if conv layers should have bias
"""
super(ConvBlock2d, self).__init__()
self.num_layers = num_layers
self.batch_norm = batch_norm
self.dropout = dropout
if not internal_feature_size:
internal_feature_size = input_feature_size
self.relu = torch.nn.ReLU(inplace=False)
if self.batch_norm:
self.batch_norm = torch.nn.BatchNorm2d(input_feature_size)
for i in range(num_layers):
current_input_size = input_feature_size if i == 0 else internal_feature_size
current_output_size = output_feature_size if i == num_layers - 1 else internal_feature_size
# Create new layer
conv_layer = torch.nn.Conv2d(
current_input_size,
current_output_size,
kernel_size=3,
stride=1,
padding=1,
bias=bias
)
# init bias and weight
if bias and bias_initializer:
bias_initializer(conv_layer.bias)
if weight_initializer:
weight_initializer(conv_layer.weight)
# Save layer
setattr(self, 'conv{}'.format(i), conv_layer)
if self.dropout:
self.dropout = torch.nn.Dropout2d(p=self.dropout, inplace=False)
def forward(self, x):
if self.batch_norm:
x = self.batch_norm(x)
x = self.relu(x)
for i in range(self.num_layers):
x = getattr(self, 'conv{}'.format(i))(x)
x = self.relu(x)
if self.dropout:
x = self.dropout(x)
return x
|
[
"torch.nn.BatchNorm2d",
"torch.nn.Conv2d",
"torch.nn.ReLU",
"torch.nn.Dropout2d"
] |
[((1315, 1343), 'torch.nn.ReLU', 'torch.nn.ReLU', ([], {'inplace': '(False)'}), '(inplace=False)\n', (1328, 1343), False, 'import torch\n'), ((1403, 1443), 'torch.nn.BatchNorm2d', 'torch.nn.BatchNorm2d', (['input_feature_size'], {}), '(input_feature_size)\n', (1423, 1443), False, 'import torch\n'), ((1731, 1838), 'torch.nn.Conv2d', 'torch.nn.Conv2d', (['current_input_size', 'current_output_size'], {'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': 'bias'}), '(current_input_size, current_output_size, kernel_size=3,\n stride=1, padding=1, bias=bias)\n', (1746, 1838), False, 'import torch\n'), ((2299, 2348), 'torch.nn.Dropout2d', 'torch.nn.Dropout2d', ([], {'p': 'self.dropout', 'inplace': '(False)'}), '(p=self.dropout, inplace=False)\n', (2317, 2348), False, 'import torch\n')]
|
# Author : <NAME>
# Date : July 19th, 2007
# last update: $Date: 2010/03/17 18:17:34 $ by $Author: mussgill $
import FWCore.ParameterSet.Config as cms
# DCS partitions
# "EBp","EBm","EEp","EEm","HBHEa","HBHEb","HBHEc","HF","HO","RPC"
# "DT0","DTp","DTm","CSCp","CSCm","CASTOR","TIBTID","TOB","TECp","TECm"
# "BPIX","FPIX","ESp","ESm"
import DPGAnalysis.Skims.skim_detstatus_cfi
ALCARECOTkAlCosmicsDCSFilter = DPGAnalysis.Skims.skim_detstatus_cfi.dcsstatus.clone(
DetectorType = cms.vstring('TIBTID','TOB','TECp','TECm','BPIX','FPIX'),
ApplyFilter = cms.bool(True),
AndOr = cms.bool(True),
DebugOn = cms.untracked.bool(False)
)
#________________________________Track selection____________________________________
# AlCaReco for track based alignment using Cosmic muons reconstructed by Combinatorial Track Finder
import Alignment.CommonAlignmentProducer.AlignmentTrackSelector_cfi
ALCARECOTkAlCosmicsCTF = Alignment.CommonAlignmentProducer.AlignmentTrackSelector_cfi.AlignmentTrackSelector.clone(
src = 'ctfWithMaterialTracksP5',
filter = True,
applyBasicCuts = True,
ptMin = 0., ##10
ptMax = 99999.,
pMin = 4., ##10
pMax = 99999.,
etaMin = -99., ##-2.4 keep also what is going through...
etaMax = 99., ## 2.4 ...both TEC with flat slope
nHitMin = 7,
nHitMin2D = 2,
chi2nMax = 999999.,
applyMultiplicityFilter = False,
applyNHighestPt = True, ## select only highest pT track
nHighestPt = 1
)
# AlCaReco for track based alignment using Cosmic muons reconstructed by Cosmic Track Finder
# (same cuts)
ALCARECOTkAlCosmicsCosmicTF = ALCARECOTkAlCosmicsCTF.clone(
src = 'cosmictrackfinderP5' ## different for CTF
)
# AlCaReco for track based alignment using Cosmic muons reconstructed by Regional Cosmic Tracking
# (same cuts)
ALCARECOTkAlCosmicsRegional = ALCARECOTkAlCosmicsCTF.clone(
src = 'regionalCosmicTracks'
)
#________________________________Sequences____________________________________
seqALCARECOTkAlCosmicsCTF = cms.Sequence(ALCARECOTkAlCosmicsCTF)
seqALCARECOTkAlCosmicsCosmicTF = cms.Sequence(ALCARECOTkAlCosmicsCosmicTF)
seqALCARECOTkAlCosmicsRegional = cms.Sequence(ALCARECOTkAlCosmicsRegional)
|
[
"FWCore.ParameterSet.Config.Sequence",
"FWCore.ParameterSet.Config.vstring",
"FWCore.ParameterSet.Config.untracked.bool",
"FWCore.ParameterSet.Config.bool"
] |
[((2053, 2089), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['ALCARECOTkAlCosmicsCTF'], {}), '(ALCARECOTkAlCosmicsCTF)\n', (2065, 2089), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2123, 2164), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['ALCARECOTkAlCosmicsCosmicTF'], {}), '(ALCARECOTkAlCosmicsCosmicTF)\n', (2135, 2164), True, 'import FWCore.ParameterSet.Config as cms\n'), ((2198, 2239), 'FWCore.ParameterSet.Config.Sequence', 'cms.Sequence', (['ALCARECOTkAlCosmicsRegional'], {}), '(ALCARECOTkAlCosmicsRegional)\n', (2210, 2239), True, 'import FWCore.ParameterSet.Config as cms\n'), ((496, 556), 'FWCore.ParameterSet.Config.vstring', 'cms.vstring', (['"""TIBTID"""', '"""TOB"""', '"""TECp"""', '"""TECm"""', '"""BPIX"""', '"""FPIX"""'], {}), "('TIBTID', 'TOB', 'TECp', 'TECm', 'BPIX', 'FPIX')\n", (507, 556), True, 'import FWCore.ParameterSet.Config as cms\n'), ((572, 586), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(True)'], {}), '(True)\n', (580, 586), True, 'import FWCore.ParameterSet.Config as cms\n'), ((607, 621), 'FWCore.ParameterSet.Config.bool', 'cms.bool', (['(True)'], {}), '(True)\n', (615, 621), True, 'import FWCore.ParameterSet.Config as cms\n'), ((642, 667), 'FWCore.ParameterSet.Config.untracked.bool', 'cms.untracked.bool', (['(False)'], {}), '(False)\n', (660, 667), True, 'import FWCore.ParameterSet.Config as cms\n')]
|
from __future__ import with_statement
import datetime
import sys
import os
import requests
from akismet import Akismet, SpamStatus
from akismet.exceptions import AkismetServerError, MissingParameterError
if sys.version_info < (2, 7):
import unittest2 as unittest
else:
import unittest
USER_AGENT = 'Mozilla/5.0 (X11; Linux x86_64; rv:40.0) Gecko/20100101 Firefox/40.0'
EVIL_USER_AGENT = 'Bot Evil/0.1'
class TestAkismet(unittest.TestCase):
akismet = None
def setUp(self):
try:
akismet_api_key = os.environ['AKISMET_API_KEY']
except KeyError:
raise EnvironmentError('Provide AKISMET_API_KEY environment setting.')
self.akismet = Akismet(akismet_api_key, is_test=True)
def test_check(self):
self.assertEqual(self.akismet.check('127.0.0.1', USER_AGENT, blog='http://127.0.0.1'), SpamStatus.Ham)
def test_check_spam(self):
self.assertEqual(self.akismet.check('127.0.0.1', EVIL_USER_AGENT, comment_author='viagra-test-123',
blog='http://127.0.0.1'), SpamStatus.ProbableSpam)
def test_invalid_api_key(self):
akismet = Akismet('invalid_api_key', is_test=True)
with self.assertRaises(AkismetServerError):
akismet.check('127.0.0.1', EVIL_USER_AGENT, blog='http://127.0.0.1')
def test_submit_spam(self):
self.akismet.submit_spam('127.0.0.1', EVIL_USER_AGENT, blog='http://127.0.0.1')
def test_submit_ham(self):
self.akismet.submit_ham('127.0.0.1', USER_AGENT, blog='http://127.0.0.1')
def test_datetime(self):
blog_url = 'http://127.0.0.1'
comment_date = datetime.datetime(2016, 4, 16, 15, 12, 5)
comment_post_modified = datetime.datetime(2016, 4, 16, 16, 27, 31)
data = self.akismet._get_parameters({'blog': blog_url, 'comment_post_modified': comment_post_modified,
'comment_date': comment_date})
for dtkey in ['comment_date', 'comment_post_modified']:
self.assertIn('{0}_gmt'.format(dtkey), data)
self.assertNotIn(dtkey, data)
self.assertEqual(data['{0}_gmt'.format(dtkey)], locals()[dtkey].isoformat())
def test_timeout(self):
self.akismet = Akismet(os.environ['AKISMET_API_KEY'], timeout=0.000001, is_test=True)
with self.assertRaises(requests.ConnectionError):
self.akismet.submit_ham('127.0.0.1', USER_AGENT, blog='http://127.0.0.1')
def test_require_blog_param(self):
with self.assertRaises(MissingParameterError):
self.akismet._get_parameters({})
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"datetime.datetime",
"akismet.Akismet"
] |
[((2662, 2677), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2675, 2677), False, 'import unittest\n'), ((698, 736), 'akismet.Akismet', 'Akismet', (['akismet_api_key'], {'is_test': '(True)'}), '(akismet_api_key, is_test=True)\n', (705, 736), False, 'from akismet import Akismet, SpamStatus\n'), ((1165, 1205), 'akismet.Akismet', 'Akismet', (['"""invalid_api_key"""'], {'is_test': '(True)'}), "('invalid_api_key', is_test=True)\n", (1172, 1205), False, 'from akismet import Akismet, SpamStatus\n'), ((1665, 1706), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(4)', '(16)', '(15)', '(12)', '(5)'], {}), '(2016, 4, 16, 15, 12, 5)\n', (1682, 1706), False, 'import datetime\n'), ((1739, 1781), 'datetime.datetime', 'datetime.datetime', (['(2016)', '(4)', '(16)', '(16)', '(27)', '(31)'], {}), '(2016, 4, 16, 16, 27, 31)\n', (1756, 1781), False, 'import datetime\n'), ((2273, 2340), 'akismet.Akismet', 'Akismet', (["os.environ['AKISMET_API_KEY']"], {'timeout': '(1e-06)', 'is_test': '(True)'}), "(os.environ['AKISMET_API_KEY'], timeout=1e-06, is_test=True)\n", (2280, 2340), False, 'from akismet import Akismet, SpamStatus\n')]
|
"""Test cases for day 22."""
from pathlib import Path
import pytest
from aoc import log
from aoc import ROOT_PATH
from day22 import compute_reactor_state
from day22 import load_dataset
@pytest.mark.parametrize(
"dataset_path,initialisation_limit,reboot_limit",
[
[Path("day22/data/data-small.txt"), 474140, 2758514936282235],
[Path("day22/data/data-large.txt"), 620241, 1284561759639324],
],
)
def test_day(dataset_path: Path, initialisation_limit, reboot_limit):
"""Test case for verifying the results of day 22."""
instructions = load_dataset(ROOT_PATH.joinpath(dataset_path))
log.info(
"Loaded day 22 dataset %s with %d reactor core instructions.",
dataset_path.name,
len(instructions),
)
init_sequence = compute_reactor_state(instructions, True)
total_active = compute_reactor_state(instructions, False)
log.info(
"After the reactor initialisation sequence %d cubes are active, "
"after the full reboot %d are active.",
init_sequence,
total_active,
)
assert init_sequence == initialisation_limit
assert total_active == reboot_limit
|
[
"aoc.ROOT_PATH.joinpath",
"pathlib.Path",
"aoc.log.info",
"day22.compute_reactor_state"
] |
[((784, 825), 'day22.compute_reactor_state', 'compute_reactor_state', (['instructions', '(True)'], {}), '(instructions, True)\n', (805, 825), False, 'from day22 import compute_reactor_state\n'), ((845, 887), 'day22.compute_reactor_state', 'compute_reactor_state', (['instructions', '(False)'], {}), '(instructions, False)\n', (866, 887), False, 'from day22 import compute_reactor_state\n'), ((892, 1042), 'aoc.log.info', 'log.info', (['"""After the reactor initialisation sequence %d cubes are active, after the full reboot %d are active."""', 'init_sequence', 'total_active'], {}), "(\n 'After the reactor initialisation sequence %d cubes are active, after the full reboot %d are active.'\n , init_sequence, total_active)\n", (900, 1042), False, 'from aoc import log\n'), ((585, 617), 'aoc.ROOT_PATH.joinpath', 'ROOT_PATH.joinpath', (['dataset_path'], {}), '(dataset_path)\n', (603, 617), False, 'from aoc import ROOT_PATH\n'), ((284, 317), 'pathlib.Path', 'Path', (['"""day22/data/data-small.txt"""'], {}), "('day22/data/data-small.txt')\n", (288, 317), False, 'from pathlib import Path\n'), ((355, 388), 'pathlib.Path', 'Path', (['"""day22/data/data-large.txt"""'], {}), "('day22/data/data-large.txt')\n", (359, 388), False, 'from pathlib import Path\n')]
|
#!/usr/bin/python
import random
file("/picpic", "w").write(str(random.getrandbits(128)))
|
[
"random.getrandbits"
] |
[((65, 88), 'random.getrandbits', 'random.getrandbits', (['(128)'], {}), '(128)\n', (83, 88), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
from functools import partial
from openprocurement.api.models import get_now
from openprocurement.api.utils import (
context_unpack,
decrypt,
encrypt,
json_view,
APIResource,
)
from openprocurement.edge.utils import eaopresource
try:
import openprocurement.auctions.core as auctions_core
except:
auctions_core = None
if auctions_core:
from openprocurement.auctions.core.design import (
FIELDS,
auctions_by_dateModified_view,
auctions_real_by_dateModified_view,
auctions_test_by_dateModified_view,
auctions_by_local_seq_view,
auctions_real_by_local_seq_view,
auctions_test_by_local_seq_view,
)
VIEW_MAP = {
u'': auctions_real_by_dateModified_view,
u'test': auctions_test_by_dateModified_view,
u'_all_': auctions_by_dateModified_view,
}
CHANGES_VIEW_MAP = {
u'': auctions_real_by_local_seq_view,
u'test': auctions_test_by_local_seq_view,
u'_all_': auctions_by_local_seq_view,
}
FEED = {
u'dateModified': VIEW_MAP,
u'changes': CHANGES_VIEW_MAP,
}
@eaopresource(name='Auctions',
path='/auctions',
description="Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info")
class AuctionsResource(APIResource):
def __init__(self, request, context):
super(AuctionsResource, self).__init__(request, context)
self.server = request.registry.couchdb_server
self.update_after = request.registry.update_after
@json_view(permission='view_auction')
def get(self):
"""Auctions List
Get Auctions List
----------------
Example request to get auctions list:
.. sourcecode:: http
GET /auctions HTTP/1.1
Host: example.com
Accept: application/json
This is what one should expect in response:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Type: application/json
{
"data": [
{
"id": "64e93250be76435397e8c992ed4214d1",
"dateModified": "2014-10-27T08:06:58.158Z"
}
]
}
"""
# http://wiki.apache.org/couchdb/HTTP_view_API#Querying_Options
params = {}
pparams = {}
fields = self.request.params.get('opt_fields', '')
if fields:
params['opt_fields'] = fields
pparams['opt_fields'] = fields
fields = fields.split(',')
view_fields = fields + ['dateModified', 'id']
limit = self.request.params.get('limit', '')
if limit:
params['limit'] = limit
pparams['limit'] = limit
limit = int(limit) if limit.isdigit() and (100 if fields else 1000) >= int(limit) > 0 else 100
descending = bool(self.request.params.get('descending'))
offset = self.request.params.get('offset', '')
if descending:
params['descending'] = 1
else:
pparams['descending'] = 1
feed = self.request.params.get('feed', '')
view_map = FEED.get(feed, VIEW_MAP)
changes = view_map is CHANGES_VIEW_MAP
if feed and feed in FEED:
params['feed'] = feed
pparams['feed'] = feed
mode = self.request.params.get('mode', '')
if mode and mode in view_map:
params['mode'] = mode
pparams['mode'] = mode
view_limit = limit + 1 if offset else limit
if changes:
if offset:
view_offset = decrypt(self.server.uuid, self.db.name, offset)
if view_offset and view_offset.isdigit():
view_offset = int(view_offset)
else:
self.request.errors.add('params', 'offset', 'Offset expired/invalid')
self.request.errors.status = 404
return
if not offset:
view_offset = 'now' if descending else 0
else:
if offset:
view_offset = offset
else:
view_offset = '9' if descending else ''
list_view = view_map.get(mode, view_map[u''])
if self.update_after:
view = partial(list_view, self.db, limit=view_limit, startkey=view_offset, descending=descending, stale='update_after')
else:
view = partial(list_view, self.db, limit=view_limit, startkey=view_offset, descending=descending)
if fields:
if not changes and set(fields).issubset(set(FIELDS)):
results = [
(dict([(i, j) for i, j in x.value.items() + [('id', x.id), ('dateModified', x.key)] if i in view_fields]), x.key)
for x in view()
]
elif changes and set(fields).issubset(set(FIELDS)):
results = [
(dict([(i, j) for i, j in x.value.items() + [('id', x.id)] if i in view_fields]), x.key)
for x in view()
]
elif fields:
self.LOGGER.info('Used custom fields for auctions list: {}'.format(','.join(sorted(fields))),
extra=context_unpack(self.request, {'MESSAGE_ID': 'auction_list_custom'}))
results = [
(dict([(k, j) for k, j in i[u'doc'].items() if k in view_fields]), i.key)
for i in view(include_docs=True)
]
else:
results = [
({'id': i.id, 'dateModified': i.value['dateModified']} if changes else {'id': i.id, 'dateModified': i.key}, i.key)
for i in view()
]
if results:
params['offset'], pparams['offset'] = results[-1][1], results[0][1]
if offset and view_offset == results[0][1]:
results = results[1:]
elif offset and view_offset != results[0][1]:
results = results[:limit]
params['offset'], pparams['offset'] = results[-1][1], view_offset
results = [i[0] for i in results]
if changes:
params['offset'] = encrypt(self.server.uuid, self.db.name, params['offset'])
pparams['offset'] = encrypt(self.server.uuid, self.db.name, pparams['offset'])
else:
params['offset'] = offset
pparams['offset'] = offset
data = {
'data': results,
'next_page': {
"offset": params['offset'],
"path": self.request.route_path('Auctions', _query=params),
"uri": self.request.route_url('Auctions', _query=params)
}
}
if descending or offset:
data['prev_page'] = {
"offset": pparams['offset'],
"path": self.request.route_path('Auctions', _query=pparams),
"uri": self.request.route_url('Auctions', _query=pparams)
}
return data
@eaopresource(name='Auction',
path='/auctions/{auction_id}',
description="Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info")
class AuctionResource(APIResource):
@json_view(permission='view_auction')
def get(self):
del self.request.validated['auction'].__parent__
del self.request.validated['auction'].rev
return {'data': self.request.validated['auction']}
@eaopresource(name='Auction Items',
path='/auctions/{auction_id}/*items',
description="Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info")
class AuctionItemsResource(APIResource):
@json_view(permission='view_auction')
def get(self):
return {'data': self.request.validated['item']}
|
[
"functools.partial",
"openprocurement.api.utils.json_view",
"openprocurement.api.utils.encrypt",
"openprocurement.api.utils.decrypt",
"openprocurement.api.utils.context_unpack",
"openprocurement.edge.utils.eaopresource"
] |
[((1154, 1354), 'openprocurement.edge.utils.eaopresource', 'eaopresource', ([], {'name': '"""Auctions"""', 'path': '"""/auctions"""', 'description': '"""Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info"""'}), "(name='Auctions', path='/auctions', description=\n 'Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info'\n )\n", (1166, 1354), False, 'from openprocurement.edge.utils import eaopresource\n'), ((7182, 7394), 'openprocurement.edge.utils.eaopresource', 'eaopresource', ([], {'name': '"""Auction"""', 'path': '"""/auctions/{auction_id}"""', 'description': '"""Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info"""'}), "(name='Auction', path='/auctions/{auction_id}', description=\n 'Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info'\n )\n", (7194, 7394), False, 'from openprocurement.edge.utils import eaopresource\n'), ((7696, 7925), 'openprocurement.edge.utils.eaopresource', 'eaopresource', ([], {'name': '"""Auction Items"""', 'path': '"""/auctions/{auction_id}/*items"""', 'description': '"""Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info"""'}), "(name='Auction Items', path='/auctions/{auction_id}/*items',\n description=\n 'Open Contracting compatible data exchange format. See http://ocds.open-contracting.org/standard/r/master/#auction for more info'\n )\n", (7708, 7925), False, 'from openprocurement.edge.utils import eaopresource\n'), ((1632, 1668), 'openprocurement.api.utils.json_view', 'json_view', ([], {'permission': '"""view_auction"""'}), "(permission='view_auction')\n", (1641, 1668), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n'), ((7455, 7491), 'openprocurement.api.utils.json_view', 'json_view', ([], {'permission': '"""view_auction"""'}), "(permission='view_auction')\n", (7464, 7491), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n'), ((7983, 8019), 'openprocurement.api.utils.json_view', 'json_view', ([], {'permission': '"""view_auction"""'}), "(permission='view_auction')\n", (7992, 8019), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n'), ((4427, 4543), 'functools.partial', 'partial', (['list_view', 'self.db'], {'limit': 'view_limit', 'startkey': 'view_offset', 'descending': 'descending', 'stale': '"""update_after"""'}), "(list_view, self.db, limit=view_limit, startkey=view_offset,\n descending=descending, stale='update_after')\n", (4434, 4543), False, 'from functools import partial\n'), ((4573, 4667), 'functools.partial', 'partial', (['list_view', 'self.db'], {'limit': 'view_limit', 'startkey': 'view_offset', 'descending': 'descending'}), '(list_view, self.db, limit=view_limit, startkey=view_offset,\n descending=descending)\n', (4580, 4667), False, 'from functools import partial\n'), ((3743, 3790), 'openprocurement.api.utils.decrypt', 'decrypt', (['self.server.uuid', 'self.db.name', 'offset'], {}), '(self.server.uuid, self.db.name, offset)\n', (3750, 3790), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n'), ((6348, 6405), 'openprocurement.api.utils.encrypt', 'encrypt', (['self.server.uuid', 'self.db.name', "params['offset']"], {}), "(self.server.uuid, self.db.name, params['offset'])\n", (6355, 6405), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n'), ((6442, 6500), 'openprocurement.api.utils.encrypt', 'encrypt', (['self.server.uuid', 'self.db.name', "pparams['offset']"], {}), "(self.server.uuid, self.db.name, pparams['offset'])\n", (6449, 6500), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n'), ((5389, 5456), 'openprocurement.api.utils.context_unpack', 'context_unpack', (['self.request', "{'MESSAGE_ID': 'auction_list_custom'}"], {}), "(self.request, {'MESSAGE_ID': 'auction_list_custom'})\n", (5403, 5456), False, 'from openprocurement.api.utils import context_unpack, decrypt, encrypt, json_view, APIResource\n')]
|
# script adapted from https://github.com/dezeraecox-manuscripts/COX_Proteome-stability
import os
import re
import zipfile
import gzip
import shutil
from shutil import copyfile
from loguru import logger
from contextlib import closing
import urllib.request as request
import tarfile
logger.info('Import OK')
def download_resources(filename, url, resource_folder):
"""
Worker function to download and save file from URL.
inputs
======
filename: (str) name of output file (including extension)
url: (str) complete location of file to be downloaded
output_path: (str) relative or complete path to directory where folder will be saved.
returns:
======
None
"""
if not os.path.exists(resource_folder):
os.makedirs(resource_folder)
try:
with closing(request.urlopen(url)) as r:
with open(f'{resource_folder}{filename}', 'wb') as f:
shutil.copyfileobj(r, f)
logger.info(f'Downloaded {filename}')
except:
logger.info(f'Downloaded failed for {filename}.')
if __name__ == "__main__":
url = 'https://zenodo.org/record/5146871/files/SEM-external-cell-texture_Analyzed.zip?download=1'
file_name = 'SEM-external-cell-texture'
output_folder = 'SEM-external-cell-texture/python_results'
if not os.path.exists(output_folder):
os.makedirs(output_folder)
# Download file from repository
download_resources(
filename=f'{folder_name}.zip', url=url, resource_folder=output_folder)
with zipfile.ZipFile(f'{output_folder}{folder_name}.zip', 'r') as zip_ref:
zip_ref.extractall(f'{output_folder}')
|
[
"zipfile.ZipFile",
"os.makedirs",
"os.path.exists",
"urllib.request.urlopen",
"loguru.logger.info",
"shutil.copyfileobj"
] |
[((282, 306), 'loguru.logger.info', 'logger.info', (['"""Import OK"""'], {}), "('Import OK')\n", (293, 306), False, 'from loguru import logger\n'), ((718, 749), 'os.path.exists', 'os.path.exists', (['resource_folder'], {}), '(resource_folder)\n', (732, 749), False, 'import os\n'), ((759, 787), 'os.makedirs', 'os.makedirs', (['resource_folder'], {}), '(resource_folder)\n', (770, 787), False, 'import os\n'), ((962, 999), 'loguru.logger.info', 'logger.info', (['f"""Downloaded {filename}"""'], {}), "(f'Downloaded {filename}')\n", (973, 999), False, 'from loguru import logger\n'), ((1321, 1350), 'os.path.exists', 'os.path.exists', (['output_folder'], {}), '(output_folder)\n', (1335, 1350), False, 'import os\n'), ((1360, 1386), 'os.makedirs', 'os.makedirs', (['output_folder'], {}), '(output_folder)\n', (1371, 1386), False, 'import os\n'), ((1536, 1593), 'zipfile.ZipFile', 'zipfile.ZipFile', (['f"""{output_folder}{folder_name}.zip"""', '"""r"""'], {}), "(f'{output_folder}{folder_name}.zip', 'r')\n", (1551, 1593), False, 'import zipfile\n'), ((1020, 1069), 'loguru.logger.info', 'logger.info', (['f"""Downloaded failed for {filename}."""'], {}), "(f'Downloaded failed for {filename}.')\n", (1031, 1069), False, 'from loguru import logger\n'), ((819, 839), 'urllib.request.urlopen', 'request.urlopen', (['url'], {}), '(url)\n', (834, 839), True, 'import urllib.request as request\n'), ((929, 953), 'shutil.copyfileobj', 'shutil.copyfileobj', (['r', 'f'], {}), '(r, f)\n', (947, 953), False, 'import shutil\n')]
|
import photomosaic as pm
from skimage.io import imread, imsave
def gen_pic():
target = '28387.jpg'
image = imread(target)
dims_list = [(150, 150,), ]
# Analyze the collection (the "pool") of images.
# pool = pm.make_pool('guinnesscaps/*.jpg')
# Generate a collection of solid-color square images.
pm.rainbow_of_squares('pool/', range_params=(0, 256, 128))
# Analyze the collection (the "pool") of images.
pool = pm.make_pool('pool/*.png')
for dims in dims_list:
mos = pm.basic_mosaic(image, pool, dims, depth=1)
imsave('mosaic_{}.png'.format(target), mos)
if __name__ == '__main__':
gen_pic()
|
[
"photomosaic.basic_mosaic",
"photomosaic.make_pool",
"skimage.io.imread",
"photomosaic.rainbow_of_squares"
] |
[((117, 131), 'skimage.io.imread', 'imread', (['target'], {}), '(target)\n', (123, 131), False, 'from skimage.io import imread, imsave\n'), ((328, 386), 'photomosaic.rainbow_of_squares', 'pm.rainbow_of_squares', (['"""pool/"""'], {'range_params': '(0, 256, 128)'}), "('pool/', range_params=(0, 256, 128))\n", (349, 386), True, 'import photomosaic as pm\n'), ((452, 478), 'photomosaic.make_pool', 'pm.make_pool', (['"""pool/*.png"""'], {}), "('pool/*.png')\n", (464, 478), True, 'import photomosaic as pm\n'), ((520, 563), 'photomosaic.basic_mosaic', 'pm.basic_mosaic', (['image', 'pool', 'dims'], {'depth': '(1)'}), '(image, pool, dims, depth=1)\n', (535, 563), True, 'import photomosaic as pm\n')]
|
#!/usr/bin/env python
'''
This example shows the interals of certain Python modules when they are being
imported.
'''
from pycallgraph import PyCallGraph
from pycallgraph import Config
from pycallgraph.output import GraphvizOutput
def main():
import_list = (
'pickle',
'htmllib',
'urllib2',
)
graphviz = GraphvizOutput()
config = Config(include_stdlib=True)
for module in import_list:
graphviz.output_file = 'import-{}.png'.format(module)
with PyCallGraph(output=graphviz, config=config):
__import__(module)
if __name__ == '__main__':
main()
|
[
"pycallgraph.output.GraphvizOutput",
"pycallgraph.PyCallGraph",
"pycallgraph.Config"
] |
[((342, 358), 'pycallgraph.output.GraphvizOutput', 'GraphvizOutput', ([], {}), '()\n', (356, 358), False, 'from pycallgraph.output import GraphvizOutput\n'), ((372, 399), 'pycallgraph.Config', 'Config', ([], {'include_stdlib': '(True)'}), '(include_stdlib=True)\n', (378, 399), False, 'from pycallgraph import Config\n'), ((507, 550), 'pycallgraph.PyCallGraph', 'PyCallGraph', ([], {'output': 'graphviz', 'config': 'config'}), '(output=graphviz, config=config)\n', (518, 550), False, 'from pycallgraph import PyCallGraph\n')]
|
# -*- coding : utf-8 -*-
import os
'''
@brief find all files
'''
def findfiles(path, ext):
all_files = []
subdirs = [path+"/"+name for name in os.listdir(path)]
for subdir in subdirs:
try:
files = [subdir+"/"+f for f in os.listdir(subdir) if os.path.isfile(os.path.join(subdir, f)) and f.endswith(ext)]
all_files += files
except NotADirectoryError:
pass
return all_files
|
[
"os.path.join",
"os.listdir"
] |
[((154, 170), 'os.listdir', 'os.listdir', (['path'], {}), '(path)\n', (164, 170), False, 'import os\n'), ((255, 273), 'os.listdir', 'os.listdir', (['subdir'], {}), '(subdir)\n', (265, 273), False, 'import os\n'), ((292, 315), 'os.path.join', 'os.path.join', (['subdir', 'f'], {}), '(subdir, f)\n', (304, 315), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
import numpy as np
from scipy import stats
from scipy import interpolate as spin
import pandas as pd
import os
import warnings
from datetime import datetime, timedelta
import calendar
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.ticker as pltick
import matplotlib.dates as mdates
import matplotlib.cbook as mcbook
import matplotlib.colors as mcolors
import matplotlib.legend as mlegend
from matplotlib import gridspec
from matplotlib.patches import Polygon
if os.path.isdir('/Applications/anaconda/share/proj'): # needed for Basemap import on my machine, but probably not yours
os.environ['PROJ_LIB'] = '/Applications/anaconda/share/proj'
from mpl_toolkits.basemap import Basemap
from collections import OrderedDict
warnings.filterwarnings('ignore','.*is_string_like function.*') # MatplotlibDeprecationWarning upon cmocean import
import cmocean
import gsw
from Circles.circles import circle # from https://github.com/urschrei/Circles
import time_tools as tt
import geo_tools as gt
import load_product as ldp
def sea_ice_argo_spatial(data_dir,date,sic_grid,sic,float_data,plot_argo_locs_not_trajs,
title,save_as,results_dir,width,height,lat_center,lon_center,
open_sic=0,max_sic=100,which_ice_cmap=4,extend_cmap='neither',rasterized=True,
plot_floats=True,polynya_grid=None,label_traj_dates=True,
as_subplot=False,create_subplot=True,subplot_fig_size=(11,6),
first_subplot=False,last_subplot=False,which_subplot=[1,1,1],
subplot_add_colorbar=False,bathy_contours=np.arange(-3500,-100,500),
grid_lats=np.arange(-80,60,5),grid_lons=np.arange(-80,50,10),
subplot_lon_labels=[0,0,0,0],subplot_lat_labels=[0,0,0,0],subplot_labelsize=None,
cmap_bad_color='w',cmap_ocean_color='#5bcfff',grid_color='.2',continent_color='0.7',
boundary_width=2,coastline_width=1,pad=0.25,spacing=0.2,cbar_bottom=0.125,
return_basemap=False,return_pcolor=False,include_year_in_date=False,save_png=False):
""" Plots Argo profile locations on regional map with background of sea ice concentration and bathymetry.
"""
warnings.filterwarnings('ignore', category=mcbook.mplDeprecation)
if plot_argo_locs_not_trajs is True:
floats_linger = 14 # let floats linger on the map for N days after most recent profile (if no new profile)
if as_subplot: cross_lonx = 100000/2; cross_laty = 100000/2; cross_width = 1.25; text_offset = 0.03
else: cross_lonx = 100000/4; cross_laty = 100000/4; cross_width = 2.5; text_offset = 0.015
if which_ice_cmap == 1: ice_cmap = plt.cm.CMRmap
elif which_ice_cmap == 2: ice_cmap = plt.cm.inferno
elif which_ice_cmap == 3: ice_cmap = plt.cm.gist_ncar
elif which_ice_cmap == 4: # custom colormap similar to cm.CMRmap:
cmap_colors = ['#79CDFA','#79CDFA','#87C3EC','#628eac','#1E1952',
'#4630B8','#E85A33','#E1C047','#F2F1C4','#FBFBEE','#FFFFFF']
ice_cmap = mcolors.LinearSegmentedColormap.from_list(name=None,colors=cmap_colors,N=250,gamma=1.3)
elif which_ice_cmap == 5: # alternate version of cmap 4 above, with less vibrant ocean blue
cmap_colors = ['#bce6fc','#bce6fc','#87C3EC','#628eac','#1E1952',
'#4630B8','#E85A33','#E1C047','#F2F1C4','#FBFBEE','#FFFFFF']
ice_cmap = mcolors.LinearSegmentedColormap.from_list(name=None,colors=cmap_colors,N=250,gamma=1.3)
if not as_subplot:
fig, m = blank_inset_basemap(width,height,lat_center,lon_center,lon_labels=[0,0,0,1],lat_labels=[1,0,0,0],
grid_lats=grid_lats,grid_lons=grid_lons,labelsize=subplot_labelsize)
if as_subplot:
if create_subplot:
if first_subplot: master_fig = plt.figure(figsize=subplot_fig_size)
plt.gcf().add_subplot(*which_subplot)
master_fig, m = blank_inset_basemap(width,height,lat_center,lon_center,create_new_fig=False,
lon_labels=subplot_lon_labels,lat_labels=subplot_lat_labels,
grid_lats=grid_lats,grid_lons=grid_lons,
labelsize=subplot_labelsize,grid_color=grid_color,
fill_continent_color=continent_color,
boundary_width=boundary_width,coastline_width=coastline_width)
xlims = plt.gca().get_xlim()
ylims = plt.gca().get_ylim()
lonx, laty = m(sic_grid['lons'], sic_grid['lats'])
sic_nan_masked = np.ma.masked_where(np.isnan(sic), sic)
sic_lon_edge_to_center = 0.5*np.mean([np.mean(np.diff(lonx[0,:])), np.mean(np.diff(lonx[-1,:]))])
sic_lat_edge_to_center = 0.5*np.mean([np.mean(np.diff(laty[0,:])),np.mean(np.diff(laty[-1,:]))])
pcm = plt.pcolormesh(lonx-sic_lon_edge_to_center, laty-sic_lat_edge_to_center, sic_nan_masked,
cmap=ice_cmap, edgecolors='None', rasterized=rasterized, zorder=1, alpha=1.0,
vmin=open_sic, vmax=max_sic) # norm=mcolors.PowerNorm(gamma=0.4,vmin=open_sic,vmax=100,clip=False)
pcm.cmap.set_over('w')
pcm.cmap.set_bad(cmap_bad_color)
pcm.cmap.set_under(cmap_ocean_color) # '#5bcfff' is sea blue (previously #f0ffff, very light blue)
if not as_subplot:
cbar = plt.colorbar(pad=0.05, shrink=0.65, format='%.0f%%', extend=extend_cmap)
cbar.ax.tick_params(labelsize=12)
cbar.set_label('Sea ice concentration',size=12)
if polynya_grid is not None:
plt.contour(lonx,laty,polynya_grid,levels=[0.999],colors='#00FF00',linewidths=0.7,alpha=0.8, zorder=2)
if len(bathy_contours) > 0:
etopo_lons, etopo_lats, etopo = ldp.load_bathy(data_dir)
retopolons, retopolats = m(*np.meshgrid(etopo_lons, etopo_lats))
olevels = bathy_contours # check etopo.ravel().min()
m.contour(retopolons, retopolats, etopo, olevels, linewidths=0.5, linestyles='solid', colors='#808080',
alpha=0.5, zorder=3)
if plot_floats:
for f in range(len(float_data)):
wmoid = float_data[f][0]
float_lons = float_data[f][1]
float_lats = float_data[f][2]
position_flags = float_data[f][3]
float_datetimes = float_data[f][4]
float_dates = (float_data[f][4]/1000000).astype(int)
prof_nums = float_data[f][5]
date_int = tt.convert_tuple_to_8_int(date)
if plot_argo_locs_not_trajs:
if sum((float_dates - date_int) == 0) >= 1:
this_day_index = np.where((float_dates - date_int) == 0)[0][0]
lonx, laty = m(float_lons[this_day_index], float_lats[this_day_index])
if not (xlims[0] <= lonx <= xlims[1]) or not (ylims[0] <= laty <= ylims[1]): continue
if position_flags[this_day_index] == 1:
c='m'
edgecolor='k'
else:
c='#15178F'
edgecolor='k'
plt.plot([lonx-cross_lonx,lonx+cross_lonx],[laty,laty],color=c,linestyle='solid',linewidth=cross_width,zorder=4)
plt.plot([lonx,lonx],[laty-cross_laty,laty+cross_laty],color=c,linestyle='solid',linewidth=cross_width,zorder=4)
plt.scatter(lonx,laty,s=14,c=c,edgecolors=edgecolor,alpha=0.9,zorder=5)
if subplot_labelsize is None: float_fontsize = 8
else: float_fontsize = subplot_labelsize
plt.text(lonx + text_offset*width,laty - 3*text_offset*height,str(wmoid) + ' ('
+ str(prof_nums[this_day_index]) + ')',fontsize=float_fontsize,color=c,clip_on=False,zorder=6)
elif date_int > float_dates[0]:
recent_day_index = np.where((float_dates - date_int) < 0)[0][-1]
days_since_last_profile = tt.days_between(tt.convert_8_int_to_tuple(float_dates[recent_day_index]),date)
if days_since_last_profile <= floats_linger:
lonx, laty = m(float_lons[recent_day_index], float_lats[recent_day_index])
if not (xlims[0] <= lonx <= xlims[1]) or not (ylims[0] <= laty <= ylims[1]): continue
# alpha = (0.75-0.25) + 0.25*(1 - days_since_last_profile/floats_linger)
if position_flags[recent_day_index] == 1: c = 'm'
else: c = '#15178F'
plt.plot([lonx-cross_lonx,lonx+cross_lonx],[laty,laty],color=c,linestyle='solid',linewidth=cross_width,zorder=4)
plt.plot([lonx,lonx],[laty-cross_laty,laty+cross_laty],color=c,linestyle='solid',linewidth=cross_width,zorder=4)
# plt.scatter(lonx,laty,s=18,c=c,edgecolors='none',alpha=0.7,zorder=5)
elif not plot_argo_locs_not_trajs:
flonx,flaty = m(float_lons,float_lats)
plt.plot(flonx[position_flags != 9],flaty[position_flags != 9],color='#15178F',linewidth=1.25,zorder=4)
plt.scatter(flonx[position_flags == 2],flaty[position_flags == 2],s=10,c='m',edgecolors='none',zorder=5)
plt.scatter(flonx[position_flags == 1],flaty[position_flags == 1],s=10,c='#15178F',edgecolors='none',
zorder=6)
if len(float_data) == 1 or label_traj_dates == True:
datetime_tuples = [tt.convert_14_to_tuple(float_datetimes[n]) for n in range(len(float_datetimes))]
mo_yr_strings = [str(datetime_tuples[n][1]) + '/' + '{0:02d}'.format(datetime_tuples[n][0] - 2000)
for n in range(len(datetime_tuples))]
unique_mo_yr_strings,unique_indices = np.unique(mo_yr_strings,return_index=True)
unique_indices = np.sort(unique_indices) # to undo undesired sort by 'unique'
mo_yr_strings_to_label = [mo_yr_strings[n] for n in unique_indices]
lonx_to_label = [flonx[n] for n in unique_indices]
laty_to_label = [flaty[n] for n in unique_indices]
for pt in np.arange(0,len(mo_yr_strings_to_label),6):
plt.text(lonx_to_label[pt] + 0.000625 * width,laty_to_label[pt] - 0.026 * height,
mo_yr_strings_to_label[pt],fontsize=7,color='#15178F')
for pt in np.arange(3,len(mo_yr_strings_to_label),6):
plt.text(lonx_to_label[pt] + 0.000625 * width,laty_to_label[pt] + 0.017 * height,
mo_yr_strings_to_label[pt],fontsize=7,color='#15178F')
if len(float_data) != 1:
plt.annotate(str(wmoid),fontsize=10,color='#15178F',xy=(flonx[len(flonx) - 1], flaty[len(flaty) - 1]),
xytext=(flonx[len(flonx) - 1] - 0.25 * width,flaty[len(flaty) - 1] + 0.2 * height),
arrowprops=dict(arrowstyle='->',color='#15178F',alpha=0.5))
if not as_subplot:
if title is not None: plt.title(title,fontsize=16)
plt.tight_layout()
if save_png: plt.savefig(results_dir + save_as + '.png',dpi=150)
else: plt.savefig(results_dir + save_as + '.pdf')
plt.close()
elif as_subplot:
if create_subplot:
if subplot_labelsize is None: baseline_date_fontsize = 7
else: baseline_date_fontsize = subplot_labelsize
if include_year_in_date:
day_string = '{0}-{1:02d}-{2:02d}'.format(*date)
date_fontsize = baseline_date_fontsize + 1
else:
day_string = '{1}-{2:02}'.format(*date)
date_fontsize = baseline_date_fontsize + 3
plt.text(0.05,0.95,day_string,fontsize=date_fontsize,fontweight='bold',
horizontalalignment='left',verticalalignment='top',transform=plt.gca().transAxes)
if not create_subplot and subplot_add_colorbar: # deprecated?
cbar = plt.gcf().colorbar(pcm,ticks=np.arange(open_sic,101,10),format='%.0f%%',extend=extend_cmap,
orientation='vertical',fraction=0.03,aspect=30)
cbar.ax.tick_params(labelsize=subplot_labelsize,left=True,right=False,labelleft=True,labelright=False)
cbar.outline.set_linewidth(boundary_width)
if last_subplot:
plt.tight_layout(h_pad=pad,w_pad=pad,rect=(0.02,0.02,0.98,0.98))
if subplot_add_colorbar:
if spacing is not None: hspace = spacing*width/height
else: hspace = None
plt.gcf().subplots_adjust(bottom=0.05,wspace=spacing,hspace=hspace)
cbar_ax = plt.gcf().add_axes([0.2,cbar_bottom,0.6,0.015])
cbar = plt.gcf().colorbar(pcm,format='%.0f%%',extend=extend_cmap,orientation='horizontal',cax=cbar_ax)
cbar.ax.tick_params(labelsize=subplot_labelsize+2)
cbar.outline.set_linewidth(boundary_width)
plt.savefig(results_dir + save_as + '.pdf')
plt.close()
if return_basemap and not return_pcolor: return m
elif return_basemap and return_pcolor: return m, pcm
def section(wmoid,results_dir,save_as,float_data,params='all',depth_lim=(0,1700),fixed_ylim=True,vert_res=10,toi=None,
mld=True,mld_ref_depth=10,mld_sigma_theta_crit=0.03,show_ice_bars=True,sea_ice_grids=None,
sea_ice_data_avail=None,show_prof_bars=False,show_prof_ticks=True,add_date_bars=None,cmap_level_mods=None,
cmap_color_mods=None,cmap_gamma_mods=None,cmap_freq_mods=None,trim_xlim=False,
create_new_figs=True,new_figsize=(8,6),add_title=True,facecolor='k',grid=True,
years_only=False,plot_ylabel=True,plot_xticklabels=True,plot_cbar=True,condensed_cbar_label=None,
smaller_text=False,force_label_size=None,density_coor=False,density_lim=(27.75,27.85),density_power_scale=15,
density_depth_contours=None,density_depth_labels=True,explicit_yticks=None,
drift_temps=None,drift_temp_baseline=None,drift_temp_depth=None):
""" Hydrographic depth section plots.
Args:
wmoid: int
params: 'all' to plot standard set of parameters listed below, or list of specific param_abbrev (if available)
depth_lim: tuple/list of bounding depths (ylim[1] will default to shallowest observation ≤ depth_lim[1])
fixed_ylim: True or False (force depth range to <<depth_lim>> [True], or set to deepest observation [False])
vert_res: vertical resolution of section (in meters); note that plot size scales inversely with this
toi: None or tuple/list of bounding times of interest (in 14-digit integer format)
mld: plot mixed-layer depth
mld_ref_depth: see gt.mld() (applies only if 'mld' is True)
mld_sigma_theta_crit: see gt.mld() (applies only if 'mld' is True)
show_ice_bars: plot bars estimating when float was under sea ice
note: calculates average SIC within a box 2° longitude x 1° latitude around the given or
interpolated float location (uses AMSR if available, then GSFC)
sea_ice_grids: created by ldp.sea_ice_data_prep(), only needed if show_ice_bars is True
sea_ice_data_avail: created by ldp.sea_ice_data_prep(), only needed if show_ice_bars is True
show_prof_bars: plot each profile as thin gray line on section
show_prof_ticks: plot each profile as small tick on top x-axis
add_date_bars: None or list of Datetimes to add vertical black bars, e.g. denoting start and end of some event
cmap_level_mods: None or dict with param_abbrevs as keys to lists of colormap levels to replace defaults
cmap_color_mods: None or dict with param_abbrevs as keys to lists of color sequences to replace defaults
cmap_gamma_mods: None or dict with param_abbrevs as keys to colormap shift parameter to replace defaults
note: gamma=1.0 is even spacing; gamma>1.0 stretches colors upwards; gamma<1.0 downwards
cmap_freq_mods: None or dict with param_abbrevs as keys to multiplier for adding additional color levels
between those specified in 'cmap_levels' (e.g. 3 for 3 levels between)
trim_xlim: trim xlim (time axis) to match range of data for each parameter (otherwise trim to time range of
GDAC temperature data)
create_new_figs: True to save each sections as an individual new figure (with dimensions of new_figsize)
False to plot each section to the currently active plot axes
(for this, pass a single param_abbrev)
new_figsize: figure dimensions in inches: (width,height), e.g. (8,6)
note: only used if create_new_figs is True
facecolor: 'k' or other color for plot background (i.e. where data is missing or invalid)
grid: True or False to add faint x- and y-grid at locations of major time and depth/density ticks
years_only: True or False (only label years on x-axis, instead of automatic month labeling)
plot_ylabel: True or False
plot_xticklabels: True or False
plot_cbar: True or False
condensed_cbar_label: None or string to replace default colorbar parameter label
smaller_text: True or False (use smaller font sizes, e.g. for subplot)
force_label_size: None or fontsize for labels (smaller_text should be True)
density_coor: True or False (if True, use sigma_theta as y-coordinate; if False, use depth as y-coordinate)
density_lim: tuple/list of bounding sigma_theta values (only used if density_coor is True)
density_power_scale: power exponent to stretch deeper density levels / condense near-surface levels
density_depth_contours: None or list of depths to contour and label when plotting with density y-coordinate
density_depth_labels: False or True (label the depth contours described above)
NOTE: this requires manual input (click and Return) to position contour labels
explicit_yticks: None or list/array of ytick locations (depths or sigma_theta values)
drift_temps: None or dict containing float drift-depth temperature time series with keys 'datetime' and 'temp'
drift_temp_baseline: None or potential temperature value to use as baseline from which to plot drift_temps
drift_temp_depth: None or depth to use as baseline from which to plot drift_temps
"""
if params == 'all':
param_abbrevs = np.array(['ptmp','psal','Nsquared','PV','destab','Oxygen','OxygenSat','pHinsitu','Nitrate',
'Chl_a'])
# full list of parameters below; implement custom colormaps as needed:
# param_abbrevs = np.array(['ptmp', 'psal', 'Nsquared', 'PV', 'destab', 'Oxygen', 'OxygenSat', 'Nitrate',
# 'Chl_a', 'pHinsitu', 'pH25C', 'TALK_LIAR', 'DIC_LIAR', 'pCO2_LIAR'])
else:
param_abbrevs = params
cmap_levels = {}
cmap_levels['ptmp'] = [0.0,0.2,0.4,0.6,0.8,1.0,1.2]
cmap_levels['psal'] = [34.66,34.67,34.68,34.69,34.70]
cmap_levels['Oxygen'] = [190,195,200,205,210,215,220]
cmap_levels['OxygenSat'] = [54,55,56,57,58,59,60,61,62,63,64,65,70,80,90,100,110]
cmap_levels['Nsquared'] = [0,5,10,15,20,25,50,100,500,1000]
cmap_levels['PV'] = [0,5,10,25,50,100,250,500,1000,5000]
cmap_levels['sigma_theta'] = [27.0,27.78,27.79,27.80,27.81,27.82,27.83,27.84,27.85]
cmap_levels['destab'] = [0,0.1,0.2,0.3,0.4,0.5,0.6,0.7] # same range but even spacing
cmap_levels['pHinsitu'] = np.arange(7.84,8.16,0.04)
cmap_levels['Nitrate'] = np.arange(20.0,34.01,1.0)
cmap_levels['Chl_a_corr'] = np.arange(0.0,2.0,0.25)
cmap_extend = {}
cmap_extend['ptmp'] = 'both'
cmap_extend['psal'] = 'both'
cmap_extend['Oxygen'] = 'both'
cmap_extend['OxygenSat'] = 'both'
cmap_extend['Nsquared'] = 'both'
cmap_extend['PV'] = 'both'
cmap_extend['sigma_theta'] = 'both'
cmap_extend['destab'] = 'max'
cmap_extend['pHinsitu'] = 'both'
cmap_extend['Nitrate'] = 'both'
cmap_extend['Chl_a_corr'] = 'both'
cmap_under_over = {}
cmap_under_over['ptmp'] = ['#252766','#62001d'] # darker purple/blue, darker red
cmap_under_over['psal'] = ['#271E6A','#fcf6d1'] # darker purple/blue, lighter cream
cmap_under_over['Oxygen'] = ['0.9','#660000'] # light grey-white, darker version of 'maroon'
cmap_under_over['Nsquared'] = ['#000099','0.3'] # darker version of 'blue', dark grey
cmap_under_over['PV'] = ['#000099','0.3'] # same as above
cmap_under_over['destab'] = [None,'#2d004e'] # darker version of 'indigo'
cmap_colors = {}
# useful resources for color picking: https://matplotlib.org/examples/color/named_colors.html
# http://www.color-hex.com/
cmap_colors['ptmp'] = ['#353992','#7294C2','#A5C4DD','#F9FCCF','#F2CF85','#CB533B','#8D002A']
cmap_colors['psal'] = ['#252A83','#22369C','#215091','#306489','#3F7687','#569487',
'#6EB380','#87C574','#B4D56D','#DCE184','#FAEDA3']
cmap_colors['Oxygen'] = ['white','maroon'] # previously ended with 'teal', started with '0.7'
cmap_colors['OxygenSat'] = ['0.7','white','maroon','teal']
cmap_colors['Nsquared'] = ['blue','#ffe34c','firebrick'] # ffe34c is lighter version of 'gold'
cmap_colors['PV'] = ['blue','gold','firebrick']
cmap_colors['sigma_theta'] = ['seagreen','white','coral','0.2']
cmap_colors['destab'] = ['yellow','#9366b4','indigo'] #9366b4 is lighter indigo
cmap_colors['pHinsitu'] = ['green','white','red','blue','orange']
cmap_colors['Nitrate'] = ['orange','blue','red','white','green']
cmap_colors['Chl_a_corr'] = ['#11114e','white','palegoldenrod','#005000']
cmap_gamma = {}
cmap_gamma['ptmp'] = 0.9
cmap_gamma['psal'] = 0.7
cmap_gamma['Oxygen'] = 1.75
cmap_gamma['OxygenSat'] = 0.5
cmap_gamma['Nsquared'] = 1.1
cmap_gamma['PV'] = 0.7
cmap_gamma['sigma_theta'] = 0.6 # colorbar is reversed below
cmap_gamma['destab'] = 0.7 # colorbar is reversed below
cmap_gamma['pHinsitu'] = 1.0
cmap_gamma['Nitrate'] = 1.0
cmap_gamma['Chl_a_corr'] = 1.0
cmap_freq = {}
cmap_freq['ptmp'] = 2
cmap_freq['psal'] = 3
cmap_freq['Oxygen'] = 2
cmap_freq['OxygenSat'] = 2
cmap_freq['Nsquared'] = 1
cmap_freq['PV'] = 1
cmap_freq['sigma_theta'] = 4
cmap_freq['destab'] = 2
cmap_freq['pHinsitu'] = 8
cmap_freq['Nitrate'] = 8
cmap_freq['Chl_a_corr'] = 5
if cmap_level_mods is not None:
for param in cmap_level_mods.keys():
cmap_levels[param] = cmap_level_mods[param]
if cmap_color_mods is not None:
for param in cmap_color_mods.keys():
cmap_colors[param] = cmap_color_mods[param]
if cmap_gamma_mods is not None:
for param in cmap_gamma_mods.keys():
cmap_gamma[param] = cmap_gamma_mods[param]
if cmap_freq_mods is not None:
for param in cmap_freq_mods.keys():
cmap_freq[param] = cmap_freq_mods[param]
prof_match = np.zeros(len(float_data['profiles'])).astype(bool)
for p in np.arange(len(prof_match)):
if toi is not None:
if toi[0] <= float_data['profiles'][p]['datetime'] <= toi[1]:
prof_match[p] = True
else:
prof_match[p] = True
prof_indices_to_plot = np.where(prof_match)[0]
if mld or show_ice_bars:
datetime_coord_profs = []
datetime_coord_as_tuples = []
mld_data = []
prof_lats = []
prof_lons = []
for pi in prof_indices_to_plot:
datetime_tuple_format = tt.convert_14_to_tuple(float_data['profiles'][pi]['datetime'])
datetime_coord_as_tuples.append(datetime_tuple_format)
datetime_coord_profs.append(tt.convert_tuple_to_datetime(datetime_tuple_format))
this_mld = gt.mld(float_data['profiles'][pi],ref_depth=mld_ref_depth,
sigma_theta_crit=mld_sigma_theta_crit,verbose_warn=False)
if density_coor:
# actually a density value:
this_mld = gt.vert_prof_eval(float_data['profiles'][pi],'sigma_theta',this_mld,extrap='nearest')
# convert to power-scaled density; ignore MLDs outside plotting range
if this_mld < density_lim[0]: this_mld = np.NaN
this_mld = (this_mld - density_lim[0])**density_power_scale
mld_data.append(this_mld)
prof_lats.append(float_data['profiles'][pi]['lat'])
prof_lons.append(float_data['profiles'][pi]['lon'])
def DatetimeToTimestampForInterp(dt):
return calendar.timegm(dt.timetuple())
if show_ice_bars:
date_coord_daily = tt.dates_in_range(datetime_coord_as_tuples[0][0:3],datetime_coord_as_tuples[-1][0:3])
datetime_coord_daily = [tt.convert_tuple_to_datetime(date_tuple) for date_tuple in date_coord_daily]
timestamp_coord_daily = [DatetimeToTimestampForInterp(dt) for dt in datetime_coord_daily]
timestamp_coord_profs = [DatetimeToTimestampForInterp(dt) for dt in datetime_coord_profs]
specific_lat_coord_for_ice = np.interp(timestamp_coord_daily,timestamp_coord_profs,prof_lats)
specific_lon_coord_for_ice = np.interp(timestamp_coord_daily,timestamp_coord_profs,prof_lons)
lat_coord_for_ice = []
lon_coord_for_ice = []
for pos_idx in range(len(specific_lat_coord_for_ice)):
lat_coord_for_ice.append([specific_lat_coord_for_ice[pos_idx] - 0.5,
specific_lat_coord_for_ice[pos_idx] + 0.5])
lon_coord_for_ice.append([specific_lon_coord_for_ice[pos_idx] - 1.0,
specific_lon_coord_for_ice[pos_idx] + 1.0])
sic_coord = ldp.sea_ice_concentration_along_track(date_coord_daily,lat_coord_for_ice,lon_coord_for_ice,
sea_ice_grids,sea_ice_data_avail)
for param_index, param_abbrev in enumerate(param_abbrevs):
param_skip = True
for pi in prof_indices_to_plot:
if param_abbrev in float_data['profiles'][pi].keys(): param_skip = False
if param_skip: continue
datetime_coord = []
section_data = []
if density_coor: depth_data = []
obs_range = []
for pi in prof_indices_to_plot:
if param_abbrev in float_data['profiles'][pi].keys():
if float_data['profiles'][pi][param_abbrev]['data'].size == 0: continue
z_vec, data_vec = gt.vert_prof_even_spacing(float_data['profiles'][pi],param_abbrev,z_coor='depth',
spacing=vert_res,interp_method='linear',extrap='NaN',
top=depth_lim[0],bottom=depth_lim[1],verbose_error=True)
if density_coor:
obs_param = data_vec
obs_depth, obs_sigma_theta \
= gt.vert_prof_even_spacing(float_data['profiles'][pi],'sigma_theta',z_coor='depth',
spacing=vert_res,interp_method='linear',extrap='NaN',
top=depth_lim[0],bottom=depth_lim[1],verbose_error=True)
obs_good_mask = ~np.logical_or(np.isnan(obs_param),np.isnan(obs_sigma_theta))
obs_sort_order = obs_sigma_theta[obs_good_mask].argsort()
sorted_sigma_theta = obs_sigma_theta[obs_good_mask][obs_sort_order]
sorted_param = obs_param[obs_good_mask][obs_sort_order]
sorted_depth = obs_depth[obs_good_mask][obs_sort_order]
z_vec = density_lim[0] \
+ (np.arange(0, (density_lim[1]-density_lim[0])**density_power_scale,
((density_lim[1]-density_lim[0])**density_power_scale)/200)) \
** (1.0/density_power_scale)
data_vec = gt.profile_interp(sorted_param,sorted_sigma_theta,z_vec,
method='linear',out_of_bounds='NaN')
depth_vec = gt.profile_interp(sorted_depth,sorted_sigma_theta,z_vec,
method='linear',out_of_bounds='NaN')
depth_data.append(depth_vec)
z_vec[z_vec < density_lim[0]] = np.NaN
z_vec = (z_vec - density_lim[0])**density_power_scale
section_data.append(data_vec)
datetime_coord.append(tt.convert_tuple_to_datetime(tt.convert_14_to_tuple(float_data['profiles']
[pi]['datetime'])))
obs_range.append([np.min(z_vec[np.isfinite(data_vec)]),
np.max(z_vec[np.isfinite(data_vec)])])
param_name_for_cbar = float_data['profiles'][pi][param_abbrev]['name']
param_units_for_cbar = float_data['profiles'][pi][param_abbrev]['units']
section_data = np.ma.masked_invalid(np.array(section_data).T)
if density_coor: depth_data = np.ma.masked_invalid(np.array(depth_data).T)
if create_new_figs: plt.figure(figsize=new_figsize)
specified_levels = np.array(cmap_levels[param_abbrev])
more_levels = np.interp(np.arange(len(specified_levels),step=1.0/cmap_freq[param_abbrev]),
np.arange(len(specified_levels)),specified_levels,right=np.NaN)
more_levels = more_levels[~np.isnan(more_levels)]
N_colors = len(more_levels) - 1
contourf_cmap = mcolors.LinearSegmentedColormap.from_list(name=None,colors=cmap_colors[param_abbrev],
N=N_colors,gamma=cmap_gamma[param_abbrev])
if param_abbrev in cmap_under_over:
if cmap_under_over[param_abbrev][0] is not None: contourf_cmap.set_under(cmap_under_over[param_abbrev][0])
if cmap_under_over[param_abbrev][1] is not None: contourf_cmap.set_over(cmap_under_over[param_abbrev][1])
normalization = mcolors.BoundaryNorm(more_levels,ncolors=N_colors,clip=False)
# set facecolor as black (or other given color)
if density_coor: plt.gca().axhspan((density_lim[1]-density_lim[0])**density_power_scale,0,
facecolor=facecolor,zorder=1)
else: plt.gca().axhspan(depth_lim[0],depth_lim[1],facecolor=facecolor,zorder=1)
contour_handle = plt.contourf(datetime_coord,z_vec,section_data,
vmin=np.min(more_levels),vmax=np.max(more_levels),
levels=more_levels,norm=normalization,cmap=contourf_cmap,
extend=cmap_extend[param_abbrev],zorder=2)
if plot_cbar:
if show_ice_bars: shrink_cbar = 1650/(1650+175)
else: shrink_cbar = 1.0
if np.max(np.abs(specified_levels)) >= 1000: formatter = pltick.FuncFormatter(lambda x, p: format(x, ','))
else: formatter = None
cbar = plt.colorbar(ticks=specified_levels,spacing='uniform',shrink=shrink_cbar,format=formatter)
if condensed_cbar_label is not None: cbar_label = condensed_cbar_label
else: cbar_label = '{0}\n({1})'.format(param_name_for_cbar,
param_units_for_cbar)
if smaller_text:
if force_label_size is not None:
cbar_labelsize = force_label_size - 1
cbar_titlesize = force_label_size
else:
cbar_labelsize = 6
cbar_titlesize = 8
cbar.ax.tick_params(labelsize=cbar_labelsize)
cbar.set_label(label=cbar_label,rotation=90,labelpad=9,size=cbar_titlesize)
else:
cbar.set_label(label=cbar_label,rotation=90,labelpad=11) # subtracted 9
cbar.ax.set_title(param_units_for_cbar,fontsize=8)
if param_abbrev == 'destab' or param_abbrev == 'sigma_theta': cbar.ax.invert_yaxis()
if show_prof_bars:
for obs_idx, obs_dt in enumerate(datetime_coord):
plt.plot([obs_dt,obs_dt],obs_range[obs_idx],color='0.5',linewidth=0.5,zorder=3)
if add_date_bars is not None:
for dt in add_date_bars:
if not density_coor: plt.plot([dt,dt],[*depth_lim],color='0.2',linewidth=0.8,zorder=3)
else: plt.plot([dt,dt],[*(np.array(density_lim)-density_lim[0])**density_power_scale],
color='0.2',linewidth=0.8,zorder=3)
if (drift_temps is not None) and (not density_coor): # sorry, can't plot drift temps in density space
plt.plot(drift_temps['datetime'],
drift_temp_depth + (depth_lim[1]-depth_lim[0])*(drift_temp_baseline-drift_temps['temp']),
'k-',linewidth=0.01,alpha=0.5,zorder=4)
if trim_xlim:
plt.xlim([datetime_coord[0],datetime_coord[-1]])
else:
start_date = tt.convert_tuple_to_datetime(tt.convert_14_to_tuple(float_data['profiles'][0]['datetime']))
end_date = tt.convert_tuple_to_datetime(tt.convert_14_to_tuple(float_data['profiles'][-1]['datetime']))
plt.xlim([start_date,end_date])
if mld or show_ice_bars:
mld_xlim_mask = np.logical_and(np.array(datetime_coord_profs) >= datetime_coord[0],
np.array(datetime_coord_profs) <= datetime_coord[-1])
if mld:
plt.plot(np.array(datetime_coord_profs)[mld_xlim_mask],np.array(mld_data)[mld_xlim_mask],
'w-',linewidth=1.0,zorder=4)
if density_coor and density_depth_contours is not None:
depth_contours = plt.contour(datetime_coord,z_vec,depth_data,levels=density_depth_contours,
linewidths=0.5,alpha=0.75,colors='k',zorder=5)
if force_label_size: depth_contour_fontsize = force_label_size-1
else: depth_contour_fontsize = 8
if density_depth_labels:
print('>>> Waiting for manual input.\n'
'>>> Click to position contours, hit Return when done.\n'
'>>> Note: do not change figure size.')
clabels = plt.clabel(depth_contours,fmt='%d m',fontsize=depth_contour_fontsize,manual=True,
inline=True,inline_spacing=25) # removed zorder=5
for label in clabels: label.set_rotation(0)
if fixed_ylim and not density_coor: max_ylim = depth_lim[1]
else: max_ylim = np.max(np.array(obs_range))
if show_ice_bars:
sic_xlim_mask = np.logical_and(np.array(datetime_coord_daily) >= datetime_coord[0],
np.array(datetime_coord_daily) <= datetime_coord[-1])
if not smaller_text: sic_norm = -35 # meters equivalent
elif depth_lim[1] <= 300: sic_norm = -35 # hackish temp solution for upper-ocean-only sections
else: sic_norm = -175
if not density_coor:
sic_baseline = depth_lim[0] # top of section (probably 0 m, but not necessarily)
else:
sic_norm = (sic_norm/1700)*((density_lim[1]-density_lim[0])**density_power_scale)
sic_baseline = 0
plt.gca().fill_between(np.array(datetime_coord_daily)[sic_xlim_mask],
sic_baseline + (sic_norm * np.array(sic_coord)[sic_xlim_mask]),sic_baseline,
color='k',linewidth=0,zorder=5) # or #8aacb8 for blue
plt.plot([datetime_coord_daily[0],datetime_coord_daily[-1]],[sic_baseline,sic_baseline],'k-',linewidth=0.5)
if not density_coor: plt.ylim([sic_baseline + 1.2*sic_norm,max_ylim])
else: plt.ylim(sic_baseline + 1.2*sic_norm,
(density_lim[1]-density_lim[0])**density_power_scale)
else:
if not density_coor: plt.ylim([depth_lim[0],max_ylim])
else: plt.ylim(*(np.array(density_lim)-density_lim[0])**density_power_scale)
plt.gca().invert_yaxis()
if not density_coor:
if explicit_yticks is not None: plt.yticks(explicit_yticks)
plt.gca().get_yaxis().set_major_formatter(pltick.FuncFormatter(lambda x, loc: "{:,}".format(x)))
else:
plt.yticks((np.array(explicit_yticks)-density_lim[0])**density_power_scale)
plt.gca().set_yticklabels(explicit_yticks)
if show_ice_bars and not density_coor: # NOTE: weird numbers display when using this in density coordinates
current_yticks = plt.yticks()[0]
plt.yticks([sic_baseline+sic_norm,sic_baseline,*current_yticks[1:]],
['100%','0%',*["{:,}".format(yt) for yt in current_yticks[1:]]])
if smaller_text:
if force_label_size is not None: ysize = force_label_size
else: ysize = 8
else: ysize = None
if not plot_ylabel: plt.gca().yaxis.set_ticklabels([])
else: plt.gca().tick_params(axis='y',which='major',labelsize=ysize)
if plot_ylabel:
if not density_coor: plt.ylabel('Depth (m)',size=ysize)
else: plt.ylabel(r'$\sigma_\theta$ (kg/m$^3$)',size=ysize)
if show_ice_bars:
plt.ylabel('Depth (m) ',size=ysize)
plt.text(-0.14,0.93,'SIC',fontsize=ysize,rotation=0,transform=plt.gca().transAxes,
horizontalalignment='right',verticalalignment='center')
years = mdates.YearLocator()
months = mdates.MonthLocator()
if not years_only:
xaxis_formatter = mdates.DateFormatter("%b")
plt.gca().xaxis.set_major_locator(months)
plt.gca().xaxis.set_major_formatter(xaxis_formatter)
else:
xaxis_formatter = mdates.DateFormatter("%Y")
plt.gca().xaxis.set_major_locator(years)
plt.gca().xaxis.set_major_formatter(xaxis_formatter)
plt.gca().xaxis.set_minor_locator(months)
plt.xticks(rotation=45)
if not plot_xticklabels:
plt.gca().xaxis.set_ticklabels([])
elif force_label_size is not None:
plt.gca().tick_params(axis='x',which='major',labelsize=force_label_size)
if grid:
plt.grid(which='major',axis='both',color='0.6',linewidth=0.25,alpha=0.6)
plt.gca().set_axisbelow(False) # True (grid below all), 'line' (below lines), False (grid above all)
if show_prof_ticks:
top_xaxis = plt.gca().twiny()
top_xaxis.set_xlim([datetime_coord[0],datetime_coord[-1]])
top_xaxis.xaxis.set_ticks_position('top')
top_xaxis.xaxis.set_tick_params(width=0.5)
top_xaxis.set_xticks(datetime_coord)
top_xaxis.xaxis.set_ticklabels([])
if create_new_figs:
if add_title: plt.title('Float {0}'.format(wmoid))
plt.tight_layout()
plt.savefig(results_dir + save_as + param_abbrev + '.pdf')
plt.close()
def section_compiler(wmoids,data_dir,results_dir,save_as,float_data,params,figsize=(8.5,11),depth_lim=(0,1000),
fixed_ylim=True,mld=True,sea_ice_grids=None,sea_ice_data_avail=None,add_date_bars=None,
condensed_cbar_labels=None,width_ratios=None,height_ratios=None,all_trajs=None,
traj_plot_params=None,show_ice_bars=True,density_coor=False,density_lim=None,
density_power_scale=None,density_depth_contours=None,plot_title=True,force_label_size=None,
explicit_yticks=None,w_pad=0.0,drift_temps=None,drift_temp_baseline=None,drift_temp_depth=None,
years_only=None):
""" Arrange multiple hydrographic sections and float trajectories on a single plot. Wrapper method for pt.section().
"""
plt.figure(figsize=figsize)
if all_trajs is not None:
params = ['__trajectories__',*params]
if condensed_cbar_labels is not None: condensed_cbar_labels = ['__trajectories__',*condensed_cbar_labels]
first_param_idx = 1
else:
first_param_idx = 0
subplot_grid = gridspec.GridSpec(len(params),len(wmoids),width_ratios=width_ratios,height_ratios=height_ratios)
for float_idx, wmoid in enumerate(wmoids):
for param_idx, param in enumerate(params):
plt.subplot(subplot_grid[len(wmoids)*param_idx + float_idx])
if param_idx == 0 and plot_title: plt.title('Float {0}'.format(wmoid),size=8,fontweight='bold')
if param == '__trajectories__':
argo_traj(data_dir,None,all_trajs[float_idx],*traj_plot_params[float_idx],label_dates=True,
save_as=None,label_placement=(0.04,-0.25),boundary_width=1,labelsize=4,
label_dates_12mo_only=True)
if float_idx+1 == len(wmoids):
plt.gca().set_anchor('W')
continue
if param_idx == first_param_idx: ice_bars_yes_no = show_ice_bars; show_prof_ticks = True
else: ice_bars_yes_no = False; show_prof_ticks = False
if float_idx == 0: plot_ylabel = True; density_depth_labels = True
else: plot_ylabel = False; density_depth_labels = False
if float_idx == len(wmoids)-1: plot_cbar = True
else: plot_cbar = False
if param_idx == len(params)-1: plot_xticklabels = True; dd_contours = density_depth_contours
else: plot_xticklabels = False; dd_contours = None
if param == 'ptmp' and drift_temps is not None:
dt = drift_temps[wmoid]; dtb = drift_temp_baseline; dtd = drift_temp_depth
else:
dt = None; dtb = None; dtd = None
section(wmoid,None,None,float_data[float_idx],params=[param],depth_lim=depth_lim,fixed_ylim=fixed_ylim,
mld=mld,show_ice_bars=ice_bars_yes_no,sea_ice_grids=sea_ice_grids,sea_ice_data_avail=sea_ice_data_avail,
show_prof_ticks=show_prof_ticks,add_date_bars=add_date_bars,trim_xlim=False,create_new_figs=False,
plot_ylabel=plot_ylabel,plot_xticklabels=plot_xticklabels,years_only=years_only[float_idx],plot_cbar=plot_cbar,
condensed_cbar_label=condensed_cbar_labels[param_idx],smaller_text=True,density_coor=density_coor,
density_lim=density_lim,density_power_scale=density_power_scale,force_label_size=force_label_size,
density_depth_contours=dd_contours,density_depth_labels=density_depth_labels,
explicit_yticks=explicit_yticks,drift_temps=dt,drift_temp_baseline=dtb,drift_temp_depth=dtd)
plt.tight_layout(h_pad=0.2,w_pad=w_pad) # can go negative if necessary
plt.savefig(results_dir + save_as + '.pdf')
plt.close()
def prof_locations_map(results_dir,data_dir,compiled_obs,map_dimensions,
toi_range=[datetime(1900,1,1),datetime.today()],bathy_cmap='Greys_r',
seasons=[[1,3],[4,6],[7,9],[10,12]],season_colors=['orange','cyan','orchid','lime'],
season_labels=['Jan-Mar','Apr-Jun','Jul-Sep','Oct-Dec'],
manual_list_of_types=None,manual_labels_for_types=None,
manual_markers_for_types=None,manual_marker_open_for_types=None,
grid_lats=np.arange(-80,60,5),grid_lons=np.arange(-80,50,10),
lon_labels=[0,0,1,0],lat_labels=[1,0,0,0],label_contours=False,
add_epoch_title=None,fontsize=5,fontsize_extra_for_epoch=2,
add_rect_patch=None,add_circ_patch=None,add_legend=False,legend_pos='outside_bottom',
create_new_fig=False,use_existing_basemap=None,return_basemap=False,save_as=None):
""" Plot locations of hydrographic observations, as compiled by ldp.compile_hydrographic_obs(),
by season and type (source) for a given epoch.
Args:
add_rect_patch: None or [lon_W,lon_E,lat_S,lat_N]
add_circ_patch: None or [[lon_cent,lat_cent,radius_in_km], etc.], i.e. a list of params for multiple circles
legend_pos: 'outside_bottom' or 'outside_right'
"""
if use_existing_basemap is None:
fig,m = bathy_basemap(data_dir,*map_dimensions,create_new_fig=create_new_fig,figsize=(9,9),
boundary_width=1,labelsize=fontsize,grid_color='.2',
grid_lats=grid_lats,grid_lons=grid_lons,force_lon_labels=lon_labels,force_lat_labels=lat_labels,
label_contours=label_contours,cmap=bathy_cmap)
else:
m = use_existing_basemap
if add_rect_patch is not None:
ap = add_rect_patch
patch_lons = np.concatenate((np.linspace(ap[0],ap[1],100),np.linspace(ap[1],ap[1],100),
np.linspace(ap[1],ap[0],100),np.linspace(ap[0],ap[0],100)))
patch_lats = np.concatenate((np.linspace(ap[3],ap[3],100),np.linspace(ap[3],ap[2],100),
np.linspace(ap[2],ap[2],100),np.linspace(ap[2],ap[3],100)))
plonx,platy = m(patch_lons,patch_lats)
patchxy = list(zip(plonx,platy))
poly = Polygon(patchxy,facecolor='white',alpha=0.1)
plt.gca().add_patch(poly)
if add_circ_patch is not None:
for circ in add_circ_patch:
circle_tuples = circle(m,*circ)
poly = Polygon(list(circle_tuples),facecolor='white',alpha=0.1)
plt.gca().add_patch(poly)
toi_mask_base = np.logical_and(np.array(compiled_obs['datetimes']) >= toi_range[0],
np.array(compiled_obs['datetimes']) <= toi_range[1])
dt_months = np.array([dt.month for dt in compiled_obs['datetimes']])
if manual_list_of_types is None: obs_types = np.unique(compiled_obs['types'][toi_mask_base])
else: obs_types = manual_list_of_types
if manual_labels_for_types is None: obs_type_labels = obs_types
else: obs_type_labels = manual_labels_for_types
if manual_markers_for_types is None: obs_type_markers = ['o','s','^','v','<','>','p','*','+','d'] # etc.
else: obs_type_markers = manual_markers_for_types
if manual_marker_open_for_types is None: obs_type_markers_open = np.tile(False,len(obs_type_markers))
else: obs_type_markers_open = manual_marker_open_for_types
for s_idx, season_months in enumerate(seasons):
toi_mask = np.logical_and(toi_mask_base,np.logical_and(dt_months >= season_months[0],
dt_months <= season_months[1]))
for t_idx, obs_type in enumerate(obs_types):
final_mask = np.logical_and(toi_mask,np.array(compiled_obs['types']) == obs_type)
if sum(final_mask) > 0:
lonx,laty = m(np.array(compiled_obs['lons'])[final_mask],np.array(compiled_obs['lats'])[final_mask])
if obs_type_markers_open[t_idx]: plt.scatter(lonx,laty,s=4.0,marker=obs_type_markers[t_idx],
facecolor='none',edgecolors=season_colors[s_idx],
linewidths=0.5)
else: plt.scatter(lonx,laty,s=4.0,marker=obs_type_markers[t_idx],
facecolor=season_colors[s_idx],edgecolors='none')
if add_epoch_title is not None:
plt.text(0.05,0.95,add_epoch_title,color='w',fontsize=fontsize+fontsize_extra_for_epoch,fontweight='bold',
horizontalalignment='left',verticalalignment='top',transform=plt.gca().transAxes)
if add_legend:
for s_idx, season_months in enumerate(seasons):
plt.plot([0,0],[np.nan,np.nan],lw=0,c=season_colors[s_idx],marker='o',ms=4,label=season_labels[s_idx])
for t_idx, obs_type in enumerate(obs_types):
if obs_type_markers_open[t_idx]:
plt.plot([0,0],[np.nan,np.nan],lw=0,marker=obs_type_markers[t_idx],ms=4,
markerfacecolor='none',markeredgecolor='k',markeredgewidth=0.5,label=obs_type_labels[t_idx])
else:
plt.plot([0,0],[np.nan,np.nan],lw=0,marker=obs_type_markers[t_idx],ms=4,
markerfacecolor='k',markeredgecolor='none',label=obs_type_labels[t_idx])
if legend_pos == 'outside_bottom':
ncol = len(seasons)+len(obs_types)
loc = 'upper right'
bbox_to_anchor = [0.5,-0.05]
handletextpad = 0.05
columnspacing = 1.5
labelspacing = None
elif legend_pos == 'outside_right':
ncol = 2
loc = 'center left'
bbox_to_anchor = [1.15,0.5]
handletextpad = 0.25
columnspacing = 1.5
labelspacing = 1.5
plt.legend(ncol=ncol,fontsize=fontsize,loc=loc,bbox_to_anchor=bbox_to_anchor,frameon=False,
handletextpad=handletextpad,columnspacing=columnspacing,labelspacing=labelspacing)
if save_as is not None:
plt.tight_layout()
plt.savefig(results_dir + save_as + '.pdf')
plt.close()
elif return_basemap:
return m
def era_field(data_dir,results_dir,save_as,data,datetime_range,width,height,lat_center,lon_center,bathy_contours=[],
contour=True,contour_lims=None,n_contours=21,use_cmap=None,add_cbar=True,
existing_canvas=None,return_pcm=False,
add_wind_vectors=None,wind_vector_downsample=[5,2],wind_vector_scale=50,
add_wind_vector_key=True,wind_vector_key=20,wind_vector_key_loc=[0.8,-0.25],wind_vector_key_fontsize=8,
add_sic_contours=None,sic_contours=[50],
add_date=None,date_string_loc=[0.05,0.95],date_string_size=8,
date_string_valign='top',date_string_halign='left',average_daily=True,add_patch_lons_lats=None):
""" Plotting routine for daily ECMWF fields.
Args:
data_dir: data directory (for bathymetry files)
results_dir: None to plot on existing canvas, or directory to save plot
save_as: None (to not save figure) or filename, without extension
data: xarray DataArray containing reanalysis parameter, e.g. erai_daily['u10']
datetime_range: single datetime to plot, or range of datetimes ([start,end]) to average for plot
note: if <<average_daily>> is True, averages over all hours during a given day, from hour 0 to 23
width: Basemap plot width
height: Basemap plot height
lat_center: Basemap plot latitude center location
lon_center: Basemap plot longitude center location
bathy_contours: list of depths to add bathymetric contours
contour: True or False to draw filled contour plot of <<data>>
contour_lims: None or [min,max] to specify contour color limits
n_contours: number of contour levels to plot, if contour_lims is specified (default = 21)
add_cbar: plot colorbar? True or False
existing_canvas: None or handle of Basemap instance (m) to plot onto
return_pcm: return handle ('pcm') to pcolormesh of field
add_wind_vectors: None or xarray DataArrays for [u,v] to plot wind vectors from fields
note: assumes lats and lons are same as for main 'data' DataArray above
wind_vector_downsample: [i,j] to plot every ith u-wind and jth v-wind vector
wind_vector_scale: length of wind vectors (larger numbers are smaller vectors)
add_wind_vector_key: add quiver key next to plot, representing size of [N] m/s wind vector
wind_vector_key: depict a [N] m/s vector key
wind_vector_key_loc: location of wind vector in axes coordinates from bottom left (x, y)
wind_vector_key_fontsize: fontsize of 'N m/s' key text
add_sic_contours: None or list of [sic_grid['lons'],sic_grid['lats'],sic_field]
sic_contours: [50] or list of other SIC % levels to contour
add_date: None or formatted date string to add as text
date_string_loc: location in axes coordinates (x,y) from bottom left for date string
date_string_size: fontsize for date string
date_string_valign: vertical alignment of date string location ('top' or 'bottom')
date_string_halign: horizontal alignment of date string location ('left' or 'right')
average_daily: if True, ignores hour value(s) of <<datetime_range>> and averages over day
if False, keeps hour value(s) of <<datetime_range>>
add_patch_lons_lats: None or box coordinates to plot as shaded patch: [lon_W,lon_E,lat_S,lat_N]
"""
if contour_lims is not None: contour_levs = np.linspace(contour_lims[0],contour_lims[1],n_contours)
else: contour_levs = None
if not isinstance(datetime_range,list) and not isinstance(datetime_range,tuple):
dtr = [datetime_range,datetime_range] # i.e. if only single datetime specified
else:
dtr = datetime_range
if average_daily: # if not average_daily, interpret datetime_range exactly and slice accordingly
dtr[0] = datetime(dtr[0].year,dtr[0].month,dtr[0].day,0)
dtr[1] = datetime(dtr[1].year,dtr[1].month,dtr[1].day,23,59,59)
data = data.sel(time=slice(*dtr)).mean(dim='time',keep_attrs=True)
if add_wind_vectors is not None:
u_data = add_wind_vectors[0].sel(time=slice(*dtr)).mean(dim='time',keep_attrs=True)
v_data = add_wind_vectors[1].sel(time=slice(*dtr)).mean(dim='time',keep_attrs=True)
if existing_canvas is None:
fig, m = lambert_basemap(width,height,lat_center,lon_center,
boundary_width=1,lon_labels_on_top=True,resolution='i')
else:
fig = plt.gcf()
m = existing_canvas
lon_grid, lat_grid = np.meshgrid(data['lons'],data['lats'])
rlons, rlats = m(lon_grid, lat_grid)
if contour:
if use_cmap is None:
if contour_lims is not None:
if contour_lims[1] == abs(contour_lims[0]): cmap = 'PRGn'
else: cmap = 'viridis'
else:
cmap = 'viridis'
else:
cmap = use_cmap
with warnings.catch_warnings(): # ignore Dask true_divide warning upon evaluating data
warnings.simplefilter('ignore')
pcm = m.contourf(rlons,rlats,data,levels=contour_levs,cmap=cmap,extend='both')
if add_cbar:
cbar = plt.colorbar()
cbar.set_label('{0} ({1})'.format(data.attrs['long_name'],data.attrs['units']),rotation=-90,labelpad=15)
if add_wind_vectors:
[i,j] = wind_vector_downsample
Q = plt.quiver(rlons[::j,::i],rlats[::j,::i],u_data[::j,::i],v_data[::j,::i],
units='width',scale=wind_vector_scale,width=0.01,zorder=10)
if add_wind_vector_key: plt.quiverkey(Q,*wind_vector_key_loc,wind_vector_key,
r'{0} '.format(wind_vector_key) + r'm s$^{-1}$',
fontproperties={'size':wind_vector_key_fontsize})
if add_sic_contours is not None:
sic_lonx,sic_laty = m(add_sic_contours[0],add_sic_contours[1])
plt.contour(sic_lonx,sic_laty,add_sic_contours[2],levels=sic_contours,
colors='k',linewidths=0.5,alpha=0.8,zorder=5)
if len(bathy_contours) > 0:
etopo_lons,etopo_lats,etopo = ldp.load_bathy(data_dir)
retopolons,retopolats = m(*np.meshgrid(etopo_lons,etopo_lats))
olevels = bathy_contours # check etopo.ravel().min()
m.contour(retopolons,retopolats,etopo,olevels,linewidths=0.5,linestyles='solid',colors='#808080',
alpha=0.5,zorder=4)
if add_patch_lons_lats is not None:
pll = add_patch_lons_lats
patch_lons = np.concatenate((np.linspace(pll[0],pll[1],100),np.linspace(pll[1],pll[1],100),
np.linspace(pll[1],pll[0],100),np.linspace(pll[0],pll[0],100)))
patch_lats = np.concatenate((np.linspace(pll[3],pll[3],100),np.linspace(pll[3],pll[2],100),
np.linspace(pll[2],pll[2],100),np.linspace(pll[2],pll[3],100)))
plonx,platy = m(patch_lons,patch_lats)
patchxy = list(zip(plonx,platy))
poly = Polygon(patchxy,facecolor='white',alpha=0.25,zorder=3)
plt.gca().add_patch(poly)
if add_date is not None:
plt.text(*date_string_loc,add_date,fontsize=date_string_size,fontweight='bold',
horizontalalignment=date_string_halign,verticalalignment=date_string_valign,
transform=plt.gca().transAxes)
if save_as is not None:
plt.savefig(results_dir + save_as + '.pdf')
plt.close()
if return_pcm and contour:
return pcm
############# AUXILIARY (INTERNAL) FUNCTIONS ################
def lambert_basemap(width,height,lat_center,lon_center,boundary_width=2,create_new_fig=True,figsize=None,resolution='i',
draw_grid=True,lon_labels_on_top=False,grid_color='0.2',meridians=np.arange(-80,50,20)):
""" Creates basic figure on a Lambert azimuthal equal-area projection.
"""
warnings.filterwarnings('ignore', category=mcbook.mplDeprecation)
if create_new_fig: fig = plt.figure(figsize=figsize)
else: fig = plt.gcf()
m = Basemap(width=width, height=height, resolution=resolution, projection='laea', lat_ts=lat_center,
lat_0=lat_center, lon_0=lon_center)
m.drawcoastlines(color='k')
m.drawmapboundary(linewidth=boundary_width)
m.fillcontinents()
if draw_grid:
if lon_labels_on_top: lon_labels = [0,0,1,0]
else: lon_labels = [0,0,0,1]
m.drawmeridians(meridians, linewidth=0.5, color=grid_color, labels=lon_labels)
m.drawmeridians(np.arange(-80, 50, 10), linewidth=0.5, color=grid_color)
m.drawparallels(np.arange(-80, 60, 5), linewidth=0.5, color=grid_color, labels=[1, 0, 0, 0])
return fig, m
def bathy_basemap(data_dir,width,height,lat_center,lon_center,create_new_fig=True,figsize=None,
labelsize=None,lon_labels_on_top=False,force_lon_labels=None,force_lat_labels=[1,0,0,0],
boundary_width=2,grid_color='0.2',grid_lats=np.arange(-80,60,5),grid_lons=np.arange(-80,50,10),
label_contours=False,cmap=cmocean.cm.deep_r,bathy_alpha=1.0):
""" Draws bathymetry on LAEA (Lambert) basemap.
Currently the figure parameters are not entirely defined through arguments above. This could be remedied.
"""
fig, m = lambert_basemap(width,height,lat_center,lon_center,create_new_fig=create_new_fig,figsize=figsize,
draw_grid=False,boundary_width=boundary_width)
lons, lats, etopo = ldp.load_bathy(data_dir)
rlons, rlats = m(*np.meshgrid(lons, lats))
olevels = np.arange(-7000, 760, 750) # check etopo.ravel().min()
cf = m.contourf(rlons, rlats, etopo, olevels, cmap=cmap, alpha=bathy_alpha, zorder=1)
if label_contours:
if cmap == 'Greys_r': contour_line_cmap = 'Greys_r'
else: contour_line_cmap = None
co = m.contour(rlons,rlats,-1*etopo,[2500,3250,4000,4750],linewidths=0.0,alpha=0.5,cmap=contour_line_cmap,zorder=1)
print('>>> Waiting for manual input.\n'
'>>> Click to position contours, hit Return when done.\n'
'>>> Note: do not change figure size.')
if cmap == 'Greys_r': clabel_single_color = 'k' # or change to None to use reversed grayscale cmap
else: clabel_single_color = 'w'
plt.clabel(co,colors=clabel_single_color,fmt='%d',fontsize=labelsize-1,manual=True,inline=True)
if lon_labels_on_top: lon_labels = [0, 0, 1, 0]
else: lon_labels = [0, 0, 0, 1]
lat_labels = [1,0,0,0]
if force_lon_labels is not None: lon_labels = force_lon_labels
if force_lat_labels is not None: lat_labels = force_lat_labels
m.drawmeridians(grid_lons,color=grid_color,linewidth=0.5,labels=lon_labels,fontsize=labelsize,zorder=2)
m.drawparallels(grid_lats,color=grid_color,linewidth=0.5,labels=lat_labels,fontsize=labelsize,zorder=2)
return fig, m
def blank_inset_basemap(width,height,lat_center,lon_center,create_new_fig=True,
boundary_width=2,coastline_width=1,lon_labels=[0,0,0,0],lat_labels=[0,0,0,0],
grid_lats=np.arange(-80,60,5),grid_lons=np.arange(-80,50,10),labelsize=None,grid_color='0.2',
fill_continent_zorder=None,lat_lon_line_zorder=None,fill_continent_color='0.8',
resolution='i'):
""" Creates figure with regional Lambert basemap, to be filled elsewhere with a plot.
"""
if create_new_fig: fig = plt.figure(figsize=(9, 7))
else: fig = plt.gcf()
m = Basemap(width=width, height=height, resolution=resolution, projection='laea', lat_ts=lat_center,
lat_0=lat_center, lon_0=lon_center)
m.drawcoastlines(linewidth=coastline_width,color='k',zorder=fill_continent_zorder)
m.drawmapboundary(linewidth=boundary_width, fill_color='#f0ffff')
m.fillcontinents(color=fill_continent_color,zorder=fill_continent_zorder)
if create_new_fig:
print('Temporary warning from pt.blank_inset_basemap(): '
'explicit setting of lat/lon labels is turned off; make sure this is okay')
# lon_labels = [0, 0, 0, 1]
# lat_labels = [1, 0, 0, 0]
m.drawmeridians(grid_lons, color=grid_color, linewidth=0.5, labels=lon_labels, fontsize=labelsize,
zorder=lat_lon_line_zorder)
m.drawparallels(grid_lats, color=grid_color, linewidth=0.5, labels=lat_labels, fontsize=labelsize,
zorder=lat_lon_line_zorder)
return fig, m
|
[
"matplotlib.pyplot.title",
"matplotlib.dates.MonthLocator",
"numpy.abs",
"time_tools.convert_tuple_to_8_int",
"matplotlib.pyplot.quiver",
"load_product.sea_ice_concentration_along_track",
"numpy.isnan",
"matplotlib.patches.Polygon",
"matplotlib.pyplot.figure",
"numpy.arange",
"matplotlib.pyplot.contour",
"geo_tools.mld",
"matplotlib.pyplot.gca",
"numpy.interp",
"matplotlib.pyplot.tight_layout",
"numpy.unique",
"time_tools.convert_14_to_tuple",
"load_product.load_bathy",
"matplotlib.colors.LinearSegmentedColormap.from_list",
"numpy.meshgrid",
"time_tools.convert_8_int_to_tuple",
"warnings.simplefilter",
"time_tools.dates_in_range",
"matplotlib.pyplot.close",
"matplotlib.pyplot.yticks",
"numpy.isfinite",
"matplotlib.pyplot.colorbar",
"geo_tools.profile_interp",
"matplotlib.dates.DateFormatter",
"numpy.max",
"warnings.catch_warnings",
"numpy.linspace",
"matplotlib.pyplot.xticks",
"time_tools.convert_tuple_to_datetime",
"Circles.circles.circle",
"datetime.datetime.today",
"matplotlib.pyplot.ylim",
"matplotlib.pyplot.legend",
"datetime.datetime",
"matplotlib.pyplot.text",
"numpy.sort",
"numpy.min",
"matplotlib.pyplot.pcolormesh",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.grid",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.clabel",
"geo_tools.vert_prof_eval",
"matplotlib.pyplot.plot",
"warnings.filterwarnings",
"os.path.isdir",
"numpy.logical_and",
"matplotlib.colors.BoundaryNorm",
"matplotlib.pyplot.scatter",
"geo_tools.vert_prof_even_spacing",
"numpy.where",
"numpy.array",
"numpy.diff",
"matplotlib.dates.YearLocator",
"mpl_toolkits.basemap.Basemap",
"matplotlib.pyplot.savefig"
] |
[((515, 565), 'os.path.isdir', 'os.path.isdir', (['"""/Applications/anaconda/share/proj"""'], {}), "('/Applications/anaconda/share/proj')\n", (528, 565), False, 'import os\n'), ((775, 839), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""', '""".*is_string_like function.*"""'], {}), "('ignore', '.*is_string_like function.*')\n", (798, 839), False, 'import warnings\n'), ((1679, 1706), 'numpy.arange', 'np.arange', (['(-3500)', '(-100)', '(500)'], {}), '(-3500, -100, 500)\n', (1688, 1706), True, 'import numpy as np\n'), ((1741, 1762), 'numpy.arange', 'np.arange', (['(-80)', '(60)', '(5)'], {}), '(-80, 60, 5)\n', (1750, 1762), True, 'import numpy as np\n'), ((1771, 1793), 'numpy.arange', 'np.arange', (['(-80)', '(50)', '(10)'], {}), '(-80, 50, 10)\n', (1780, 1793), True, 'import numpy as np\n'), ((2343, 2408), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'mcbook.mplDeprecation'}), "('ignore', category=mcbook.mplDeprecation)\n", (2366, 2408), False, 'import warnings\n'), ((5041, 5248), 'matplotlib.pyplot.pcolormesh', 'plt.pcolormesh', (['(lonx - sic_lon_edge_to_center)', '(laty - sic_lat_edge_to_center)', 'sic_nan_masked'], {'cmap': 'ice_cmap', 'edgecolors': '"""None"""', 'rasterized': 'rasterized', 'zorder': '(1)', 'alpha': '(1.0)', 'vmin': 'open_sic', 'vmax': 'max_sic'}), "(lonx - sic_lon_edge_to_center, laty - sic_lat_edge_to_center,\n sic_nan_masked, cmap=ice_cmap, edgecolors='None', rasterized=rasterized,\n zorder=1, alpha=1.0, vmin=open_sic, vmax=max_sic)\n", (5055, 5248), True, 'import matplotlib.pyplot as plt\n'), ((20185, 20212), 'numpy.arange', 'np.arange', (['(7.84)', '(8.16)', '(0.04)'], {}), '(7.84, 8.16, 0.04)\n', (20194, 20212), True, 'import numpy as np\n'), ((20240, 20267), 'numpy.arange', 'np.arange', (['(20.0)', '(34.01)', '(1.0)'], {}), '(20.0, 34.01, 1.0)\n', (20249, 20267), True, 'import numpy as np\n'), ((20298, 20323), 'numpy.arange', 'np.arange', (['(0.0)', '(2.0)', '(0.25)'], {}), '(0.0, 2.0, 0.25)\n', (20307, 20323), True, 'import numpy as np\n'), ((41350, 41377), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (41360, 41377), True, 'import matplotlib.pyplot as plt\n'), ((44324, 44364), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {'h_pad': '(0.2)', 'w_pad': 'w_pad'}), '(h_pad=0.2, w_pad=w_pad)\n', (44340, 44364), True, 'import matplotlib.pyplot as plt\n'), ((44399, 44442), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + '.pdf')"], {}), "(results_dir + save_as + '.pdf')\n", (44410, 44442), True, 'import matplotlib.pyplot as plt\n'), ((44447, 44458), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (44456, 44458), True, 'import matplotlib.pyplot as plt\n'), ((45015, 45036), 'numpy.arange', 'np.arange', (['(-80)', '(60)', '(5)'], {}), '(-80, 60, 5)\n', (45024, 45036), True, 'import numpy as np\n'), ((45045, 45067), 'numpy.arange', 'np.arange', (['(-80)', '(50)', '(10)'], {}), '(-80, 50, 10)\n', (45054, 45067), True, 'import numpy as np\n'), ((47380, 47436), 'numpy.array', 'np.array', (["[dt.month for dt in compiled_obs['datetimes']]"], {}), "([dt.month for dt in compiled_obs['datetimes']])\n", (47388, 47436), True, 'import numpy as np\n'), ((55688, 55727), 'numpy.meshgrid', 'np.meshgrid', (["data['lons']", "data['lats']"], {}), "(data['lons'], data['lats'])\n", (55699, 55727), True, 'import numpy as np\n'), ((58995, 59017), 'numpy.arange', 'np.arange', (['(-80)', '(50)', '(20)'], {}), '(-80, 50, 20)\n', (59004, 59017), True, 'import numpy as np\n'), ((59106, 59171), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {'category': 'mcbook.mplDeprecation'}), "('ignore', category=mcbook.mplDeprecation)\n", (59129, 59171), False, 'import warnings\n'), ((59278, 59415), 'mpl_toolkits.basemap.Basemap', 'Basemap', ([], {'width': 'width', 'height': 'height', 'resolution': 'resolution', 'projection': '"""laea"""', 'lat_ts': 'lat_center', 'lat_0': 'lat_center', 'lon_0': 'lon_center'}), "(width=width, height=height, resolution=resolution, projection=\n 'laea', lat_ts=lat_center, lat_0=lat_center, lon_0=lon_center)\n", (59285, 59415), False, 'from mpl_toolkits.basemap import Basemap\n'), ((60210, 60231), 'numpy.arange', 'np.arange', (['(-80)', '(60)', '(5)'], {}), '(-80, 60, 5)\n', (60219, 60231), True, 'import numpy as np\n'), ((60240, 60262), 'numpy.arange', 'np.arange', (['(-80)', '(50)', '(10)'], {}), '(-80, 50, 10)\n', (60249, 60262), True, 'import numpy as np\n'), ((60731, 60755), 'load_product.load_bathy', 'ldp.load_bathy', (['data_dir'], {}), '(data_dir)\n', (60745, 60755), True, 'import load_product as ldp\n'), ((60817, 60843), 'numpy.arange', 'np.arange', (['(-7000)', '(760)', '(750)'], {}), '(-7000, 760, 750)\n', (60826, 60843), True, 'import numpy as np\n'), ((62385, 62406), 'numpy.arange', 'np.arange', (['(-80)', '(60)', '(5)'], {}), '(-80, 60, 5)\n', (62394, 62406), True, 'import numpy as np\n'), ((62415, 62437), 'numpy.arange', 'np.arange', (['(-80)', '(50)', '(10)'], {}), '(-80, 50, 10)\n', (62424, 62437), True, 'import numpy as np\n'), ((62821, 62958), 'mpl_toolkits.basemap.Basemap', 'Basemap', ([], {'width': 'width', 'height': 'height', 'resolution': 'resolution', 'projection': '"""laea"""', 'lat_ts': 'lat_center', 'lat_0': 'lat_center', 'lon_0': 'lon_center'}), "(width=width, height=height, resolution=resolution, projection=\n 'laea', lat_ts=lat_center, lat_0=lat_center, lon_0=lon_center)\n", (62828, 62958), False, 'from mpl_toolkits.basemap import Basemap\n'), ((4808, 4821), 'numpy.isnan', 'np.isnan', (['sic'], {}), '(sic)\n', (4816, 4821), True, 'import numpy as np\n'), ((5562, 5634), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'pad': '(0.05)', 'shrink': '(0.65)', 'format': '"""%.0f%%"""', 'extend': 'extend_cmap'}), "(pad=0.05, shrink=0.65, format='%.0f%%', extend=extend_cmap)\n", (5574, 5634), True, 'import matplotlib.pyplot as plt\n'), ((5774, 5886), 'matplotlib.pyplot.contour', 'plt.contour', (['lonx', 'laty', 'polynya_grid'], {'levels': '[0.999]', 'colors': '"""#00FF00"""', 'linewidths': '(0.7)', 'alpha': '(0.8)', 'zorder': '(2)'}), "(lonx, laty, polynya_grid, levels=[0.999], colors='#00FF00',\n linewidths=0.7, alpha=0.8, zorder=2)\n", (5785, 5886), True, 'import matplotlib.pyplot as plt\n'), ((5949, 5973), 'load_product.load_bathy', 'ldp.load_bathy', (['data_dir'], {}), '(data_dir)\n', (5963, 5973), True, 'import load_product as ldp\n'), ((11471, 11489), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (11487, 11489), True, 'import matplotlib.pyplot as plt\n'), ((11636, 11647), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (11645, 11647), True, 'import matplotlib.pyplot as plt\n'), ((19095, 19208), 'numpy.array', 'np.array', (["['ptmp', 'psal', 'Nsquared', 'PV', 'destab', 'Oxygen', 'OxygenSat',\n 'pHinsitu', 'Nitrate', 'Chl_a']"], {}), "(['ptmp', 'psal', 'Nsquared', 'PV', 'destab', 'Oxygen', 'OxygenSat',\n 'pHinsitu', 'Nitrate', 'Chl_a'])\n", (19103, 19208), True, 'import numpy as np\n'), ((24064, 24084), 'numpy.where', 'np.where', (['prof_match'], {}), '(prof_match)\n', (24072, 24084), True, 'import numpy as np\n'), ((25445, 25535), 'time_tools.dates_in_range', 'tt.dates_in_range', (['datetime_coord_as_tuples[0][0:3]', 'datetime_coord_as_tuples[-1][0:3]'], {}), '(datetime_coord_as_tuples[0][0:3],\n datetime_coord_as_tuples[-1][0:3])\n', (25462, 25535), True, 'import time_tools as tt\n'), ((25873, 25939), 'numpy.interp', 'np.interp', (['timestamp_coord_daily', 'timestamp_coord_profs', 'prof_lats'], {}), '(timestamp_coord_daily, timestamp_coord_profs, prof_lats)\n', (25882, 25939), True, 'import numpy as np\n'), ((25975, 26041), 'numpy.interp', 'np.interp', (['timestamp_coord_daily', 'timestamp_coord_profs', 'prof_lons'], {}), '(timestamp_coord_daily, timestamp_coord_profs, prof_lons)\n', (25984, 26041), True, 'import numpy as np\n'), ((26511, 26643), 'load_product.sea_ice_concentration_along_track', 'ldp.sea_ice_concentration_along_track', (['date_coord_daily', 'lat_coord_for_ice', 'lon_coord_for_ice', 'sea_ice_grids', 'sea_ice_data_avail'], {}), '(date_coord_daily, lat_coord_for_ice,\n lon_coord_for_ice, sea_ice_grids, sea_ice_data_avail)\n', (26548, 26643), True, 'import load_product as ldp\n'), ((30138, 30173), 'numpy.array', 'np.array', (['cmap_levels[param_abbrev]'], {}), '(cmap_levels[param_abbrev])\n', (30146, 30173), True, 'import numpy as np\n'), ((30492, 30627), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'mcolors.LinearSegmentedColormap.from_list', ([], {'name': 'None', 'colors': 'cmap_colors[param_abbrev]', 'N': 'N_colors', 'gamma': 'cmap_gamma[param_abbrev]'}), '(name=None, colors=cmap_colors[\n param_abbrev], N=N_colors, gamma=cmap_gamma[param_abbrev])\n', (30533, 30627), True, 'import matplotlib.colors as mcolors\n'), ((30993, 31056), 'matplotlib.colors.BoundaryNorm', 'mcolors.BoundaryNorm', (['more_levels'], {'ncolors': 'N_colors', 'clip': '(False)'}), '(more_levels, ncolors=N_colors, clip=False)\n', (31013, 31056), True, 'import matplotlib.colors as mcolors\n'), ((38993, 39013), 'matplotlib.dates.YearLocator', 'mdates.YearLocator', ([], {}), '()\n', (39011, 39013), True, 'import matplotlib.dates as mdates\n'), ((39031, 39052), 'matplotlib.dates.MonthLocator', 'mdates.MonthLocator', ([], {}), '()\n', (39050, 39052), True, 'import matplotlib.dates as mdates\n'), ((39507, 39530), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'rotation': '(45)'}), '(rotation=45)\n', (39517, 39530), True, 'import matplotlib.pyplot as plt\n'), ((44568, 44588), 'datetime.datetime', 'datetime', (['(1900)', '(1)', '(1)'], {}), '(1900, 1, 1)\n', (44576, 44588), False, 'from datetime import datetime, timedelta\n'), ((44587, 44603), 'datetime.datetime.today', 'datetime.today', ([], {}), '()\n', (44601, 44603), False, 'from datetime import datetime, timedelta\n'), ((46878, 46924), 'matplotlib.patches.Polygon', 'Polygon', (['patchxy'], {'facecolor': '"""white"""', 'alpha': '(0.1)'}), "(patchxy, facecolor='white', alpha=0.1)\n", (46885, 46924), False, 'from matplotlib.patches import Polygon\n'), ((47487, 47534), 'numpy.unique', 'np.unique', (["compiled_obs['types'][toi_mask_base]"], {}), "(compiled_obs['types'][toi_mask_base])\n", (47496, 47534), True, 'import numpy as np\n'), ((50667, 50856), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'ncol': 'ncol', 'fontsize': 'fontsize', 'loc': 'loc', 'bbox_to_anchor': 'bbox_to_anchor', 'frameon': '(False)', 'handletextpad': 'handletextpad', 'columnspacing': 'columnspacing', 'labelspacing': 'labelspacing'}), '(ncol=ncol, fontsize=fontsize, loc=loc, bbox_to_anchor=\n bbox_to_anchor, frameon=False, handletextpad=handletextpad,\n columnspacing=columnspacing, labelspacing=labelspacing)\n', (50677, 50856), True, 'import matplotlib.pyplot as plt\n'), ((50898, 50916), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (50914, 50916), True, 'import matplotlib.pyplot as plt\n'), ((50925, 50968), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + '.pdf')"], {}), "(results_dir + save_as + '.pdf')\n", (50936, 50968), True, 'import matplotlib.pyplot as plt\n'), ((50977, 50988), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (50986, 50988), True, 'import matplotlib.pyplot as plt\n'), ((54553, 54610), 'numpy.linspace', 'np.linspace', (['contour_lims[0]', 'contour_lims[1]', 'n_contours'], {}), '(contour_lims[0], contour_lims[1], n_contours)\n', (54564, 54610), True, 'import numpy as np\n'), ((54996, 55046), 'datetime.datetime', 'datetime', (['dtr[0].year', 'dtr[0].month', 'dtr[0].day', '(0)'], {}), '(dtr[0].year, dtr[0].month, dtr[0].day, 0)\n', (55004, 55046), False, 'from datetime import datetime, timedelta\n'), ((55061, 55120), 'datetime.datetime', 'datetime', (['dtr[1].year', 'dtr[1].month', 'dtr[1].day', '(23)', '(59)', '(59)'], {}), '(dtr[1].year, dtr[1].month, dtr[1].day, 23, 59, 59)\n', (55069, 55120), False, 'from datetime import datetime, timedelta\n'), ((55624, 55633), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (55631, 55633), True, 'import matplotlib.pyplot as plt\n'), ((56580, 56727), 'matplotlib.pyplot.quiver', 'plt.quiver', (['rlons[::j, ::i]', 'rlats[::j, ::i]', 'u_data[::j, ::i]', 'v_data[::j, ::i]'], {'units': '"""width"""', 'scale': 'wind_vector_scale', 'width': '(0.01)', 'zorder': '(10)'}), "(rlons[::j, ::i], rlats[::j, ::i], u_data[::j, ::i], v_data[::j,\n ::i], units='width', scale=wind_vector_scale, width=0.01, zorder=10)\n", (56590, 56727), True, 'import matplotlib.pyplot as plt\n'), ((57131, 57257), 'matplotlib.pyplot.contour', 'plt.contour', (['sic_lonx', 'sic_laty', 'add_sic_contours[2]'], {'levels': 'sic_contours', 'colors': '"""k"""', 'linewidths': '(0.5)', 'alpha': '(0.8)', 'zorder': '(5)'}), "(sic_lonx, sic_laty, add_sic_contours[2], levels=sic_contours,\n colors='k', linewidths=0.5, alpha=0.8, zorder=5)\n", (57142, 57257), True, 'import matplotlib.pyplot as plt\n'), ((57339, 57363), 'load_product.load_bathy', 'ldp.load_bathy', (['data_dir'], {}), '(data_dir)\n', (57353, 57363), True, 'import load_product as ldp\n'), ((58221, 58278), 'matplotlib.patches.Polygon', 'Polygon', (['patchxy'], {'facecolor': '"""white"""', 'alpha': '(0.25)', 'zorder': '(3)'}), "(patchxy, facecolor='white', alpha=0.25, zorder=3)\n", (58228, 58278), False, 'from matplotlib.patches import Polygon\n'), ((58607, 58650), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + '.pdf')"], {}), "(results_dir + save_as + '.pdf')\n", (58618, 58650), True, 'import matplotlib.pyplot as plt\n'), ((58659, 58670), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (58668, 58670), True, 'import matplotlib.pyplot as plt\n'), ((59202, 59229), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'figsize'}), '(figsize=figsize)\n', (59212, 59229), True, 'import matplotlib.pyplot as plt\n'), ((59259, 59268), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (59266, 59268), True, 'import matplotlib.pyplot as plt\n'), ((61571, 61677), 'matplotlib.pyplot.clabel', 'plt.clabel', (['co'], {'colors': 'clabel_single_color', 'fmt': '"""%d"""', 'fontsize': '(labelsize - 1)', 'manual': '(True)', 'inline': '(True)'}), "(co, colors=clabel_single_color, fmt='%d', fontsize=labelsize - 1,\n manual=True, inline=True)\n", (61581, 61677), True, 'import matplotlib.pyplot as plt\n'), ((62746, 62772), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 7)'}), '(figsize=(9, 7))\n', (62756, 62772), True, 'import matplotlib.pyplot as plt\n'), ((62802, 62811), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (62809, 62811), True, 'import matplotlib.pyplot as plt\n'), ((4659, 4668), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4666, 4668), True, 'import matplotlib.pyplot as plt\n'), ((4692, 4701), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4699, 4701), True, 'import matplotlib.pyplot as plt\n'), ((6665, 6696), 'time_tools.convert_tuple_to_8_int', 'tt.convert_tuple_to_8_int', (['date'], {}), '(date)\n', (6690, 6696), True, 'import time_tools as tt\n'), ((11434, 11463), 'matplotlib.pyplot.title', 'plt.title', (['title'], {'fontsize': '(16)'}), '(title, fontsize=16)\n', (11443, 11463), True, 'import matplotlib.pyplot as plt\n'), ((11511, 11563), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + '.png')"], {'dpi': '(150)'}), "(results_dir + save_as + '.png', dpi=150)\n", (11522, 11563), True, 'import matplotlib.pyplot as plt\n'), ((11584, 11627), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + '.pdf')"], {}), "(results_dir + save_as + '.pdf')\n", (11595, 11627), True, 'import matplotlib.pyplot as plt\n'), ((24334, 24396), 'time_tools.convert_14_to_tuple', 'tt.convert_14_to_tuple', (["float_data['profiles'][pi]['datetime']"], {}), "(float_data['profiles'][pi]['datetime'])\n", (24356, 24396), True, 'import time_tools as tt\n'), ((24580, 24702), 'geo_tools.mld', 'gt.mld', (["float_data['profiles'][pi]"], {'ref_depth': 'mld_ref_depth', 'sigma_theta_crit': 'mld_sigma_theta_crit', 'verbose_warn': '(False)'}), "(float_data['profiles'][pi], ref_depth=mld_ref_depth,\n sigma_theta_crit=mld_sigma_theta_crit, verbose_warn=False)\n", (24586, 24702), True, 'import geo_tools as gt\n'), ((25563, 25603), 'time_tools.convert_tuple_to_datetime', 'tt.convert_tuple_to_datetime', (['date_tuple'], {}), '(date_tuple)\n', (25591, 25603), True, 'import time_tools as tt\n'), ((30078, 30109), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'new_figsize'}), '(figsize=new_figsize)\n', (30088, 30109), True, 'import matplotlib.pyplot as plt\n'), ((32065, 32162), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'ticks': 'specified_levels', 'spacing': '"""uniform"""', 'shrink': 'shrink_cbar', 'format': 'formatter'}), "(ticks=specified_levels, spacing='uniform', shrink=shrink_cbar,\n format=formatter)\n", (32077, 32162), True, 'import matplotlib.pyplot as plt\n'), ((33849, 34029), 'matplotlib.pyplot.plot', 'plt.plot', (["drift_temps['datetime']", "(drift_temp_depth + (depth_lim[1] - depth_lim[0]) * (drift_temp_baseline -\n drift_temps['temp']))", '"""k-"""'], {'linewidth': '(0.01)', 'alpha': '(0.5)', 'zorder': '(4)'}), "(drift_temps['datetime'], drift_temp_depth + (depth_lim[1] -\n depth_lim[0]) * (drift_temp_baseline - drift_temps['temp']), 'k-',\n linewidth=0.01, alpha=0.5, zorder=4)\n", (33857, 34029), True, 'import matplotlib.pyplot as plt\n'), ((34090, 34139), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[datetime_coord[0], datetime_coord[-1]]'], {}), '([datetime_coord[0], datetime_coord[-1]])\n', (34098, 34139), True, 'import matplotlib.pyplot as plt\n'), ((34398, 34430), 'matplotlib.pyplot.xlim', 'plt.xlim', (['[start_date, end_date]'], {}), '([start_date, end_date])\n', (34406, 34430), True, 'import matplotlib.pyplot as plt\n'), ((34919, 35051), 'matplotlib.pyplot.contour', 'plt.contour', (['datetime_coord', 'z_vec', 'depth_data'], {'levels': 'density_depth_contours', 'linewidths': '(0.5)', 'alpha': '(0.75)', 'colors': '"""k"""', 'zorder': '(5)'}), "(datetime_coord, z_vec, depth_data, levels=\n density_depth_contours, linewidths=0.5, alpha=0.75, colors='k', zorder=5)\n", (34930, 35051), True, 'import matplotlib.pyplot as plt\n'), ((36897, 37013), 'matplotlib.pyplot.plot', 'plt.plot', (['[datetime_coord_daily[0], datetime_coord_daily[-1]]', '[sic_baseline, sic_baseline]', '"""k-"""'], {'linewidth': '(0.5)'}), "([datetime_coord_daily[0], datetime_coord_daily[-1]], [sic_baseline,\n sic_baseline], 'k-', linewidth=0.5)\n", (36905, 37013), True, 'import matplotlib.pyplot as plt\n'), ((39110, 39136), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['"""%b"""'], {}), "('%b')\n", (39130, 39136), True, 'import matplotlib.dates as mdates\n'), ((39300, 39326), 'matplotlib.dates.DateFormatter', 'mdates.DateFormatter', (['"""%Y"""'], {}), "('%Y')\n", (39320, 39326), True, 'import matplotlib.dates as mdates\n'), ((39769, 39845), 'matplotlib.pyplot.grid', 'plt.grid', ([], {'which': '"""major"""', 'axis': '"""both"""', 'color': '"""0.6"""', 'linewidth': '(0.25)', 'alpha': '(0.6)'}), "(which='major', axis='both', color='0.6', linewidth=0.25, alpha=0.6)\n", (39777, 39845), True, 'import matplotlib.pyplot as plt\n'), ((40406, 40424), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (40422, 40424), True, 'import matplotlib.pyplot as plt\n'), ((40437, 40495), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + param_abbrev + '.pdf')"], {}), "(results_dir + save_as + param_abbrev + '.pdf')\n", (40448, 40495), True, 'import matplotlib.pyplot as plt\n'), ((40508, 40519), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (40517, 40519), True, 'import matplotlib.pyplot as plt\n'), ((47057, 47073), 'Circles.circles.circle', 'circle', (['m', '*circ'], {}), '(m, *circ)\n', (47063, 47073), False, 'from Circles.circles import circle\n'), ((47223, 47258), 'numpy.array', 'np.array', (["compiled_obs['datetimes']"], {}), "(compiled_obs['datetimes'])\n", (47231, 47258), True, 'import numpy as np\n'), ((47311, 47346), 'numpy.array', 'np.array', (["compiled_obs['datetimes']"], {}), "(compiled_obs['datetimes'])\n", (47319, 47346), True, 'import numpy as np\n'), ((48254, 48330), 'numpy.logical_and', 'np.logical_and', (['(dt_months >= season_months[0])', '(dt_months <= season_months[1])'], {}), '(dt_months >= season_months[0], dt_months <= season_months[1])\n', (48268, 48330), True, 'import numpy as np\n'), ((49551, 49665), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 0]', '[np.nan, np.nan]'], {'lw': '(0)', 'c': 'season_colors[s_idx]', 'marker': '"""o"""', 'ms': '(4)', 'label': 'season_labels[s_idx]'}), "([0, 0], [np.nan, np.nan], lw=0, c=season_colors[s_idx], marker='o',\n ms=4, label=season_labels[s_idx])\n", (49559, 49665), True, 'import matplotlib.pyplot as plt\n'), ((56112, 56137), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (56135, 56137), False, 'import warnings\n'), ((56207, 56238), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (56228, 56238), False, 'import warnings\n'), ((56371, 56385), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (56383, 56385), True, 'import matplotlib.pyplot as plt\n'), ((59765, 59787), 'numpy.arange', 'np.arange', (['(-80)', '(50)', '(10)'], {}), '(-80, 50, 10)\n', (59774, 59787), True, 'import numpy as np\n'), ((59846, 59867), 'numpy.arange', 'np.arange', (['(-80)', '(60)', '(5)'], {}), '(-80, 60, 5)\n', (59855, 59867), True, 'import numpy as np\n'), ((60778, 60801), 'numpy.meshgrid', 'np.meshgrid', (['lons', 'lats'], {}), '(lons, lats)\n', (60789, 60801), True, 'import numpy as np\n'), ((3985, 4021), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': 'subplot_fig_size'}), '(figsize=subplot_fig_size)\n', (3995, 4021), True, 'import matplotlib.pyplot as plt\n'), ((6010, 6045), 'numpy.meshgrid', 'np.meshgrid', (['etopo_lons', 'etopo_lats'], {}), '(etopo_lons, etopo_lats)\n', (6021, 6045), True, 'import numpy as np\n'), ((12806, 12875), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {'h_pad': 'pad', 'w_pad': 'pad', 'rect': '(0.02, 0.02, 0.98, 0.98)'}), '(h_pad=pad, w_pad=pad, rect=(0.02, 0.02, 0.98, 0.98))\n', (12822, 12875), True, 'import matplotlib.pyplot as plt\n'), ((13447, 13490), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(results_dir + save_as + '.pdf')"], {}), "(results_dir + save_as + '.pdf')\n", (13458, 13490), True, 'import matplotlib.pyplot as plt\n'), ((13503, 13514), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (13512, 13514), True, 'import matplotlib.pyplot as plt\n'), ((24504, 24555), 'time_tools.convert_tuple_to_datetime', 'tt.convert_tuple_to_datetime', (['datetime_tuple_format'], {}), '(datetime_tuple_format)\n', (24532, 24555), True, 'import time_tools as tt\n'), ((24827, 24919), 'geo_tools.vert_prof_eval', 'gt.vert_prof_eval', (["float_data['profiles'][pi]", '"""sigma_theta"""', 'this_mld'], {'extrap': '"""nearest"""'}), "(float_data['profiles'][pi], 'sigma_theta', this_mld,\n extrap='nearest')\n", (24844, 24919), True, 'import geo_tools as gt\n'), ((27289, 27497), 'geo_tools.vert_prof_even_spacing', 'gt.vert_prof_even_spacing', (["float_data['profiles'][pi]", 'param_abbrev'], {'z_coor': '"""depth"""', 'spacing': 'vert_res', 'interp_method': '"""linear"""', 'extrap': '"""NaN"""', 'top': 'depth_lim[0]', 'bottom': 'depth_lim[1]', 'verbose_error': '(True)'}), "(float_data['profiles'][pi], param_abbrev, z_coor=\n 'depth', spacing=vert_res, interp_method='linear', extrap='NaN', top=\n depth_lim[0], bottom=depth_lim[1], verbose_error=True)\n", (27314, 27497), True, 'import geo_tools as gt\n'), ((29940, 29962), 'numpy.array', 'np.array', (['section_data'], {}), '(section_data)\n', (29948, 29962), True, 'import numpy as np\n'), ((30404, 30425), 'numpy.isnan', 'np.isnan', (['more_levels'], {}), '(more_levels)\n', (30412, 30425), True, 'import numpy as np\n'), ((31499, 31518), 'numpy.min', 'np.min', (['more_levels'], {}), '(more_levels)\n', (31505, 31518), True, 'import numpy as np\n'), ((31524, 31543), 'numpy.max', 'np.max', (['more_levels'], {}), '(more_levels)\n', (31530, 31543), True, 'import numpy as np\n'), ((33266, 33354), 'matplotlib.pyplot.plot', 'plt.plot', (['[obs_dt, obs_dt]', 'obs_range[obs_idx]'], {'color': '"""0.5"""', 'linewidth': '(0.5)', 'zorder': '(3)'}), "([obs_dt, obs_dt], obs_range[obs_idx], color='0.5', linewidth=0.5,\n zorder=3)\n", (33274, 33354), True, 'import matplotlib.pyplot as plt\n'), ((34207, 34268), 'time_tools.convert_14_to_tuple', 'tt.convert_14_to_tuple', (["float_data['profiles'][0]['datetime']"], {}), "(float_data['profiles'][0]['datetime'])\n", (34229, 34268), True, 'import time_tools as tt\n'), ((34322, 34384), 'time_tools.convert_14_to_tuple', 'tt.convert_14_to_tuple', (["float_data['profiles'][-1]['datetime']"], {}), "(float_data['profiles'][-1]['datetime'])\n", (34344, 34384), True, 'import time_tools as tt\n'), ((35480, 35600), 'matplotlib.pyplot.clabel', 'plt.clabel', (['depth_contours'], {'fmt': '"""%d m"""', 'fontsize': 'depth_contour_fontsize', 'manual': '(True)', 'inline': '(True)', 'inline_spacing': '(25)'}), "(depth_contours, fmt='%d m', fontsize=depth_contour_fontsize,\n manual=True, inline=True, inline_spacing=25)\n", (35490, 35600), True, 'import matplotlib.pyplot as plt\n'), ((35842, 35861), 'numpy.array', 'np.array', (['obs_range'], {}), '(obs_range)\n', (35850, 35861), True, 'import numpy as np\n'), ((37038, 37089), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[sic_baseline + 1.2 * sic_norm, max_ylim]'], {}), '([sic_baseline + 1.2 * sic_norm, max_ylim])\n', (37046, 37089), True, 'import matplotlib.pyplot as plt\n'), ((37120, 37221), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(sic_baseline + 1.2 * sic_norm)', '((density_lim[1] - density_lim[0]) ** density_power_scale)'], {}), '(sic_baseline + 1.2 * sic_norm, (density_lim[1] - density_lim[0]) **\n density_power_scale)\n', (37128, 37221), True, 'import matplotlib.pyplot as plt\n'), ((37301, 37335), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[depth_lim[0], max_ylim]'], {}), '([depth_lim[0], max_ylim])\n', (37309, 37335), True, 'import matplotlib.pyplot as plt\n'), ((37447, 37456), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (37454, 37456), True, 'import matplotlib.pyplot as plt\n'), ((37545, 37572), 'matplotlib.pyplot.yticks', 'plt.yticks', (['explicit_yticks'], {}), '(explicit_yticks)\n', (37555, 37572), True, 'import matplotlib.pyplot as plt\n'), ((37985, 37997), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {}), '()\n', (37995, 37997), True, 'import matplotlib.pyplot as plt\n'), ((38588, 38623), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Depth (m)"""'], {'size': 'ysize'}), "('Depth (m)', size=ysize)\n", (38598, 38623), True, 'import matplotlib.pyplot as plt\n'), ((38656, 38710), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\sigma_\\\\theta$ (kg/m$^3$)"""'], {'size': 'ysize'}), "('$\\\\sigma_\\\\theta$ (kg/m$^3$)', size=ysize)\n", (38666, 38710), True, 'import matplotlib.pyplot as plt\n'), ((38755, 38796), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Depth (m) """'], {'size': 'ysize'}), "('Depth (m) ', size=ysize)\n", (38765, 38796), True, 'import matplotlib.pyplot as plt\n'), ((46426, 46456), 'numpy.linspace', 'np.linspace', (['ap[0]', 'ap[1]', '(100)'], {}), '(ap[0], ap[1], 100)\n', (46437, 46456), True, 'import numpy as np\n'), ((46455, 46485), 'numpy.linspace', 'np.linspace', (['ap[1]', 'ap[1]', '(100)'], {}), '(ap[1], ap[1], 100)\n', (46466, 46485), True, 'import numpy as np\n'), ((46522, 46552), 'numpy.linspace', 'np.linspace', (['ap[1]', 'ap[0]', '(100)'], {}), '(ap[1], ap[0], 100)\n', (46533, 46552), True, 'import numpy as np\n'), ((46551, 46581), 'numpy.linspace', 'np.linspace', (['ap[0]', 'ap[0]', '(100)'], {}), '(ap[0], ap[0], 100)\n', (46562, 46581), True, 'import numpy as np\n'), ((46619, 46649), 'numpy.linspace', 'np.linspace', (['ap[3]', 'ap[3]', '(100)'], {}), '(ap[3], ap[3], 100)\n', (46630, 46649), True, 'import numpy as np\n'), ((46648, 46678), 'numpy.linspace', 'np.linspace', (['ap[3]', 'ap[2]', '(100)'], {}), '(ap[3], ap[2], 100)\n', (46659, 46678), True, 'import numpy as np\n'), ((46715, 46745), 'numpy.linspace', 'np.linspace', (['ap[2]', 'ap[2]', '(100)'], {}), '(ap[2], ap[2], 100)\n', (46726, 46745), True, 'import numpy as np\n'), ((46744, 46774), 'numpy.linspace', 'np.linspace', (['ap[2]', 'ap[3]', '(100)'], {}), '(ap[2], ap[3], 100)\n', (46755, 46774), True, 'import numpy as np\n'), ((46931, 46940), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (46938, 46940), True, 'import matplotlib.pyplot as plt\n'), ((49768, 49951), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 0]', '[np.nan, np.nan]'], {'lw': '(0)', 'marker': 'obs_type_markers[t_idx]', 'ms': '(4)', 'markerfacecolor': '"""none"""', 'markeredgecolor': '"""k"""', 'markeredgewidth': '(0.5)', 'label': 'obs_type_labels[t_idx]'}), "([0, 0], [np.nan, np.nan], lw=0, marker=obs_type_markers[t_idx], ms\n =4, markerfacecolor='none', markeredgecolor='k', markeredgewidth=0.5,\n label=obs_type_labels[t_idx])\n", (49776, 49951), True, 'import matplotlib.pyplot as plt\n'), ((49993, 50156), 'matplotlib.pyplot.plot', 'plt.plot', (['[0, 0]', '[np.nan, np.nan]'], {'lw': '(0)', 'marker': 'obs_type_markers[t_idx]', 'ms': '(4)', 'markerfacecolor': '"""k"""', 'markeredgecolor': '"""none"""', 'label': 'obs_type_labels[t_idx]'}), "([0, 0], [np.nan, np.nan], lw=0, marker=obs_type_markers[t_idx], ms\n =4, markerfacecolor='k', markeredgecolor='none', label=obs_type_labels[\n t_idx])\n", (50001, 50156), True, 'import matplotlib.pyplot as plt\n'), ((57399, 57434), 'numpy.meshgrid', 'np.meshgrid', (['etopo_lons', 'etopo_lats'], {}), '(etopo_lons, etopo_lats)\n', (57410, 57434), True, 'import numpy as np\n'), ((57753, 57785), 'numpy.linspace', 'np.linspace', (['pll[0]', 'pll[1]', '(100)'], {}), '(pll[0], pll[1], 100)\n', (57764, 57785), True, 'import numpy as np\n'), ((57784, 57816), 'numpy.linspace', 'np.linspace', (['pll[1]', 'pll[1]', '(100)'], {}), '(pll[1], pll[1], 100)\n', (57795, 57816), True, 'import numpy as np\n'), ((57853, 57885), 'numpy.linspace', 'np.linspace', (['pll[1]', 'pll[0]', '(100)'], {}), '(pll[1], pll[0], 100)\n', (57864, 57885), True, 'import numpy as np\n'), ((57884, 57916), 'numpy.linspace', 'np.linspace', (['pll[0]', 'pll[0]', '(100)'], {}), '(pll[0], pll[0], 100)\n', (57895, 57916), True, 'import numpy as np\n'), ((57954, 57986), 'numpy.linspace', 'np.linspace', (['pll[3]', 'pll[3]', '(100)'], {}), '(pll[3], pll[3], 100)\n', (57965, 57986), True, 'import numpy as np\n'), ((57985, 58017), 'numpy.linspace', 'np.linspace', (['pll[3]', 'pll[2]', '(100)'], {}), '(pll[3], pll[2], 100)\n', (57996, 58017), True, 'import numpy as np\n'), ((58054, 58086), 'numpy.linspace', 'np.linspace', (['pll[2]', 'pll[2]', '(100)'], {}), '(pll[2], pll[2], 100)\n', (58065, 58086), True, 'import numpy as np\n'), ((58085, 58117), 'numpy.linspace', 'np.linspace', (['pll[2]', 'pll[3]', '(100)'], {}), '(pll[2], pll[3], 100)\n', (58096, 58117), True, 'import numpy as np\n'), ((58284, 58293), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (58291, 58293), True, 'import matplotlib.pyplot as plt\n'), ((3202, 3297), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'mcolors.LinearSegmentedColormap.from_list', ([], {'name': 'None', 'colors': 'cmap_colors', 'N': '(250)', 'gamma': '(1.3)'}), '(name=None, colors=cmap_colors, N=\n 250, gamma=1.3)\n', (3243, 3297), True, 'import matplotlib.colors as mcolors\n'), ((4034, 4043), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (4041, 4043), True, 'import matplotlib.pyplot as plt\n'), ((4878, 4897), 'numpy.diff', 'np.diff', (['lonx[0, :]'], {}), '(lonx[0, :])\n', (4885, 4897), True, 'import numpy as np\n'), ((4907, 4927), 'numpy.diff', 'np.diff', (['lonx[-1, :]'], {}), '(lonx[-1, :])\n', (4914, 4927), True, 'import numpy as np\n'), ((4980, 4999), 'numpy.diff', 'np.diff', (['laty[0, :]'], {}), '(laty[0, :])\n', (4987, 4999), True, 'import numpy as np\n'), ((5008, 5028), 'numpy.diff', 'np.diff', (['laty[-1, :]'], {}), '(laty[-1, :])\n', (5015, 5028), True, 'import numpy as np\n'), ((7327, 7454), 'matplotlib.pyplot.plot', 'plt.plot', (['[lonx - cross_lonx, lonx + cross_lonx]', '[laty, laty]'], {'color': 'c', 'linestyle': '"""solid"""', 'linewidth': 'cross_width', 'zorder': '(4)'}), "([lonx - cross_lonx, lonx + cross_lonx], [laty, laty], color=c,\n linestyle='solid', linewidth=cross_width, zorder=4)\n", (7335, 7454), True, 'import matplotlib.pyplot as plt\n'), ((7460, 7587), 'matplotlib.pyplot.plot', 'plt.plot', (['[lonx, lonx]', '[laty - cross_laty, laty + cross_laty]'], {'color': 'c', 'linestyle': '"""solid"""', 'linewidth': 'cross_width', 'zorder': '(4)'}), "([lonx, lonx], [laty - cross_laty, laty + cross_laty], color=c,\n linestyle='solid', linewidth=cross_width, zorder=4)\n", (7468, 7587), True, 'import matplotlib.pyplot as plt\n'), ((7593, 7670), 'matplotlib.pyplot.scatter', 'plt.scatter', (['lonx', 'laty'], {'s': '(14)', 'c': 'c', 'edgecolors': 'edgecolor', 'alpha': '(0.9)', 'zorder': '(5)'}), '(lonx, laty, s=14, c=c, edgecolors=edgecolor, alpha=0.9, zorder=5)\n', (7604, 7670), True, 'import matplotlib.pyplot as plt\n'), ((9277, 9389), 'matplotlib.pyplot.plot', 'plt.plot', (['flonx[position_flags != 9]', 'flaty[position_flags != 9]'], {'color': '"""#15178F"""', 'linewidth': '(1.25)', 'zorder': '(4)'}), "(flonx[position_flags != 9], flaty[position_flags != 9], color=\n '#15178F', linewidth=1.25, zorder=4)\n", (9285, 9389), True, 'import matplotlib.pyplot as plt\n'), ((9397, 9511), 'matplotlib.pyplot.scatter', 'plt.scatter', (['flonx[position_flags == 2]', 'flaty[position_flags == 2]'], {'s': '(10)', 'c': '"""m"""', 'edgecolors': '"""none"""', 'zorder': '(5)'}), "(flonx[position_flags == 2], flaty[position_flags == 2], s=10, c\n ='m', edgecolors='none', zorder=5)\n", (9408, 9511), True, 'import matplotlib.pyplot as plt\n'), ((9518, 9638), 'matplotlib.pyplot.scatter', 'plt.scatter', (['flonx[position_flags == 1]', 'flaty[position_flags == 1]'], {'s': '(10)', 'c': '"""#15178F"""', 'edgecolors': '"""none"""', 'zorder': '(6)'}), "(flonx[position_flags == 1], flaty[position_flags == 1], s=10, c\n ='#15178F', edgecolors='none', zorder=6)\n", (9529, 9638), True, 'import matplotlib.pyplot as plt\n'), ((27751, 27960), 'geo_tools.vert_prof_even_spacing', 'gt.vert_prof_even_spacing', (["float_data['profiles'][pi]", '"""sigma_theta"""'], {'z_coor': '"""depth"""', 'spacing': 'vert_res', 'interp_method': '"""linear"""', 'extrap': '"""NaN"""', 'top': 'depth_lim[0]', 'bottom': 'depth_lim[1]', 'verbose_error': '(True)'}), "(float_data['profiles'][pi], 'sigma_theta', z_coor\n ='depth', spacing=vert_res, interp_method='linear', extrap='NaN', top=\n depth_lim[0], bottom=depth_lim[1], verbose_error=True)\n", (27776, 27960), True, 'import geo_tools as gt\n'), ((28802, 28902), 'geo_tools.profile_interp', 'gt.profile_interp', (['sorted_param', 'sorted_sigma_theta', 'z_vec'], {'method': '"""linear"""', 'out_of_bounds': '"""NaN"""'}), "(sorted_param, sorted_sigma_theta, z_vec, method='linear',\n out_of_bounds='NaN')\n", (28819, 28902), True, 'import geo_tools as gt\n'), ((28977, 29077), 'geo_tools.profile_interp', 'gt.profile_interp', (['sorted_depth', 'sorted_sigma_theta', 'z_vec'], {'method': '"""linear"""', 'out_of_bounds': '"""NaN"""'}), "(sorted_depth, sorted_sigma_theta, z_vec, method='linear',\n out_of_bounds='NaN')\n", (28994, 29077), True, 'import geo_tools as gt\n'), ((30025, 30045), 'numpy.array', 'np.array', (['depth_data'], {}), '(depth_data)\n', (30033, 30045), True, 'import numpy as np\n'), ((31137, 31146), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (31144, 31146), True, 'import matplotlib.pyplot as plt\n'), ((31309, 31318), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (31316, 31318), True, 'import matplotlib.pyplot as plt\n'), ((31875, 31899), 'numpy.abs', 'np.abs', (['specified_levels'], {}), '(specified_levels)\n', (31881, 31899), True, 'import numpy as np\n'), ((33459, 33529), 'matplotlib.pyplot.plot', 'plt.plot', (['[dt, dt]', '[*depth_lim]'], {'color': '"""0.2"""', 'linewidth': '(0.8)', 'zorder': '(3)'}), "([dt, dt], [*depth_lim], color='0.2', linewidth=0.8, zorder=3)\n", (33467, 33529), True, 'import matplotlib.pyplot as plt\n'), ((34507, 34537), 'numpy.array', 'np.array', (['datetime_coord_profs'], {}), '(datetime_coord_profs)\n', (34515, 34537), True, 'import numpy as np\n'), ((34603, 34633), 'numpy.array', 'np.array', (['datetime_coord_profs'], {}), '(datetime_coord_profs)\n', (34611, 34633), True, 'import numpy as np\n'), ((34694, 34724), 'numpy.array', 'np.array', (['datetime_coord_profs'], {}), '(datetime_coord_profs)\n', (34702, 34724), True, 'import numpy as np\n'), ((34740, 34758), 'numpy.array', 'np.array', (['mld_data'], {}), '(mld_data)\n', (34748, 34758), True, 'import numpy as np\n'), ((35932, 35962), 'numpy.array', 'np.array', (['datetime_coord_daily'], {}), '(datetime_coord_daily)\n', (35940, 35962), True, 'import numpy as np\n'), ((36028, 36058), 'numpy.array', 'np.array', (['datetime_coord_daily'], {}), '(datetime_coord_daily)\n', (36036, 36058), True, 'import numpy as np\n'), ((36614, 36623), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (36621, 36623), True, 'import matplotlib.pyplot as plt\n'), ((36637, 36667), 'numpy.array', 'np.array', (['datetime_coord_daily'], {}), '(datetime_coord_daily)\n', (36645, 36667), True, 'import numpy as np\n'), ((37796, 37805), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (37803, 37805), True, 'import matplotlib.pyplot as plt\n'), ((38469, 38478), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (38476, 38478), True, 'import matplotlib.pyplot as plt\n'), ((39854, 39863), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39861, 39863), True, 'import matplotlib.pyplot as plt\n'), ((40008, 40017), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (40015, 40017), True, 'import matplotlib.pyplot as plt\n'), ((47161, 47170), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (47168, 47170), True, 'import matplotlib.pyplot as plt\n'), ((48497, 48528), 'numpy.array', 'np.array', (["compiled_obs['types']"], {}), "(compiled_obs['types'])\n", (48505, 48528), True, 'import numpy as np\n'), ((48744, 48878), 'matplotlib.pyplot.scatter', 'plt.scatter', (['lonx', 'laty'], {'s': '(4.0)', 'marker': 'obs_type_markers[t_idx]', 'facecolor': '"""none"""', 'edgecolors': 'season_colors[s_idx]', 'linewidths': '(0.5)'}), "(lonx, laty, s=4.0, marker=obs_type_markers[t_idx], facecolor=\n 'none', edgecolors=season_colors[s_idx], linewidths=0.5)\n", (48755, 48878), True, 'import matplotlib.pyplot as plt\n'), ((49041, 49159), 'matplotlib.pyplot.scatter', 'plt.scatter', (['lonx', 'laty'], {'s': '(4.0)', 'marker': 'obs_type_markers[t_idx]', 'facecolor': 'season_colors[s_idx]', 'edgecolors': '"""none"""'}), "(lonx, laty, s=4.0, marker=obs_type_markers[t_idx], facecolor=\n season_colors[s_idx], edgecolors='none')\n", (49052, 49159), True, 'import matplotlib.pyplot as plt\n'), ((49442, 49451), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (49449, 49451), True, 'import matplotlib.pyplot as plt\n'), ((58549, 58558), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (58556, 58558), True, 'import matplotlib.pyplot as plt\n'), ((3563, 3658), 'matplotlib.colors.LinearSegmentedColormap.from_list', 'mcolors.LinearSegmentedColormap.from_list', ([], {'name': 'None', 'colors': 'cmap_colors', 'N': '(250)', 'gamma': '(1.3)'}), '(name=None, colors=cmap_colors, N=\n 250, gamma=1.3)\n', (3604, 3658), True, 'import matplotlib.colors as mcolors\n'), ((10100, 10143), 'numpy.unique', 'np.unique', (['mo_yr_strings'], {'return_index': '(True)'}), '(mo_yr_strings, return_index=True)\n', (10109, 10143), True, 'import numpy as np\n'), ((10180, 10203), 'numpy.sort', 'np.sort', (['unique_indices'], {}), '(unique_indices)\n', (10187, 10203), True, 'import numpy as np\n'), ((12421, 12430), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12428, 12430), True, 'import matplotlib.pyplot as plt\n'), ((12450, 12478), 'numpy.arange', 'np.arange', (['open_sic', '(101)', '(10)'], {}), '(open_sic, 101, 10)\n', (12459, 12478), True, 'import numpy as np\n'), ((29418, 29480), 'time_tools.convert_14_to_tuple', 'tt.convert_14_to_tuple', (["float_data['profiles'][pi]['datetime']"], {}), "(float_data['profiles'][pi]['datetime'])\n", (29440, 29480), True, 'import time_tools as tt\n'), ((37720, 37745), 'numpy.array', 'np.array', (['explicit_yticks'], {}), '(explicit_yticks)\n', (37728, 37745), True, 'import numpy as np\n'), ((38406, 38415), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (38413, 38415), True, 'import matplotlib.pyplot as plt\n'), ((39149, 39158), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39156, 39158), True, 'import matplotlib.pyplot as plt\n'), ((39203, 39212), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39210, 39212), True, 'import matplotlib.pyplot as plt\n'), ((39339, 39348), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39346, 39348), True, 'import matplotlib.pyplot as plt\n'), ((39392, 39401), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39399, 39401), True, 'import matplotlib.pyplot as plt\n'), ((39457, 39466), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39464, 39466), True, 'import matplotlib.pyplot as plt\n'), ((39576, 39585), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39583, 39585), True, 'import matplotlib.pyplot as plt\n'), ((39666, 39675), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (39673, 39675), True, 'import matplotlib.pyplot as plt\n'), ((48608, 48638), 'numpy.array', 'np.array', (["compiled_obs['lons']"], {}), "(compiled_obs['lons'])\n", (48616, 48638), True, 'import numpy as np\n'), ((48651, 48681), 'numpy.array', 'np.array', (["compiled_obs['lats']"], {}), "(compiled_obs['lats'])\n", (48659, 48681), True, 'import numpy as np\n'), ((6836, 6873), 'numpy.where', 'np.where', (['(float_dates - date_int == 0)'], {}), '(float_dates - date_int == 0)\n', (6844, 6873), True, 'import numpy as np\n'), ((8238, 8294), 'time_tools.convert_8_int_to_tuple', 'tt.convert_8_int_to_tuple', (['float_dates[recent_day_index]'], {}), '(float_dates[recent_day_index])\n', (8263, 8294), True, 'import time_tools as tt\n'), ((8814, 8941), 'matplotlib.pyplot.plot', 'plt.plot', (['[lonx - cross_lonx, lonx + cross_lonx]', '[laty, laty]'], {'color': 'c', 'linestyle': '"""solid"""', 'linewidth': 'cross_width', 'zorder': '(4)'}), "([lonx - cross_lonx, lonx + cross_lonx], [laty, laty], color=c,\n linestyle='solid', linewidth=cross_width, zorder=4)\n", (8822, 8941), True, 'import matplotlib.pyplot as plt\n'), ((8951, 9078), 'matplotlib.pyplot.plot', 'plt.plot', (['[lonx, lonx]', '[laty - cross_laty, laty + cross_laty]'], {'color': 'c', 'linestyle': '"""solid"""', 'linewidth': 'cross_width', 'zorder': '(4)'}), "([lonx, lonx], [laty - cross_laty, laty + cross_laty], color=c,\n linestyle='solid', linewidth=cross_width, zorder=4)\n", (8959, 9078), True, 'import matplotlib.pyplot as plt\n'), ((9767, 9809), 'time_tools.convert_14_to_tuple', 'tt.convert_14_to_tuple', (['float_datetimes[n]'], {}), '(float_datetimes[n])\n', (9789, 9809), True, 'import time_tools as tt\n'), ((10569, 10712), 'matplotlib.pyplot.text', 'plt.text', (['(lonx_to_label[pt] + 0.000625 * width)', '(laty_to_label[pt] - 0.026 * height)', 'mo_yr_strings_to_label[pt]'], {'fontsize': '(7)', 'color': '"""#15178F"""'}), "(lonx_to_label[pt] + 0.000625 * width, laty_to_label[pt] - 0.026 *\n height, mo_yr_strings_to_label[pt], fontsize=7, color='#15178F')\n", (10577, 10712), True, 'import matplotlib.pyplot as plt\n'), ((10837, 10980), 'matplotlib.pyplot.text', 'plt.text', (['(lonx_to_label[pt] + 0.000625 * width)', '(laty_to_label[pt] + 0.017 * height)', 'mo_yr_strings_to_label[pt]'], {'fontsize': '(7)', 'color': '"""#15178F"""'}), "(lonx_to_label[pt] + 0.000625 * width, laty_to_label[pt] + 0.017 *\n height, mo_yr_strings_to_label[pt], fontsize=7, color='#15178F')\n", (10845, 10980), True, 'import matplotlib.pyplot as plt\n'), ((12310, 12319), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (12317, 12319), True, 'import matplotlib.pyplot as plt\n'), ((13048, 13057), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (13055, 13057), True, 'import matplotlib.pyplot as plt\n'), ((13142, 13151), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (13149, 13151), True, 'import matplotlib.pyplot as plt\n'), ((13213, 13222), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (13220, 13222), True, 'import matplotlib.pyplot as plt\n'), ((28100, 28119), 'numpy.isnan', 'np.isnan', (['obs_param'], {}), '(obs_param)\n', (28108, 28119), True, 'import numpy as np\n'), ((28120, 28145), 'numpy.isnan', 'np.isnan', (['obs_sigma_theta'], {}), '(obs_sigma_theta)\n', (28128, 28145), True, 'import numpy as np\n'), ((28541, 28680), 'numpy.arange', 'np.arange', (['(0)', '((density_lim[1] - density_lim[0]) ** density_power_scale)', '((density_lim[1] - density_lim[0]) ** density_power_scale / 200)'], {}), '(0, (density_lim[1] - density_lim[0]) ** density_power_scale, (\n density_lim[1] - density_lim[0]) ** density_power_scale / 200)\n', (28550, 28680), True, 'import numpy as np\n'), ((36746, 36765), 'numpy.array', 'np.array', (['sic_coord'], {}), '(sic_coord)\n', (36754, 36765), True, 'import numpy as np\n'), ((37585, 37594), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (37592, 37594), True, 'import matplotlib.pyplot as plt\n'), ((38874, 38883), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (38881, 38883), True, 'import matplotlib.pyplot as plt\n'), ((42400, 42409), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (42407, 42409), True, 'import matplotlib.pyplot as plt\n'), ((8130, 8166), 'numpy.where', 'np.where', (['(float_dates - date_int < 0)'], {}), '(float_dates - date_int < 0)\n', (8138, 8166), True, 'import numpy as np\n'), ((29621, 29642), 'numpy.isfinite', 'np.isfinite', (['data_vec'], {}), '(data_vec)\n', (29632, 29642), True, 'import numpy as np\n'), ((29693, 29714), 'numpy.isfinite', 'np.isfinite', (['data_vec'], {}), '(data_vec)\n', (29704, 29714), True, 'import numpy as np\n'), ((37379, 37400), 'numpy.array', 'np.array', (['density_lim'], {}), '(density_lim)\n', (37387, 37400), True, 'import numpy as np\n'), ((33582, 33603), 'numpy.array', 'np.array', (['density_lim'], {}), '(density_lim)\n', (33590, 33603), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 27 14:24:54 2017
@author: IACJ
"""
import os
from os import path
from os.path import expanduser
from numpy import genfromtxt
import numpy as np
from numpy import array, zeros, argmin, inf
from numpy import *
from US_DTW import US_DTW
# classify using kNN
def kNNClassify_ED(newInput, dataSet, labels, k):
numSamples = dataSet.shape[0] # shape[0] stands for the num of row
## step 1: calculate Euclidean distance
diff = tile(newInput, (numSamples, 1)) - dataSet # Subtract element-wise
squaredDiff = diff ** 2 # squared for the subtract
squaredDist = sum(squaredDiff, axis = 1) # sum is performed by row
distance = squaredDist ** 0.5
## step 2: sort the distance
# argsort() returns the indices that would sort an array in a ascending order
sortedDistIndices = argsort(distance)
classCount = {} # define a dictionary (can be append element)
for i in range(k):
## step 3: choose the min k distance
voteLabel = labels[sortedDistIndices[i]]
## step 4: count the times labels occur
classCount[voteLabel] = classCount.get(voteLabel, 0) + 1
## step 5: the max voted class will return
maxCount = 0
for key, value in classCount.items():
if value > maxCount:
maxCount = value
maxIndex = key
return maxIndex
def kNNClassify_DTW(newInput, dataSet, labels, k):
numSamples = dataSet.shape[0] # shape[0] stands for the num of row
## step 1: calculate Euclidean distance
distance = np.zeros(numSamples)
for i in range(numSamples):
distance[i] = dtw(newInput,dataSet[i])
## step 2: sort the distance
# argsort() returns the indices that would sort an array in a ascending order
sortedDistIndices = argsort(distance)
classCount = {} # define a dictionary (can be append element)
for i in range(k):
## step 3: choose the min k distance
voteLabel = labels[sortedDistIndices[i]]
## step 4: count the times labels occur
classCount[voteLabel] = classCount.get(voteLabel, 0) + 1
## step 5: the max voted class will return
maxCount = 0
for key, value in classCount.items():
if value > maxCount:
maxCount = value
maxIndex = key
return maxIndex
def kNNClassify_US_DTW(newInput, dataSet, labels, k):
numSamples = dataSet.shape[0] # shape[0] stands for the num of row
## step 1: calculate Euclidean distance
distance = np.zeros(numSamples)
for i in range(numSamples):
us_dtw = US_DTW(newInput,dataSet[i])
print(i,len(us_dtw.paths))
distance[i] = us_dtw.resultDistance
## step 2: sort the distance
# argsort() returns the indices that would sort an array in a ascending order
sortedDistIndices = argsort(distance)
classCount = {} # define a dictionary (can be append element)
for i in range(k):
## step 3: choose the min k distance
voteLabel = labels[sortedDistIndices[i]]
## step 4: count the times labels occur
classCount[voteLabel] = classCount.get(voteLabel, 0) + 1
## step 5: the max voted class will return
maxCount = 0
for key, value in classCount.items():
if value > maxCount:
maxCount = value
maxIndex = key
return maxIndex
def dtw(x, y ):
"""
Computes Dynamic Time Warping (DTW) of two sequences.
:param array x: N1*M array
:param array y: N2*M array
:param func dist: distance used as cost measure
Returns the minimum distance, the cost matrix, the accumulated cost matrix, and the wrap path.
"""
assert len(x)
assert len(y)
r, c = len(x), len(y)
D0 = zeros((r + 1, c + 1))
D0[0, 1:] = inf
D0[1:, 0] = inf
D1 = D0[1:, 1:] # view
for i in range(r):
for j in range(c):
D1[i, j] = abs(x[i]-y[j])
C = D1.copy()
for i in range(r):
for j in range(c):
D1[i, j] += min(D0[i, j], D0[i, j+1], D0[i+1, j])
return D1[-1, -1]
# 加载 UCR 数据集的函数
def load_dataset(dataset_name, dataset_folder):
dataset_path = path.join(dataset_folder, dataset_name)
train_file_path = path.join(dataset_path, '{}_TRAIN'.format(dataset_name))
test_file_path = path.join(dataset_path, '{}_TEST'.format(dataset_name))
# training data
train_raw_arr = genfromtxt(train_file_path, delimiter=',')
train_data = train_raw_arr[:, 1:]
train_labels = train_raw_arr[:, 0] - 1
# one was subtracted to change the labels to 0 and 1 instead of 1 and 2
# test_data
test_raw_arr = genfromtxt(test_file_path, delimiter=',')
test_data = test_raw_arr[:, 1:]
test_labels = test_raw_arr[:, 0] - 1
return train_data, train_labels, test_data, test_labels
if __name__ == '__main__':
print("Program Begin")
########## 使用 UCR 数据集 ###############
ucr_dataset_base_folder = expanduser('~/UCR_TS_Archive_2015')
dirs = os.listdir(ucr_dataset_base_folder)
for dir in dirs:
print (dir,end=" : \t")
ucr_dataset_name = dir
train_data, train_labels, test_data, test_labels = load_dataset(ucr_dataset_name,ucr_dataset_base_folder)
print(train_data.shape,train_labels.shape,test_data.shape,test_labels.shape)
########## 使用 1NN_ED ###################
Trues = 0
Falses = 0
for i in range (test_data.shape[0]):
x = test_data[i]
y = test_labels[i]
outputLabel = kNNClassify_ED(x, train_data, train_labels, 1)
# print (i,":\tpredict : ", outputLabel,"\tGroundTruth : ",y,"\t",outputLabel==y)
if (outputLabel==y):
Trues += 1
else :
Falses += 1
print ("1NN_ED :",Trues/(Trues+Falses))
#########################################
train_data = np.tile(train_data,2)
########## 使用 1NN_US-DTW ###################
Trues = 0
Falses = 0
for i in range (test_data.shape[0]):
x = test_data[i]
y = test_labels[i]
outputLabel = kNNClassify_US_DTW(x, train_data, train_labels, 1)
print (i,":\tpredict : ", outputLabel,"\tGroundTruth : ",y,"\t",outputLabel==y)
if (outputLabel==y):
Trues += 1
else :
Falses += 1
print ("1NN_DTW :",Trues/(Trues+Falses))
################
########## 使用 1NN_DTW ###################
Trues = 0
Falses = 0
for i in range (test_data.shape[0]):
x = test_data[i]
y = test_labels[i]
outputLabel = kNNClassify_DTW(x, train_data, train_labels, 1)
# print (i,":\tpredict : ", outputLabel,"\tGroundTruth : ",y,"\t",outputLabel==y)
if (outputLabel==y):
Trues += 1
else :
Falses += 1
print ("1NN_DTW :",Trues/(Trues+Falses))
#########################################
print()
|
[
"os.path.join",
"numpy.zeros",
"numpy.genfromtxt",
"numpy.tile",
"US_DTW.US_DTW",
"os.path.expanduser",
"os.listdir"
] |
[((1638, 1658), 'numpy.zeros', 'np.zeros', (['numSamples'], {}), '(numSamples)\n', (1646, 1658), True, 'import numpy as np\n'), ((2650, 2670), 'numpy.zeros', 'np.zeros', (['numSamples'], {}), '(numSamples)\n', (2658, 2670), True, 'import numpy as np\n'), ((3916, 3937), 'numpy.zeros', 'zeros', (['(r + 1, c + 1)'], {}), '((r + 1, c + 1))\n', (3921, 3937), False, 'from numpy import array, zeros, argmin, inf\n'), ((4330, 4369), 'os.path.join', 'path.join', (['dataset_folder', 'dataset_name'], {}), '(dataset_folder, dataset_name)\n', (4339, 4369), False, 'from os import path\n'), ((4567, 4609), 'numpy.genfromtxt', 'genfromtxt', (['train_file_path'], {'delimiter': '""","""'}), "(train_file_path, delimiter=',')\n", (4577, 4609), False, 'from numpy import genfromtxt\n'), ((4803, 4844), 'numpy.genfromtxt', 'genfromtxt', (['test_file_path'], {'delimiter': '""","""'}), "(test_file_path, delimiter=',')\n", (4813, 4844), False, 'from numpy import genfromtxt\n'), ((5126, 5161), 'os.path.expanduser', 'expanduser', (['"""~/UCR_TS_Archive_2015"""'], {}), "('~/UCR_TS_Archive_2015')\n", (5136, 5161), False, 'from os.path import expanduser\n'), ((5179, 5214), 'os.listdir', 'os.listdir', (['ucr_dataset_base_folder'], {}), '(ucr_dataset_base_folder)\n', (5189, 5214), False, 'import os\n'), ((2720, 2748), 'US_DTW.US_DTW', 'US_DTW', (['newInput', 'dataSet[i]'], {}), '(newInput, dataSet[i])\n', (2726, 2748), False, 'from US_DTW import US_DTW\n'), ((6111, 6133), 'numpy.tile', 'np.tile', (['train_data', '(2)'], {}), '(train_data, 2)\n', (6118, 6133), True, 'import numpy as np\n')]
|
# Generated by Django 2.0.7 on 2018-09-20 10:17
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('hivs_administrative', '0007_remove_areatype'),
]
operations = [
migrations.CreateModel(
name='AreaType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True, verbose_name='name')),
('timestamp', models.DateTimeField(auto_now_add=True, verbose_name='created')),
('last_modified', models.DateTimeField(auto_now=True, null=True, verbose_name='last modified')),
],
options={
'verbose_name': 'Area type',
'verbose_name_plural': 'Area types',
'abstract': False,
},
),
migrations.AddField(
model_name='area',
name='area_type',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='areas', to='hivs_administrative.AreaType', verbose_name='area type'),
),
]
|
[
"django.db.models.ForeignKey",
"django.db.models.DateTimeField",
"django.db.models.CharField",
"django.db.models.AutoField"
] |
[((1073, 1239), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'related_name': '"""areas"""', 'to': '"""hivs_administrative.AreaType"""', 'verbose_name': '"""area type"""'}), "(null=True, on_delete=django.db.models.deletion.SET_NULL,\n related_name='areas', to='hivs_administrative.AreaType', verbose_name=\n 'area type')\n", (1090, 1239), False, 'from django.db import migrations, models\n'), ((374, 467), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (390, 467), False, 'from django.db import migrations, models\n'), ((491, 557), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)', 'verbose_name': '"""name"""'}), "(max_length=255, unique=True, verbose_name='name')\n", (507, 557), False, 'from django.db import migrations, models\n'), ((590, 653), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""created"""'}), "(auto_now_add=True, verbose_name='created')\n", (610, 653), False, 'from django.db import migrations, models\n'), ((690, 766), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'null': '(True)', 'verbose_name': '"""last modified"""'}), "(auto_now=True, null=True, verbose_name='last modified')\n", (710, 766), False, 'from django.db import migrations, models\n')]
|
from django.db import models
from .slugify import slugify # For persian slugs
from django.urls import reverse
'''from whoosh.fields import Schema , TEXT
from whoosh.analysis import StemmingAnalyzer
from whoosh.index import create_index_in'''
class Catalog(models.Model):
name = models.CharField(max_length=255 , unique=True)
slug = models.SlugField(max_length=255 , unique=True , blank=True)
publisher = models.CharField(max_length=255)
description = models.TextField(blank=True)
pub_date = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
def save(self , *args , **kwargs):
self.slug = slugify(self.name)
return super(Catalog,self).save()
def get_absolute_url(self):
return reverse('shop:product_list_by_catalog' , args=[self.slug , self.id])
class Category(models.Model):
catalog = models.ForeignKey(Catalog , related_name='catalog_categories' , on_delete=models.CASCADE)
parent = models.ForeignKey('self' , blank=True , null=True , related_name='children',
on_delete=models.DO_NOTHING)
name = models.CharField(max_length=150 , unique=True)
slug = models.SlugField(max_length=150 , blank=True, unique=True)
description = models.TextField(blank=True)
def save(self , *args , **kwargs):
self.slug = slugify(self.name)
return super(Category,self).save()
def __str__(self):
if self.parent:
return '{}:{}-{}'.format(self.catalog.name,
self.parent.name,
self.name)
return '{}:{}'.format(self.catalog , self.name)
def get_absolute_url(self):
return reverse('shop:product_list_by_category' , args=[self.slug , self.id])
class Meta:
verbose_name_plural = 'categories'
class Product(models.Model):
category = models.ForeignKey(Category , related_name='category_products' , on_delete=models.CASCADE)
name = models.CharField(max_length=40 , unique=True)
slug = models.SlugField(max_length=300 , unique=True , blank=True)
description = models.TextField()
image = models.ImageField(upload_to='products' , blank=True)
manufacturer = models.CharField(max_length=200 , blank=True)
price = models.DecimalField(max_digits=6 , decimal_places=2)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
available = models.BooleanField(default=True)
class Meta:
ordering = ['-created']
def __str__(self):
return self.name
def save(self , *args , **kwargs):
self.slug = slugify(self.name)
return super(Product,self).save()
def get_absolute_url(self):
return reverse('shop:product_detail' , args=[self.slug , self.id])
class ProductAttribute(models.Model):
name = models.CharField(max_length=300)
description = models.TextField(blank=True)
def __str__(self):
return self.name
class ProductDetail(models.Model):
product = models.ForeignKey(Product , related_name='details' , on_delete=models.CASCADE)
attribute = models.ForeignKey(ProductAttribute , on_delete=models.CASCADE)
value = models.CharField(max_length=500)
description = models.TextField(blank=True)
def __str__(self):
return '{}:{}-{}'.format(self.product,
self.attribute,
self.value)
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.SlugField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.DecimalField",
"django.urls.reverse",
"django.db.models.DateTimeField"
] |
[((285, 330), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)', 'unique': '(True)'}), '(max_length=255, unique=True)\n', (301, 330), False, 'from django.db import models\n'), ((342, 399), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(255)', 'unique': '(True)', 'blank': '(True)'}), '(max_length=255, unique=True, blank=True)\n', (358, 399), False, 'from django.db import models\n'), ((416, 448), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (432, 448), False, 'from django.db import models\n'), ((464, 492), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (480, 492), False, 'from django.db import models\n'), ((508, 547), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (528, 547), False, 'from django.db import models\n'), ((851, 943), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Catalog'], {'related_name': '"""catalog_categories"""', 'on_delete': 'models.CASCADE'}), "(Catalog, related_name='catalog_categories', on_delete=\n models.CASCADE)\n", (868, 943), False, 'from django.db import models\n'), ((953, 1059), 'django.db.models.ForeignKey', 'models.ForeignKey', (['"""self"""'], {'blank': '(True)', 'null': '(True)', 'related_name': '"""children"""', 'on_delete': 'models.DO_NOTHING'}), "('self', blank=True, null=True, related_name='children',\n on_delete=models.DO_NOTHING)\n", (970, 1059), False, 'from django.db import models\n'), ((1090, 1135), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(150)', 'unique': '(True)'}), '(max_length=150, unique=True)\n', (1106, 1135), False, 'from django.db import models\n'), ((1149, 1206), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(150)', 'blank': '(True)', 'unique': '(True)'}), '(max_length=150, blank=True, unique=True)\n', (1165, 1206), False, 'from django.db import models\n'), ((1223, 1251), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (1239, 1251), False, 'from django.db import models\n'), ((1768, 1860), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'related_name': '"""category_products"""', 'on_delete': 'models.CASCADE'}), "(Category, related_name='category_products', on_delete=\n models.CASCADE)\n", (1785, 1860), False, 'from django.db import models\n'), ((1869, 1913), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(40)', 'unique': '(True)'}), '(max_length=40, unique=True)\n', (1885, 1913), False, 'from django.db import models\n'), ((1926, 1983), 'django.db.models.SlugField', 'models.SlugField', ([], {'max_length': '(300)', 'unique': '(True)', 'blank': '(True)'}), '(max_length=300, unique=True, blank=True)\n', (1942, 1983), False, 'from django.db import models\n'), ((2002, 2020), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2018, 2020), False, 'from django.db import models\n'), ((2037, 2088), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""products"""', 'blank': '(True)'}), "(upload_to='products', blank=True)\n", (2054, 2088), False, 'from django.db import models\n'), ((2106, 2150), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(200)', 'blank': '(True)'}), '(max_length=200, blank=True)\n', (2122, 2150), False, 'from django.db import models\n'), ((2168, 2219), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(6)', 'decimal_places': '(2)'}), '(max_digits=6, decimal_places=2)\n', (2187, 2219), False, 'from django.db import models\n'), ((2237, 2276), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (2257, 2276), False, 'from django.db import models\n'), ((2293, 2328), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (2313, 2328), False, 'from django.db import models\n'), ((2345, 2378), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (2364, 2378), False, 'from django.db import models\n'), ((2724, 2756), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(300)'}), '(max_length=300)\n', (2740, 2756), False, 'from django.db import models\n'), ((2772, 2800), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (2788, 2800), False, 'from django.db import models\n'), ((2894, 2970), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Product'], {'related_name': '"""details"""', 'on_delete': 'models.CASCADE'}), "(Product, related_name='details', on_delete=models.CASCADE)\n", (2911, 2970), False, 'from django.db import models\n'), ((2987, 3048), 'django.db.models.ForeignKey', 'models.ForeignKey', (['ProductAttribute'], {'on_delete': 'models.CASCADE'}), '(ProductAttribute, on_delete=models.CASCADE)\n', (3004, 3048), False, 'from django.db import models\n'), ((3064, 3096), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(500)'}), '(max_length=500)\n', (3080, 3096), False, 'from django.db import models\n'), ((3112, 3140), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (3128, 3140), False, 'from django.db import models\n'), ((736, 802), 'django.urls.reverse', 'reverse', (['"""shop:product_list_by_catalog"""'], {'args': '[self.slug, self.id]'}), "('shop:product_list_by_catalog', args=[self.slug, self.id])\n", (743, 802), False, 'from django.urls import reverse\n'), ((1595, 1662), 'django.urls.reverse', 'reverse', (['"""shop:product_list_by_category"""'], {'args': '[self.slug, self.id]'}), "('shop:product_list_by_category', args=[self.slug, self.id])\n", (1602, 1662), False, 'from django.urls import reverse\n'), ((2610, 2667), 'django.urls.reverse', 'reverse', (['"""shop:product_detail"""'], {'args': '[self.slug, self.id]'}), "('shop:product_detail', args=[self.slug, self.id])\n", (2617, 2667), False, 'from django.urls import reverse\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, <NAME> <<EMAIL>>
# (c) 2017, <NAME> <<EMAIL>>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
module: manageiq_policies
short_description: Management of resource policy_profiles in ManageIQ.
extends_documentation_fragment:
- community.general.manageiq
author: <NAME> (@dkorn)
description:
- The manageiq_policies module supports adding and deleting policy_profiles in ManageIQ.
options:
state:
type: str
description:
- absent - policy_profiles should not exist,
- present - policy_profiles should exist,
- list - list current policy_profiles and policies.
choices: ['absent', 'present', 'list']
default: 'present'
policy_profiles:
type: list
description:
- list of dictionaries, each includes the policy_profile 'name' key.
- required if state is present or absent.
resource_type:
type: str
description:
- the type of the resource to which the profile should be [un]assigned
required: true
choices: ['provider', 'host', 'vm', 'blueprint', 'category', 'cluster',
'data store', 'group', 'resource pool', 'service', 'service template',
'template', 'tenant', 'user']
resource_name:
type: str
description:
- the name of the resource to which the profile should be [un]assigned
required: true
'''
EXAMPLES = '''
- name: Assign new policy_profile for a provider in ManageIQ
community.general.manageiq_policies:
resource_name: 'EngLab'
resource_type: 'provider'
policy_profiles:
- name: openscap profile
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: '<PASSWORD>'
validate_certs: False
- name: Unassign a policy_profile for a provider in ManageIQ
community.general.manageiq_policies:
state: absent
resource_name: 'EngLab'
resource_type: 'provider'
policy_profiles:
- name: openscap profile
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: '<PASSWORD>'
validate_certs: False
- name: List current policy_profile and policies for a provider in ManageIQ
community.general.manageiq_policies:
state: list
resource_name: 'EngLab'
resource_type: 'provider'
manageiq_connection:
url: 'http://127.0.0.1:3000'
username: 'admin'
password: '<PASSWORD>'
validate_certs: False
'''
RETURN = '''
manageiq_policies:
description:
- List current policy_profile and policies for a provider in ManageIQ
returned: always
type: dict
sample: '{
"changed": false,
"profiles": [
{
"policies": [
{
"active": true,
"description": "OpenSCAP",
"name": "openscap policy"
},
{
"active": true,
"description": "Analyse incoming container images",
"name": "analyse incoming container images"
},
{
"active": true,
"description": "Schedule compliance after smart state analysis",
"name": "schedule compliance after smart state analysis"
}
],
"profile_description": "OpenSCAP profile",
"profile_name": "openscap profile"
}
]
}'
'''
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.community.general.plugins.module_utils.manageiq import ManageIQ, manageiq_argument_spec, manageiq_entities
class ManageIQPolicies(object):
"""
Object to execute policies management operations of manageiq resources.
"""
def __init__(self, manageiq, resource_type, resource_id):
self.manageiq = manageiq
self.module = self.manageiq.module
self.api_url = self.manageiq.api_url
self.client = self.manageiq.client
self.resource_type = resource_type
self.resource_id = resource_id
self.resource_url = '{api_url}/{resource_type}/{resource_id}'.format(
api_url=self.api_url,
resource_type=resource_type,
resource_id=resource_id)
def query_profile_href(self, profile):
""" Add or Update the policy_profile href field
Example:
{name: STR, ...} => {name: STR, href: STR}
"""
resource = self.manageiq.find_collection_resource_or_fail(
"policy_profiles", **profile)
return dict(name=profile['name'], href=resource['href'])
def query_resource_profiles(self):
""" Returns a set of the profile objects objects assigned to the resource
"""
url = '{resource_url}/policy_profiles?expand=resources'
try:
response = self.client.get(url.format(resource_url=self.resource_url))
except Exception as e:
msg = "Failed to query {resource_type} policies: {error}".format(
resource_type=self.resource_type,
error=e)
self.module.fail_json(msg=msg)
resources = response.get('resources', [])
# clean the returned rest api profile object to look like:
# {profile_name: STR, profile_description: STR, policies: ARR<POLICIES>}
profiles = [self.clean_profile_object(profile) for profile in resources]
return profiles
def query_profile_policies(self, profile_id):
""" Returns a set of the policy objects assigned to the resource
"""
url = '{api_url}/policy_profiles/{profile_id}?expand=policies'
try:
response = self.client.get(url.format(api_url=self.api_url, profile_id=profile_id))
except Exception as e:
msg = "Failed to query {resource_type} policies: {error}".format(
resource_type=self.resource_type,
error=e)
self.module.fail_json(msg=msg)
resources = response.get('policies', [])
# clean the returned rest api policy object to look like:
# {name: STR, description: STR, active: BOOL}
policies = [self.clean_policy_object(policy) for policy in resources]
return policies
def clean_policy_object(self, policy):
""" Clean a policy object to have human readable form of:
{
name: STR,
description: STR,
active: BOOL
}
"""
name = policy.get('name')
description = policy.get('description')
active = policy.get('active')
return dict(
name=name,
description=description,
active=active)
def clean_profile_object(self, profile):
""" Clean a profile object to have human readable form of:
{
profile_name: STR,
profile_description: STR,
policies: ARR<POLICIES>
}
"""
profile_id = profile['id']
name = profile.get('name')
description = profile.get('description')
policies = self.query_profile_policies(profile_id)
return dict(
profile_name=name,
profile_description=description,
policies=policies)
def profiles_to_update(self, profiles, action):
""" Create a list of policies we need to update in ManageIQ.
Returns:
Whether or not a change took place and a message describing the
operation executed.
"""
profiles_to_post = []
assigned_profiles = self.query_resource_profiles()
# make a list of assigned full profile names strings
# e.g. ['openscap profile', ...]
assigned_profiles_set = set([profile['profile_name'] for profile in assigned_profiles])
for profile in profiles:
assigned = profile.get('name') in assigned_profiles_set
if (action == 'unassign' and assigned) or (action == 'assign' and not assigned):
# add/update the policy profile href field
# {name: STR, ...} => {name: STR, href: STR}
profile = self.query_profile_href(profile)
profiles_to_post.append(profile)
return profiles_to_post
def assign_or_unassign_profiles(self, profiles, action):
""" Perform assign/unassign action
"""
# get a list of profiles needed to be changed
profiles_to_post = self.profiles_to_update(profiles, action)
if not profiles_to_post:
return dict(
changed=False,
msg="Profiles {profiles} already {action}ed, nothing to do".format(
action=action,
profiles=profiles))
# try to assign or unassign profiles to resource
url = '{resource_url}/policy_profiles'.format(resource_url=self.resource_url)
try:
response = self.client.post(url, action=action, resources=profiles_to_post)
except Exception as e:
msg = "Failed to {action} profile: {error}".format(
action=action,
error=e)
self.module.fail_json(msg=msg)
# check all entities in result to be successful
for result in response['results']:
if not result['success']:
msg = "Failed to {action}: {message}".format(
action=action,
message=result['message'])
self.module.fail_json(msg=msg)
# successfully changed all needed profiles
return dict(
changed=True,
msg="Successfully {action}ed profiles: {profiles}".format(
action=action,
profiles=profiles))
def main():
actions = {'present': 'assign', 'absent': 'unassign', 'list': 'list'}
argument_spec = dict(
policy_profiles=dict(type='list'),
resource_name=dict(required=True, type='str'),
resource_type=dict(required=True, type='str',
choices=list(manageiq_entities().keys())),
state=dict(required=False, type='str',
choices=['present', 'absent', 'list'], default='present'),
)
# add the manageiq connection arguments to the arguments
argument_spec.update(manageiq_argument_spec())
module = AnsibleModule(
argument_spec=argument_spec,
required_if=[
('state', 'present', ['policy_profiles']),
('state', 'absent', ['policy_profiles'])
],
)
policy_profiles = module.params['policy_profiles']
resource_type_key = module.params['resource_type']
resource_name = module.params['resource_name']
state = module.params['state']
# get the action and resource type
action = actions[state]
resource_type = manageiq_entities()[resource_type_key]
manageiq = ManageIQ(module)
# query resource id, fail if resource does not exist
resource_id = manageiq.find_collection_resource_or_fail(resource_type, name=resource_name)['id']
manageiq_policies = ManageIQPolicies(manageiq, resource_type, resource_id)
if action == 'list':
# return a list of current profiles for this object
current_profiles = manageiq_policies.query_resource_profiles()
res_args = dict(changed=False, profiles=current_profiles)
else:
# assign or unassign the profiles
res_args = manageiq_policies.assign_or_unassign_profiles(policy_profiles, action)
module.exit_json(**res_args)
if __name__ == "__main__":
main()
|
[
"ansible.module_utils.basic.AnsibleModule",
"ansible_collections.community.general.plugins.module_utils.manageiq.manageiq_entities",
"ansible_collections.community.general.plugins.module_utils.manageiq.manageiq_argument_spec",
"ansible_collections.community.general.plugins.module_utils.manageiq.ManageIQ"
] |
[((10614, 10759), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec', 'required_if': "[('state', 'present', ['policy_profiles']), ('state', 'absent', [\n 'policy_profiles'])]"}), "(argument_spec=argument_spec, required_if=[('state', 'present',\n ['policy_profiles']), ('state', 'absent', ['policy_profiles'])])\n", (10627, 10759), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((11153, 11169), 'ansible_collections.community.general.plugins.module_utils.manageiq.ManageIQ', 'ManageIQ', (['module'], {}), '(module)\n', (11161, 11169), False, 'from ansible_collections.community.general.plugins.module_utils.manageiq import ManageIQ, manageiq_argument_spec, manageiq_entities\n'), ((10574, 10598), 'ansible_collections.community.general.plugins.module_utils.manageiq.manageiq_argument_spec', 'manageiq_argument_spec', ([], {}), '()\n', (10596, 10598), False, 'from ansible_collections.community.general.plugins.module_utils.manageiq import ManageIQ, manageiq_argument_spec, manageiq_entities\n'), ((11098, 11117), 'ansible_collections.community.general.plugins.module_utils.manageiq.manageiq_entities', 'manageiq_entities', ([], {}), '()\n', (11115, 11117), False, 'from ansible_collections.community.general.plugins.module_utils.manageiq import ManageIQ, manageiq_argument_spec, manageiq_entities\n'), ((10327, 10346), 'ansible_collections.community.general.plugins.module_utils.manageiq.manageiq_entities', 'manageiq_entities', ([], {}), '()\n', (10344, 10346), False, 'from ansible_collections.community.general.plugins.module_utils.manageiq import ManageIQ, manageiq_argument_spec, manageiq_entities\n')]
|
from django.shortcuts import render
# Create your views here.
from django.http import HttpResponseRedirect
from django.http import HttpResponse
from django.shortcuts import render
from django.core.urlresolvers import reverse
from .forms import docform
from .models import Document
def upload(request):
if request.method == 'POST':
form = docform(request.POST)
if form.is_valid():
newdoc = Document (newfile=request.FILES['newfile'])
newdoc.save()
return HttpResponseRedirect(reverse('upload'))
else:
form = docform()
return render( request,'upload.html',)
|
[
"django.shortcuts.render",
"django.core.urlresolvers.reverse"
] |
[((597, 627), 'django.shortcuts.render', 'render', (['request', '"""upload.html"""'], {}), "(request, 'upload.html')\n", (603, 627), False, 'from django.shortcuts import render\n'), ((531, 548), 'django.core.urlresolvers.reverse', 'reverse', (['"""upload"""'], {}), "('upload')\n", (538, 548), False, 'from django.core.urlresolvers import reverse\n')]
|
# -*- coding: utf-8 -*-
# Copyright 2018 University of Groningen
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test PTM detection and canonicalisation.
"""
import networkx as nx
import pytest
import vermouth
import vermouth.processors.canonicalize_modifications as canmod
# pylint: disable=redefined-outer-name
@pytest.fixture
def known_ptm_graphs():
"""
Provide some known PTMs in a defined order
"""
ptm_graphs = []
nh_ptm = nx.Graph(name='NH')
nh_ptm.add_nodes_from([
(0, {'atomname': 'H', 'PTM_atom': True, 'element': 'H'}),
(1, {'atomname': 'N', 'PTM_atom': False, 'element': 'N'}),
])
nh_ptm.add_edge(0, 1)
ptm_graphs.append(nh_ptm)
cooc_ptm = nx.Graph(name='COOC')
cooc_ptm.add_nodes_from([
(0, {'atomname': 'C', 'PTM_atom': False, 'element': 'C'}),
(1, {'atomname': 'O', 'PTM_atom': True, 'element': 'O'}),
(2, {'atomname': 'O', 'PTM_atom': True, 'element': 'O'}),
(3, {'atomname': 'C', 'PTM_atom': False, 'element': 'C'}),
])
cooc_ptm.add_edges_from([(0, 1), (1, 2), (2, 3)])
ptm_graphs.append(cooc_ptm)
return sorted(ptm_graphs, key=len, reverse=True)
def make_molecule(atoms, edges):
"""
Makes molecules from atoms and edges
"""
mol = vermouth.molecule.Molecule()
mol.add_nodes_from(atoms.items())
mol.add_edges_from(edges)
return mol
@pytest.mark.parametrize('atoms, edges, expected', [
({}, [], []),
(
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
2: {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1},
},
[(0, 1), (1, 2)],
[({1, 2}, {0})]
),
(
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': False, 'element': 'H', 'resid': 1},
2: {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1},
},
[(0, 1), (1, 2)],
[({2}, {1})]
),
(
{
0: {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': False, 'element': 'H', 'resid': 1},
2: {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1},
},
[(0, 1), (1, 2)],
[({0}, {1}), ({2}, {1})]
),
(
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
2: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 2},
},
[(0, 1), (1, 2)],
[({1}, {0, 2})]
),
(
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
2: {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 2},
3: {'atomname': 'CA', 'PTM_atom': False, 'element': 'N', 'resid': 2},
},
[(0, 1), (1, 2), (2, 3)],
[({1, 2}, {0, 3})]
),
])
def test_ptm_groups(atoms, edges, expected):
"""
Make sure PTM atoms are grouped correctly with appropriate anchors
"""
molecule = make_molecule(atoms, edges)
found = canmod.find_ptm_atoms(molecule)
assert expected == found
@pytest.mark.parametrize('atoms, edges, expected', [
pytest.param(
# This needs to raise a KeyError, because not all the anchors are
# covered. This is the root of #140
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
2: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 2},
},
[(0, 1), (1, 2)],
[('NH', {1: 0, 2: 1})],
marks=pytest.mark.xfail(raises=KeyError, strict=True)
),
(
# Simplest case: one PTM atom for 1 residue
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
},
[(0, 1)],
[('NH', {0: 1, 1: 0})]
),
(
# Two PTM atoms with a shared anchor
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
2: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
},
[(0, 1), (0, 2)],
[('NH', {0: 1, 1: 0}), ('NH', {0: 1, 2: 0})]
),
(
# Two PTM atoms with two anchors covered (?) by 2 fragments
{
0: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1},
1: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1},
2: {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 2},
3: {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 2},
},
[(0, 1), (1, 2), (2, 3)],
[('NH', {0: 1, 1: 0}), ('NH', {2: 0, 3: 1})]
),
(
# Two PTM atoms with two anchors covered by 1 fragment
{
0: {'atomname': 'C', 'PTM_atom': False, 'element': 'C', 'resid': 1},
1: {'atomname': 'O', 'PTM_atom': True, 'element': 'O', 'resid': 1},
2: {'atomname': 'O', 'PTM_atom': True, 'element': 'O', 'resid': 2},
3: {'atomname': 'C', 'PTM_atom': False, 'element': 'C', 'resid': 2},
},
[(0, 1), (1, 2), (2, 3)],
[('COOC', {0: 0, 1: 1, 2: 2, 3: 3})]
),
])
def test_identify_ptms(known_ptm_graphs, atoms, edges, expected):
"""
Make sure PTMs are identified correctly.
"""
molecule = make_molecule(atoms, edges)
ptms = canmod.find_ptm_atoms(molecule)
known_ptms = [(ptm_graph, canmod.PTMGraphMatcher(molecule, ptm_graph))
for ptm_graph in known_ptm_graphs]
found = canmod.identify_ptms(molecule, ptms, known_ptms)
found = [(ptm.name, match) for ptm, match in found]
assert found == expected
|
[
"vermouth.molecule.Molecule",
"vermouth.processors.canonicalize_modifications.PTMGraphMatcher",
"vermouth.processors.canonicalize_modifications.identify_ptms",
"networkx.Graph",
"pytest.mark.parametrize",
"vermouth.processors.canonicalize_modifications.find_ptm_atoms",
"pytest.mark.xfail"
] |
[((1899, 3393), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""atoms, edges, expected"""', "[({}, [], []), ({(0): {'atomname': 'N', 'PTM_atom': False, 'element': 'N',\n 'resid': 1}, (1): {'atomname': 'H', 'PTM_atom': True, 'element': 'H',\n 'resid': 1}, (2): {'atomname': 'N', 'PTM_atom': True, 'element': 'N',\n 'resid': 1}}, [(0, 1), (1, 2)], [({1, 2}, {0})]), ({(0): {'atomname':\n 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1}, (1): {'atomname':\n 'H', 'PTM_atom': False, 'element': 'H', 'resid': 1}, (2): {'atomname':\n 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1}}, [(0, 1), (1, 2)],\n [({2}, {1})]), ({(0): {'atomname': 'N', 'PTM_atom': True, 'element':\n 'N', 'resid': 1}, (1): {'atomname': 'H', 'PTM_atom': False, 'element':\n 'H', 'resid': 1}, (2): {'atomname': 'N', 'PTM_atom': True, 'element':\n 'N', 'resid': 1}}, [(0, 1), (1, 2)], [({0}, {1}), ({2}, {1})]), ({(0):\n {'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1}, (1):\n {'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1}, (2): {\n 'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 2}}, [(0, \n 1), (1, 2)], [({1}, {0, 2})]), ({(0): {'atomname': 'N', 'PTM_atom': \n False, 'element': 'N', 'resid': 1}, (1): {'atomname': 'H', 'PTM_atom': \n True, 'element': 'H', 'resid': 1}, (2): {'atomname': 'N', 'PTM_atom': \n True, 'element': 'N', 'resid': 2}, (3): {'atomname': 'CA', 'PTM_atom': \n False, 'element': 'N', 'resid': 2}}, [(0, 1), (1, 2), (2, 3)], [({1, 2},\n {0, 3})])]"], {}), "('atoms, edges, expected', [({}, [], []), ({(0): {\n 'atomname': 'N', 'PTM_atom': False, 'element': 'N', 'resid': 1}, (1): {\n 'atomname': 'H', 'PTM_atom': True, 'element': 'H', 'resid': 1}, (2): {\n 'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1}}, [(0, 1\n ), (1, 2)], [({1, 2}, {0})]), ({(0): {'atomname': 'N', 'PTM_atom': \n False, 'element': 'N', 'resid': 1}, (1): {'atomname': 'H', 'PTM_atom': \n False, 'element': 'H', 'resid': 1}, (2): {'atomname': 'N', 'PTM_atom': \n True, 'element': 'N', 'resid': 1}}, [(0, 1), (1, 2)], [({2}, {1})]), ({\n (0): {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1}, (\n 1): {'atomname': 'H', 'PTM_atom': False, 'element': 'H', 'resid': 1}, (\n 2): {'atomname': 'N', 'PTM_atom': True, 'element': 'N', 'resid': 1}}, [\n (0, 1), (1, 2)], [({0}, {1}), ({2}, {1})]), ({(0): {'atomname': 'N',\n 'PTM_atom': False, 'element': 'N', 'resid': 1}, (1): {'atomname': 'H',\n 'PTM_atom': True, 'element': 'H', 'resid': 1}, (2): {'atomname': 'N',\n 'PTM_atom': False, 'element': 'N', 'resid': 2}}, [(0, 1), (1, 2)], [({1\n }, {0, 2})]), ({(0): {'atomname': 'N', 'PTM_atom': False, 'element':\n 'N', 'resid': 1}, (1): {'atomname': 'H', 'PTM_atom': True, 'element':\n 'H', 'resid': 1}, (2): {'atomname': 'N', 'PTM_atom': True, 'element':\n 'N', 'resid': 2}, (3): {'atomname': 'CA', 'PTM_atom': False, 'element':\n 'N', 'resid': 2}}, [(0, 1), (1, 2), (2, 3)], [({1, 2}, {0, 3})])])\n", (1922, 3393), False, 'import pytest\n'), ((957, 976), 'networkx.Graph', 'nx.Graph', ([], {'name': '"""NH"""'}), "(name='NH')\n", (965, 976), True, 'import networkx as nx\n'), ((1217, 1238), 'networkx.Graph', 'nx.Graph', ([], {'name': '"""COOC"""'}), "(name='COOC')\n", (1225, 1238), True, 'import networkx as nx\n'), ((1784, 1812), 'vermouth.molecule.Molecule', 'vermouth.molecule.Molecule', ([], {}), '()\n', (1810, 1812), False, 'import vermouth\n'), ((3886, 3917), 'vermouth.processors.canonicalize_modifications.find_ptm_atoms', 'canmod.find_ptm_atoms', (['molecule'], {}), '(molecule)\n', (3907, 3917), True, 'import vermouth.processors.canonicalize_modifications as canmod\n'), ((6417, 6448), 'vermouth.processors.canonicalize_modifications.find_ptm_atoms', 'canmod.find_ptm_atoms', (['molecule'], {}), '(molecule)\n', (6438, 6448), True, 'import vermouth.processors.canonicalize_modifications as canmod\n'), ((6590, 6638), 'vermouth.processors.canonicalize_modifications.identify_ptms', 'canmod.identify_ptms', (['molecule', 'ptms', 'known_ptms'], {}), '(molecule, ptms, known_ptms)\n', (6610, 6638), True, 'import vermouth.processors.canonicalize_modifications as canmod\n'), ((6479, 6522), 'vermouth.processors.canonicalize_modifications.PTMGraphMatcher', 'canmod.PTMGraphMatcher', (['molecule', 'ptm_graph'], {}), '(molecule, ptm_graph)\n', (6501, 6522), True, 'import vermouth.processors.canonicalize_modifications as canmod\n'), ((4473, 4520), 'pytest.mark.xfail', 'pytest.mark.xfail', ([], {'raises': 'KeyError', 'strict': '(True)'}), '(raises=KeyError, strict=True)\n', (4490, 4520), False, 'import pytest\n')]
|
import pickle
import time
import codecs
import logging
import numpy as np
import torch
import torch.nn as nn
import click
from itertools import chain
from pathlib import Path
from collections import OrderedDict
from tqdm import tqdm
from torch.utils.data import DataLoader
from crf import CRF
from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents
from model import CharWordSeg
unk_token = "<UNK>"
pad_token = "<PAD>"
logging.basicConfig(format='%(levelname)s: %(asctime)s %(message)s', level=logging.DEBUG)
def test(model_path, data_path, split, batch_size, max_sent_length, cuda):
device = torch.device("cuda:0" if cuda else "cpu")
dataset = SIGHAN(split='dev', root_path=data_path)
model = CharWordSeg.load(model_path)
model = model.to(device)
model.eval()
vocab_tag = model.vocab_tag
tok2idx, idx2tok = vocab_tag['token_to_index'], vocab_tag['index_to_token']
tag2idx, idx2tag = vocab_tag['tag_to_index'], vocab_tag['index_to_tag']
data_loader = DataLoader(dataset, batch_size=batch_size)
val_loss = 0
cum_cnt = 0
with torch.no_grad():
for iter_idx, data in enumerate(data_loader):
chars_batch, tags_batch = get_feature(data)
chars_batch, chars_mask = pad_sents(chars_batch, pad_token, max_len=max_sent_length)
tags_batch, tags_mask = pad_sents(tags_batch, pad_token, max_len=max_sent_length)
input_chars = torch.tensor(token2id(chars_batch, tok2idx, unk_id=tok2idx['<UNK>']), device=device)
target_tags = torch.tensor(token2id(tags_batch, tag2idx), device=device)
tags_mask = torch.tensor(tags_mask, dtype=torch.uint8, device=device)
loss = -model(input_chars, target_tags, tags_mask)
val_loss += loss
cum_cnt = cum_cnt + input_chars.shape[0]
val_loss = val_loss / cum_cnt
return val_loss
def create_output(chars_batch, tags_batch):
output = []
for seq, tag in zip(chars_batch, tags_batch):
new_sent = []
for char, tag in zip(seq, tag):
new_sent.append(char)
if tag=='S' or tag=='E':
new_sent.append(" ")
output.append("".join(new_sent).strip())
return output
def predict(model_path, data_path, split, output_path, batch_size, max_sent_length, cuda):
device = torch.device("cuda:0" if cuda else "cpu")
dataset = SIGHAN(split=split, root_path=data_path)
output = []
model = CharWordSeg.load(model_path)
model = model.to(device)
model.eval()
vocab_tag = model.vocab_tag
tok2idx, idx2tok = vocab_tag['token_to_index'], vocab_tag['index_to_token']
tag2idx, idx2tag = vocab_tag['tag_to_index'], vocab_tag['index_to_tag']
data_loader = DataLoader(dataset, batch_size=batch_size, shuffle=False)
with torch.no_grad():
for iter_idx, data in enumerate(data_loader):
chars = get_feature(data, stage='predict')
chars_batch, chars_mask = pad_sents(chars, pad_token, max_len=max_sent_length)
input_chars = torch.tensor(token2id(chars_batch, tok2idx, unk_id=tok2idx['<UNK>']), device=device)
chars_mask = torch.tensor(chars_mask, dtype=torch.uint8, device=device)
pred_tags = id2token(model.decode(input_chars, chars_mask), idx2tag)
output.extend(create_output(chars, pred_tags))
with codecs.open(output_path, 'w', 'utf8') as f:
for sent in output:
print(sent, file=f)
@click.command()
@click.option('--mode', default='pred', help="eval - compute the test loss; pred - generate the output textfiles")
@click.option('--model_path', default="outputs/model_best", help="path of the trained model", required=True)
@click.option('--data_path', default="data/datasets/sighan2005-pku", help="path of the training data", required=True)
@click.option('--split', default="test")
@click.option('--output_path', default="test_output.txt")
@click.option('--batch_size', default=1024, type=int)
@click.option('--max_sent_length', default=200, type=int)
@click.option('--cuda', default=True, help="whether to use cuda", type=bool, required=True)
def main(mode, model_path, data_path, split, output_path, batch_size, max_sent_length, cuda):
if mode == 'eval':
print(f"model:{model_path}\t data:{data_path} split:{split}")
print(f"loss: {test(model_path, data_path, split, batch_size, max_sent_length, cuda)}")
elif mode == 'pred':
predict(model_path, data_path, split, output_path, batch_size, max_sent_length, cuda)
if __name__ == '__main__':
main() # pyline: disable=no-value-for-argument
|
[
"codecs.open",
"logging.basicConfig",
"torch.utils.data.DataLoader",
"click.option",
"utils.SIGHAN",
"click.command",
"utils.get_feature",
"utils.pad_sents",
"utils.token2id",
"torch.device",
"model.CharWordSeg.load",
"torch.no_grad",
"torch.tensor"
] |
[((456, 550), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s: %(asctime)s %(message)s"""', 'level': 'logging.DEBUG'}), "(format='%(levelname)s: %(asctime)s %(message)s', level=\n logging.DEBUG)\n", (475, 550), False, 'import logging\n'), ((3524, 3539), 'click.command', 'click.command', ([], {}), '()\n', (3537, 3539), False, 'import click\n'), ((3541, 3659), 'click.option', 'click.option', (['"""--mode"""'], {'default': '"""pred"""', 'help': '"""eval - compute the test loss; pred - generate the output textfiles"""'}), "('--mode', default='pred', help=\n 'eval - compute the test loss; pred - generate the output textfiles')\n", (3553, 3659), False, 'import click\n'), ((3656, 3768), 'click.option', 'click.option', (['"""--model_path"""'], {'default': '"""outputs/model_best"""', 'help': '"""path of the trained model"""', 'required': '(True)'}), "('--model_path', default='outputs/model_best', help=\n 'path of the trained model', required=True)\n", (3668, 3768), False, 'import click\n'), ((3765, 3886), 'click.option', 'click.option', (['"""--data_path"""'], {'default': '"""data/datasets/sighan2005-pku"""', 'help': '"""path of the training data"""', 'required': '(True)'}), "('--data_path', default='data/datasets/sighan2005-pku', help=\n 'path of the training data', required=True)\n", (3777, 3886), False, 'import click\n'), ((3883, 3922), 'click.option', 'click.option', (['"""--split"""'], {'default': '"""test"""'}), "('--split', default='test')\n", (3895, 3922), False, 'import click\n'), ((3924, 3980), 'click.option', 'click.option', (['"""--output_path"""'], {'default': '"""test_output.txt"""'}), "('--output_path', default='test_output.txt')\n", (3936, 3980), False, 'import click\n'), ((3982, 4034), 'click.option', 'click.option', (['"""--batch_size"""'], {'default': '(1024)', 'type': 'int'}), "('--batch_size', default=1024, type=int)\n", (3994, 4034), False, 'import click\n'), ((4036, 4092), 'click.option', 'click.option', (['"""--max_sent_length"""'], {'default': '(200)', 'type': 'int'}), "('--max_sent_length', default=200, type=int)\n", (4048, 4092), False, 'import click\n'), ((4094, 4188), 'click.option', 'click.option', (['"""--cuda"""'], {'default': '(True)', 'help': '"""whether to use cuda"""', 'type': 'bool', 'required': '(True)'}), "('--cuda', default=True, help='whether to use cuda', type=bool,\n required=True)\n", (4106, 4188), False, 'import click\n'), ((636, 677), 'torch.device', 'torch.device', (["('cuda:0' if cuda else 'cpu')"], {}), "('cuda:0' if cuda else 'cpu')\n", (648, 677), False, 'import torch\n'), ((692, 732), 'utils.SIGHAN', 'SIGHAN', ([], {'split': '"""dev"""', 'root_path': 'data_path'}), "(split='dev', root_path=data_path)\n", (698, 732), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((748, 776), 'model.CharWordSeg.load', 'CharWordSeg.load', (['model_path'], {}), '(model_path)\n', (764, 776), False, 'from model import CharWordSeg\n'), ((1031, 1073), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size'}), '(dataset, batch_size=batch_size)\n', (1041, 1073), False, 'from torch.utils.data import DataLoader\n'), ((2371, 2412), 'torch.device', 'torch.device', (["('cuda:0' if cuda else 'cpu')"], {}), "('cuda:0' if cuda else 'cpu')\n", (2383, 2412), False, 'import torch\n'), ((2427, 2467), 'utils.SIGHAN', 'SIGHAN', ([], {'split': 'split', 'root_path': 'data_path'}), '(split=split, root_path=data_path)\n', (2433, 2467), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((2500, 2528), 'model.CharWordSeg.load', 'CharWordSeg.load', (['model_path'], {}), '(model_path)\n', (2516, 2528), False, 'from model import CharWordSeg\n'), ((2783, 2840), 'torch.utils.data.DataLoader', 'DataLoader', (['dataset'], {'batch_size': 'batch_size', 'shuffle': '(False)'}), '(dataset, batch_size=batch_size, shuffle=False)\n', (2793, 2840), False, 'from torch.utils.data import DataLoader\n'), ((1116, 1131), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (1129, 1131), False, 'import torch\n'), ((2850, 2865), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2863, 2865), False, 'import torch\n'), ((3417, 3454), 'codecs.open', 'codecs.open', (['output_path', '"""w"""', '"""utf8"""'], {}), "(output_path, 'w', 'utf8')\n", (3428, 3454), False, 'import codecs\n'), ((1228, 1245), 'utils.get_feature', 'get_feature', (['data'], {}), '(data)\n', (1239, 1245), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((1284, 1342), 'utils.pad_sents', 'pad_sents', (['chars_batch', 'pad_token'], {'max_len': 'max_sent_length'}), '(chars_batch, pad_token, max_len=max_sent_length)\n', (1293, 1342), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((1379, 1436), 'utils.pad_sents', 'pad_sents', (['tags_batch', 'pad_token'], {'max_len': 'max_sent_length'}), '(tags_batch, pad_token, max_len=max_sent_length)\n', (1388, 1436), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((1658, 1715), 'torch.tensor', 'torch.tensor', (['tags_mask'], {'dtype': 'torch.uint8', 'device': 'device'}), '(tags_mask, dtype=torch.uint8, device=device)\n', (1670, 1715), False, 'import torch\n'), ((2944, 2978), 'utils.get_feature', 'get_feature', (['data'], {'stage': '"""predict"""'}), "(data, stage='predict')\n", (2955, 2978), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((3017, 3069), 'utils.pad_sents', 'pad_sents', (['chars', 'pad_token'], {'max_len': 'max_sent_length'}), '(chars, pad_token, max_len=max_sent_length)\n', (3026, 3069), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((3207, 3265), 'torch.tensor', 'torch.tensor', (['chars_mask'], {'dtype': 'torch.uint8', 'device': 'device'}), '(chars_mask, dtype=torch.uint8, device=device)\n', (3219, 3265), False, 'import torch\n'), ((1477, 1532), 'utils.token2id', 'token2id', (['chars_batch', 'tok2idx'], {'unk_id': "tok2idx['<UNK>']"}), "(chars_batch, tok2idx, unk_id=tok2idx['<UNK>'])\n", (1485, 1532), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((1588, 1617), 'utils.token2id', 'token2id', (['tags_batch', 'tag2idx'], {}), '(tags_batch, tag2idx)\n', (1596, 1617), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n'), ((3110, 3165), 'utils.token2id', 'token2id', (['chars_batch', 'tok2idx'], {'unk_id': "tok2idx['<UNK>']"}), "(chars_batch, tok2idx, unk_id=tok2idx['<UNK>'])\n", (3118, 3165), False, 'from utils import SIGHAN, build_vocab, add_id, get_feature, token2id, id2token, pad_sents\n')]
|
from unittest.mock import Mock
from .context import tohu
from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator
from tohu.v7.custom_generator.tohu_namespace import TohuNamespace
def test_add_field_generators():
ns = TohuNamespace("Quux")
assert ns.tohu_items_cls.__name__ == "Quux"
assert ns.tohu_items_cls.field_names == []
aa = Integer(100, 200)
ns.add_field_generator("aa", aa)
assert ns.field_names == ["aa"]
assert ns["aa"].is_clone_of(aa)
assert ns.tohu_items_cls.field_names == ["aa"]
bb = HashDigest(length=8)
ns.add_field_generator("bb", bb)
assert ns.field_names == ["aa", "bb"]
assert ns["aa"].is_clone_of(aa)
assert ns["bb"].is_clone_of(bb)
assert ns.tohu_items_cls.field_names == ["aa", "bb"]
cc = FakerGenerator(method="first_name")
ns.add_field_generator("cc", cc)
assert ns.field_names == ["aa", "bb", "cc"]
assert ns["aa"].is_clone_of(aa)
assert ns["bb"].is_clone_of(bb)
assert ns["cc"].is_clone_of(cc)
assert ns.tohu_items_cls.field_names == ["aa", "bb", "cc"]
def test_reset():
ns = TohuNamespace("Quux")
aa = Mock()
bb = Mock()
cc = Mock()
seed_generator = Mock()
seed_generator.__next__ = Mock(side_effect=["<seed_1>", "<seed_2>", "<seed_3>"])
ns.add_field_generator("bb", bb)
ns.add_field_generator("aa", aa)
ns.add_field_generator("cc", cc)
ns.seed_generator = seed_generator
ns.reset(seed="<master_seed>")
ns.seed_generator.reset.assert_called_once_with("<master_seed>")
ns["bb"].reset.assert_called_once_with("<seed_1>")
ns["aa"].reset.assert_called_once_with("<seed_2>")
ns["cc"].reset.assert_called_once_with("<seed_3>")
def test_next_item():
ns = TohuNamespace("Quux")
aa = Mock(__next__=Mock(side_effect=[11, 22, 33]))
bb = Mock(__next__=Mock(side_effect=["foo", "bar", "baz"]))
cc = Mock(__next__=Mock(side_effect=["z", "y", "x"]))
aa.clone.return_value = aa
bb.clone.return_value = bb
cc.clone.return_value = cc
ns.add_field_generator("bb", bb)
ns.add_field_generator("aa", aa)
ns.add_field_generator("cc", cc)
assert next(ns) == {"bb": "foo", "aa": 11, "cc": "z"}
assert next(ns) == {"bb": "bar", "aa": 22, "cc": "y"}
assert next(ns) == {"bb": "baz", "aa": 33, "cc": "x"}
# TODO: we should probably also check that the state of generators
# transferred correctly when spawning a TohuNamespace instance.
def test_spawn():
ns = TohuNamespace("Quux")
aa_first_clone = Mock()
bb_first_clone = Mock()
aa_second_clone = Mock()
bb_second_clone = Mock()
aa = Mock(clone=Mock(side_effect=[aa_first_clone, aa_second_clone]))
bb = Mock(clone=Mock(side_effect=[bb_first_clone, bb_second_clone]))
aa_first_clone.parent = aa_second_clone.parent = aa
bb_first_clone.parent = bb_second_clone.parent = bb
ns.add_field_generator("aa", aa)
ns.add_field_generator("bb", bb)
assert ns["aa"] is aa_first_clone
assert ns["bb"] is bb_first_clone
ns_spawned = ns.spawn()
assert ns_spawned["aa"] is aa_second_clone
assert ns_spawned["bb"] is bb_second_clone
def test_update_from_dict():
ns = TohuNamespace("Quux")
the_dict = {"aa": Integer(100, 200), "bb": "<this_is_not_a_tohu_generator", "cc": HashDigest(length=6)}
ns.update_from_dict(the_dict)
assert ns.field_names == ["aa", "cc"]
def test_find_existing_name():
g1 = Integer(100, 200)
g2 = HashDigest(length=6)
g3 = FakerGenerator(method="first_name")
ns = TohuNamespace("Quux")
ns.add_field_generator("aa", g1)
ns.add_field_generator("bb", g2)
assert ns.find_existing_name(g1) == "aa"
assert ns.find_existing_name(g2) == "bb"
assert ns.find_existing_name(g3) is None
def test_add_alias_for_another_field_generator():
ns = TohuNamespace("Quux")
g = Integer(100, 200)
ns.add_field_generator("aa", g)
ns.add_field_generator("bb", g)
field_gen_aa = ns.field_generators["aa"]
field_gen_bb = ns.field_generators["bb"]
assert field_gen_bb.is_clone_of(field_gen_aa)
|
[
"unittest.mock.Mock",
"tohu.v7.primitive_generators.HashDigest",
"tohu.v7.primitive_generators.FakerGenerator",
"tohu.v7.custom_generator.tohu_namespace.TohuNamespace",
"tohu.v7.primitive_generators.Integer"
] |
[((245, 266), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (258, 266), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((372, 389), 'tohu.v7.primitive_generators.Integer', 'Integer', (['(100)', '(200)'], {}), '(100, 200)\n', (379, 389), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((560, 580), 'tohu.v7.primitive_generators.HashDigest', 'HashDigest', ([], {'length': '(8)'}), '(length=8)\n', (570, 580), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((799, 834), 'tohu.v7.primitive_generators.FakerGenerator', 'FakerGenerator', ([], {'method': '"""first_name"""'}), "(method='first_name')\n", (813, 834), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((1120, 1141), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (1133, 1141), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((1151, 1157), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1155, 1157), False, 'from unittest.mock import Mock\n'), ((1167, 1173), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1171, 1173), False, 'from unittest.mock import Mock\n'), ((1183, 1189), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1187, 1189), False, 'from unittest.mock import Mock\n'), ((1211, 1217), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1215, 1217), False, 'from unittest.mock import Mock\n'), ((1248, 1302), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': "['<seed_1>', '<seed_2>', '<seed_3>']"}), "(side_effect=['<seed_1>', '<seed_2>', '<seed_3>'])\n", (1252, 1302), False, 'from unittest.mock import Mock\n'), ((1758, 1779), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (1771, 1779), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((2497, 2518), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (2510, 2518), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((2540, 2546), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2544, 2546), False, 'from unittest.mock import Mock\n'), ((2568, 2574), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2572, 2574), False, 'from unittest.mock import Mock\n'), ((2597, 2603), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2601, 2603), False, 'from unittest.mock import Mock\n'), ((2626, 2632), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2630, 2632), False, 'from unittest.mock import Mock\n'), ((3205, 3226), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (3218, 3226), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((3453, 3470), 'tohu.v7.primitive_generators.Integer', 'Integer', (['(100)', '(200)'], {}), '(100, 200)\n', (3460, 3470), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((3480, 3500), 'tohu.v7.primitive_generators.HashDigest', 'HashDigest', ([], {'length': '(6)'}), '(length=6)\n', (3490, 3500), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((3510, 3545), 'tohu.v7.primitive_generators.FakerGenerator', 'FakerGenerator', ([], {'method': '"""first_name"""'}), "(method='first_name')\n", (3524, 3545), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((3555, 3576), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (3568, 3576), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((3847, 3868), 'tohu.v7.custom_generator.tohu_namespace.TohuNamespace', 'TohuNamespace', (['"""Quux"""'], {}), "('Quux')\n", (3860, 3868), False, 'from tohu.v7.custom_generator.tohu_namespace import TohuNamespace\n'), ((3877, 3894), 'tohu.v7.primitive_generators.Integer', 'Integer', (['(100)', '(200)'], {}), '(100, 200)\n', (3884, 3894), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((3249, 3266), 'tohu.v7.primitive_generators.Integer', 'Integer', (['(100)', '(200)'], {}), '(100, 200)\n', (3256, 3266), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((3313, 3333), 'tohu.v7.primitive_generators.HashDigest', 'HashDigest', ([], {'length': '(6)'}), '(length=6)\n', (3323, 3333), False, 'from tohu.v7.primitive_generators import Integer, HashDigest, FakerGenerator\n'), ((1803, 1833), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': '[11, 22, 33]'}), '(side_effect=[11, 22, 33])\n', (1807, 1833), False, 'from unittest.mock import Mock\n'), ((1858, 1897), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': "['foo', 'bar', 'baz']"}), "(side_effect=['foo', 'bar', 'baz'])\n", (1862, 1897), False, 'from unittest.mock import Mock\n'), ((1922, 1955), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': "['z', 'y', 'x']"}), "(side_effect=['z', 'y', 'x'])\n", (1926, 1955), False, 'from unittest.mock import Mock\n'), ((2653, 2704), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': '[aa_first_clone, aa_second_clone]'}), '(side_effect=[aa_first_clone, aa_second_clone])\n', (2657, 2704), False, 'from unittest.mock import Mock\n'), ((2726, 2777), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': '[bb_first_clone, bb_second_clone]'}), '(side_effect=[bb_first_clone, bb_second_clone])\n', (2730, 2777), False, 'from unittest.mock import Mock\n')]
|
from typing import (
Any,
AsyncIterable,
Awaitable,
Callable,
Generic,
Optional,
Tuple,
TypeVar,
)
from typing_extensions import Protocol
from tickit.core.device import Device
from tickit.utils.configuration.configurable import as_tagged_union
#: Message type
T = TypeVar("T")
# https://github.com/python/mypy/issues/708#issuecomment-647124281
class RaiseInterrupt(Protocol):
"""A raise_interrupt function that should be passed to `Adapter`."""
async def __call__(self) -> None:
"""The actual call signature."""
pass
@as_tagged_union
class Adapter:
"""An interface for types which implement device adapters."""
device: Device
raise_interrupt: RaiseInterrupt
def __getattr__(self, name: str) -> Any:
"""Improve error message for getting attributes before `run_forever`."""
if name in ("device", "raise_interrupt"):
raise RuntimeError(
"Can't get self.device or self.raise_interrupt before run_forever()"
)
return super().__getattribute__(name)
async def run_forever(
self, device: Device, raise_interrupt: RaiseInterrupt
) -> None:
"""An asynchronous method allowing indefinite running of core adapter logic.
An asynchronous method allowing for indefinite running of core adapter logic
(typically the hosting of a protocol server and the interpretation of commands
which are supplied via it).
"""
self.device = device
self.raise_interrupt = raise_interrupt
def after_update(self):
"""A method which is called immediately after the device updates."""
@as_tagged_union
class Interpreter(Generic[T]):
"""An interface for types which handle messages recieved by an adapter."""
async def handle(
self, adapter: Adapter, message: T
) -> Tuple[AsyncIterable[T], bool]:
"""An asynchronous method which handles messages recieved by an adapter.
An asynchronous method which handles messages recieved by an adapter, replies
are sent as an asynchronous iterable to support setting of continious readback,
stand alone replies should be wrapped in an asynchronous iterable of length one.
Args:
adapter (Adapter): The adapter which is delegating message handling.
message (T): The message recieved by the adapter.
Returns:
Tuple[AsyncIterable[T], bool]: A tuple containing both an asynchronous
iterable of reply messages and an interrupt flag.
"""
@as_tagged_union
class Server(Generic[T]):
"""An interface for types which implement an external messaging protocol."""
async def run_forever(
self,
on_connect: Callable[[], AsyncIterable[Optional[T]]],
handler: Callable[[T], Awaitable[AsyncIterable[Optional[T]]]],
) -> None:
"""An asynchronous method allowing indefinite running of core server logic.
Args:
on_connect (Callable[[], AsyncIterable[Optional[T]]]): An asynchronous
iterable of messages to be sent once a client connects.
handler (Callable[[T], Awaitable[AsyncIterable[Optional[T]]]]): An
asynchronous method used to handle recieved messages, returning an
asynchronous iterable of replies.
"""
|
[
"typing.TypeVar"
] |
[((299, 311), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {}), "('T')\n", (306, 311), False, 'from typing import Any, AsyncIterable, Awaitable, Callable, Generic, Optional, Tuple, TypeVar\n')]
|
# Random tests
import math
if 1:
def isqrt(x):
op = x
res = 0
one = 1 << 30
while one > op:
one >>= 2
while one != 0:
if op >= res + one:
op -= res + one
res += one << 1
res >>= 1
one >>= 2
return res
def isqrt64(x):
op = x
res = 0
one = 1 << 62
while one > op:
one >>= 2
while one != 0:
if op >= res + one:
op -= res + one
res += one << 1
res >>= 1
one >>= 2
return res
for x in range(0, 2 ** 64, 2 ** 48):
i = isqrt64(x)
s = int(math.sqrt(x))
if i != s:
print(x, i, s)
print("finish")
|
[
"math.sqrt"
] |
[((720, 732), 'math.sqrt', 'math.sqrt', (['x'], {}), '(x)\n', (729, 732), False, 'import math\n')]
|
# encoding: utf-8
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: <NAME> (<EMAIL>)
#
from __future__ import absolute_import, division, unicode_literals
import os
import sys
from unittest import TestCase
from moz_sql_parser import sql_parser
from moz_sql_parser.debugs import DEBUG
_ensure_imported = sql_parser
class TestSimple(TestCase):
"""
THESE TESTS ARE FOR VERIFYING THE STATE OF THE REPO, NOT HTE STATE OF THE CODE
"""
def test_recursion_limit(self):
if os.environ.get('TRAVIS_BRANCH') == 'master':
limit = sys.getrecursionlimit()
self.assertEqual(limit, 1500)
def test_debug_is_off(self):
self.assertFalse(DEBUG, "Turn off debugging")
|
[
"os.environ.get",
"sys.getrecursionlimit"
] |
[((657, 688), 'os.environ.get', 'os.environ.get', (['"""TRAVIS_BRANCH"""'], {}), "('TRAVIS_BRANCH')\n", (671, 688), False, 'import os\n'), ((722, 745), 'sys.getrecursionlimit', 'sys.getrecursionlimit', ([], {}), '()\n', (743, 745), False, 'import sys\n')]
|
from flask_restful import Resource, abort
recipes = []
# Handles the collection
class RecipesResource(Resource):
def get(self):
return {
'data': recipes
}
def post(self):
return
# Handles the items in collection
class Recipes(Resource):
def get(self, id):
if id < len(recipes):
return {
'data': recipes[id]
}
else:
abort(404, message = 'ID not found.')
|
[
"flask_restful.abort"
] |
[((455, 490), 'flask_restful.abort', 'abort', (['(404)'], {'message': '"""ID not found."""'}), "(404, message='ID not found.')\n", (460, 490), False, 'from flask_restful import Resource, abort\n')]
|
from django.db import models
class Base(models.Model):
createdAt = models.DateTimeField(auto_now=True)
updatedAt = models.DateTimeField(auto_now=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ['id']
abstract = True
class Category(Base):
description = models.CharField(max_length=255)
class Meta:
verbose_name = "Categoria"
verbose_name_plural = "Categorias"
def __str__(self):
return self.description
class Product(Base):
name = models.CharField(max_length=255)
price = models.DecimalField(max_digits=9, decimal_places=2)
category = models.ForeignKey(Category, related_name="Product", on_delete=models.CASCADE)
class Meta:
verbose_name = "Produto"
verbose_name_plural = "Produtos"
def __str__(self):
return self.name
class ProductAvaliation(Base):
product = models.ForeignKey(Product, related_name="ProductAvaliation", on_delete=models.CASCADE)
evaluator = models.CharField(max_length=255)
email = models.EmailField()
comment = models.TextField(blank=True, default="")
grades = models.DecimalField(max_digits=2, decimal_places=1)
class Meta:
verbose_name = "Avaliação Do Produto"
verbose_name_plural = "Avaliação Dos Produtos"
unique_together = ["email", "product"]
def __str__(self):
return f"Avaliador: ${self.evaluator}, Produto: ${self.product}, Nota: ${self.grades}"
|
[
"django.db.models.TextField",
"django.db.models.CharField",
"django.db.models.ForeignKey",
"django.db.models.BooleanField",
"django.db.models.EmailField",
"django.db.models.DecimalField",
"django.db.models.DateTimeField"
] |
[((72, 107), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (92, 107), False, 'from django.db import models\n'), ((124, 159), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (144, 159), False, 'from django.db import models\n'), ((173, 206), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (192, 206), False, 'from django.db import models\n'), ((316, 348), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (332, 348), False, 'from django.db import models\n'), ((534, 566), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (550, 566), False, 'from django.db import models\n'), ((579, 630), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(9)', 'decimal_places': '(2)'}), '(max_digits=9, decimal_places=2)\n', (598, 630), False, 'from django.db import models\n'), ((646, 723), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Category'], {'related_name': '"""Product"""', 'on_delete': 'models.CASCADE'}), "(Category, related_name='Product', on_delete=models.CASCADE)\n", (663, 723), False, 'from django.db import models\n'), ((915, 1006), 'django.db.models.ForeignKey', 'models.ForeignKey', (['Product'], {'related_name': '"""ProductAvaliation"""', 'on_delete': 'models.CASCADE'}), "(Product, related_name='ProductAvaliation', on_delete=\n models.CASCADE)\n", (932, 1006), False, 'from django.db import models\n'), ((1018, 1050), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (1034, 1050), False, 'from django.db import models\n'), ((1063, 1082), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (1080, 1082), False, 'from django.db import models\n'), ((1097, 1137), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'default': '""""""'}), "(blank=True, default='')\n", (1113, 1137), False, 'from django.db import models\n'), ((1151, 1202), 'django.db.models.DecimalField', 'models.DecimalField', ([], {'max_digits': '(2)', 'decimal_places': '(1)'}), '(max_digits=2, decimal_places=1)\n', (1170, 1202), False, 'from django.db import models\n')]
|
import html
from unittest.mock import Mock
import numpy as np
import pytest
from napari.utils import nbscreenshot
def test_nbscreenshot(make_napari_viewer):
"""Test taking a screenshot."""
viewer = make_napari_viewer()
np.random.seed(0)
data = np.random.random((10, 15))
viewer.add_image(data)
rich_display_object = nbscreenshot(viewer)
assert hasattr(rich_display_object, '_repr_png_')
# Trigger method that would run in jupyter notebook cell automatically
rich_display_object._repr_png_()
assert rich_display_object.image is not None
@pytest.mark.parametrize(
"alt_text_input, expected_alt_text",
[
(None, None),
("Good alt text", "Good alt text"),
# Naughty strings https://github.com/minimaxir/big-list-of-naughty-strings
# ASCII punctuation
(r",./;'[]\-=", ',./;'[]\\-='), # noqa: W605
('>?:"{}|_+', '>?:"{}|_+'), # ASCII punctuation 2
("!@#$%^&*()`~", '!@#$%^&*()`~'), # ASCII punctuation 3
# # Emjoi
("😍", "😍"), # emoji 1
("👨🦰 👨🏿🦰 👨🦱 👨🏿🦱 🦹🏿♂️", "👨🦰 👨🏿🦰 👨🦱 👨🏿🦱 🦹🏿♂️"), # emoji 2
(r"¯\_(ツ)_/¯", '¯\\_(ツ)_/¯'), # Japanese emoticon # noqa: W605
# # Special characters
("田中さんにあげて下さい", "田中さんにあげて下さい"), # two-byte characters
("表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀", "表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀"), # special unicode chars
("گچپژ", "گچپژ"), # Persian special characters
# # Script injection
("<script>alert(0)</script>", None), # script injection 1
("<script>alert('1');</script>", None),
("<svg><script>123<1>alert(3)</script>", None),
],
)
def test_safe_alt_text(alt_text_input, expected_alt_text):
display_obj = nbscreenshot(Mock(), alt_text=alt_text_input)
if not expected_alt_text:
assert not display_obj.alt_text
else:
assert html.escape(display_obj.alt_text) == expected_alt_text
|
[
"numpy.random.seed",
"napari.utils.nbscreenshot",
"unittest.mock.Mock",
"numpy.random.random",
"pytest.mark.parametrize",
"html.escape"
] |
[((585, 1229), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""alt_text_input, expected_alt_text"""', '[(None, None), (\'Good alt text\', \'Good alt text\'), (",./;\'[]\\\\-=",\n \',./;'[]\\\\-=\'), (\'>?:"{}|_+\', \'>?:"{}|_+\'), (\n \'!@#$%^&*()`~\', \'!@#$%^&*()`~\'), (\'😍\', \'😍\'), (\n \'👨\\u200d🦰 👨🏿\\u200d🦰 👨\\u200d🦱 👨🏿\\u200d🦱 🦹🏿\\u200d♂️\',\n \'👨\\u200d🦰 👨🏿\\u200d🦰 👨\\u200d🦱 👨🏿\\u200d🦱 🦹🏿\\u200d♂️\'), (\'¯\\\\_(ツ)_/¯\',\n \'¯\\\\_(ツ)_/¯\'), (\'田中さんにあげて下さい\', \'田中さんにあげて下さい\'), (\'表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀\',\n \'表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀\'), (\'گچپژ\', \'گچپژ\'), (\'<script>alert(0)</script>\',\n None), (\'<script>alert('1');</script>\', None), (\n \'<svg><script>123<1>alert(3)</script>\', None)]'], {}), '(\'alt_text_input, expected_alt_text\', [(None, None),\n (\'Good alt text\', \'Good alt text\'), (",./;\'[]\\\\-=", \',./;'[]\\\\-=\'),\n (\'>?:"{}|_+\', \'>?:"{}|_+\'), (\'!@#$%^&*()`~\', \'!@#$%^&*()`~\'\n ), (\'😍\', \'😍\'), (\'👨\\u200d🦰 👨🏿\\u200d🦰 👨\\u200d🦱 👨🏿\\u200d🦱 🦹🏿\\u200d♂️\',\n \'👨\\u200d🦰 👨🏿\\u200d🦰 👨\\u200d🦱 👨🏿\\u200d🦱 🦹🏿\\u200d♂️\'), (\'¯\\\\_(ツ)_/¯\',\n \'¯\\\\_(ツ)_/¯\'), (\'田中さんにあげて下さい\', \'田中さんにあげて下さい\'), (\'表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀\',\n \'表ポあA鷗ŒéB逍Üߪąñ丂㐀𠀀\'), (\'گچپژ\', \'گچپژ\'), (\'<script>alert(0)</script>\',\n None), (\'<script>alert('1');</script>\', None), (\n \'<svg><script>123<1>alert(3)</script>\', None)])\n', (608, 1229), False, 'import pytest\n'), ((236, 253), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (250, 253), True, 'import numpy as np\n'), ((265, 291), 'numpy.random.random', 'np.random.random', (['(10, 15)'], {}), '((10, 15))\n', (281, 291), True, 'import numpy as np\n'), ((346, 366), 'napari.utils.nbscreenshot', 'nbscreenshot', (['viewer'], {}), '(viewer)\n', (358, 366), False, 'from napari.utils import nbscreenshot\n'), ((1772, 1778), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1776, 1778), False, 'from unittest.mock import Mock\n'), ((1900, 1933), 'html.escape', 'html.escape', (['display_obj.alt_text'], {}), '(display_obj.alt_text)\n', (1911, 1933), False, 'import html\n')]
|
"""
A low level example:
This is how JenkinsAPI creates views
"""
from __future__ import print_function
import json
import requests
url = 'http://localhost:8080/createView'
str_view_name = "blahblah123"
params = {} # {'name': str_view_name}
headers = {'Content-Type': 'application/x-www-form-urlencoded'}
data = {
"name": str_view_name,
"mode": "hudson.model.ListView",
"Submit": "OK",
"json": json.dumps(
{"name": str_view_name, "mode": "hudson.model.ListView"}
)
}
# Try 1
result = requests.post(url, params=params, data=data, headers=headers)
print(result.text.encode('UTF-8'))
|
[
"requests.post",
"json.dumps"
] |
[((515, 576), 'requests.post', 'requests.post', (['url'], {'params': 'params', 'data': 'data', 'headers': 'headers'}), '(url, params=params, data=data, headers=headers)\n', (528, 576), False, 'import requests\n'), ((413, 481), 'json.dumps', 'json.dumps', (["{'name': str_view_name, 'mode': 'hudson.model.ListView'}"], {}), "({'name': str_view_name, 'mode': 'hudson.model.ListView'})\n", (423, 481), False, 'import json\n')]
|
print("Starting bookie server")
import praw
import os
from datetime import datetime
import io
# import boto3
from boto3 import session
from boto3.s3.transfer import S3Transfer
import json
import string
import requests
import re
print("Imports worked")
tickerMatch = re.compile("(?:\s|^)(?:\$([A-Za-z]{1,5})|([A-Z]{2,5}))(?=(?:[\s.,?!]|$))")
tickerData = {}
comments = {}
buckets = [{}]
last_min = datetime.now().strftime("%M")
last_hour = datetime.now().strftime("%H")
# Initiate session
session = session.Session()
client = session.client('s3',
region_name='nyc3', #enter your own region_name
endpoint_url='https://nyc3.digitaloceanspaces.com', #enter your own endpoint url
aws_access_key_id=os.environ["SPACES_ACCESS"],
aws_secret_access_key=os.environ["SPACES_SECRET"])
print("Initialized s3 connection")
# Load reddit
reddit = praw.Reddit(
client_id=os.environ["REDDIT_CLIENT_ID"],
client_secret=os.environ["REDDIT_SECRET"],
# password="PASSWORD",
user_agent=os.environ["REDDIT_USER_AGENT"],
# username="USERNAME",
)
print("Initialized reddit connection")
# returns a dict of key: ticker symbol value: symbol count, for th comment
def parseComment(comment: str):
output = {}
candidates = []
for match in tickerMatch.finditer(comment):
groups = [*match.groups()]
candidate = groups[0] if groups[0] else groups[1]
if candidate in tickerData:
if not candidate in output:
output[candidate] = 1
else:
output[candidate] += 1
return output
def getTickerData():
tickerDict = {}
payload = {'download': 'true'}
headers = {'Accept': 'application/json', 'Accept-Encoding': 'gzip', 'User-Agent': 'Bookie'}
r = requests.get('https://api.nasdaq.com/api/screener/stocks', params=payload, headers=headers)
nasDat = r.json()
for symbol in nasDat["data"]["rows"]:
tickerDict[symbol["symbol"]] = symbol
return tickerDict
tickerData = getTickerData()
print("Loaded NASDAQ data")
def createClientObject():
today = buckets[0:23]
aggregate = {}
for bucket in today:
for symbol in bucket:
if not symbol in aggregate:
aggregate[symbol] = {
"daily_mentions": 1,
"prev_daily_mentions": 0
}
else:
aggregate[symbol]["daily_mentions"] += 1
yesterday = buckets[24:47]
for bucket in yesterday:
for symbol in bucket:
if not symbol in aggregate:
aggregate[symbol] = {
"daily_mentions": 0,
"prev_daily_mentions": 1
}
else:
aggregate[symbol]["prev_daily_mentions"] += 1
for symbol in aggregate:
if symbol in tickerData:
aggregate[symbol]["last_sale"] = tickerData[symbol]["lastsale"]
return aggregate
print("Running primary loop")
while(True):
try:
for comment in reddit.subreddit("wallstreetbets").stream.comments():
print(comment)
# Get the data from reddit
rawComment = comment.body
# comment de-dupe on reinitialize after error
if (comment.id in comments):
continue
else:
comments[comment.id] = True
# process data
mentioned = parseComment(rawComment)
for key in mentioned:
if not key in buckets[0]:
buckets[0][key] = 1
else:
buckets[0][key] += 1
# perform minute updates
minstr = datetime.now().strftime("%M")
if (minstr != last_min):
last_min = minstr
# pull new data every 5 minutes
if (int(minstr) % 5 == 0):
tickerData = getTickerData()
data = createClientObject()
print(data)
# upload to the space
datastream = io.BytesIO(bytes(json.dumps(data), "ascii"))
client.upload_fileobj(datastream, "ledger", "data.json", ExtraArgs={'ACL':'public-read'})
# perform hour updates
hourstr = datetime.now().strftime("%H")
if (hourstr != last_hour): #an hour has passed since the last update
last_hour = hourstr
buckets.insert(0, {})
if len(buckets) > 49:
buckets.pop()
except KeyboardInterrupt:
print("Quitting :)")
break
except:
print("Unexpected error, retrying...")
|
[
"json.dumps",
"boto3.session.client",
"requests.get",
"boto3.session.Session",
"praw.Reddit",
"datetime.datetime.now",
"re.compile"
] |
[((269, 346), 're.compile', 're.compile', (['"""(?:\\\\s|^)(?:\\\\$([A-Za-z]{1,5})|([A-Z]{2,5}))(?=(?:[\\\\s.,?!]|$))"""'], {}), "('(?:\\\\s|^)(?:\\\\$([A-Za-z]{1,5})|([A-Z]{2,5}))(?=(?:[\\\\s.,?!]|$))')\n", (279, 346), False, 'import re\n'), ((504, 521), 'boto3.session.Session', 'session.Session', ([], {}), '()\n', (519, 521), False, 'from boto3 import session\n'), ((531, 731), 'boto3.session.client', 'session.client', (['"""s3"""'], {'region_name': '"""nyc3"""', 'endpoint_url': '"""https://nyc3.digitaloceanspaces.com"""', 'aws_access_key_id': "os.environ['SPACES_ACCESS']", 'aws_secret_access_key': "os.environ['SPACES_SECRET']"}), "('s3', region_name='nyc3', endpoint_url=\n 'https://nyc3.digitaloceanspaces.com', aws_access_key_id=os.environ[\n 'SPACES_ACCESS'], aws_secret_access_key=os.environ['SPACES_SECRET'])\n", (545, 731), False, 'from boto3 import session\n'), ((936, 1081), 'praw.Reddit', 'praw.Reddit', ([], {'client_id': "os.environ['REDDIT_CLIENT_ID']", 'client_secret': "os.environ['REDDIT_SECRET']", 'user_agent': "os.environ['REDDIT_USER_AGENT']"}), "(client_id=os.environ['REDDIT_CLIENT_ID'], client_secret=os.\n environ['REDDIT_SECRET'], user_agent=os.environ['REDDIT_USER_AGENT'])\n", (947, 1081), False, 'import praw\n'), ((1842, 1937), 'requests.get', 'requests.get', (['"""https://api.nasdaq.com/api/screener/stocks"""'], {'params': 'payload', 'headers': 'headers'}), "('https://api.nasdaq.com/api/screener/stocks', params=payload,\n headers=headers)\n", (1854, 1937), False, 'import requests\n'), ((401, 415), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (413, 415), False, 'from datetime import datetime\n'), ((443, 457), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (455, 457), False, 'from datetime import datetime\n'), ((3775, 3789), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3787, 3789), False, 'from datetime import datetime\n'), ((4367, 4381), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4379, 4381), False, 'from datetime import datetime\n'), ((4174, 4190), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (4184, 4190), False, 'import json\n')]
|
"""Collection of helpers methods that help to style tables in Jupyter Notebooks
"""
from typing import Union, Optional, List
from html import escape
import pandas as pd
import numpy as np
from pandas.io.formats.style import Styler
from pandas.api.types import is_list_like # noqa
from pandas._typing import Axis # noqa
from seaborn import color_palette
from helpsk import color
# pylint: disable=redefined-builtin,too-many-arguments
import helpsk.pandas as pandas # pylint: disable=consider-using-from-import
from helpsk.validation import any_none_nan
def format(styler: Union[pd.DataFrame, "pandas.io.formats.style.Styler"], # noqa
subset: Optional[List[str]] = None,
round_by: int = 2,
fill_missing_value: Optional[str] = '<NA>',
missing_color: Optional[str] = color.WARNING,
thousands: Optional[str] = ',',
hide_index: bool = False) -> Styler:
"""Applies basic formatting to pandas Dataframe.
Args:
styler:
either pd.Dataframe or pd.Dataframe.style
subset:
A valid 2d input to DataFrame.loc[<subset>], or, in the case of a 1d input or single key,
to DataFrame.loc[:, <subset>] where the columns are prioritised, to limit data to before applying
the function.
round_by:
number of digits to round numeric columns to
fill_missing_value:
the value to replace missing data (e.g. NaN)
missing_color:
The background color for cells that have missing values.
thousands:
the separator used for thousands e.g. `'` will result in `10,000` while ` ` will result in
`10 000`.
hide_index:
Hide the index of the dataframe.
Returns:
styler
"""
if isinstance(styler, pd.DataFrame):
styler = styler.style
if missing_color:
styler = styler.highlight_null(null_color=missing_color)
if hide_index:
styler = styler.hide_index()
return styler.format(subset=subset, # noqa
precision=round_by, # noqa
na_rep=escape(fill_missing_value), # noqa
thousands=thousands) # noqa
def background_color(styler: Union[pd.DataFrame, "pandas.io.formats.style.Styler"], # noqa,
palette: str = 'Blues',
**kwargs) -> Styler:
"""Applies a background color to pandas Dataframe.
Args:
styler:
either pd.Dataframe or pd.Dataframe.style
palette:
name of the palette (value passed into seaborn `color_palette()`
kwargs:
additional arguments that will be passed to the pandas `.background_gradient()` function.
Returns:
styler
"""
if isinstance(styler, pd.DataFrame):
styler = styler.style
# color_map = sns.light_palette("green", as_cmap=True)
# color_map = sns.color_palette("dark:salmon_r", as_cmap=True)
# color_map = sns.color_palette(['red', 'blue', 'green'], as_cmap=True)
# color_map = sns.color_palette("light:#5A9", as_cmap=True)
color_map = color_palette(palette, as_cmap=True)
return styler.background_gradient(cmap=color_map, **kwargs)
# pylint: disable=too-many-arguments
def __bar_inverse(style_object, align: str, colors: list[str], width: float = 100, min_value: float = None,
max_value: float = None):
"""
CODE MODIFIED FROM
https://github.com/pandas-dev/pandas/blob/v1.3.2/pandas/io/formats/style.py#L2178-L2258
Draw bar chart in dataframe cells.
"""
# Get input value range.
object_min = np.nanmin(style_object.to_numpy()) if min_value is None else min_value
object_max = np.nanmax(style_object.to_numpy()) if max_value is None else max_value
if align == "mid":
object_min = min(0, object_min) # noqa
object_max = max(0, object_max) # noqa
elif align == "zero":
# For "zero" mode, we want the range to be symmetrical around zero.
object_max = max(abs(object_min), abs(object_max))
object_min = -object_max
# Transform to percent-range of linear-gradient
normed = width * (style_object.to_numpy(dtype=float) - object_min) / (object_max - object_min + 1e-12)
zero = -width * object_min / (object_max - object_min + 1e-12)
# pylint: disable=redefined-outer-name
def css_bar(start: float, end: float, color: str) -> str: # noqa
"""
Generate CSS code to draw a bar from start to end.
"""
css = "width: 10em; height: 80%;" # noqa
if end > start:
css += "background: linear-gradient(90deg,"
if start > 0:
css += f" {color} {start:.1f}%, transparent {start:.1f}%, "
min_e = min(end, width)
css += f"transparent {min_e:.1f}%, {color} {min_e:.1f}%)"
elif end == start == 0:
css += "background: linear-gradient(90deg,"
css += f" {color} {100:.1f}%, transparent {100:.1f}%, "
css += f"transparent {100:.1f}%, {color} {100:.1f}%)"
return css
def css(row_item):
if pd.isna(row_item):
return ""
# avoid deprecated indexing `colors[x > zero]`
color_value = colors[1] if row_item > zero else colors[0]
if align == "left":
return css_bar(0, row_item, color_value)
return css_bar(min(row_item, zero), max(row_item, zero), color_value)
if style_object.ndim == 1:
# print(css(normed[10]))
return [css(x) for x in normed]
return pd.DataFrame(
[[css(x) for x in row] for row in normed],
index=style_object.index,
columns=style_object.columns,
)
def bar_inverse(
styler: Union[pd.DataFrame, "pandas.io.formats.style.Styler"], # noqa
subset: "Subset" = None, # noqa
axis: Axis = 0,
color="#d65f5f", # pylint: disable=redefined-outer-name # noqa
width: float = 100,
align: str = "left",
min_value: float = None,
max_value: float = None,
) -> Styler:
"""
CODE MODIFIED FROM
https://github.com/pandas-dev/pandas/blob/v1.3.2/pandas/io/formats/style.py#L2178-L2258
Draw (inverse) bar chart in the cell backgrounds.
Parameters
----------
styler: either a pandas DataFrame or object returned by pd.DataFrame.style
subset : label, array-like, IndexSlice, optional
A valid 2d input to `DataFrame.loc[<subset>]`, or, in the case of a 1d input
or single key, to `DataFrame.loc[:, <subset>]` where the columns are
prioritised, to limit ``data`` to *before* applying the function.
axis : {0 or 'index', 1 or 'columns', None}, default 0
Apply to each column (``axis=0`` or ``'index'``), to each row
(``axis=1`` or ``'columns'``), or to the entire DataFrame at once
with ``axis=None``.
color : str or 2-tuple/list
If a str is passed, the color is the same for both
negative and positive numbers. If 2-tuple/list is used, the
first element is the color_negative and the second is the
color_positive (eg: ['#d65f5f', '#5fba7d']).
width : float, default 100
A number between 0 or 100. The largest value will cover `width`
percent of the cell's width.
align : {'left', 'zero',' mid'}, default 'left'
How to align the bars with the cells.
- 'left' : the min value starts at the left of the cell.
- 'zero' : a value of zero is located at the center of the cell.
- 'mid' : the center of the cell is at (max-min)/2, or
if values are all negative (positive) the zero is aligned
at the right (left) of the cell.
min_value : float, optional
Minimum bar value, defining the left hand limit
of the bar drawing range, lower values are clipped to `min_value`.
When None (default): the minimum value of the data will be used.
max_value : float, optional
Maximum bar value, defining the right hand limit
of the bar drawing range, higher values are clipped to `max_value`.
When None (default): the maximum value of the data will be used.
Returns
-------
styler
"""
if isinstance(styler, pd.DataFrame):
styler = styler.style
if align not in ("left", "zero", "mid"):
raise ValueError("`align` must be one of {'left', 'zero',' mid'}")
if not is_list_like(color):
color = [color, color] # noqa
elif len(color) == 1:
color = [color[0], color[0]] # noqa
elif len(color) > 2:
raise ValueError(
"`color` must be string or a list-like "
"of length 2: [`color_neg`, `color_pos`] "
"(eg: color=['#d65f5f', '#5fba7d'])"
)
if subset is None:
subset = styler.data.select_dtypes(include=np.number).columns
# noqa
styler.apply(
__bar_inverse, # noqa
subset=subset,
axis=axis,
align=align, # noqa
colors=color,
width=width, # noqa
min_value=min_value, # noqa
max_value=max_value, # noqa
)
return styler
def html_escape_dataframe(dataframe: pd.DataFrame):
"""HTML `escapes` all string and categorical columns and indexes in the `dataframe`.
This can be used when displaying pd.DataFrames in Jupyter notebook using `.style`;
e.g. `<XXX>` is displayed as blank because it is not encoded.
Args:
pd.DataFrame
Returns:
a copy of the `dataframe` with string values replaced after being html encoded via `html.escape()`
"""
def __escape(value):
if not any_none_nan([value]) and isinstance(value, str):
return escape(value)
return value
dataframe = dataframe.copy()
columns_to_escape = pandas.get_string_columns(dataframe) + pandas.get_categorical_columns(dataframe)
for column in columns_to_escape:
dataframe[column] = dataframe[column].apply(__escape)
if isinstance(dataframe.index, pd.MultiIndex):
index_tuples = [tuple([__escape(x) for x in index]) for index in dataframe.index] # pylint: disable=consider-using-generator
dataframe.index = pd.MultiIndex.from_tuples(index_tuples)
else:
dataframe.index = [__escape(x) for x in dataframe.index.values]
if isinstance(dataframe.columns, pd.MultiIndex):
index_tuples = [tuple([__escape(x) for x in columns]) for columns in dataframe.columns] # pylint: disable=consider-using-generator
dataframe.columns = pd.MultiIndex.from_tuples(index_tuples)
else:
dataframe.columns = [__escape(x) for x in dataframe.columns.values]
return dataframe
|
[
"helpsk.pandas.get_categorical_columns",
"pandas.MultiIndex.from_tuples",
"seaborn.color_palette",
"helpsk.pandas.get_string_columns",
"pandas.isna",
"helpsk.validation.any_none_nan",
"html.escape",
"pandas.api.types.is_list_like"
] |
[((3208, 3244), 'seaborn.color_palette', 'color_palette', (['palette'], {'as_cmap': '(True)'}), '(palette, as_cmap=True)\n', (3221, 3244), False, 'from seaborn import color_palette\n'), ((5230, 5247), 'pandas.isna', 'pd.isna', (['row_item'], {}), '(row_item)\n', (5237, 5247), True, 'import pandas as pd\n'), ((8550, 8569), 'pandas.api.types.is_list_like', 'is_list_like', (['color'], {}), '(color)\n', (8562, 8569), False, 'from pandas.api.types import is_list_like\n'), ((9935, 9971), 'helpsk.pandas.get_string_columns', 'pandas.get_string_columns', (['dataframe'], {}), '(dataframe)\n', (9960, 9971), True, 'import helpsk.pandas as pandas\n'), ((9974, 10015), 'helpsk.pandas.get_categorical_columns', 'pandas.get_categorical_columns', (['dataframe'], {}), '(dataframe)\n', (10004, 10015), True, 'import helpsk.pandas as pandas\n'), ((10327, 10366), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['index_tuples'], {}), '(index_tuples)\n', (10352, 10366), True, 'import pandas as pd\n'), ((10671, 10710), 'pandas.MultiIndex.from_tuples', 'pd.MultiIndex.from_tuples', (['index_tuples'], {}), '(index_tuples)\n', (10696, 10710), True, 'import pandas as pd\n'), ((2159, 2185), 'html.escape', 'escape', (['fill_missing_value'], {}), '(fill_missing_value)\n', (2165, 2185), False, 'from html import escape\n'), ((9841, 9854), 'html.escape', 'escape', (['value'], {}), '(value)\n', (9847, 9854), False, 'from html import escape\n'), ((9772, 9793), 'helpsk.validation.any_none_nan', 'any_none_nan', (['[value]'], {}), '([value])\n', (9784, 9793), False, 'from helpsk.validation import any_none_nan\n')]
|
import numpy as np
def random_sum(*dimensions):
return np.random.rand(*dimensions).sum()
|
[
"numpy.random.rand"
] |
[((61, 88), 'numpy.random.rand', 'np.random.rand', (['*dimensions'], {}), '(*dimensions)\n', (75, 88), True, 'import numpy as np\n')]
|
"""iBird URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from apps.utils.image_uploader import upload
from apps.utils import test
urlpatterns = [
path('admin/', admin.site.urls),
path('api/test_get', test.test_get), # 仅供测试
path('api/test_post', test.test_post), # 仅供测试
path('api/test_patch', test.test_patch), # 仅供测试
path('api/upload', upload),
path('api/account/', include(('apps.account.urls', 'apps.account'), namespace='account')),
path('api/prediction/', include(('apps.prediction.urls', 'apps.prediction'), namespace='prediction')),
path('api/gallery/', include(('apps.gallery.urls', 'apps.gallery'), namespace='gallery')),
path('api/post/', include(('apps.post.urls', 'apps.post'), namespace='post'))
]
|
[
"django.urls.path",
"django.urls.include"
] |
[((794, 825), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (798, 825), False, 'from django.urls import path, include\n'), ((832, 867), 'django.urls.path', 'path', (['"""api/test_get"""', 'test.test_get'], {}), "('api/test_get', test.test_get)\n", (836, 867), False, 'from django.urls import path, include\n'), ((881, 918), 'django.urls.path', 'path', (['"""api/test_post"""', 'test.test_post'], {}), "('api/test_post', test.test_post)\n", (885, 918), False, 'from django.urls import path, include\n'), ((932, 971), 'django.urls.path', 'path', (['"""api/test_patch"""', 'test.test_patch'], {}), "('api/test_patch', test.test_patch)\n", (936, 971), False, 'from django.urls import path, include\n'), ((986, 1012), 'django.urls.path', 'path', (['"""api/upload"""', 'upload'], {}), "('api/upload', upload)\n", (990, 1012), False, 'from django.urls import path, include\n'), ((1040, 1107), 'django.urls.include', 'include', (["('apps.account.urls', 'apps.account')"], {'namespace': '"""account"""'}), "(('apps.account.urls', 'apps.account'), namespace='account')\n", (1047, 1107), False, 'from django.urls import path, include\n'), ((1138, 1214), 'django.urls.include', 'include', (["('apps.prediction.urls', 'apps.prediction')"], {'namespace': '"""prediction"""'}), "(('apps.prediction.urls', 'apps.prediction'), namespace='prediction')\n", (1145, 1214), False, 'from django.urls import path, include\n'), ((1242, 1309), 'django.urls.include', 'include', (["('apps.gallery.urls', 'apps.gallery')"], {'namespace': '"""gallery"""'}), "(('apps.gallery.urls', 'apps.gallery'), namespace='gallery')\n", (1249, 1309), False, 'from django.urls import path, include\n'), ((1334, 1392), 'django.urls.include', 'include', (["('apps.post.urls', 'apps.post')"], {'namespace': '"""post"""'}), "(('apps.post.urls', 'apps.post'), namespace='post')\n", (1341, 1392), False, 'from django.urls import path, include\n')]
|
"""
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import Dict
from typing import List
from typing import Optional
from nncf.common.quantization.initialization.range import PerLayerRangeInitConfig
from nncf.common.quantization.initialization.range import RangeInitConfig
from nncf.common.utils.logger import logger
from nncf.config.config import NNCFConfig
from nncf.config.structures import BNAdaptationInitArgs
from nncf.config.structures import QuantizationRangeInitArgs
def extract_algorithm_names(config: NNCFConfig) -> List[str]:
retval = []
compression_config_json_section = config.get('compression', [])
if isinstance(compression_config_json_section, dict):
compression_config_json_section = [compression_config_json_section]
for algo_config in compression_config_json_section:
retval.append(algo_config['algorithm'])
return retval
def extract_algo_specific_config(config: NNCFConfig, algo_name_to_match: str) -> Dict:
"""
Extracts a .json sub-dictionary for a given compression algorithm from the
common NNCFConfig.
:param config: An instance of the NNCFConfig.
:param algo_name_to_match: The name of the algorithm for which the algorithm-specific section
should be extracted.
:return: The sub-dictionary, exactly as it is specified in the NNCF configuration of the .json file,
that corresponds to the algorithm-specific data (i.e. {"algorithm": "quantization", ... })
"""
compression_section = config.get('compression', [])
if isinstance(compression_section, list):
algo_list = compression_section
else:
assert isinstance(compression_section, dict)
algo_list = [compression_section]
from nncf.common.compression import NO_COMPRESSION_ALGORITHM_NAME
if algo_name_to_match == NO_COMPRESSION_ALGORITHM_NAME:
if len(algo_list) > 0:
raise RuntimeError(f'No algorithm configuration should be specified '
f'when you try to extract {algo_name_to_match} from the NNCF config!')
return {}
matches = []
for compression_algo_dict in algo_list:
algo_name = compression_algo_dict['algorithm']
if algo_name == algo_name_to_match:
matches.append(compression_algo_dict)
if len(matches) > 1:
raise RuntimeError(f'Multiple algorithm configurations specified for the same '
f'algo {algo_name_to_match} in the NNCF config!')
if not matches:
raise RuntimeError(f'Did not find an algorithm configuration for '
f'algo {algo_name_to_match} in the NNCF config!')
return next(iter(matches))
def extract_range_init_params(config: NNCFConfig) -> Optional[Dict[str, object]]:
"""
Extracts parameters of the quantization range initialization algorithm from the
compression algorithm NNCFconfig.
:param config: An instance of the NNCFConfig.
:return: Parameters of the quantization range initialization algorithm.
"""
algo_config = extract_algo_specific_config(config, 'quantization')
init_range_config_dict_or_list = algo_config.get('initializer', {}).get('range', {})
range_init_args = None
try:
range_init_args = config.get_extra_struct(QuantizationRangeInitArgs)
except KeyError:
if not init_range_config_dict_or_list:
logger.warning('Initializer section not specified for quantization algorithm in NNCF config and '
'quantization init args not supplied - the necessary parameters are not specified '
'to run the quantizer range initialization algorithm')
return None
if not init_range_config_dict_or_list:
logger.warning('Enabling quantization range initialization with default parameters.')
init_range_config_dict_or_list = {'num_init_samples': 256}
max_num_init_samples = 0
global_range_init_config = None
scope_overrides = [] # type: List[PerLayerRangeInitConfig]
if isinstance(init_range_config_dict_or_list, dict):
global_range_init_config = RangeInitConfig.from_dict(init_range_config_dict_or_list)
max_num_init_samples = global_range_init_config.num_init_samples
else:
for sub_init_range_config_dict in init_range_config_dict_or_list:
scope_overrides.append(PerLayerRangeInitConfig.from_dict(sub_init_range_config_dict))
max_num_init_samples_config = max(scope_overrides, key=lambda x: x.num_init_samples)
max_num_init_samples = max_num_init_samples_config.num_init_samples
if max_num_init_samples == 0:
return None
if range_init_args is None:
raise ValueError(
'Should run range initialization as specified via config,'
'but the initializing data loader is not provided as an extra struct. '
'Refer to `NNCFConfig.register_extra_structs` and the `QuantizationRangeInitArgs` class')
params = {
'init_range_data_loader': range_init_args.data_loader,
'device': range_init_args.device,
'global_init_config': global_range_init_config,
'per_layer_range_init_configs': scope_overrides
}
return params
def extract_bn_adaptation_init_params(config: NNCFConfig, algo_name: str) -> Dict[str, object]:
"""
Extracts parameters for initialization of an object of the class `BatchnormAdaptationAlgorithm`
from the compression algorithm NNCFconfig.
:param config: An instance of the NNCFConfig.
:param algo_name: The name of the algorithm for which the params have to be extracted.
:return: Parameters for initialization of an object of the class `BatchnormAdaptationAlgorithm` specific
to the supplied algorithm.
"""
algo_config = extract_algo_specific_config(config, algo_name)
params = algo_config.get('initializer', {}).get('batchnorm_adaptation', {})
num_bn_adaptation_samples = params.get('num_bn_adaptation_samples', 2000)
try:
args = config.get_extra_struct(BNAdaptationInitArgs)
except KeyError:
raise RuntimeError(
'There is no possibility to create the batch-norm statistics adaptation algorithm '
'because the data loader is not provided as an extra struct. Refer to the '
'`NNCFConfig.register_extra_structs` method and the `BNAdaptationInitArgs` class.') from None
params = {
'num_bn_adaptation_samples': num_bn_adaptation_samples,
'data_loader': args.data_loader,
'device': args.device
}
return params
|
[
"nncf.common.quantization.initialization.range.PerLayerRangeInitConfig.from_dict",
"nncf.common.quantization.initialization.range.RangeInitConfig.from_dict",
"nncf.common.utils.logger.logger.warning"
] |
[((4288, 4378), 'nncf.common.utils.logger.logger.warning', 'logger.warning', (['"""Enabling quantization range initialization with default parameters."""'], {}), "(\n 'Enabling quantization range initialization with default parameters.')\n", (4302, 4378), False, 'from nncf.common.utils.logger import logger\n'), ((4663, 4720), 'nncf.common.quantization.initialization.range.RangeInitConfig.from_dict', 'RangeInitConfig.from_dict', (['init_range_config_dict_or_list'], {}), '(init_range_config_dict_or_list)\n', (4688, 4720), False, 'from nncf.common.quantization.initialization.range import RangeInitConfig\n'), ((3921, 4161), 'nncf.common.utils.logger.logger.warning', 'logger.warning', (['"""Initializer section not specified for quantization algorithm in NNCF config and quantization init args not supplied - the necessary parameters are not specified to run the quantizer range initialization algorithm"""'], {}), "(\n 'Initializer section not specified for quantization algorithm in NNCF config and quantization init args not supplied - the necessary parameters are not specified to run the quantizer range initialization algorithm'\n )\n", (3935, 4161), False, 'from nncf.common.utils.logger import logger\n'), ((4913, 4974), 'nncf.common.quantization.initialization.range.PerLayerRangeInitConfig.from_dict', 'PerLayerRangeInitConfig.from_dict', (['sub_init_range_config_dict'], {}), '(sub_init_range_config_dict)\n', (4946, 4974), False, 'from nncf.common.quantization.initialization.range import PerLayerRangeInitConfig\n')]
|
import numpy as np
import time
import sys
from ServoMotor import *
from fns import *
# Initialize motor control library & USB Port
filename = "/dev/ttyUSB0"
motor = ServoMotor(filename)
IO = motor.IO_Init()
if IO < 0:
print('IO exit')
sys.exit()
# Call corresponding function to convert sim2real/real2sim
def convFns(pos, convType):
conv = [left_armpit, left_elbow, left_shoulder, right_armpit, right_elbow, right_shoulder,
left_armpit, left_elbow, left_shoulder, right_armpit, right_elbow, right_shoulder]
targ = np.zeros(12)
for i in range(len(pos)):
if i==0:
targ[i] = conv[i](pos[i], convType, "front")
elif i==6:
targ[i] = conv[i](pos[i], convType, "back")
else:
targ[i] = conv[i](pos[i], convType)
return targ
'''
# Return target position
def act_shoulders&armpits(t, a, b, c, d, e):
# Calculate desired position
desired_p = np.zeros(12)
# Positive
pos_v_shoulder = a * np.sin(t * e) + b
pos_v_elbow = c * np.sin(t * e) + d
pos_shoulder = [2, 11]
pos_elbow = [1, 10]
# Negative
neg_v_shoulder = -a * np.sin(t * e) + b
neg_v_elbow = -c * np.sin(t * e) + d
neg_shoulder = [5, 8]
neg_elbow = [4, 7]
# Zero
zero = [0, 3, 6, 9]
# Assign
desired_p[pos_shoulder] = pos_v_shoulder
desired_p[pos_elbow] = pos_v_elbow
desired_p[neg_shoulder] = neg_v_shoulder
desired_p[neg_elbow] = neg_v_elbow
desired_p[zero] = 0
# Return desired new position
return convFns(desired_p, "sim2real")
'''
# Front and back legs diff
# Return target position
def act(t, a, b, c, d, e, f):
# Calculate desired position
f_pos = a * np.sin(t * e) + b
f_neg = -a * np.sin(t * e) + b
b_pos = c * np.sin(t * e + f) + d
b_neg = -c * np.sin(t * e + f) + d
# Assign
desired_p = [0, f_pos, f_pos, 0, f_neg, f_neg, 0, b_pos, b_pos, 0, b_neg, b_neg]
# Return desired new position
return convFns(desired_p, "sim2real")
# Return position to take
def get_action(steps):
params = np.array(np.load('params/ROB/best_overall-2.npy'))
params[4]-=22
#params = np.array([0.24495851730947005, 0.18187873796178136, 0.2020333429029758, -0.3852743697870839, -0.2094960812992037]) # Trained sin_gait 7, Oct 11 19:01
#params = np.array([0.2980418533307479, 0.01878523690431866, 0.022546654023646796, -0.2685025304630598, -0.2080157428428239]) # Trained sin_gait 5, Oct 12 13:21
#params = np.array([0.15, 0.0, 0.2, 0.15, 0.2]) # Smooth Criminal
#params = np.array([0.15, 0.0, 0.19, 0.2, 0.23, 2.05])
return act(steps, *params)
# MOVE MOTOR TO GIVEN POSITION
def walk(pos):
h = 0
real_pos = []
for j in range(1,5):
u = 10*j
r = range(u, u+3)
for i in r:
real_pos.append(motor.readPosition(i))
motor.move(i, int(pos[h]), 0)
h+=1
time.sleep(0.005)
return real_pos
# Initialize motors as servos and set offset
offsets = [30, 0, 64, 0, 70, 50, 26, 100, 55, 80, 90, 35]
h = 0
# Set servo mode to all servos with their offset
for j in range(1,5):
u = 10*j
r = range(u, u+3)
for i in r:
motor.setServoMode(i)
if offsets[h]!=0:
motor.setPositionOffset(i,offsets[h])
h+=1
# RESET position and stand down & up before walking
pos = [500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500]
h = 0
for j in range(1,5):
u = 10*j
r = range(u, u+3)
for i in r:
motor.move(i, int(pos[h]), 1500)
h+=1
time.sleep(3)
pos = [500, 750, 583, 500, 250, 417, 500, 750, 583, 500, 250, 417]
#pos = get_action(0)
h = 0
for j in range(1,5):
u = 10*j
r = range(u, u+3)
for i in r:
if h>5:
motor.move(i, int(pos[h]), 1000)
else:
motor.move(i, int(pos[h]), 1500)
h+=1
time.sleep(3)
'''
# Determine need to smoothen transition to first position
pos_prev = [500, 750, 583, 500, 250, 417, 500, 750, 583, 500, 250, 417]
pos = get_action(0)
delta_pos = abs(pos-pos_prev)
steps = int(max(delta_pos)/15)
m = []
for i in range(len(pos)):
m.append(np.linspace(pos_prev[i], pos[i], steps))
m_t = np.array(m).T.tolist()
for i in range(len(m_t)):
for j in range(len(m_t[0])):
m_t[i][j] = int(round(m_t[i][j]))
# If smoothing is needed, perform actions
for i in m_t:
real_pos = walk(i)
# WALK
j = 1
while j < 100:
# Get target position
pos = get_action(j)
# Move robot to target position
real_pos = walk(pos)
j += 1
'''
# RESET position and stand down & up before walking
pos = [500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500]
h = 0
for j in range(1,5):
u = 10*j
r = range(u, u+3)
for i in r:
motor.move(i, int(pos[h]), 1500)
h+=1
|
[
"numpy.load",
"numpy.zeros",
"time.sleep",
"numpy.sin",
"sys.exit"
] |
[((3246, 3259), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (3256, 3259), False, 'import time\n'), ((3514, 3527), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (3524, 3527), False, 'import time\n'), ((238, 248), 'sys.exit', 'sys.exit', ([], {}), '()\n', (246, 248), False, 'import sys\n'), ((524, 536), 'numpy.zeros', 'np.zeros', (['(12)'], {}), '(12)\n', (532, 536), True, 'import numpy as np\n'), ((1914, 1954), 'numpy.load', 'np.load', (['"""params/ROB/best_overall-2.npy"""'], {}), "('params/ROB/best_overall-2.npy')\n", (1921, 1954), True, 'import numpy as np\n'), ((2666, 2683), 'time.sleep', 'time.sleep', (['(0.005)'], {}), '(0.005)\n', (2676, 2683), False, 'import time\n'), ((1559, 1572), 'numpy.sin', 'np.sin', (['(t * e)'], {}), '(t * e)\n', (1565, 1572), True, 'import numpy as np\n'), ((1591, 1604), 'numpy.sin', 'np.sin', (['(t * e)'], {}), '(t * e)\n', (1597, 1604), True, 'import numpy as np\n'), ((1622, 1639), 'numpy.sin', 'np.sin', (['(t * e + f)'], {}), '(t * e + f)\n', (1628, 1639), True, 'import numpy as np\n'), ((1658, 1675), 'numpy.sin', 'np.sin', (['(t * e + f)'], {}), '(t * e + f)\n', (1664, 1675), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Atom and Bond features used in graph reaction representations
"""
import logging
from typing import List, Optional
from rdkit.Chem.rdchem import Bond, Atom
logger = logging.getLogger(__name__)
# values found on train+valid on USPTO-50k + USPTO-FULL + USPTO-MIT
ATOM_PROPS = {
'atomic_num': [1, 3, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 19, 22,
29, 30, 32, 33, 34, 35, 47, 50, 51, 52, 53, 82, 83],
'formal_charge': [-1, 0, 1, 2, 3],
'chiral_tag': [0, 1, 2],
'num_explicit_hs': [0, 1, 2, 4],
'is_aromatic': [0, 1],
'is_supernode': [0, 1], #
'is_edited': [0, 1], # we mark atoms that have been added/edited by the model
'is_reactant': [0, 1] # this feature is used to mark reactants in "SEPARATED" variant of forward prediction
}
BOND_PROPS = {
'bond_type': ['self', 'supernode', 1, 2, 3, 12],
'bond_stereo': [0, 2, 3],
'is_aromatic': [0, 1],
'is_edited': [0, 1],
}
ATOM_PROP2OH = dict((k, (dict((ap, i + 1) for i, ap in enumerate(vals)))) for k, vals in ATOM_PROPS.items())
BOND_PROP2OH = dict((k, (dict((ap, i + 1) for i, ap in enumerate(vals)))) for k, vals in BOND_PROPS.items())
def try_get_bond_feature(bond: Bond, feat_key: str):
try:
if feat_key == 'bond_type':
return int(bond.GetBondType())
elif feat_key == 'is_edited':
if bond.HasProp('is_edited') and bond.GetBoolProp('is_edited'):
return 1
return 0
elif feat_key == 'bond_stereo':
return int(bond.GetStereo())
elif feat_key == 'is_aromatic':
return int(bond.GetIsAromatic())
else:
raise KeyError(f"Unknown bond feature: {feat_key}")
except RuntimeError as e:
logger.warning(f'Runtime error while try_get_bond_feature: {str(e)}')
return None
def try_get_atom_feature(atom: Atom, feat_key: str):
try:
if feat_key == 'is_supernode':
return 0
elif feat_key == 'is_product':
return 0
elif feat_key == 'is_edited':
if atom.HasProp('is_edited') and atom.GetBoolProp('is_edited'):
return 1
return 0
elif feat_key == 'is_reactant':
if atom.HasProp('in_target') and atom.GetBoolProp('in_target'):
return 1
return 0
elif feat_key == 'atomic_num':
return atom.GetAtomicNum()
elif feat_key == 'chiral_tag':
return int(atom.GetChiralTag())
elif feat_key == 'formal_charge':
return atom.GetFormalCharge()
elif feat_key == 'is_aromatic':
return int(atom.GetIsAromatic())
elif feat_key == 'num_explicit_hs':
return atom.GetNumExplicitHs()
else:
raise KeyError(f"Unknown atom feature: {feat_key}")
except RuntimeError as e:
logger.warning(f'Runtime error while try_get_atom_feature: {str(e)}')
return None
def get_atom_features(atom: Atom, atom_oh_keys: List[str], used_oh_keys: Optional[List[str]] = None,
atom_prop2oh: dict = ATOM_PROP2OH) -> List[int]:
feat = [try_get_atom_feature(atom, key) if used_oh_keys is None or key in used_oh_keys else 0
for key in atom_oh_keys]
result = []
for key, val in zip(atom_oh_keys, feat):
if key not in atom_prop2oh:
continue
if val not in atom_prop2oh[key]:
logger.debug(f'Unknown {key} value: {val}')
result.append(0)
else:
result.append(atom_prop2oh[key][val])
return result
def get_bond_features(bond: Bond, bond_oh_keys: List[str], used_oh_keys: Optional[List[str]] = None,
bond_prop2oh: dict = BOND_PROP2OH) -> List[int]:
feat = [try_get_bond_feature(bond, key) if used_oh_keys is None or key in used_oh_keys else 0
for key in bond_oh_keys]
result = []
for key, val in zip(bond_oh_keys, feat):
if key not in bond_prop2oh:
continue
if val not in bond_prop2oh[key]:
logger.debug(f'Unknown {key} value: {val}')
result.append(0)
else:
result.append(bond_prop2oh[key][val])
return result
|
[
"logging.getLogger"
] |
[((196, 223), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (213, 223), False, 'import logging\n')]
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from marionette_driver import expected, By, Wait
from gaiatest.apps.base import Base
class SettingsForm(Base):
_settings_view_locator = (By.ID, 'view-settings')
_loading_overlay_locator = (By.ID, 'loading-overlay')
_settings_close_button_locator = (By.ID, 'settings-close')
_order_by_last_name_switch_locator = (By.CSS_SELECTOR, 'gaia-switch[name="order.lastname"]')
_import_from_sim_button_locator = (By.CSS_SELECTOR, "li[id*='import-sim-option'] button")
_import_from_sdcard_locator = (By.CSS_SELECTOR, 'button.icon-sd')
_import_from_gmail_button_locator = (By.CSS_SELECTOR, 'button.icon-gmail')
_import_from_windows_live_button_locator = (By.CSS_SELECTOR, 'button.icon-live')
_import_settings_header = (By.ID, 'import-settings-header')
_export_to_sd_button_locator = (By.CSS_SELECTOR, 'button[data-l10n-id="memoryCard"]')
_export_to_sim_button_locator = (By.CSS_SELECTOR, '#export-options button.icon-sim')
_import_contacts_locator = (By.CSS_SELECTOR, 'button[data-l10n-id="importContactsButton"]')
_export_contacts_locator = (By.CSS_SELECTOR, 'button[data-l10n-id="exportContactsButton"]')
_delete_contacts_locator = (By.ID, 'bulkDelete')
_gmail_contacts_imported_locator = (By.CSS_SELECTOR, '.icon.icon-gmail > p > span')
_gmail_import_option_locator = (By.ID, 'import-gmail-option')
_import_settings_locator = (By.ID, 'import-settings')
_select_contacts_locator = (By.ID, 'selectable-form')
_sync_friends_locator = (By.ID, 'settingsFb')
_import_error_message_locator = (By.CSS_SELECTOR, '#import-live-option > p.error-message')
_outlook_import_option_locator = (By.ID, 'import-live-option')
_import_from_outlook_button_locator = (By.CSS_SELECTOR, 'button.icon-live')
def __init__(self, marionette):
Base.__init__(self, marionette)
view = self.marionette.find_element(*self._settings_view_locator)
Wait(self.marionette).until(lambda m: view.location['y'] == 0)
def tap_order_by_last_name(self):
last_name = Wait(self.marionette).until(
expected.element_present(*self._order_by_last_name_switch_locator))
Wait(self.marionette).until(expected.element_displayed(last_name))
initial_state = self.is_custom_element_checked(last_name)
last_name.tap()
self.wait_for_custom_element_checked_state(last_name, checked=not(initial_state))
@property
def order_by_last_name(self):
return self.marionette.find_element(*self._order_by_last_name_switch_locator).is_selected()
def tap_import_contacts(self):
import_contacts = Wait(self.marionette).until(
expected.element_present(*self._import_contacts_locator))
Wait(self.marionette).until(expected.element_displayed(import_contacts))
import_contacts.tap()
import_settings = self.marionette.find_element(*self._import_settings_locator)
Wait(self.marionette).until(lambda m: import_settings.location['x'] == 0)
def tap_export_contacts(self):
export_contacts = Wait(self.marionette).until(
expected.element_present(*self._export_contacts_locator))
Wait(self.marionette).until(expected.element_displayed(export_contacts))
export_contacts.tap()
import_settings = self.marionette.find_element(*self._import_settings_locator)
Wait(self.marionette).until(lambda m: import_settings.location['x'] == 0)
def tap_delete_contacts(self):
delete_contacts = Wait(self.marionette).until(
expected.element_present(*self._delete_contacts_locator))
Wait(self.marionette).until(expected.element_displayed(delete_contacts))
delete_contacts.tap()
select_contacts = self.marionette.find_element(*self._select_contacts_locator)
Wait(self.marionette).until(lambda m: select_contacts.location['y'] == 0)
def tap_import_from_sim(self):
import_from_sim = Wait(self.marionette).until(
expected.element_present(*self._import_from_sim_button_locator))
Wait(self.marionette).until(expected.element_displayed(import_from_sim))
import_from_sim.tap()
from gaiatest.apps.contacts.app import Contacts
status_message = Wait(self.marionette).until(
expected.element_present(*Contacts._status_message_locator))
Wait(self.marionette).until(expected.element_displayed(status_message))
Wait(self.marionette).until(expected.element_not_displayed(status_message))
@property
def gmail_imported_contacts(self):
return self.marionette.find_element(*self._gmail_contacts_imported_locator).text
def tap_import_from_gmail(self):
import_from_gmail = Wait(self.marionette).until(
expected.element_present(*self._import_from_gmail_button_locator))
Wait(self.marionette).until(expected.element_displayed(import_from_gmail))
import_from_gmail.tap()
from gaiatest.apps.contacts.regions.gmail import GmailLogin
return GmailLogin(self.marionette)
def tap_sync_friends(self):
element = Wait(self.marionette).until(
expected.element_present(*self._sync_friends_locator))
Wait(self.marionette).until(expected.element_displayed(element))
element.tap()
Wait(self.marionette).until(
lambda m: m.find_element(*self._sync_friends_locator).location['x'] == 0)
from gaiatest.apps.system.regions.facebook import FacebookLogin
return FacebookLogin(self.marionette)
def tap_import_from_sdcard(self):
import_from_sdcard = Wait(self.marionette).until(
expected.element_present(*self._import_from_sdcard_locator))
Wait(self.marionette).until(expected.element_displayed(import_from_sdcard))
import_from_sdcard.tap()
from gaiatest.apps.contacts.app import Contacts
status_message = Wait(self.marionette).until(
expected.element_present(*Contacts._status_message_locator))
Wait(self.marionette).until(expected.element_displayed(status_message))
Wait(self.marionette).until(expected.element_not_displayed(status_message))
def tap_export_to_sd(self):
export_to_sdcard = Wait(self.marionette).until(
expected.element_present(*self._export_to_sd_button_locator))
Wait(self.marionette).until(expected.element_displayed(export_to_sdcard))
export_to_sdcard.tap()
select_contacts = self.marionette.find_element(*self._select_contacts_locator)
Wait(self.marionette).until(lambda m: select_contacts.location['y'] == 0)
def tap_export_to_sim(self):
export_to_sim = Wait(self.marionette).until(
expected.element_present(*self._export_to_sim_button_locator))
Wait(self.marionette).until(expected.element_displayed(export_to_sim))
export_to_sim.tap()
select_contacts = self.marionette.find_element(*self._select_contacts_locator)
Wait(self.marionette).until(
lambda m: select_contacts.rect['y'] == 0 and select_contacts.is_displayed())
def tap_done(self):
close = self.marionette.find_element(*self._settings_close_button_locator)
close.tap()
Wait(self.marionette).until(expected.element_not_displayed(close))
from gaiatest.apps.contacts.app import Contacts
return Contacts(self.marionette)
def tap_back_from_import_contacts(self):
header = self.marionette.find_element(*self._import_settings_header)
# TODO: remove tap with coordinates after Bug 1061698 is fixed
header.tap(25, 25)
Wait(self.marionette).until(expected.element_not_displayed(header))
@property
def is_gmail_import_service_in_error(self):
gmail_import_service = self.marionette.find_element(*self._gmail_import_option_locator)
return 'error' in gmail_import_service.get_attribute('class')
@property
def is_gmail_import_enabled(self):
return self.marionette.find_element(*self._import_from_gmail_button_locator).is_enabled()
@property
def is_error_message_displayed(self):
return self.is_element_displayed(*self._import_error_message_locator)
@property
def is_outlook_import_service_in_error(self):
outlook_import_service = self.marionette.find_element(*self._outlook_import_option_locator)
return 'error' in outlook_import_service.get_attribute('class')
@property
def is_outlook_import_enabled(self):
return self.marionette.find_element(*self._import_from_outlook_button_locator).is_enabled()
class ConfirmationView(Base):
_confirmation_locator = (By.ID, 'confirmation-message')
_error_message_locator = (By.CSS_SELECTOR, '#confirmation-message p')
def __init__(self, marionette):
Base.__init__(self, marionette)
view = self.marionette.find_element(*self._confirmation_locator)
Wait(self.marionette).until(lambda m: view.location['y'] == 0)
@property
def error_message(self):
return self.marionette.find_element(*self._error_message_locator).text
|
[
"gaiatest.apps.contacts.app.Contacts",
"marionette_driver.expected.element_not_displayed",
"gaiatest.apps.contacts.regions.gmail.GmailLogin",
"gaiatest.apps.base.Base.__init__",
"marionette_driver.Wait",
"marionette_driver.expected.element_displayed",
"gaiatest.apps.system.regions.facebook.FacebookLogin",
"marionette_driver.expected.element_present"
] |
[((2009, 2040), 'gaiatest.apps.base.Base.__init__', 'Base.__init__', (['self', 'marionette'], {}), '(self, marionette)\n', (2022, 2040), False, 'from gaiatest.apps.base import Base\n'), ((5222, 5249), 'gaiatest.apps.contacts.regions.gmail.GmailLogin', 'GmailLogin', (['self.marionette'], {}), '(self.marionette)\n', (5232, 5249), False, 'from gaiatest.apps.contacts.regions.gmail import GmailLogin\n'), ((5702, 5732), 'gaiatest.apps.system.regions.facebook.FacebookLogin', 'FacebookLogin', (['self.marionette'], {}), '(self.marionette)\n', (5715, 5732), False, 'from gaiatest.apps.system.regions.facebook import FacebookLogin\n'), ((7568, 7593), 'gaiatest.apps.contacts.app.Contacts', 'Contacts', (['self.marionette'], {}), '(self.marionette)\n', (7576, 7593), False, 'from gaiatest.apps.contacts.app import Contacts\n'), ((9012, 9043), 'gaiatest.apps.base.Base.__init__', 'Base.__init__', (['self', 'marionette'], {}), '(self, marionette)\n', (9025, 9043), False, 'from gaiatest.apps.base import Base\n'), ((2286, 2352), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._order_by_last_name_switch_locator'], {}), '(*self._order_by_last_name_switch_locator)\n', (2310, 2352), False, 'from marionette_driver import expected, By, Wait\n'), ((2390, 2427), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['last_name'], {}), '(last_name)\n', (2416, 2427), False, 'from marionette_driver import expected, By, Wait\n'), ((2861, 2917), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._import_contacts_locator'], {}), '(*self._import_contacts_locator)\n', (2885, 2917), False, 'from marionette_driver import expected, By, Wait\n'), ((2955, 2998), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['import_contacts'], {}), '(import_contacts)\n', (2981, 2998), False, 'from marionette_driver import expected, By, Wait\n'), ((3302, 3358), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._export_contacts_locator'], {}), '(*self._export_contacts_locator)\n', (3326, 3358), False, 'from marionette_driver import expected, By, Wait\n'), ((3396, 3439), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['export_contacts'], {}), '(export_contacts)\n', (3422, 3439), False, 'from marionette_driver import expected, By, Wait\n'), ((3743, 3799), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._delete_contacts_locator'], {}), '(*self._delete_contacts_locator)\n', (3767, 3799), False, 'from marionette_driver import expected, By, Wait\n'), ((3837, 3880), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['delete_contacts'], {}), '(delete_contacts)\n', (3863, 3880), False, 'from marionette_driver import expected, By, Wait\n'), ((4184, 4247), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._import_from_sim_button_locator'], {}), '(*self._import_from_sim_button_locator)\n', (4208, 4247), False, 'from marionette_driver import expected, By, Wait\n'), ((4285, 4328), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['import_from_sim'], {}), '(import_from_sim)\n', (4311, 4328), False, 'from marionette_driver import expected, By, Wait\n'), ((4482, 4541), 'marionette_driver.expected.element_present', 'expected.element_present', (['*Contacts._status_message_locator'], {}), '(*Contacts._status_message_locator)\n', (4506, 4541), False, 'from marionette_driver import expected, By, Wait\n'), ((4579, 4621), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['status_message'], {}), '(status_message)\n', (4605, 4621), False, 'from marionette_driver import expected, By, Wait\n'), ((4659, 4705), 'marionette_driver.expected.element_not_displayed', 'expected.element_not_displayed', (['status_message'], {}), '(status_message)\n', (4689, 4705), False, 'from marionette_driver import expected, By, Wait\n'), ((4957, 5022), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._import_from_gmail_button_locator'], {}), '(*self._import_from_gmail_button_locator)\n', (4981, 5022), False, 'from marionette_driver import expected, By, Wait\n'), ((5060, 5105), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['import_from_gmail'], {}), '(import_from_gmail)\n', (5086, 5105), False, 'from marionette_driver import expected, By, Wait\n'), ((5342, 5395), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._sync_friends_locator'], {}), '(*self._sync_friends_locator)\n', (5366, 5395), False, 'from marionette_driver import expected, By, Wait\n'), ((5433, 5468), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['element'], {}), '(element)\n', (5459, 5468), False, 'from marionette_driver import expected, By, Wait\n'), ((5842, 5901), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._import_from_sdcard_locator'], {}), '(*self._import_from_sdcard_locator)\n', (5866, 5901), False, 'from marionette_driver import expected, By, Wait\n'), ((5939, 5985), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['import_from_sdcard'], {}), '(import_from_sdcard)\n', (5965, 5985), False, 'from marionette_driver import expected, By, Wait\n'), ((6142, 6201), 'marionette_driver.expected.element_present', 'expected.element_present', (['*Contacts._status_message_locator'], {}), '(*Contacts._status_message_locator)\n', (6166, 6201), False, 'from marionette_driver import expected, By, Wait\n'), ((6239, 6281), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['status_message'], {}), '(status_message)\n', (6265, 6281), False, 'from marionette_driver import expected, By, Wait\n'), ((6319, 6365), 'marionette_driver.expected.element_not_displayed', 'expected.element_not_displayed', (['status_message'], {}), '(status_message)\n', (6349, 6365), False, 'from marionette_driver import expected, By, Wait\n'), ((6468, 6528), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._export_to_sd_button_locator'], {}), '(*self._export_to_sd_button_locator)\n', (6492, 6528), False, 'from marionette_driver import expected, By, Wait\n'), ((6566, 6610), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['export_to_sdcard'], {}), '(export_to_sdcard)\n', (6592, 6610), False, 'from marionette_driver import expected, By, Wait\n'), ((6911, 6972), 'marionette_driver.expected.element_present', 'expected.element_present', (['*self._export_to_sim_button_locator'], {}), '(*self._export_to_sim_button_locator)\n', (6935, 6972), False, 'from marionette_driver import expected, By, Wait\n'), ((7010, 7051), 'marionette_driver.expected.element_displayed', 'expected.element_displayed', (['export_to_sim'], {}), '(export_to_sim)\n', (7036, 7051), False, 'from marionette_driver import expected, By, Wait\n'), ((7458, 7495), 'marionette_driver.expected.element_not_displayed', 'expected.element_not_displayed', (['close'], {}), '(close)\n', (7488, 7495), False, 'from marionette_driver import expected, By, Wait\n'), ((7851, 7889), 'marionette_driver.expected.element_not_displayed', 'expected.element_not_displayed', (['header'], {}), '(header)\n', (7881, 7889), False, 'from marionette_driver import expected, By, Wait\n'), ((2123, 2144), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (2127, 2144), False, 'from marionette_driver import expected, By, Wait\n'), ((2245, 2266), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (2249, 2266), False, 'from marionette_driver import expected, By, Wait\n'), ((2362, 2383), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (2366, 2383), False, 'from marionette_driver import expected, By, Wait\n'), ((2820, 2841), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (2824, 2841), False, 'from marionette_driver import expected, By, Wait\n'), ((2927, 2948), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (2931, 2948), False, 'from marionette_driver import expected, By, Wait\n'), ((3125, 3146), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (3129, 3146), False, 'from marionette_driver import expected, By, Wait\n'), ((3261, 3282), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (3265, 3282), False, 'from marionette_driver import expected, By, Wait\n'), ((3368, 3389), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (3372, 3389), False, 'from marionette_driver import expected, By, Wait\n'), ((3566, 3587), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (3570, 3587), False, 'from marionette_driver import expected, By, Wait\n'), ((3702, 3723), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (3706, 3723), False, 'from marionette_driver import expected, By, Wait\n'), ((3809, 3830), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (3813, 3830), False, 'from marionette_driver import expected, By, Wait\n'), ((4007, 4028), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4011, 4028), False, 'from marionette_driver import expected, By, Wait\n'), ((4143, 4164), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4147, 4164), False, 'from marionette_driver import expected, By, Wait\n'), ((4257, 4278), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4261, 4278), False, 'from marionette_driver import expected, By, Wait\n'), ((4441, 4462), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4445, 4462), False, 'from marionette_driver import expected, By, Wait\n'), ((4551, 4572), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4555, 4572), False, 'from marionette_driver import expected, By, Wait\n'), ((4631, 4652), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4635, 4652), False, 'from marionette_driver import expected, By, Wait\n'), ((4916, 4937), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (4920, 4937), False, 'from marionette_driver import expected, By, Wait\n'), ((5032, 5053), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (5036, 5053), False, 'from marionette_driver import expected, By, Wait\n'), ((5301, 5322), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (5305, 5322), False, 'from marionette_driver import expected, By, Wait\n'), ((5405, 5426), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (5409, 5426), False, 'from marionette_driver import expected, By, Wait\n'), ((5500, 5521), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (5504, 5521), False, 'from marionette_driver import expected, By, Wait\n'), ((5801, 5822), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (5805, 5822), False, 'from marionette_driver import expected, By, Wait\n'), ((5911, 5932), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (5915, 5932), False, 'from marionette_driver import expected, By, Wait\n'), ((6101, 6122), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6105, 6122), False, 'from marionette_driver import expected, By, Wait\n'), ((6211, 6232), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6215, 6232), False, 'from marionette_driver import expected, By, Wait\n'), ((6291, 6312), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6295, 6312), False, 'from marionette_driver import expected, By, Wait\n'), ((6427, 6448), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6431, 6448), False, 'from marionette_driver import expected, By, Wait\n'), ((6538, 6559), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6542, 6559), False, 'from marionette_driver import expected, By, Wait\n'), ((6738, 6759), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6742, 6759), False, 'from marionette_driver import expected, By, Wait\n'), ((6870, 6891), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6874, 6891), False, 'from marionette_driver import expected, By, Wait\n'), ((6982, 7003), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (6986, 7003), False, 'from marionette_driver import expected, By, Wait\n'), ((7176, 7197), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (7180, 7197), False, 'from marionette_driver import expected, By, Wait\n'), ((7430, 7451), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (7434, 7451), False, 'from marionette_driver import expected, By, Wait\n'), ((7823, 7844), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (7827, 7844), False, 'from marionette_driver import expected, By, Wait\n'), ((9125, 9146), 'marionette_driver.Wait', 'Wait', (['self.marionette'], {}), '(self.marionette)\n', (9129, 9146), False, 'from marionette_driver import expected, By, Wait\n')]
|
from flask import render_template, request, redirect, url_for, abort
from . import main
from .forms import UpdateProfile, BlogForm
from ..models import User
from flask_login import login_required, current_user
from .. import db
import requests
import json
import os
# import markdown2
@main.route('/')
#@login_required
def index():
title = 'CAREY-BLOG'
random = requests.get('http://quotes.stormconsultancy.co.uk/random.json').json()
#search_pitch = request.args.get('pitch_query')
#pitches = Pitch.get_all_pitches()
#categories = Category.get_categories()
return render_template("index.html",title = title, random = random)
@main.route('/blog/new/', methods=['GET', 'POST'])
@login_required
def new_blog():
form = BlogForm()
if form.validate_on_submit():
blog = form.content.data
new_blog = Blog(blog=blog)
new_blog.save_blog()
return redirect(url_for('main.index'))
return render_template('new_blog.html', new_blog_form=form)
@main.route('/blog/comments/new/<int:id>', methods=['GET', 'POST'])
#@login_required
def new_comment(id):
form = CommentsForm()
vote_form = UpvoteForm()
if form.validate_on_submit():
new_comment = Comment(pitch_id=id, comment=form.comment.data,
username=current_user.username, votes=form.vote.data)
new_comment.save_comment()
return redirect(url_for('main.index'))
#title = f'{pitch_result.id} review'
return render_template('comments.html', comment_form=form, vote_form=vote_form)
@main.route('/user/<uname>/update/pic', methods=['POST'])
#@login_required
def update_pic(uname):
user = User.query.filter_by(username=uname).first()
if 'photo' in request.files:
filename = photos.save(request.files['photo'])
path = 'photos/{}'.format(filename)
user.profile_pic_path = path
db.session.commit()
return redirect(url_for('main.profile', uname=uname))
@main.route('/blogs/<int:blog_id>',methods = ["GET","POST"])
def view_blog(blog_id):
blog = Blog.query.filter_by(id=blog_id).first()
random = random_quote()
random = requests.get('http://quotes.stormconsultancy.co.uk/random.json').json()
if form.validate_on_submit():
name = form.name.data
description = form.description.data
new_comment = Comment(name=name, description=description,blog_id=blog.id)
new_comment.save_comment()
return redirect(url_for('main.view_blog', blog_id=blog.id))
comments = Comment.query.filter_by(blog_id=blog.id)
return render_template("index.html", form=form, blog=blog, comments=comments, random = random)
@main.route('/user/<uname>')
def profile(uname):
user = User.query.filter_by(username=uname).first()
if user is None:
return render_template('fourOwFour.html')
return render_template("profile/profile.html", user=user)
@main.route('/user/<uname>/update', methods=['GET', 'POST'])
@login_required
def update_profile(uname):
user = User.query.filter_by(username=uname).first()
if user is None:
abort(404)
form = UpdateProfile()
if form.validate_on_submit():
user.bio = form.bio.data
db.session.add(user)
db.session.commit()
return redirect(url_for('.profile', uname=user.username))
return render_template('profile/update.html', form=form)
@main.route('/view/comment/<int:id>')
def view_comments(id):
'''
Function that returs the comments belonging to a particular pitch
'''
comments = Comment.get_comments(id)
return render_template('view_comments.html', comments=comments, id=id)
|
[
"requests.get",
"flask.url_for",
"flask.abort",
"flask.render_template"
] |
[((592, 649), 'flask.render_template', 'render_template', (['"""index.html"""'], {'title': 'title', 'random': 'random'}), "('index.html', title=title, random=random)\n", (607, 649), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((958, 1010), 'flask.render_template', 'render_template', (['"""new_blog.html"""'], {'new_blog_form': 'form'}), "('new_blog.html', new_blog_form=form)\n", (973, 1010), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1497, 1569), 'flask.render_template', 'render_template', (['"""comments.html"""'], {'comment_form': 'form', 'vote_form': 'vote_form'}), "('comments.html', comment_form=form, vote_form=vote_form)\n", (1512, 1569), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((2594, 2683), 'flask.render_template', 'render_template', (['"""index.html"""'], {'form': 'form', 'blog': 'blog', 'comments': 'comments', 'random': 'random'}), "('index.html', form=form, blog=blog, comments=comments,\n random=random)\n", (2609, 2683), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((2872, 2922), 'flask.render_template', 'render_template', (['"""profile/profile.html"""'], {'user': 'user'}), "('profile/profile.html', user=user)\n", (2887, 2922), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((3358, 3407), 'flask.render_template', 'render_template', (['"""profile/update.html"""'], {'form': 'form'}), "('profile/update.html', form=form)\n", (3373, 3407), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((3609, 3672), 'flask.render_template', 'render_template', (['"""view_comments.html"""'], {'comments': 'comments', 'id': 'id'}), "('view_comments.html', comments=comments, id=id)\n", (3624, 3672), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1943, 1979), 'flask.url_for', 'url_for', (['"""main.profile"""'], {'uname': 'uname'}), "('main.profile', uname=uname)\n", (1950, 1979), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((2825, 2859), 'flask.render_template', 'render_template', (['"""fourOwFour.html"""'], {}), "('fourOwFour.html')\n", (2840, 2859), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((3114, 3124), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (3119, 3124), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((373, 437), 'requests.get', 'requests.get', (['"""http://quotes.stormconsultancy.co.uk/random.json"""'], {}), "('http://quotes.stormconsultancy.co.uk/random.json')\n", (385, 437), False, 'import requests\n'), ((923, 944), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (930, 944), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((1422, 1443), 'flask.url_for', 'url_for', (['"""main.index"""'], {}), "('main.index')\n", (1429, 1443), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((2161, 2225), 'requests.get', 'requests.get', (['"""http://quotes.stormconsultancy.co.uk/random.json"""'], {}), "('http://quotes.stormconsultancy.co.uk/random.json')\n", (2173, 2225), False, 'import requests\n'), ((2482, 2524), 'flask.url_for', 'url_for', (['"""main.view_blog"""'], {'blog_id': 'blog.id'}), "('main.view_blog', blog_id=blog.id)\n", (2489, 2524), False, 'from flask import render_template, request, redirect, url_for, abort\n'), ((3304, 3344), 'flask.url_for', 'url_for', (['""".profile"""'], {'uname': 'user.username'}), "('.profile', uname=user.username)\n", (3311, 3344), False, 'from flask import render_template, request, redirect, url_for, abort\n')]
|
'''
Notes
TO DO
-Change the sleep timer back to 15 when done with testing.
'''
import pyautogui, time, random
# welcome message and explanation of program
print('Welcome to Mouse Wiggler by GrizzledLabs\n')
print('This program prevents computer sleeping')
print('...by wiggling your mouse every 15 seconds\n')
print('After entering desired time in minutes you will')
print('...have 10 seconds before Mouse Wiggler begins.')
# replay setup
replay = 'Y'
while replay == 'Y':
# sets up second counter
iteration = 0
# user input for length of program run in minutes
runtime = input('\nRun mouse wiggle for how many minutes?\n\n')
# enforces numbers only with digit check and while loop
numcheck = runtime.isdigit()
while numcheck == False:
runtime = input('\nINVALID RESPONSE\nRun mouse wiggle for how many minutes?\n\n')
numcheck = runtime.isdigit()
# takes minutes and multiplies by 4 as program runs in 15 second chunks
runtimeINT = int(runtime)
actualrun = runtimeINT * 4
# 5 second prewiggle warning and countdown
print('\nStarting in...')
x = 11
while x != 1:
x = x-1
print(x)
time.sleep(1)
print('\nProgram started\n')
# engine of the program
for i in range(actualrun):
# sleeper until next run in seconds
time.sleep(15)
# for double zero move checker
doublezero = False
# iteration counter in seconds
iteration = iteration + 15
# creates random numbers to move by
randx = random.randint(-4,4)
randy = random.randint(-4,4)
# double zero checker to protect against timeout
if randx == 0 and randy == 0:
doublezero = 'double zero avoided'
pyautogui.moveRel(-1, -1, duration=0.25)
pyautogui.moveRel(1, 1, duration=0.25)
# mouse mover
pyautogui.moveRel(randx, randy, duration=0.25)
# print messages
if doublezero == False:
print(f'{iteration} seconds {randx}, {randy}')
else:
print(f'{iteration} seconds {randx}, {randy}, {doublezero}')
# replay checker
replay = input('Program over.\nRun again? Y/N: ').upper()
while replay not in ('Y', 'N'):
replay = input('INVALID RESPONSE\nRun again? Y/N: ').upper()
|
[
"pyautogui.moveRel",
"random.randint",
"time.sleep"
] |
[((1113, 1126), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1123, 1126), False, 'import pyautogui, time, random\n'), ((1251, 1265), 'time.sleep', 'time.sleep', (['(15)'], {}), '(15)\n', (1261, 1265), False, 'import pyautogui, time, random\n'), ((1430, 1451), 'random.randint', 'random.randint', (['(-4)', '(4)'], {}), '(-4, 4)\n', (1444, 1451), False, 'import pyautogui, time, random\n'), ((1461, 1482), 'random.randint', 'random.randint', (['(-4)', '(4)'], {}), '(-4, 4)\n', (1475, 1482), False, 'import pyautogui, time, random\n'), ((1708, 1754), 'pyautogui.moveRel', 'pyautogui.moveRel', (['randx', 'randy'], {'duration': '(0.25)'}), '(randx, randy, duration=0.25)\n', (1725, 1754), False, 'import pyautogui, time, random\n'), ((1606, 1646), 'pyautogui.moveRel', 'pyautogui.moveRel', (['(-1)', '(-1)'], {'duration': '(0.25)'}), '(-1, -1, duration=0.25)\n', (1623, 1646), False, 'import pyautogui, time, random\n'), ((1650, 1688), 'pyautogui.moveRel', 'pyautogui.moveRel', (['(1)', '(1)'], {'duration': '(0.25)'}), '(1, 1, duration=0.25)\n', (1667, 1688), False, 'import pyautogui, time, random\n')]
|
from distutils.core import setup
from Cython.Build import cythonize
setup(
ext_modules = cythonize("pyiArduinoI2Cencoder.pyx")
)
|
[
"Cython.Build.cythonize"
] |
[((93, 130), 'Cython.Build.cythonize', 'cythonize', (['"""pyiArduinoI2Cencoder.pyx"""'], {}), "('pyiArduinoI2Cencoder.pyx')\n", (102, 130), False, 'from Cython.Build import cythonize\n')]
|
import logging
from ..base.twilltestcase import common, ShedTwillTestCase
log = logging.getLogger(__name__)
category_name = 'Test 1460 Data Manager'
category_description = 'Test script 1460 for testing Data Managers'
data_manager_repository_name = 'data_manager_1460'
data_manager_repository_description = 'Repository that contains a Data Manager'
data_manager_repository_long_description = f'{data_manager_repository_name}: {data_manager_repository_description}'
data_manager_name = 'testing_data_manager'
data_manager_tar_file = '1460_files/data_manager_files/test_data_manager.tar'
'''
1. Add a Data Manager to toolshed
2. install Data Manager
3. Check that Data Manager tool
'''
# TODO: Allow testing actual Execution of installed Data Manager Tool.
class TestDataManagers(ShedTwillTestCase):
'''Test installing a repository containing a Data Manager.'''
def test_0000_initiate_users_and_category(self):
"""Create necessary user accounts and login as an admin user."""
self.login(email=common.admin_email, username=common.admin_username)
admin_user = self.test_db_util.get_user(common.admin_email)
assert admin_user is not None, f'Problem retrieving user with email {common.admin_email} from the database'
self.test_db_util.get_private_role(admin_user)
self.create_category(name=category_name, description=category_description)
self.login(email=common.test_user_2_email, username=common.test_user_2_name)
test_user_2 = self.test_db_util.get_user(common.test_user_2_email)
assert test_user_2 is not None, f'Problem retrieving user with email {common.test_user_2_email} from the database'
self.test_db_util.get_private_role(test_user_2)
self.login(email=common.test_user_1_email, username=common.test_user_1_name)
test_user_1 = self.test_db_util.get_user(common.test_user_1_email)
assert test_user_1 is not None, f'Problem retrieving user with email {common.test_user_1_email} from the database'
self.test_db_util.get_private_role(test_user_1)
def test_0010_create_data_manager_repository(self):
'''Create and populate data_manager_1460.
This is step 1 - Create repository data_manager_1460.
Create and populate a repository that contains a Data manager.
'''
category = self.test_db_util.get_category_by_name(category_name)
repository = self.get_or_create_repository(name=data_manager_repository_name,
description=data_manager_repository_description,
long_description=data_manager_repository_long_description,
owner=common.test_user_1_name,
category_id=self.security.encode_id(category.id),
strings_displayed=[])
# Upload the data manager files to the repository.
self.upload_file(repository,
filename=data_manager_tar_file,
filepath=None,
valid_tools_only=True,
uncompress_file=True,
remove_repo_files_not_in_tar=False,
commit_message=f'Populate {data_manager_repository_name} with a data manager configuration.',
strings_displayed=[],
strings_not_displayed=[])
def test_0020_install_data_manager_repository(self):
'''Install the data_manager_1460 repository to galaxy.
This is step 3 - Attempt to install the repository into a galaxy instance, verify that it is installed.
'''
self.galaxy_login(email=common.admin_email, username=common.admin_username)
post_submit_strings_displayed = [data_manager_repository_name]
self.install_repository(data_manager_repository_name,
common.test_user_1_name,
category_name,
install_tool_dependencies=True,
post_submit_strings_displayed=post_submit_strings_displayed)
def test_0030_verify_data_manager_tool(self):
'''Verify that the data_manager_1460 repository is installed and Data Manager tool appears in list in Galaxy.'''
repository = self.test_db_util.get_installed_repository_by_name_owner(data_manager_repository_name, common.test_user_1_name)
strings_displayed = ['status', 'jobs', data_manager_name]
self.display_installed_jobs_list_page(repository, data_manager_names=data_manager_name, strings_displayed=strings_displayed)
def test_0040_verify_data_manager_data_table(self):
'''Verify that the installed repository populated shed_tool_data_table.xml and the sample files.'''
self.verify_installed_repository_data_table_entries(required_data_table_entries=['data_manager_test_data_table'])
|
[
"logging.getLogger"
] |
[((82, 109), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (99, 109), False, 'import logging\n')]
|
"""
Test program that:
connects to xbee unit on COM3, which is determined by current computer
sends test1.wav
stores all actions in a continuing log
"""
import logging
import sys
import time
import serial
import wave
from communication import Communication
def main():
# Setup logging
logger = logging.getLogger('control')
logger.setLevel(logging.DEBUG)
filehandler = logging.FileHandler('main.log')
filehandler.setLevel(logging.DEBUG)
console = logging.StreamHandler()
console.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
filehandler.setFormatter(formatter)
console.setFormatter(formatter)
logger.addHandler(filehandler)
logger.addHandler(console)
# Connect to xBee
com = Communication(COM_CONNECTION_STRING, 0.1)
logger.debug("Connected to wireless communication tansceiver")
#hardcoded file format
n_channels = 1
sample_width = 2
framerate = 44100
n_frames = 204800
comp_type = "NONE"
comp_name = "not compressed"
#Send file
logger.debug("Sending test file")
f = wave.open("test1.wav", "rb")
logger.debug("File opened")
bytesSent = 0
lastBytesSent = 0
f1 = f.readframes(n_frames)
#send file
for x in f1:
com.sendAudio(bytes([x]))
bytesSent = bytesSent + 1
if bytesSent >= lastBytesSent + 1024:
lastBytesSent = lastBytesSent + 1024
logger.debug(bytesSent)
#file sent
f.close()
logger.debug("File sent")
# Program end
logger.debug("Finished program.")
sys.exit(0)
if __name__ == "__main__":
COM_CONNECTION_STRING = 'COM3'
main()
|
[
"wave.open",
"logging.FileHandler",
"logging.StreamHandler",
"communication.Communication",
"logging.Formatter",
"sys.exit",
"logging.getLogger"
] |
[((307, 335), 'logging.getLogger', 'logging.getLogger', (['"""control"""'], {}), "('control')\n", (324, 335), False, 'import logging\n'), ((389, 420), 'logging.FileHandler', 'logging.FileHandler', (['"""main.log"""'], {}), "('main.log')\n", (408, 420), False, 'import logging\n'), ((475, 498), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (496, 498), False, 'import logging\n'), ((551, 624), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (568, 624), False, 'import logging\n'), ((800, 841), 'communication.Communication', 'Communication', (['COM_CONNECTION_STRING', '(0.1)'], {}), '(COM_CONNECTION_STRING, 0.1)\n', (813, 841), False, 'from communication import Communication\n'), ((1145, 1173), 'wave.open', 'wave.open', (['"""test1.wav"""', '"""rb"""'], {}), "('test1.wav', 'rb')\n", (1154, 1173), False, 'import wave\n'), ((1661, 1672), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1669, 1672), False, 'import sys\n')]
|
import json
import jsonschema
from jsonschema import validate
import abc
import error as error
from Estimator import Estimator
import sys
import numpy as np # vectors and matrices
import pandas as pd # tables and data manipulations
from dateutil.relativedelta import relativedelta # working with dates with style
from scipy.optimize import minimize # for function minimization
import statsmodels.formula.api as smf # statistics and econometrics
import statsmodels.tsa.api as smt
import statsmodels.api as sm
import scipy.stats as scs
from itertools import product # some useful functions
from tqdm import tqdm_notebook
import warnings # `do not disturbe` mode
warnings.filterwarnings('ignore')
# Describe what kind of json you expect.
tripleESSchema = {
"type": "object",
"properties": {
"estimator": {
"type": "string"
},
"season_length": {
"type": "number"
},
"scaling_factor": {
"type": "number"
}
},
"required": ["estimator", "season_length"],
"additionalProperties": False
}
class TripleES(Estimator):
def __init__(self, jsonData):
super().__init__()
self.nick = 'TripleES'
try:
validate(instance=jsonData, schema=tripleESSchema)
except jsonschema.exceptions.ValidationError as err:
template = "An exception of type {0} occurred. Arguments: {1!r}"
message = template.format(type(err).__name__, err.args)
print(message)
raise ValueError(error.errors['tripleES_config'])
self.parse(jsonData)
self.estimator = self
def parse(self, jsonData):
self.is_regr = True
if 'scaling_factor' in jsonData:
self.scaling_factor = jsonData['scaling_factor']
else:
self.scaling_factor = 1.96
self.season_length = jsonData['season_length']
sys.path.insert(1, 'output')
import TripleES_OM
self.output_manager = TripleES_OM.TripleES_OM(self)
def process(self, prep, cv, X_train, y_train):
# initializing model parameters alpha, beta and gamma
x = [0, 0, 0]
# Minimizing the loss function
from scipy.optimize import minimize
from sklearn.metrics import mean_squared_error, mean_squared_log_error
#leggiamo cv.metrics, a seconda del suo valore stringa gli passo l'oggetto corrispondente
if 'mean_squared_log_error' in cv.metrics:
abg = minimize(timeseriesCVscore, x0=x,
args=(X_train, cv.cv, mean_squared_log_error, self.season_length),
method="TNC", bounds = ((0, 1), (0, 1), (0, 1))
)
elif 'mean_squared_error' in cv.metrics: #usable also for rootMSE
abg = minimize(timeseriesCVscore, x0=x,
args=(X_train, cv.cv, mean_squared_error, self.season_length),
method="TNC", bounds = ((0, 1), (0, 1), (0, 1))
)
else:
template = "An exception of type {0} occurred. Arguments: {1!r}"
message = template.format('Wrong metrics configuration parameter', cv.metrics)
raise ValueError(message)
# Take optimal values...
#best_estimator = abg.x
self.alpha = abg.x[0]
self.beta = abg.x[1]
self.gamma = abg.x[2]
self.X_train = X_train
#return best_estimator # stimatore deve restituire alpha, beta e gamma
return self
def predict(self, X_test):
self.model = HoltWinters(self.X_train, self.season_length, self.alpha, self.beta, self.gamma, len(X_test), self.scaling_factor)
self.model.triple_exponential_smoothing()
predictions = self.model.result[-len(X_test):]
return predictions
def predict_from_series(self, series, n_preds):
res = []
for i in range(len(series)):
if len(series[i])-2*self.season_length<0:
print(f'Skipped series nr. {i}, as too short. A series should be long at least two times the season length')
continue
self.model = HoltWinters(series[i], self.season_length, self.alpha, self.beta, self.gamma, n_preds, self.scaling_factor)
self.model.triple_exponential_smoothing()
predictions = self.model.result[-n_preds:]
res.append(predictions)
return res
class HoltWinters:
"""
Holt-Winters model with the anomalies detection using Brutlag method
# series - initial time series
# slen - length of a season <- parametro da mettere nel json est_tscv
# alpha, beta, gamma - Holt-Winters model coefficients <- output paramas
# n_preds - predictions horizon <- parametro def a priori
# scaling_factor - sets the width of the confidence interval by Brutlag (usually takes values from 2 to 3) <- ? può essere
hyperparmas per CV
"""
def __init__(self, series, slen, alpha, beta, gamma, n_preds, scaling_factor=1.96):
self.series = series
self.slen = slen
self.alpha = alpha
self.beta = beta
self.gamma = gamma
self.n_preds = n_preds
self.scaling_factor = scaling_factor
def initial_trend(self):
sum = 0.0
for i in range(self.slen):
sum += float(self.series[i+self.slen] - self.series[i]) / self.slen
return sum / self.slen
def initial_seasonal_components(self):
seasonals = {}
season_averages = []
n_seasons = int(len(self.series)/self.slen)
# let's calculate season averages
for j in range(n_seasons):
season_averages.append(sum(self.series[self.slen*j:self.slen*j+self.slen])/float(self.slen))
# let's calculate initial values
for i in range(self.slen):
sum_of_vals_over_avg = 0.0
for j in range(n_seasons):
sum_of_vals_over_avg += self.series[self.slen*j+i]-season_averages[j]
seasonals[i] = sum_of_vals_over_avg/n_seasons
return seasonals
def triple_exponential_smoothing(self):
self.result = []
self.Smooth = []
self.Season = []
self.Trend = []
self.PredictedDeviation = []
seasonals = self.initial_seasonal_components()
for i in range(len(self.series)+self.n_preds):
if i == 0: # components initialization
smooth = self.series[0]
trend = self.initial_trend()
self.result.append(self.series[0])
self.Smooth.append(smooth)
self.Trend.append(trend)
self.Season.append(seasonals[i%self.slen])
self.PredictedDeviation.append(0)
continue
if i >= len(self.series): # predicting
m = i - len(self.series) + 1
self.result.append((smooth + m*trend) + seasonals[i%self.slen])
# when predicting we increase uncertainty on each step
self.PredictedDeviation.append(self.PredictedDeviation[-1]*1.01)
else:
val = self.series[i]
last_smooth, smooth = smooth, self.alpha*(val-seasonals[i%self.slen]) + (1-self.alpha)*(smooth+trend)
trend = self.beta * (smooth-last_smooth) + (1-self.beta)*trend
seasonals[i%self.slen] = self.gamma*(val-smooth) + (1-self.gamma)*seasonals[i%self.slen]
self.result.append(smooth+trend+seasonals[i%self.slen])
# Deviation is calculated according to Brutlag algorithm.
self.PredictedDeviation.append(self.gamma * np.abs(self.series[i] - self.result[i])
+ (1-self.gamma)*self.PredictedDeviation[-1])
self.Smooth.append(smooth)
self.Trend.append(trend)
self.Season.append(seasonals[i%self.slen])
from sklearn.model_selection import TimeSeriesSplit
from sklearn.metrics import mean_squared_error
def timeseriesCVscore(params, series, cv, loss_function, slen):
"""
Returns error on CV
params - vector of parameters for optimization
series - dataset with timeseries
slen - season length for Holt-Winters model
"""
# errors array
errors_arr = []
values = series.values
alpha, beta, gamma = params
# set the number of folds for cross-validation
tscv = TimeSeriesSplit(n_splits=cv)
# iterating over folds, train model on each, forecast and calculate error
for train, test in tscv.split(values):
try:
n=len(train)-2*slen
assert n > 0
except AssertionError as err:
template = "An exception of type {0} occurred"
message = template.format(type(err).__name__)
print(message)
raise ValueError(error.errors['tripleES_wrong_nsplits'])
model = HoltWinters(series=values[train], slen=slen,
alpha=alpha, beta=beta, gamma=gamma, n_preds=len(test))
model.triple_exponential_smoothing()
predictions = model.result[-len(test):]
actual = values[test]
error_arr = loss_function(predictions, actual)
errors_arr.append(error_arr)
return np.mean(np.array(errors_arr))
|
[
"jsonschema.validate",
"scipy.optimize.minimize",
"numpy.abs",
"warnings.filterwarnings",
"sys.path.insert",
"sklearn.model_selection.TimeSeriesSplit",
"numpy.array",
"TripleES_OM.TripleES_OM"
] |
[((802, 835), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (825, 835), False, 'import warnings\n'), ((8825, 8853), 'sklearn.model_selection.TimeSeriesSplit', 'TimeSeriesSplit', ([], {'n_splits': 'cv'}), '(n_splits=cv)\n', (8840, 8853), False, 'from sklearn.model_selection import TimeSeriesSplit\n'), ((2094, 2122), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""output"""'], {}), "(1, 'output')\n", (2109, 2122), False, 'import sys\n'), ((2180, 2209), 'TripleES_OM.TripleES_OM', 'TripleES_OM.TripleES_OM', (['self'], {}), '(self)\n', (2203, 2209), False, 'import TripleES_OM\n'), ((9712, 9732), 'numpy.array', 'np.array', (['errors_arr'], {}), '(errors_arr)\n', (9720, 9732), True, 'import numpy as np\n'), ((1384, 1434), 'jsonschema.validate', 'validate', ([], {'instance': 'jsonData', 'schema': 'tripleESSchema'}), '(instance=jsonData, schema=tripleESSchema)\n', (1392, 1434), False, 'from jsonschema import validate\n'), ((2680, 2836), 'scipy.optimize.minimize', 'minimize', (['timeseriesCVscore'], {'x0': 'x', 'args': '(X_train, cv.cv, mean_squared_log_error, self.season_length)', 'method': '"""TNC"""', 'bounds': '((0, 1), (0, 1), (0, 1))'}), "(timeseriesCVscore, x0=x, args=(X_train, cv.cv,\n mean_squared_log_error, self.season_length), method='TNC', bounds=((0, \n 1), (0, 1), (0, 1)))\n", (2688, 2836), False, 'from scipy.optimize import minimize\n'), ((2972, 3119), 'scipy.optimize.minimize', 'minimize', (['timeseriesCVscore'], {'x0': 'x', 'args': '(X_train, cv.cv, mean_squared_error, self.season_length)', 'method': '"""TNC"""', 'bounds': '((0, 1), (0, 1), (0, 1))'}), "(timeseriesCVscore, x0=x, args=(X_train, cv.cv, mean_squared_error,\n self.season_length), method='TNC', bounds=((0, 1), (0, 1), (0, 1)))\n", (2980, 3119), False, 'from scipy.optimize import minimize\n'), ((7999, 8038), 'numpy.abs', 'np.abs', (['(self.series[i] - self.result[i])'], {}), '(self.series[i] - self.result[i])\n', (8005, 8038), True, 'import numpy as np\n')]
|
import json
from typing import Any, Dict, Optional, Union
from django.db.models import Exists, OuterRef, Q
from posthog.utils import is_valid_regex
class Property:
key: str
operator: Optional[str]
value: str
type: str
def __init__(
self, key: str, value: str, operator: Optional[str] = None, type: Optional[str] = None, **kwargs
) -> None:
self.key = key
self.value = value
self.operator = operator
self.type = type if type else "event"
def __repr__(self):
return "Property({}: {}{}={})".format(
self.type, self.key, "__{}".format(self.operator) if self.operator else "", self.value,
)
def to_dict(self) -> Dict[str, Any]:
return {
"key": self.key,
"value": self.value,
"operator": self.operator,
"type": self.type,
}
def _parse_value(self, value: Union[int, str]) -> Union[int, str, bool]:
if value == "true":
return True
if value == "false":
return False
if isinstance(value, int):
return value
try:
return json.loads(value)
except (json.JSONDecodeError, TypeError):
return value
def property_to_Q(self) -> Q:
from .cohort import CohortPeople
value = self._parse_value(self.value)
if self.type == "cohort":
return Q(Exists(CohortPeople.objects.filter(cohort_id=int(value), person_id=OuterRef("id"),).only("id")))
if self.operator == "is_not":
return Q(~Q(**{"properties__{}".format(self.key): value}) | ~Q(properties__has_key=self.key))
if self.operator == "is_set":
return Q(**{"properties__{}__isnull".format(self.key): False})
if self.operator == "is_not_set":
return Q(**{"properties__{}__isnull".format(self.key): True})
if self.operator in ("regex", "not_regex") and not is_valid_regex(value):
# Return no data for invalid regexes
return Q(pk=-1)
if isinstance(self.operator, str) and self.operator.startswith("not_"):
return Q(
~Q(**{"properties__{}__{}".format(self.key, self.operator[4:]): value})
| ~Q(properties__has_key=self.key)
| Q(**{"properties__{}".format(self.key): None})
)
return Q(**{"properties__{}{}".format(self.key, f"__{self.operator}" if self.operator else ""): value})
|
[
"django.db.models.Q",
"django.db.models.OuterRef",
"json.loads",
"posthog.utils.is_valid_regex"
] |
[((1165, 1182), 'json.loads', 'json.loads', (['value'], {}), '(value)\n', (1175, 1182), False, 'import json\n'), ((2057, 2065), 'django.db.models.Q', 'Q', ([], {'pk': '(-1)'}), '(pk=-1)\n', (2058, 2065), False, 'from django.db.models import Exists, OuterRef, Q\n'), ((1966, 1987), 'posthog.utils.is_valid_regex', 'is_valid_regex', (['value'], {}), '(value)\n', (1980, 1987), False, 'from posthog.utils import is_valid_regex\n'), ((1645, 1676), 'django.db.models.Q', 'Q', ([], {'properties__has_key': 'self.key'}), '(properties__has_key=self.key)\n', (1646, 1676), False, 'from django.db.models import Exists, OuterRef, Q\n'), ((2275, 2306), 'django.db.models.Q', 'Q', ([], {'properties__has_key': 'self.key'}), '(properties__has_key=self.key)\n', (2276, 2306), False, 'from django.db.models import Exists, OuterRef, Q\n'), ((1503, 1517), 'django.db.models.OuterRef', 'OuterRef', (['"""id"""'], {}), "('id')\n", (1511, 1517), False, 'from django.db.models import Exists, OuterRef, Q\n')]
|
# main file for l2race model server, run this class to start the model server
import sys
sys.path.insert(0, './commonroad-vehicle-models/PYTHON/')
import argparse
import atexit
import copy
import socket, pickle
from queue import Empty
from typing import Dict, Tuple, List, Optional
import argcomplete
from timeit import default_timer as timer
from time import sleep
import multiprocessing as mp
from src.car_state import car_state
from src.l2race_utils import set_logging_level, loop_timer, become_daemon, \
find_unbound_port_in_range
from src.my_args import server_args
from src.car_model import car_model
from src.globals import *
from src.track import track, list_tracks
from src.l2race_utils import my_logger
logger = my_logger(__name__)
SKIP_CHECK_SERVER_QUEUE = 0 # use to reduce checking queue, but causes timeout problems with adding car if too big. 0 to disable
MAX_TIMESTEP = 0.1 # Max timestep of car model simulation. We limit it to avoid instability
def get_args():
parser = argparse.ArgumentParser(
description='l2race client: run this if you are a racer.',
epilog='Run with no arguments to open dialog for server IP', allow_abbrev=True,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser = server_args(parser)
argcomplete.autocomplete(parser)
args = parser.parse_args()
return args
def send_message(socket: socket, lock: mp.Lock, client_addr: Tuple[str, int], msg: object):
try:
logger.debug('sending msg {} to client {}'.format(msg, client_addr))
p = pickle.dumps(msg)
if lock: lock.acquire()
try:
socket.sendto(p, client_addr)
except OSError as e:
logger.error('failed sending msg {} to client {}: {}'.format(msg, client_addr, e))
finally:
if lock: lock.release()
class track_server_process(mp.Process):
''' The main process that runs each track.'''
def __init__(self,
queue_from_server: mp.Queue,
server_port_lock: mp.Lock(),
server_socket: socket,
track_name=None,
port: int = None,
allow_off_track=False):
super(track_server_process, self).__init__(name='track_server_process-{}'.format(track_name))
self.server_queue = queue_from_server
self.server_port_lock = server_port_lock
self.server_socket = server_socket # used for initial communication to client who has not yet sent anything to us on the new port
self.track_name = track_name
self.track = None # create after start since Process.spawn cannot pickle it
self.car_dict: Dict[Tuple[str, int], car_model] = None # maps from client_addr to car_model (or None if a spectator)
# each client process should bind it's own unique local port (on remote client) so should be unique in dict
self.car_states_list: List[car_state] = None # list of all car states, to send to clients and put in each car's state
self.spectator_list: List[Tuple[str, int]] = None # maps from client_addr to car_model (or None if a spectator)
self.track_socket: Optional[socket] = None # make a new datagram socket
self.local_port_number = port
self.track_socket_address = None # get the port info for our local port
self.exit = False
self.last_message_time = timer() # used to terminate ourselves if no messages for some time
self.skip_checking_server_queue_count = 0
self.allow_off_track = allow_off_track
atexit.register(self.cleanup)
def cleanup(self):
logger.info('cleaning up {} process'.format(self.track_name))
self.send_all_clients_string_message('track has shut down')
if self.car_dict:
for c in self.car_dict.keys():
self.send_client_msg(c,'track_shutdown', 'track server has shut down')
if self.spectator_list:
for s in self.spectator_list:
self.send_client_msg(s, 'track_shutdown', 'track server has shut down')
# empty queue
if self.server_queue and not self.server_queue.empty:
item = self.server_queue.get(block=False)
while item:
try:
self.server_queue.get(block=False)
except Empty:
break
try:
sleep(1)
except Exception:
pass
self.server_queue.close()
if self.track_socket:
self.track_socket.close()
def run(self):
# logger.setLevel(logging.DEBUG)
logger.info("Starting track process track {}".format(self.track_name))
self.track = track(self.track_name)
self.car_dict = dict() # maps from client_addr to car_model (or None if a spectator)
self.car_states_list = list() # list of all car states, to send to clients and put in each car's state
self.spectator_list = list() # maps from client_addr to car_model (or None if a spectator)
self.track_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # make a new datagram socket
self.track_socket.settimeout(0) # put track socket in nonblocking mode to just poll for client messages
# find range of ports we can try to open for client to connect to
try:
self.track_socket.bind(('0.0.0.0', self.local_port_number))
except Exception as e:
logger.error('track process aborting: could not bind to the local port {} that server told us to use: got {}'.format(self.local_port_number, e))
raise e
self.track_socket_address = self.track_socket.getsockname() # get the port info for our local port
logger.info('for track {} bound free local UDP port address {}'.format(self.track_name, self.local_port_number))
last_time = timer()
# Track process makes a single socket bound to a single port for all the clients (cars and spectators).
# To handle multiple clients, when it gets a message from a client, it responds to the client using the client address.
looper = loop_timer(MODEL_UPDATE_RATE_HZ)
looper.LOG_INTERVAL_SEC=60
while not self.exit:
now = timer()
dt = now - last_time
last_time = now
if now - self.last_message_time > KILL_ZOMBIE_TRACK_TIMEOUT_S:
logger.warning('track process {} got no input for {}s, terminating'.format(self.track_name, KILL_ZOMBIE_TRACK_TIMEOUT_S))
self.exit = True
self.cleanup()
continue
self.process_server_queue() # 'add_car' 'add_spectator'
# Here we make the constrained real time from real time
# If requested timestep bigger than maximal timestep, make the update for maximal allowed timestep
# We limit timestep to avoid instability
if dt > MAX_TIMESTEP:
s = 'bounded real dt_sec={:.1f}ms to {:.2f}ms'.format(dt * 1000, MAX_TIMESTEP * 1000)
logger.info(s)
dt = MAX_TIMESTEP
# now we do main simulation/response
# update all the car models
for client, model in self.car_dict.items():
if isinstance(model, car_model):
model.update(dt) # car_state time updates already here
model.time += dt # car_model time updates here
# poll for UDP messages
# update the global list of car states that cars share
self.car_states_list.clear()
for model in self.car_dict.values():
# put copy of each state in list but strip off the contained list of other car states
model_copy: car_state = copy.copy(model.car_state)
self.car_states_list.append(model_copy)
# process incoming UDP messages from clients, e.g. to update command
while True:
try:
msg, payload, client = self.receive_msg()
self.handle_client_msg(msg, payload, client)
except socket.timeout:
break
except BlockingIOError:
break
except Exception as e:
logger.warning('caught Exception {} while processing UDP messages from client'.format(e))
break
try:
looper.sleep_leftover_time()
except KeyboardInterrupt:
logger.info('KeyboardInterrupt, stopping server')
self.exit = True
continue
self.cleanup()
logger.info('ended track {}'.format(self.track_name))
def receive_msg(self) -> (str, object, Tuple[str, int]):
"""
receives a message from client using track's socket
:returns msg, payload, client - msg is a str, payload is an object, and client is Tuple[str,int] """
p, client = self.track_socket.recvfrom(2048)
(msg, payload) = pickle.loads(p)
logger.debug('got msg={} with payload={} from client {}'.format(msg, payload, client))
return msg, payload, client
def send_client_msg(self, client, msg, payload):
""" sends message back to client using the track's socket"""
send_message(self.track_socket, None, client, (msg, payload))
def send_client_string_message(self,client, msg):
logger.info('sending client {} string_message {}'.format(client,msg))
self.send_client_msg(client, 'string_message',msg)
def send_all_clients_string_message(self, msg):
logger.info('sending all clients the string_message {}'.format(msg))
if self.car_dict:
for c in self.car_dict.keys():
self.send_client_string_message(c, msg)
if self.spectator_list:
for s in self.spectator_list:
self.send_client_string_message(s, msg)
def handle_client_msg(self, msg, payload, client):
""" handles the client messages """
logger.debug('handling msg={} with payload={} from client {}'.format(msg, payload, client))
self.last_message_time = timer()
# check if spectator or car
if msg == 'command':
car_model = self.car_dict.get(client)
if car_model is None:
logger.warning('car model=None for client {}'.format(client))
return
car_model.car_state.command = payload # update our car_state command input
# respond with complete state of all cars
self.send_states(client)
elif msg == 'send_states':
self.send_states(client)
elif msg == 'restart_car':
self.restart_car(client, payload)
elif msg == 'remove_car':
car_model = self.car_dict.get(client)
if not car_model is None:
logger.info('removing car {} from track {}'.format(car_model.car_state.static_info.name, self.track_name))
del self.car_dict[client]
elif msg == 'remove_spectator':
logger.info('removing spectator {} from track {}'.format(client, self.track_name))
self.spectator_list.remove(client)
else:
logger.warning('unknown cmd {} received; ignoring'.format(msg))
def send_states(self, client):
msg = 'state'
payload = self.car_states_list # client works out which one belongs to it from the client_ip
self.send_client_msg(client, msg, payload)
def add_car_to_track(self, car_name, client_addr):
""" adds a car to this track """
if len(self.car_dict)>=MAX_CARS_PER_TRACK:
self.send_client_string_message(client_addr, 'ERROR: already have maximum of {} cars'.format(MAX_CARS_PER_TRACK))
return
if self.car_dict.get(client_addr):
logger.warning('client at {} already has a car model, replacing it with a new model'.format(client_addr))
logger.info('adding car model for car named {} from client {} to track {}'.format(car_name, client_addr, self.track_name))
mod = car_model(track=self.track, car_name=car_name, client_ip=client_addr, allow_off_track=self.allow_off_track)
self.car_dict[client_addr] = mod
def add_spectator_to_track(self, client_addr):
""" adds a spectator to this track """
if len(self.spectator_list)>=MAX_SPECTATORS_PER_TRACK:
self.send_client_string_message(client_addr, 'ERROR: already have maximum of {} spectators'.format(MAX_SPECTATORS_PER_TRACK))
return
logger.debug('adding spectator from client {} to track {}'.format(client_addr, self.track_name))
self.spectator_list.append(client_addr)
def process_server_queue(self):
if SKIP_CHECK_SERVER_QUEUE > 0:
self.skip_checking_server_queue_count += 1
if self.skip_checking_server_queue_count % SKIP_CHECK_SERVER_QUEUE != 0: return
while not self.server_queue.empty():
(cmd, payload) = self.server_queue.get_nowait()
self.handle_server_msg(cmd, payload)
pass
def handle_server_msg(self, cmd, payload):
logger.debug('got queue message from server manager cmd={} payload={}'.format(cmd, payload))
self.last_message_time = timer()
if cmd == 'stop':
logger.info('track {} stopping'.format(self.track_name))
self.cleanup()
self.exit = True
elif cmd == 'add_car':
(car_name, client_addr) = payload
self.add_car_to_track(car_name, client_addr)
elif cmd == 'add_spectator':
client_addr = payload
self.add_spectator_to_track(client_addr)
else:
raise RuntimeWarning('unknown cmd {}'.format(cmd))
def restart_car(self, client,message):
model=self.car_dict.get(client)
if model:
name=model.car_state.static_info.name
logger.info('got request from client {} to restart its car named {} on track {} with message'
.format(client, name, self.track_name,message))
model.restart()
else:
logger.warning('request to restart car from client {} has no car model')
return
if __name__ == '__main__':
try:
from scripts.regsetup import description
from gooey import Gooey # pip install Gooey
except Exception:
logger.info('Gooey GUI builder not available, will use command line arguments.\n'
'Install with "pip install Gooey". See README')
try:
ga = Gooey(get_args, program_name="l2race server", default_size=(575, 600))
logger.info('Use --ignore-gooey to disable GUI and run with command line arguments')
ga()
except:
logger.info('Gooey GUI not available, using command line arguments. \n'
'You can try to install with "pip install Gooey"')
args = get_args()
set_logging_level(args)
server_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
server_socket.bind(('', args.port)) # bind to empty host, so we can receive from anyone on this port
logger.info("waiting on {}".format(str(server_socket)))
server_port_lock = mp.Lock() # processes get passed this lock to initiate connections using it (but only once, at start)
track_names = list_tracks()
track_processes: Dict[str, track_server_process] = {k: None for k in track_names} # each entry holds the track objects for each track name
track_queues: Dict[str, mp.Queue] = {k: None for k in track_names} # each entry is the queue to send to track process
def make_track_process(track_name, client_addr, allow_off_track=False) -> mp.Process:
p = track_processes.get(track_name)
if not (p is None) \
and p.is_alive():
track_port_number = p.local_port_number
send_game_port_to_client(client_addr, track_port_number)
logger.info('track process {} already exists already and is alive'.format(p.name))
return track_processes.get(track_name)
else:
track_port_number = find_unbound_port_in_range(CLIENT_PORT_RANGE)
send_game_port_to_client(client_addr, track_port_number)
logger.info('starting a new track_server_process for track {} for client at {} using local port {}'
.format(track_name, client_addr, track_port_number))
q = mp.Queue()
track_queues[track_name] = q
track_process = track_server_process(queue_from_server=q,
server_port_lock=server_port_lock,
server_socket=server_socket,
track_name=track_name,
port=track_port_number,
allow_off_track=allow_off_track)
track_processes[track_name] = track_process
track_processes[track_name].start()
return track_process
def send_game_port_to_client(client_addr: Tuple[str, int], port: int):
logger.info('sending game_port message to client {} telling it to use our local port number {}'.format(client_addr, port))
# first message to client is the game port number
# send client the port they should use for this track
send_message(socket=server_socket, lock=server_port_lock,
client_addr=client_addr,
msg=('game_port', port))
def add_car_to_track(track_name, car_name, client_addr, allow_off_track=False):
make_track_process(track_name=track_name, client_addr=client_addr, allow_off_track=allow_off_track)
logger.info('putting message to track process for track {} to add car named {} for client {}'.format(track_name, car_name, client_addr))
q = track_queues.get(track_name)
if q:
q.put(('add_car', (car_name, client_addr)))
def add_spectator_to_track(track_name, client_addr):
make_track_process(track_name=track_name, client_addr=client_addr)
q = track_queues[track_name]
if q:
q.put(('add_spectator', client_addr))
def stop_all_track_processes():
for t, q in track_queues.items():
if q:
logger.info('telling track {} to stop'.format(t))
try:
q.put('stop')
except:
pass
sleep(1)
logger.info('joining processes')
for t, p in track_processes.items():
if p: p.join(1)
for t, p in track_processes.items():
if p and p.is_alive():
logger.info('terminating zombie track process {}'.format(p))
p.terminate()
track_processes.clear()
logger.info('closing queues')
for q in track_queues.values():
if q:
q.close()
q.join_thread()
track_queues.clear()
def cleanup_all():
logger.debug('cleaning up server main process')
stop_all_track_processes()
atexit.register(cleanup_all)
# We fork an mp process for each track that might have one or more cars and spectators.
# There is only a single instance of each track. Any clients that want to use that track share it.
# Each track also gets a Queue which main process uses to tell it when there are new cars for it.
# Each track runs single threaded to model all the cars in real time, and responds to commands (or spectate state requests) sent from clients with car states
# of all the cars on that track.
# Flow is like this:
# 1. Server waits on SERVER_PORT
# 2. Client sends newcar to SERVER_PORT
# 3. Server responds to same port on client with ack and new port
# 4. Client talks to track process on new port
# That way, client initiates communication on new port and should be able to receive on it
# handling processes based on https://www.cloudcity.io/blog/2019/02/27/things-i-wish-they-told-me-about-multiprocessing-in-python/
while True:
try:
server_port_lock.acquire()
data, client_addr = server_socket.recvfrom(1024) # buffer size is 1024 bytes
except KeyboardInterrupt:
logger.info('KeyboardInterrupt, stopping server')
break
finally:
server_port_lock.release()
try:
(cmd, payload) = pickle.loads(data)
except pickle.UnpicklingError as ex:
logger.warning('{}: garbled command, ignoring. \n'
'Client should send pickled 2-tuple (cmd, payload).\n '
'cmd="add_car|add_spectator"\n'
'payload (for add_car) =(track_name,car_name)\n'
'payload (for add_spectator) =(track_name)\n'
.format(ex))
continue
logger.info('received cmd "{}" with payload "{}" from {}'.format(cmd, payload, client_addr))
if cmd == 'ping':
msg = ('pong', None)
send_message(server_socket, server_port_lock, client_addr, msg)
elif cmd == 'add_car':
(track_name, car_name) = payload
add_car_to_track(track_name, car_name, client_addr, allow_off_track=args.allow_off_track)
elif cmd == 'add_spectator':
track_name = payload
add_spectator_to_track(track_name, client_addr)
else:
logger.warning('model server received unknown cmd={}'.format(cmd))
|
[
"atexit.register",
"argparse.ArgumentParser",
"multiprocessing.Lock",
"src.l2race_utils.set_logging_level",
"socket.socket",
"multiprocessing.Queue",
"gooey.Gooey",
"src.l2race_utils.find_unbound_port_in_range",
"pickle.dumps",
"pickle.loads",
"src.l2race_utils.my_logger",
"src.my_args.server_args",
"time.sleep",
"src.track.list_tracks",
"src.track.track",
"src.l2race_utils.loop_timer",
"timeit.default_timer",
"copy.copy",
"sys.path.insert",
"src.car_model.car_model",
"socket.sendto",
"argcomplete.autocomplete"
] |
[((89, 146), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""./commonroad-vehicle-models/PYTHON/"""'], {}), "(0, './commonroad-vehicle-models/PYTHON/')\n", (104, 146), False, 'import sys\n'), ((730, 749), 'src.l2race_utils.my_logger', 'my_logger', (['__name__'], {}), '(__name__)\n', (739, 749), False, 'from src.l2race_utils import my_logger\n'), ((1003, 1235), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""l2race client: run this if you are a racer."""', 'epilog': '"""Run with no arguments to open dialog for server IP"""', 'allow_abbrev': '(True)', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description=\n 'l2race client: run this if you are a racer.', epilog=\n 'Run with no arguments to open dialog for server IP', allow_abbrev=True,\n formatter_class=argparse.ArgumentDefaultsHelpFormatter)\n", (1026, 1235), False, 'import argparse\n'), ((1260, 1279), 'src.my_args.server_args', 'server_args', (['parser'], {}), '(parser)\n', (1271, 1279), False, 'from src.my_args import server_args\n'), ((1284, 1316), 'argcomplete.autocomplete', 'argcomplete.autocomplete', (['parser'], {}), '(parser)\n', (1308, 1316), False, 'import argcomplete\n'), ((15080, 15103), 'src.l2race_utils.set_logging_level', 'set_logging_level', (['args'], {}), '(args)\n', (15097, 15103), False, 'from src.l2race_utils import set_logging_level, loop_timer, become_daemon, find_unbound_port_in_range\n'), ((15125, 15173), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (15138, 15173), False, 'import socket, pickle\n'), ((15363, 15372), 'multiprocessing.Lock', 'mp.Lock', ([], {}), '()\n', (15370, 15372), True, 'import multiprocessing as mp\n'), ((15485, 15498), 'src.track.list_tracks', 'list_tracks', ([], {}), '()\n', (15496, 15498), False, 'from src.track import track, list_tracks\n'), ((19340, 19368), 'atexit.register', 'atexit.register', (['cleanup_all'], {}), '(cleanup_all)\n', (19355, 19368), False, 'import atexit\n'), ((1556, 1573), 'pickle.dumps', 'pickle.dumps', (['msg'], {}), '(msg)\n', (1568, 1573), False, 'import socket, pickle\n'), ((3394, 3401), 'timeit.default_timer', 'timer', ([], {}), '()\n', (3399, 3401), True, 'from timeit import default_timer as timer\n'), ((3569, 3598), 'atexit.register', 'atexit.register', (['self.cleanup'], {}), '(self.cleanup)\n', (3584, 3598), False, 'import atexit\n'), ((4713, 4735), 'src.track.track', 'track', (['self.track_name'], {}), '(self.track_name)\n', (4718, 4735), False, 'from src.track import track, list_tracks\n'), ((5070, 5118), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (5083, 5118), False, 'import socket, pickle\n'), ((5878, 5885), 'timeit.default_timer', 'timer', ([], {}), '()\n', (5883, 5885), True, 'from timeit import default_timer as timer\n'), ((6145, 6177), 'src.l2race_utils.loop_timer', 'loop_timer', (['MODEL_UPDATE_RATE_HZ'], {}), '(MODEL_UPDATE_RATE_HZ)\n', (6155, 6177), False, 'from src.l2race_utils import set_logging_level, loop_timer, become_daemon, find_unbound_port_in_range\n'), ((9091, 9106), 'pickle.loads', 'pickle.loads', (['p'], {}), '(p)\n', (9103, 9106), False, 'import socket, pickle\n'), ((10241, 10248), 'timeit.default_timer', 'timer', ([], {}), '()\n', (10246, 10248), True, 'from timeit import default_timer as timer\n'), ((12201, 12312), 'src.car_model.car_model', 'car_model', ([], {'track': 'self.track', 'car_name': 'car_name', 'client_ip': 'client_addr', 'allow_off_track': 'self.allow_off_track'}), '(track=self.track, car_name=car_name, client_ip=client_addr,\n allow_off_track=self.allow_off_track)\n', (12210, 12312), False, 'from src.car_model import car_model\n'), ((13395, 13402), 'timeit.default_timer', 'timer', ([], {}), '()\n', (13400, 13402), True, 'from timeit import default_timer as timer\n'), ((14711, 14781), 'gooey.Gooey', 'Gooey', (['get_args'], {'program_name': '"""l2race server"""', 'default_size': '(575, 600)'}), "(get_args, program_name='l2race server', default_size=(575, 600))\n", (14716, 14781), False, 'from gooey import Gooey\n'), ((18693, 18701), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (18698, 18701), False, 'from time import sleep\n'), ((1631, 1660), 'socket.sendto', 'socket.sendto', (['p', 'client_addr'], {}), '(p, client_addr)\n', (1644, 1660), False, 'import socket, pickle\n'), ((2026, 2035), 'multiprocessing.Lock', 'mp.Lock', ([], {}), '()\n', (2033, 2035), True, 'import multiprocessing as mp\n'), ((4398, 4406), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4403, 4406), False, 'from time import sleep\n'), ((6260, 6267), 'timeit.default_timer', 'timer', ([], {}), '()\n', (6265, 6267), True, 'from timeit import default_timer as timer\n'), ((16278, 16323), 'src.l2race_utils.find_unbound_port_in_range', 'find_unbound_port_in_range', (['CLIENT_PORT_RANGE'], {}), '(CLIENT_PORT_RANGE)\n', (16304, 16323), False, 'from src.l2race_utils import set_logging_level, loop_timer, become_daemon, find_unbound_port_in_range\n'), ((16598, 16608), 'multiprocessing.Queue', 'mp.Queue', ([], {}), '()\n', (16606, 16608), True, 'import multiprocessing as mp\n'), ((20699, 20717), 'pickle.loads', 'pickle.loads', (['data'], {}), '(data)\n', (20711, 20717), False, 'import socket, pickle\n'), ((7816, 7842), 'copy.copy', 'copy.copy', (['model.car_state'], {}), '(model.car_state)\n', (7825, 7842), False, 'import copy\n')]
|
# -*- coding: utf-8 -*-
# @Time : 2019/7/8 14:43
# @Author : <NAME>
from flask import render_template, session, redirect, url_for
from datetime import datetime
from . import main
from .forms import NameForm
@main.route('/', methods=['GET', 'POST'])
def index():
form = NameForm()
if form.validate_on_submit():
# ...
return redirect(url_for('.index'))
return render_template('index.html',
form=form, name=session.get('name'),
known=session.get('known', False),
current_time=datetime.utcnow())
|
[
"datetime.datetime.utcnow",
"flask.url_for",
"flask.session.get"
] |
[((363, 380), 'flask.url_for', 'url_for', (['""".index"""'], {}), "('.index')\n", (370, 380), False, 'from flask import render_template, session, redirect, url_for\n'), ((466, 485), 'flask.session.get', 'session.get', (['"""name"""'], {}), "('name')\n", (477, 485), False, 'from flask import render_template, session, redirect, url_for\n'), ((520, 547), 'flask.session.get', 'session.get', (['"""known"""', '(False)'], {}), "('known', False)\n", (531, 547), False, 'from flask import render_template, session, redirect, url_for\n'), ((589, 606), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (604, 606), False, 'from datetime import datetime\n')]
|
'''
Future Update Notes:
1) Non-supervisor mode is not coded.
2) Controller functions should be updated.
3) robot_creator function takes too much arguments and more argument
necessary for options like robot tags, data transfer between real
robot and player, etc. New structure might be necessary.
4) Codes modified for admin-permissions in Windows, in Linux they should
be arranged.
5) Robot tag creation not included.
6) Collaboration of game manager and world creator is still not
considered.
7) Controller paths should be arranged according to Linux system and
webots configuration.
'''
import numpy as np
from decimal import Decimal
import math
from pathlib import Path
import ctypes, sys #for Windows only
def is_admin():# Windows Admin Permissions
try:
return ctypes.windll.shell32.IsUserAnAdmin()
except:
return False
def grid_material(no):
'''
New texture may be added easily with location of texture file(should
be in jpg format) only corresponding number will assigned to texture
in the function.
'''
if no == 0:
return 0
if no == 1:
return 'textures/earth_texture.jpg'
if no == 2:
return 'textures/water_texture.jpg'
if no == 3:
return 'textures/desert_texture.jpg'
if no == 4:
return 'textures/cell_texture.jpg'
def floor_text(x,z,i,grid_length,material,world_text):
'''
Creates script of every floor element of floor matrix.
'''
world_text.append('Floor {\n')
world_text.append(' translation {0} 0 {1}\n'.format(x,z))
world_text.append(' name "floor({0})"\n'.format(i))
world_text.append(' size {0} {0}\n'.format(grid_length))
world_text.append(' tileSize {0} {0}\n'.format(grid_length))
world_text.append(' appearance Appearance {\n')
world_text.append(' texture ImageTexture {\n')
world_text.append(' url [\n')
world_text.append(' "{}"\n'.format(material))
world_text.append(' ]\n')
world_text.append(' filtering 0\n')
world_text.append(' }}}\n')
def arena_creator(floor_matrix, grid_length,world_text):
'''
floor_matrix is a matrix and it decides shape of the arena, number
of grids, grid colors. Each matrix element is a grid texture.
Corresponding element number will be defined. Value of grid_length
in meters.
For example:
A = [1 3] Element value: 0 = box obstacle, 1 = earth, 2 = water,
[3 4] 3 = sand, 4 = cell
'''
i = 0
for ix,iz in np.ndindex(floor_matrix.shape):
x = (grid_length / 2) + (iz * grid_length)
z = (grid_length / 2) + (ix * grid_length)
material = grid_material(floor_matrix[ix,iz])
if material != 0:
floor_text(x,z,i,grid_length,material,world_text)
if material == 0:
obstacle(x,z,i,grid_length,world_text)
i += 1
def distance_sensor(r_no,s_no,x,z,r,cov,res,s,body,world_text,main,loop):
'''
x, z and r are x-coordinate, z-coordinate and direction(rotation
around y axis), repectively. Values of x, z, r should calculated
w.r.t. robot body. cov(Coverage) is range of the sensor and
res(resolution) tells smallest change in distance a sensor can
detect. x, z, r and coverage in meters. Body is imaginary body of
sensor device and value of body can be True or False. r_no and s_no
are id of the robot that carries sensor and id of sensor(there might
be multiple sensor on a robot, they must identified distinctly),
repectively. Imaginary body is black as default. r is in degrees.
s is for supervisor mode of the robot(True or False). main and loop
are main and loop part of the controller.
'''
pi_5f = float("{:.5f}".format(math.pi))
r = r / 180 * pi_5f
world_text.append(' DistanceSensor {\n')
world_text.append(' translation {0} 0 {1}\n'.format(x,z))
world_text.append(' rotation 0 1 0 {}\n'.format(r))
if body == True:
world_text.append(' children [\n')
world_text.append(' Shape {\n')
world_text.append(' appearance PBRAppearance {\n')
world_text.append(' baseColor 0 0 0\n')
world_text.append(' roughness 1\n')
world_text.append(' metalness 0\n')
world_text.append(' }\n')
world_text.append(' geometry Box {\n')
world_text.append(' size 0.001 0.001 0.001\n')
world_text.append(' }}]\n')
world_text.append(' name "ds_{0}_{1}"\n'.format(r_no,s_no))
world_text.append(' lookupTable [\n')
world_text.append(' 0 0 0\n')
world_text.append(' {0} {1} 0\n'.format(cov,res))
world_text.append(' ]}\n')
#Controller Part
if s == True:
main.append('ds_1 = supervisor.getDistanceSensor("ds_{0}_{1}")\n'.format(r_no,s_no))
main.append('ds_1.enable(timeStep)\n')
main.append('ds.append(ds_1)\n')
#if s == False:
def robot_controller(r_no,supervisor,main,loop):
'''
r_no is robot no. main and loop are part of controller scripts which
necessary for devices and sensors.
'''
robot_controller_main = [] # main function of robot controller
robot_controller_loop = [] # loop function of robot controller
robot_controller_main.append('import json\n')
robot_controller_main.append('from pathlib import Path\n')
robot_controller_main.append('from controller import *\n')
robot_controller_main.append('\n')
robot_controller_main.append('timeStep = 32\n')#Default
robot_controller_main.append('ds = []\n')
robot_controller_main.append('file_to_open = Path("C:/Users/Korcan/Desktop/ME462/l_r_of_Robot_1.txt")\n') #EDIT THISSS
if supervisor == True:
robot_controller_main.append('supervisor = Supervisor()\n')
robot_controller_main.append('robot_node = supervisor.getFromDef("Robot_{}")\n'.format(r_no))
robot_controller_main.append('trans_field = robot_node.getField("translation")\n')
robot_controller_main.append('rot_field = robot_node.getField("rotation")\n')
robot_controller_main.append('\n')
robot_controller_loop.append('while supervisor.step(timeStep) != -1:\n')
robot_controller_loop.append(' val_translation = trans_field.getSFVec3f()\n')
robot_controller_loop.append(' val_rotation = rot_field.getSFRotation()\n')
robot_controller_loop.append(" f = open(file_to_open, mode='r')\n")
robot_controller_loop.append(' data_as_string = f.readlines()\n')
robot_controller_loop.append(' f.close()\n')
robot_controller_loop.append(' try:\n')
robot_controller_loop.append(' if len(data_as_string[{}]) != 0:\n'.format(r_no-1))
robot_controller_loop.append(' data = json.loads(data_as_string[{}])\n'.format(r_no-1))
robot_controller_loop.append(' trans_field.setSFVec3f(data[0])\n')
robot_controller_loop.append(' rot_field.setSFRotation(data[1])\n')
robot_controller_loop.append(' except IndexError:\n')
robot_controller_loop.append(' pass\n')
robot_controller_loop.append('\n')
loop.append(' for e in ds:\n')
loop.append(' print(e.getValue())\n')
#if super == False:
robot_controller_main = robot_controller_main + main
robot_controller_loop = robot_controller_loop + loop
final_controller = robot_controller_main + robot_controller_loop
location = "C:/Program Files/Webots/projects/default/controllers/Robot_{}".format(r_no)# EDIT THISSS
if is_admin():#Windows admin permissions inside of this if part of the program
Path(location).mkdir(parents=False, exist_ok=True)
[f.unlink() for f in Path(location).glob("*") if f.is_file()]
f = open(location + "/Robot_{}.py".format(r_no), "w")
f.writelines(final_controller)
f.close()
else:
# Re-run the program with admin rights
ctypes.windll.shell32.ShellExecuteW(None, "runas", sys.executable, " ".join(sys.argv), None, 1)
def robot_creator(x,z,r_no,supervisor,world_text):
'''
Value of supervisor can be True or False. If value is False than
motors are enabled. r_no is id of the robot. x and z are start
coordinates of robot. r_no should not be 0, 0 is used for Arena
Top Camera.
'''
main = []
loop = []
world_text.append('DEF Robot_{} Robot '.format(r_no))
world_text.append('{\n')
world_text.append(' translation {0} 0.03 {1}\n'.format(x,z))
world_text.append(' children [\n')
#Below lines for robot body
world_text.append(' DEF robot_{}_body Shape '.format(r_no))
world_text.append('{\n')
world_text.append(' appearance PBRAppearance {\n')
world_text.append(' baseColor 0.917647 0.145098 0.145098\n')
world_text.append(' roughness 1\n')
world_text.append(' metalness 0\n')
world_text.append(' }\n')
world_text.append(' geometry Box {\n')
world_text.append(' size 0.09 0.06 0.07\n')
world_text.append(' }}\n')
#Below lines for sensor
distance_sensor(r_no,1,0.045,0,0,0.1,100,supervisor,False,world_text,main,loop)
distance_sensor(r_no,2,-0.045,0,180,0.1,100,supervisor,False,world_text,main,loop)
distance_sensor(r_no,3,0,0.035,-90,0.1,100,supervisor,False,world_text,main,loop)
distance_sensor(r_no,4,0,-0.035,90,0.1,100,supervisor,False,world_text,main,loop)
#Below lines for motor when no real robots exist
if supervisor == False:
motor()
world_text.append(' ]\n')
#end of the children of robot
world_text.append(' name "robot_{}"\n'.format(r_no))
world_text.append(' boundingObject USE robot_{}_body\n'.format(r_no))
world_text.append(' controller "Robot_{}"\n'.format(r_no))
if supervisor == True:
world_text.append(' supervisor TRUE\n')
world_text.append('}\n')
#controller of robot
robot_controller(r_no,supervisor,main,loop)
def motor(x,z):
'''
x, y are coordinates are w.r.t the robot body and x, y values are
in meters.
'''
def obstacle(x,z,i,a,world_text):
'''
Cubic obstacle with side size a in meters. x, y are coordinate
values of obstacle w.r.t general coordinate axis. Base color is
the color of the obstacle and it made black as default.
'''
world_text.append('Solid {\n')
world_text.append(' translation {0} {1} {2}\n'.format(x,a/2,z))
world_text.append(' children [\n')
world_text.append(' DEF obstacle_{0} Shape '.format(i))
world_text.append('{\n')
world_text.append(' appearance PBRAppearance {\n')
world_text.append(' baseColor 0 0 0\n')
world_text.append(' roughness 1\n')
world_text.append(' metalness 0\n')
world_text.append(' }\n')
world_text.append(' geometry Box {\n')
world_text.append(' size {0} {0} {0}\n'.format(a))
world_text.append(' }}]\n')
world_text.append(' name "obstacle_{}"\n'.format(i))
world_text.append(' boundingObject USE obstacle_{}\n'.format(i))
world_text.append('}\n')
def arena_top_cam(a,grid_length,y,width,height,world_text):
'''
x, z coordinates of middle point of the arena and can be found by
arena matrix(a) with grid_length. Value of y should be proper
perpendicular distance from the floor and y value in meters. Values
of width and height are resolution of camera and values are in
pixels.
'''
x = a.shape[0]
x = x / 2 * grid_length
z = a.shape[1]
z = z / 2 * grid_length
y = y * (width / height) #Assumed width > height
world_text.append('DEF Arena_Cam Robot {\n')
world_text.append(' translation {0} {1} {2}\n'.format(x,y,z))
world_text.append(' rotation -1 0 0 1.5708\n')
world_text.append(' children [\n')
world_text.append(' Camera {\n')
world_text.append(' name "Arena_Top_Cam"\n')
world_text.append(' width {}\n'.format(width))
world_text.append(' height {}\n'.format(height))
world_text.append(' }]\n')
world_text.append(' name "robot_0"\n')
world_text.append(' controller "arena_top_cam"\n')
world_text.append(' supervisor TRUE\n')
world_text.append('}\n')
def world_creator(floor_matrix,grid_length,basic_time_step):
'''
floor_matrix is a matrix and it decides shape of the arena, number
of grids, grid colors. Each matrix element is a grid texture.
Corresponding element number will be defined. Value of grid_length
in meters. basic_time_step is the time step increament used by
Webots and expressed in milliseconds. Default value of basic time
step in Webots is 32ms.
'''
contents = []
a = floor_matrix.shape
m = a[0]
n = a[1]
x = m / 2 * grid_length
z = n / 2 * grid_length
max_length = max(m,n) * grid_length
y = max_length / 0.748 #Field of view calculations
#Main contents of world
contents.append('#VRML_SIM R2020a utf8\n')
contents.append('WorldInfo {\n')
contents.append(' basicTimeStep {}\n'.format(basic_time_step))
contents.append('}\n')
contents.append('Viewpoint {\n')
contents.append(' orientation -1 0 0 1.5708\n')
contents.append(' position {0} {1} {2}\n'.format(x,2*y,z))
contents.append('}\n')
contents.append('TexturedBackground {\n')
contents.append('}\n')
contents.append('TexturedBackgroundLight {\n')
contents.append('}\n')
#Element of world: Arena, Robots, Top Camera
arena_creator(floor_matrix, grid_length,contents)
robot_creator(-grid_length,grid_length,1,True,contents)
robot_creator(-grid_length,3*grid_length,2,True,contents)
arena_top_cam(floor_matrix,grid_length,y,1280,720,contents)
f = open("sample_world.wbt", "w")
f.writelines(contents)
f.close()
a = np.random.randint(0,5,size=(10,10))
print(a)
print(a.shape)
world_creator(a, 0.15, 32)
|
[
"numpy.ndindex",
"numpy.random.randint",
"ctypes.windll.shell32.IsUserAnAdmin",
"pathlib.Path"
] |
[((13388, 13426), 'numpy.random.randint', 'np.random.randint', (['(0)', '(5)'], {'size': '(10, 10)'}), '(0, 5, size=(10, 10))\n', (13405, 13426), True, 'import numpy as np\n'), ((2466, 2496), 'numpy.ndindex', 'np.ndindex', (['floor_matrix.shape'], {}), '(floor_matrix.shape)\n', (2476, 2496), True, 'import numpy as np\n'), ((820, 857), 'ctypes.windll.shell32.IsUserAnAdmin', 'ctypes.windll.shell32.IsUserAnAdmin', ([], {}), '()\n', (855, 857), False, 'import ctypes, sys\n'), ((7421, 7435), 'pathlib.Path', 'Path', (['location'], {}), '(location)\n', (7425, 7435), False, 'from pathlib import Path\n'), ((7496, 7510), 'pathlib.Path', 'Path', (['location'], {}), '(location)\n', (7500, 7510), False, 'from pathlib import Path\n')]
|
# coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class PingProbe(object):
"""
This model contains all of the mutable and immutable properties for a ping probe.
"""
#: A constant which can be used with the protocol property of a PingProbe.
#: This constant has a value of "ICMP"
PROTOCOL_ICMP = "ICMP"
#: A constant which can be used with the protocol property of a PingProbe.
#: This constant has a value of "TCP"
PROTOCOL_TCP = "TCP"
def __init__(self, **kwargs):
"""
Initializes a new PingProbe object with values from keyword arguments.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param id:
The value to assign to the id property of this PingProbe.
:type id: str
:param results_url:
The value to assign to the results_url property of this PingProbe.
:type results_url: str
:param home_region:
The value to assign to the home_region property of this PingProbe.
:type home_region: str
:param time_created:
The value to assign to the time_created property of this PingProbe.
:type time_created: datetime
:param compartment_id:
The value to assign to the compartment_id property of this PingProbe.
:type compartment_id: str
:param targets:
The value to assign to the targets property of this PingProbe.
:type targets: list[str]
:param vantage_point_names:
The value to assign to the vantage_point_names property of this PingProbe.
:type vantage_point_names: list[str]
:param port:
The value to assign to the port property of this PingProbe.
:type port: int
:param timeout_in_seconds:
The value to assign to the timeout_in_seconds property of this PingProbe.
:type timeout_in_seconds: int
:param protocol:
The value to assign to the protocol property of this PingProbe.
Allowed values for this property are: "ICMP", "TCP", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:type protocol: str
"""
self.swagger_types = {
'id': 'str',
'results_url': 'str',
'home_region': 'str',
'time_created': 'datetime',
'compartment_id': 'str',
'targets': 'list[str]',
'vantage_point_names': 'list[str]',
'port': 'int',
'timeout_in_seconds': 'int',
'protocol': 'str'
}
self.attribute_map = {
'id': 'id',
'results_url': 'resultsUrl',
'home_region': 'homeRegion',
'time_created': 'timeCreated',
'compartment_id': 'compartmentId',
'targets': 'targets',
'vantage_point_names': 'vantagePointNames',
'port': 'port',
'timeout_in_seconds': 'timeoutInSeconds',
'protocol': 'protocol'
}
self._id = None
self._results_url = None
self._home_region = None
self._time_created = None
self._compartment_id = None
self._targets = None
self._vantage_point_names = None
self._port = None
self._timeout_in_seconds = None
self._protocol = None
@property
def id(self):
"""
Gets the id of this PingProbe.
The OCID of the resource.
:return: The id of this PingProbe.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""
Sets the id of this PingProbe.
The OCID of the resource.
:param id: The id of this PingProbe.
:type: str
"""
self._id = id
@property
def results_url(self):
"""
Gets the results_url of this PingProbe.
A URL for fetching the probe results.
:return: The results_url of this PingProbe.
:rtype: str
"""
return self._results_url
@results_url.setter
def results_url(self, results_url):
"""
Sets the results_url of this PingProbe.
A URL for fetching the probe results.
:param results_url: The results_url of this PingProbe.
:type: str
"""
self._results_url = results_url
@property
def home_region(self):
"""
Gets the home_region of this PingProbe.
The region where updates must be made and where results must be fetched from.
:return: The home_region of this PingProbe.
:rtype: str
"""
return self._home_region
@home_region.setter
def home_region(self, home_region):
"""
Sets the home_region of this PingProbe.
The region where updates must be made and where results must be fetched from.
:param home_region: The home_region of this PingProbe.
:type: str
"""
self._home_region = home_region
@property
def time_created(self):
"""
Gets the time_created of this PingProbe.
The RFC 3339-formatted creation date and time of the probe.
:return: The time_created of this PingProbe.
:rtype: datetime
"""
return self._time_created
@time_created.setter
def time_created(self, time_created):
"""
Sets the time_created of this PingProbe.
The RFC 3339-formatted creation date and time of the probe.
:param time_created: The time_created of this PingProbe.
:type: datetime
"""
self._time_created = time_created
@property
def compartment_id(self):
"""
Gets the compartment_id of this PingProbe.
The OCID of the compartment.
:return: The compartment_id of this PingProbe.
:rtype: str
"""
return self._compartment_id
@compartment_id.setter
def compartment_id(self, compartment_id):
"""
Sets the compartment_id of this PingProbe.
The OCID of the compartment.
:param compartment_id: The compartment_id of this PingProbe.
:type: str
"""
self._compartment_id = compartment_id
@property
def targets(self):
"""
Gets the targets of this PingProbe.
A list of targets (hostnames or IP addresses) of the probe.
:return: The targets of this PingProbe.
:rtype: list[str]
"""
return self._targets
@targets.setter
def targets(self, targets):
"""
Sets the targets of this PingProbe.
A list of targets (hostnames or IP addresses) of the probe.
:param targets: The targets of this PingProbe.
:type: list[str]
"""
self._targets = targets
@property
def vantage_point_names(self):
"""
Gets the vantage_point_names of this PingProbe.
A list of names of vantage points from which to execute the probe.
:return: The vantage_point_names of this PingProbe.
:rtype: list[str]
"""
return self._vantage_point_names
@vantage_point_names.setter
def vantage_point_names(self, vantage_point_names):
"""
Sets the vantage_point_names of this PingProbe.
A list of names of vantage points from which to execute the probe.
:param vantage_point_names: The vantage_point_names of this PingProbe.
:type: list[str]
"""
self._vantage_point_names = vantage_point_names
@property
def port(self):
"""
Gets the port of this PingProbe.
The port on which to probe endpoints. If unspecified, probes will use the
default port of their protocol.
:return: The port of this PingProbe.
:rtype: int
"""
return self._port
@port.setter
def port(self, port):
"""
Sets the port of this PingProbe.
The port on which to probe endpoints. If unspecified, probes will use the
default port of their protocol.
:param port: The port of this PingProbe.
:type: int
"""
self._port = port
@property
def timeout_in_seconds(self):
"""
Gets the timeout_in_seconds of this PingProbe.
The probe timeout in seconds. Valid values: 10, 20, 30, and 60.
The probe timeout must be less than or equal to `intervalInSeconds` for monitors.
:return: The timeout_in_seconds of this PingProbe.
:rtype: int
"""
return self._timeout_in_seconds
@timeout_in_seconds.setter
def timeout_in_seconds(self, timeout_in_seconds):
"""
Sets the timeout_in_seconds of this PingProbe.
The probe timeout in seconds. Valid values: 10, 20, 30, and 60.
The probe timeout must be less than or equal to `intervalInSeconds` for monitors.
:param timeout_in_seconds: The timeout_in_seconds of this PingProbe.
:type: int
"""
self._timeout_in_seconds = timeout_in_seconds
@property
def protocol(self):
"""
Gets the protocol of this PingProbe.
Allowed values for this property are: "ICMP", "TCP", 'UNKNOWN_ENUM_VALUE'.
Any unrecognized values returned by a service will be mapped to 'UNKNOWN_ENUM_VALUE'.
:return: The protocol of this PingProbe.
:rtype: str
"""
return self._protocol
@protocol.setter
def protocol(self, protocol):
"""
Sets the protocol of this PingProbe.
:param protocol: The protocol of this PingProbe.
:type: str
"""
allowed_values = ["ICMP", "TCP"]
if not value_allowed_none_or_none_sentinel(protocol, allowed_values):
protocol = 'UNKNOWN_ENUM_VALUE'
self._protocol = protocol
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
|
[
"oci.util.formatted_flat_dict",
"oci.util.value_allowed_none_or_none_sentinel"
] |
[((10536, 10561), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (10555, 10561), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n'), ((10355, 10416), 'oci.util.value_allowed_none_or_none_sentinel', 'value_allowed_none_or_none_sentinel', (['protocol', 'allowed_values'], {}), '(protocol, allowed_values)\n', (10390, 10416), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')]
|
""" Demonstrates the DC system from the client side, using a
DistributedNode-based avatar. """
from direct.directbase.DirectStart import *
from direct.distributed.ClientRepository import ClientRepository
from direct.gui.OnscreenText import OnscreenText
from direct.showbase.DirectObject import DirectObject
from pandac.PandaModules import *
from DistributedPellet import DistributedPellet
import sys
import random
helpText = """[Left Arrow]: Rotate Left
[Right Arrow]: Rotate Right
[Up Arrow]: Move Forward
[Down Arrow]: Move Backwards
[Tab]: Change color
[Space]: Drop pellet
[c]: Clear pellets
[1 - 9]: Set zone
[Escape]: exit"""
class MyClientRepository(ClientRepository):
def __init__(self):
dcFileNames = ['direct.dc', 'net.dc']
ClientRepository.__init__(self, dcFileNames = dcFileNames)
class World(DirectObject):
# Degrees per second of rotation
rotateSpeed = 90
# Units per second of motion
moveSpeed = 8
def __init__(self):
DirectObject.__init__(self)
# No avatar yet.
self.av = None
# No pellets either.
self.pellets = []
# The list of keys that we will be monitoring.
self.moveKeyList = [
'arrow_left', 'arrow_right', 'arrow_up', 'arrow_down'
]
# Initially, all keys are up. Construct a dictionary that
# maps each of the above keys to False, and hang the event to
# manage that state.
self.moveKeys = {}
for key in self.moveKeyList:
self.moveKeys[key] = False
self.accept(key, self.moveKeyStateChanged, extraArgs = [key, True])
self.accept(key + '-up', self.moveKeyStateChanged, extraArgs = [key, False])
tcpPort = base.config.GetInt('server-port', 4400)
hostname = base.config.GetString('server-host', '127.0.0.1')
self.url = URLSpec('http://%s:%s' % (hostname, tcpPort))
self.cr = MyClientRepository()
self.cr.connect([self.url],
successCallback = self.connectSuccess,
failureCallback = self.connectFailure)
self.waitingText = OnscreenText(
'Connecting to %s.\nPress ESC to cancel.' % (self.url),
scale = 0.1, fg = (1, 1, 1, 1), shadow = (0, 0, 0, 1))
self.accept('escape', self.escape)
# Oobe mode is handy to have on all the time. Why not?
base.oobe()
def moveKeyStateChanged(self, key, newState):
""" A key event has been received. Change the key state in
the dictionary. """
self.moveKeys[key] = newState
def escape(self):
""" The user pressed escape. Exit the client. """
sys.exit()
def connectFailure(self, statusCode, statusString):
self.waitingText.destroy()
self.failureText = OnscreenText(
'Failed to connect to %s: %s.\nPress ESC to quit.' % (self.url, statusString),
scale = 0.15, fg = (1, 0, 0, 1), shadow = (0, 0, 0, 1))
def connectSuccess(self):
""" Successfully connected. But we still can't really do
anything until we've got the doID range. """
self.waitingText.destroy()
self.waitingText = OnscreenText(
'Waiting for server.',
scale = 0.1, fg = (1, 1, 1, 1), shadow = (0, 0, 0, 1))
self.acceptOnce('createReady', self.createReady)
def createReady(self):
""" Now we're ready to go! """
self.waitingText.destroy()
# Manifest an avatar for ourselves.
self.av = self.cr.createDistributedObject(
className = 'DistributedAvatar', zoneId = 1)
# The tab key changes your color.
self.accept('tab', self.changeAvColor)
# The space bar drops a new pellet.
self.accept('space', self.dropPellet)
# The 'c' key clears the pellets you've dropped.
self.accept('c', self.clearPellets)
# A number key (other than zero) changes your zone.
for zoneId in range(1, 10):
self.accept(str(zoneId), self.changeAvZone, extraArgs = [zoneId])
# Pop up some help text.
self.title = OnscreenText(
parent = base.a2dBottomRight,
text = 'DistributedNode client demo',
fg = (1, 1, 1, 1),
pos = (-0.03, 0.03), align = TextNode.ARight, scale = 0.1)
self.help = OnscreenText(
parent = base.a2dTopLeft,
text = helpText,
fg = (1, 1, 1, 1),
pos=(0.03, -0.1), align = TextNode.ALeft, scale = 0.07)
# Update the local avatar's position every frame.
self.moveTask = taskMgr.add(self.moveAvatar, 'moveAvatar')
# Send position updates only several times a second, instead
# of every frame, or we will flood the network.
self.lastBroadcastTransform = self.av.getTransform()
self.updateTask = taskMgr.doMethodLater(0.2, self.updateAvatar, 'updateAvatar')
def dropPellet(self):
# Create a new DistributedPellet, and put it right where the
# avatar is.
pellet = DistributedPellet(self.cr)
x, y, z = self.av.getPos()
pellet.setInitialPos(x, y, z)
self.cr.createDistributedObject(
distObj = pellet, zoneId = self.av.zoneId)
self.pellets.append(pellet)
def clearPellets(self):
# Remove all of the DistributedPellets we've created.
for p in self.pellets:
self.cr.sendDeleteMsg(p.doId)
def changeAvColor(self):
""" The user pressed the tab key. Change the color of the
local avatar to a new random color. """
r = random.uniform(0, 1)
g = random.uniform(0, 1)
b = random.uniform(0, 1)
self.av.b_setAvColor(r, g, b)
def changeAvZone(self, zoneId):
""" The user pressed one of the number keys to change zones.
Move the avatar into the indicated zone. """
# Move our avatar into the indicated zone
self.cr.setObjectZone(self.av, zoneId)
def moveAvatar(self, task):
""" This task runs each frame to move the avatar according to
the set of arrow keys that are being held. """
dt = globalClock.getDt()
if self.moveKeys['arrow_left']:
self.av.setH(self.av, dt * self.rotateSpeed)
elif self.moveKeys['arrow_right']:
self.av.setH(self.av, -dt * self.rotateSpeed)
if self.moveKeys['arrow_up']:
self.av.setY(self.av, dt * self.moveSpeed)
elif self.moveKeys['arrow_down']:
self.av.setY(self.av, -dt * self.moveSpeed)
return task.cont
def updateAvatar(self, task):
""" This task runs five times a second to broadcast the
avatar's position to all of the other clients. """
currentTransform = self.av.getTransform()
if self.lastBroadcastTransform == currentTransform:
# No change since last broadcast.
return task.again
self.lastBroadcastTransform = currentTransform
x, y, z = self.av.getPos()
h, p, r = self.av.getHpr()
self.av.d_setPosHpr(x, y, z, h, p, r)
return task.again
w = World()
run()
|
[
"random.uniform",
"direct.distributed.ClientRepository.ClientRepository.__init__",
"direct.showbase.DirectObject.DirectObject.__init__",
"DistributedPellet.DistributedPellet",
"sys.exit",
"direct.gui.OnscreenText.OnscreenText"
] |
[((767, 823), 'direct.distributed.ClientRepository.ClientRepository.__init__', 'ClientRepository.__init__', (['self'], {'dcFileNames': 'dcFileNames'}), '(self, dcFileNames=dcFileNames)\n', (792, 823), False, 'from direct.distributed.ClientRepository import ClientRepository\n'), ((1002, 1029), 'direct.showbase.DirectObject.DirectObject.__init__', 'DirectObject.__init__', (['self'], {}), '(self)\n', (1023, 1029), False, 'from direct.showbase.DirectObject import DirectObject\n'), ((2171, 2294), 'direct.gui.OnscreenText.OnscreenText', 'OnscreenText', (['("""Connecting to %s.\nPress ESC to cancel.""" % self.url)'], {'scale': '(0.1)', 'fg': '(1, 1, 1, 1)', 'shadow': '(0, 0, 0, 1)'}), '("""Connecting to %s.\nPress ESC to cancel.""" % self.url, scale\n =0.1, fg=(1, 1, 1, 1), shadow=(0, 0, 0, 1))\n', (2183, 2294), False, 'from direct.gui.OnscreenText import OnscreenText\n'), ((2724, 2734), 'sys.exit', 'sys.exit', ([], {}), '()\n', (2732, 2734), False, 'import sys\n'), ((2862, 3011), 'direct.gui.OnscreenText.OnscreenText', 'OnscreenText', (['("""Failed to connect to %s: %s.\nPress ESC to quit.""" % (self.url,\n statusString))'], {'scale': '(0.15)', 'fg': '(1, 0, 0, 1)', 'shadow': '(0, 0, 0, 1)'}), '("""Failed to connect to %s: %s.\nPress ESC to quit.""" % (self.\n url, statusString), scale=0.15, fg=(1, 0, 0, 1), shadow=(0, 0, 0, 1))\n', (2874, 3011), False, 'from direct.gui.OnscreenText import OnscreenText\n'), ((3247, 3336), 'direct.gui.OnscreenText.OnscreenText', 'OnscreenText', (['"""Waiting for server."""'], {'scale': '(0.1)', 'fg': '(1, 1, 1, 1)', 'shadow': '(0, 0, 0, 1)'}), "('Waiting for server.', scale=0.1, fg=(1, 1, 1, 1), shadow=(0, \n 0, 0, 1))\n", (3259, 3336), False, 'from direct.gui.OnscreenText import OnscreenText\n'), ((4196, 4346), 'direct.gui.OnscreenText.OnscreenText', 'OnscreenText', ([], {'parent': 'base.a2dBottomRight', 'text': '"""DistributedNode client demo"""', 'fg': '(1, 1, 1, 1)', 'pos': '(-0.03, 0.03)', 'align': 'TextNode.ARight', 'scale': '(0.1)'}), "(parent=base.a2dBottomRight, text='DistributedNode client demo',\n fg=(1, 1, 1, 1), pos=(-0.03, 0.03), align=TextNode.ARight, scale=0.1)\n", (4208, 4346), False, 'from direct.gui.OnscreenText import OnscreenText\n'), ((4425, 4550), 'direct.gui.OnscreenText.OnscreenText', 'OnscreenText', ([], {'parent': 'base.a2dTopLeft', 'text': 'helpText', 'fg': '(1, 1, 1, 1)', 'pos': '(0.03, -0.1)', 'align': 'TextNode.ALeft', 'scale': '(0.07)'}), '(parent=base.a2dTopLeft, text=helpText, fg=(1, 1, 1, 1), pos=(\n 0.03, -0.1), align=TextNode.ALeft, scale=0.07)\n', (4437, 4550), False, 'from direct.gui.OnscreenText import OnscreenText\n'), ((5140, 5166), 'DistributedPellet.DistributedPellet', 'DistributedPellet', (['self.cr'], {}), '(self.cr)\n', (5157, 5166), False, 'from DistributedPellet import DistributedPellet\n'), ((5693, 5713), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (5707, 5713), False, 'import random\n'), ((5726, 5746), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (5740, 5746), False, 'import random\n'), ((5759, 5779), 'random.uniform', 'random.uniform', (['(0)', '(1)'], {}), '(0, 1)\n', (5773, 5779), False, 'import random\n')]
|
# -*- coding: utf-8 -*-
import scrapy
from scrapy.loader import ItemLoader
from knowledge.items import CommonItem
from knowledge.consts import consts
class GushiCommonSpider(scrapy.Spider):
def is_root_url(self, url):
for item in self.start_urls:
if url.find(item) != -1:
return True
return False
def gushi365_parse(self, response):
is_root = self.is_root_url(response.url)
if is_root:
try:
for item in response.xpath('//main[@id="main"]/article//h2/a'):
# 过滤一下url,如果已经爬过了,就不爬了
next_page_url = item.xpath('@href').extract_first()
if next_page_url is not None:
yield scrapy.Request(response.urljoin(next_page_url))
#下一页的数据
next_page = response.xpath('//span[@class="next"]/a/@href').extract_first()
if next_page is not None:
yield scrapy.Request(response.urljoin(next_page))
except Exception:
pass
else:
try:
ci = CommonItem()
title = response.xpath('//main[@id="main"]/article//h1/text()').extract_first()
title.replace('\r\n', '')
ci['title'] = title
content = "".join(response.xpath('//main[@id="main"]/article//span[@class="STYLE1"]/p/text()').extract())
content = content.replace('\r\n', '')
content = content.replace('\n', '')
content = content.replace(' ', '')
content = content.replace('\xa0', '')
content = content.replace('\u3000', '')
ci['content'] = content
ci['url'] = response.url
ci['desc'] = self.desc
ci['type'] = self.type
yield ci
except Exception:
pass
def parse(self, response):
# 这里要增加return 返回各个解析的生成器结果,因为parse是个生成器
if response.url.find('gushi365') != -1:
return self.gushi365_parse(response)
class YouerSpider(GushiCommonSpider):
name = "youer-spider"
start_urls = [
'http://www.gushi365.com/youergushi/',
]
type = consts.YOUER_TYPE
desc = consts.YOUER_DESC
class ErtongSpider(GushiCommonSpider):
name = "ertong-spider"
start_urls = [
'http://www.gushi365.com/xiaogushi/',
]
type = consts.ERTONG_TYPE
desc = consts.ERTONG_DESC
class ShuiqianSpider(GushiCommonSpider):
name = "shuiqian-spider"
start_urls = [
'http://www.gushi365.com/shuiqiangushi/',
]
type = consts.SHUIQIAN_TYPE
desc = consts.SHUIQIAN_DESC
class YizhiSpider(GushiCommonSpider):
name = "yizhi-spider"
start_urls = [
'http://www.gushi365.com/yizhigushi/',
]
type = consts.YIZHI_TYPE
desc = consts.YIZHI_DESC
class YuyanSpider(GushiCommonSpider):
name = "yuyan-spider"
start_urls = [
'http://www.gushi365.com/yuyangushi/',
]
type = consts.YUYAN_TYPE
desc = consts.YUYAN_DESC
class MinjianSpider(GushiCommonSpider):
name = "minjian-spider"
start_urls = [
'http://www.gushi365.com/minjiangushi/',
]
type = consts.MINJIAN_TYPE
desc = consts.MINJIAN_DESC
|
[
"knowledge.items.CommonItem"
] |
[((1128, 1140), 'knowledge.items.CommonItem', 'CommonItem', ([], {}), '()\n', (1138, 1140), False, 'from knowledge.items import CommonItem\n')]
|
from __future__ import print_function
import requests
from cloudmesh_client.shell.console import Console
from cloudmesh_client.common.Printer import Printer
from cloudmesh_client.db import CloudmeshDatabase
from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider
from cloudmesh_client.cloud.ListResource import ListResource
from cloudmesh_client.common.LibcloudDict import LibcloudDict
from cloudmesh_client.common.dotdict import dotdict
from pprint import pprint
from cloudmesh_client.common.ConfigDict import ConfigDict
from cloudmesh_client.default import Default
requests.packages.urllib3.disable_warnings()
class SecGroup(ListResource):
cm = CloudmeshDatabase()
"""
NOT USED
@classmethod
def convert_list_to_dict(cls, os_result):
d = {}
for i, obj in enumerate(os_result):
d[i] = {}
d[i]["Id"] = obj.id
d[i]["Name"] = obj.name
d[i]["Description"] = obj.description
return d
"""
# noinspection PyPep8
@classmethod
def convert_rules_to_dict(cls, os_result):
d = {}
for i, obj in enumerate(os_result):
if obj["ip_range"]["cidr"]:
ip_range = obj["ip_range"]["cidr"]
else:
ip_range = "0.0.0.0/0"
d[i] = {
"IP Protocol": obj["ip_protocol"],
"From Port": obj["from_port"],
"To Port": obj["to_port"],
"IP Range": ip_range
}
return d
@classmethod
def refresh(cls, cloud):
"""
This method would refresh the secgroup list by first clearing
the database, then inserting new data
:param cloud: the cloud name
"""
return cls.cm.refresh('secgroup', cloud)
@classmethod
def add_rule_to_db(cls, group=None, name=None, from_port=None, to_port=None, protocol=None, cidr=None):
cls.delete_rule_from_db(group=group, name=name)
try:
rule = {
"category": "general",
"kind": "secgrouprule",
"group": group,
"name": name,
'protocol': protocol,
'fromPort': from_port,
'toPort': to_port,
'cidr': cidr
}
cls.cm.add(rule, replace=False)
except Exception as ex:
Console.error("Problem adding rule")
@classmethod
def delete_rule_from_db(cls, group=None, name=None):
old_rule = {
"category": "general",
"kind": "secgrouprule",
"name": name,
"group": group
}
cls.cm.delete(**old_rule)
@classmethod
def upload(cls, cloud=None, group=None):
if cloud is None or cloud=='all':
clouds = ConfigDict("cloudmesh.yaml")["cloudmesh"]["active"]
else:
clouds = [cloud]
if group is None:
rules = cls.list(output='dict')
groups = set()
for g in rules:
r = rules[g]
groups.add(r["group"])
groups = list(groups)
else:
groups = [group]
for cloud in clouds:
Console.msg("Uploading the groups/rules to cloud - {}...".format(cloud))
for g in groups:
cls.delete_all_rules_cloud(cloud, g)
group = cls.get(name=g, cloud=cloud)
group_cloud = cls.get_group_cloud(cloud, g)
if not group_cloud:
cls.add_group_cloud(cloud, g)
rules = cls.list_rules(group=g, output="dict")
if rules:
for ruleid in rules:
rule = rules[ruleid]
rulename = rule["name"]
cls.add_rule_cloud(cloud, g, rulename)
'''
SecGroup.delete(category=c, group=g)
uuid = SecGroup.create(category=c, group=g)
for key in rules:
r = rules[key]
if r["group"] == g:
SecGroup.add_rule(c,uuid,r["fromPort"],r["toPort"] , r['protocol'],r['cidr'])
# create group
'''
Console.msg("...done")
Console.info("All completed")
@classmethod
def create(cls, group=None, category=None):
"""
Method creates a new security group in database
& returns the uuid of the created group
:param group:
:param category:
:return:
"""
# Create the security group in given cloud
try:
cloud_provider = CloudProvider(category).provider
secgroup = cloud_provider.create_secgroup(group)
if secgroup:
uuid = secgroup.id
return uuid
else:
print("Failed to create security group, {}".format(secgroup))
except Exception as e:
print(
"Exception creating security group in cloud, {}".format(e))
return None
@classmethod
def list(cls,
group=None,
name=None,
category='general',
output='table',
scope='all'):
"""
This method queries the database to fetch list of secgroups
filtered by cloud.
:param cloud:
:return:
"""
query = dotdict({
"kind": "secgrouprule",
"scope": "all"
})
if category is "general":
if group is not None:
query.group = group
if name is not None:
query.name = name
query.category = category
elements = cls.cm.find(**query)
else:
elements = CloudProvider(category).provider.list_secgroup_rules(category)
if elements is None:
return None
else:
# pprint(elements)
#
# BUG this should not depend on cloud, but on "general"
#
# (order, header) = CloudProvider(cloud).get_attributes("secgroup")
order = ['name', 'group', 'fromPort', 'toPort', 'cidr', 'protocol']
header = None
return Printer.write(elements,
order=order,
header=header,
output=output)
@classmethod
def list_rules(cls, group=None, output='table'):
"""
This method gets the security group rules
from the cloudmesh database
:param uuid:
:return:
"""
try:
if group is None:
rules = cls.cm.find(kind="secgrouprule")
else:
args = {
"group": group
}
rules = cls.cm.find(kind="secgrouprule", **args)
# check if rules exist
if rules is None:
return "No rules for security group={} in the database. Try cm secgroup refresh.".format(group)
# return table
return (Printer.write(rules,
order=["user",
"group",
"category",
"name",
"fromPort",
"toPort",
"protocol",
"cidr"],
output=output))
except Exception as ex:
Console.error("Listing Security group rules")
return None
@classmethod
def enable_ssh(cls, secgroup_name='default', cloud="general"):
ret = False
if cloud in LibcloudDict.Libcloud_category_list:
Console.info("Creating and adding security group for libcloud")
cloud_provider = CloudProvider(cloud).provider
cloud_provider.create_sec_group(cloud, secgroup_name)
cloud_provider.enable_ssh(cloud, secgroup_name)
else:
cloud_provider = CloudProvider(cloud).provider.provider
secgroups = cloud_provider.security_groups.list()
for asecgroup in secgroups:
if asecgroup.name == secgroup_name:
rules = asecgroup.rules
rule_exists = False
# structure of a secgroup rule:
# {u'from_port': 22, u'group': {}, u'ip_protocol': u'tcp', u'to_port': 22, u'parent_group_id': u'UUIDHERE', u'ip_range': {u'cidr': u'0.0.0.0/0'}, u'id': u'UUIDHERE'}
for arule in rules:
if arule["from_port"] == 22 and \
arule["to_port"] == 22 and \
arule["ip_protocol"] == 'tcp' and \
arule["ip_range"] == {'cidr': '0.0.0.0/0'}:
# print (arule["id"])
rule_exists = True
break
if not rule_exists:
cloud_provider.security_group_rules.create(
asecgroup.id,
ip_protocol='tcp',
from_port=22,
to_port=22,
cidr='0.0.0.0/0')
# else:
# print ("The rule allowing ssh login did exist!")
ret = True
break
# print ("*" * 80)
# d = SecGroup.convert_list_to_dict(secgroups)
# print (d)
return ret
@classmethod
def get(cls, name=None, cloud="general"):
"""
This method queries the database to fetch secgroup
with given name filtered by cloud.
:param name:
:param cloud:
:return:
"""
try:
args = {
"name": name,
'scope': 'fisrt',
'kind': "secgroup",
"output": "object",
}
if cloud is not None and cloud is not 'general':
args["category"] = cloud
secgroup = cls.cm.find(**args)
if secgroup is None:
return None
else:
return secgroup[0]
except Exception as ex:
Console.error("get secgroup")
return None
@classmethod
def add_rule(cls, cloud, secgroup_uuid, from_port, to_port, protocol, cidr):
try:
# Get the nova client object
cloud_provider = CloudProvider(cloud).provider
# Create add secgroup rules to the cloud
args = {
'uuid': secgroup_uuid,
'protocol': protocol,
'from_port': from_port,
'to_port': to_port,
'cidr': cidr
}
rule_id = cloud_provider.add_secgroup_rule(**args)
# create local db record
rule = {"kind": "secgrouprule",
"uuid": str(rule_id),
"category": cloud,
"fromPort": from_port,
"toPort": to_port,
"protocol": protocol,
"cidr": cidr}
"""
cls.cm.add(**rule)
cls.cm.save()
"""
Console.ok("Added rule {category} {uuid} {fromPort} {toPort} {protocol} {cidr}"
.format(**rule))
except Exception as ex:
if "This rule already exists" in ex.message:
Console.ok("Rule already exists. Added rule.")
return
else:
Console.error(ex.message, ex)
return
@classmethod
def reset_defaults(cls):
#secgroup = "{}-default".format(Default.user)
secgroup = "default"
Default.set_secgroup(secgroup)
# nova secgroup-add-rule default icmp -1 -1 0.0.0.0/0
SecGroup.add_rule_to_db(group=secgroup,
name="ssh",
from_port="22",
to_port="22",
protocol="tcp",
cidr="0.0.0.0/0")
SecGroup.add_rule_to_db(group=secgroup,
name="http",
from_port="80",
to_port="80",
protocol="tcp",
cidr="0.0.0.0/0")
SecGroup.add_rule_to_db(group=secgroup,
name="https",
from_port="443",
to_port="443",
protocol="tcp",
cidr="0.0.0.0/0")
SecGroup.add_rule_to_db(group=secgroup,
name="icmp",
from_port="-1",
to_port="-1",
protocol="icmp",
cidr="0.0.0.0/0")
@classmethod
def delete(cls,
category='general',
group=None,
name=None):
# name is anme of the rule
if category=='general':
if name is None and group is not None:
# delete the entire group
cls.cm.delete(kind="secgrouprule", group=group)
elif name is not None and group is not None:
# delete specific rule
cls.cm.delete(name=name, kind="secgrouprule", group=group)
elif name is None and group is None:
# delete all groups
cls.cm.delete(kind="secgrouprule")
if group == Default.secgroup or Default.secgroup is None:
cls.reset_defaults()
else:
provider = CloudProvider(category).provider
# delete on cloud
if group is not None:
provider.delete_secgroup(name)
# delete the entire group
elif group is None:
# delete all groups
pass
@classmethod
def delete_secgroup(cls, name=None, cloud=None):
try:
# Find the secgroup from the cloud
cloud_provider = CloudProvider(cloud).provider
result = cloud_provider.delete_secgroup(name)
return result
except Exception as ex:
Console.error("delete group")
@classmethod
def delete_rule(cls, cloud, secgroup, from_port, to_port, protocol, cidr):
try:
args = {
"group": secgroup["uuid"],
"fromPort": from_port,
"toPort": to_port,
"protocol": protocol,
"cidr": cidr
}
rule = cls.cm.find(kind="secgrouprule",
output="object",
scope="first",
**args)
if rule is not None:
# get the nova client for cloud
cloud_provider = CloudProvider(cloud).provider
# delete the rule from the cloud
cloud_provider.delete_secgroup_rule(rule.uuid)
# delete the local db record
cls.cm.delete(rule)
return "Rule [{fromPort} | {toPort} | {protocol} | {cidr}] deleted" \
.format(**args)
else:
return None
except Exception as ex:
Console.error("delete rule")
return
@classmethod
def delete_all_rules(cls, secgroup):
try:
args = {
"group": secgroup["uuid"]
}
rules = cls.cm.find(kind="secgrouprule", output="object", **args)
if rules is not None:
for rule in rules:
cls.cm.delete(rule)
Console.ok("Rule [{fromPort} | {toPort} | {protocol} | {cidr}] deleted"
.format(**rule))
else:
pass
except Exception as ex:
Console.error("delete all rules")
return
# new methods moved from the test_secgroup:3
# the operations are from the perspective on the cloud
# and does not make any change on local db
#
@classmethod
def add_group_cloud(cls, cloud, groupname):
provider = CloudProvider(cloud).provider
return provider.create_secgroup(groupname)
@classmethod
def delete_group_cloud(cls, cloud, groupname):
provider = CloudProvider(cloud).provider
return provider.delete_secgroup(groupname)
@classmethod
def add_rule_cloud(cls, cloud, groupname, rulename):
ret = None
provider = CloudProvider(cloud).provider
# fetch rule from db
db_rule = cls.cm.find(kind="secgrouprule",
category="general",
group=groupname,
name=rulename,
scope='first',
output='dict')
kwargs = {}
kwargs["protocol"] = db_rule["protocol"]
kwargs["cidr"] = db_rule["cidr"]
kwargs["from_port"] = db_rule["fromPort"]
kwargs["to_port"] = db_rule["toPort"]
group = cls.get_group_cloud(cloud, groupname)
if group:
groupid = group["id"]
kwargs["uuid"] = groupid
ret = provider.add_secgroup_rule(**kwargs)
return ret
@classmethod
def delete_rule_cloud(cls, cloud, groupname, rulename):
ret = None
provider = CloudProvider(cloud).provider
ruleid = cls.get_rule_cloud(cloud, groupname, rulename)
if ruleid:
ret = provider.delete_secgroup_rule(ruleid)
#else:
# Console.error("Rule does not exist - Rule:{}, Group:{}"\
# .format(rulename, groupname), traceflag=False)
return ret
@classmethod
def delete_all_rules_cloud(cls, cloud, groupname):
rules = cls.list_rules_cloud(cloud, groupname)
provider = CloudProvider(cloud).provider
if rules:
for rule in rules:
ruleid = rule['id']
# only refresh those defined with a protocol
# This leaves the default rule defined by
# allowing the same secgroup untouched
if rule['ip_protocol']:
provider.delete_secgroup_rule(ruleid)
return
@classmethod
def list_groups_cloud(cls, cloud):
provider = CloudProvider(cloud).provider
groups = provider.list_secgroup(cloud)
return groups
@classmethod
def get_group_cloud(cls, cloud, groupname):
provider = CloudProvider(cloud).provider
groups = provider.list_secgroup(cloud)
ret = None
for groupkey in groups:
group = groups[groupkey]
if group["name"] == groupname:
ret = group
break
return ret
@classmethod
def list_rules_cloud(cls, cloud, groupname):
provider = CloudProvider(cloud).provider
groups = provider.list_secgroup(cloud)
for id in groups:
group = groups[id]
if groupname == group["name"]:
return group["rules"]
return None
@classmethod
def get_rule_cloud(cls, cloud, groupname, rulename):
rules = cls.list_rules_cloud(cloud, groupname)
# find properties for db rule
db_rule = cls.cm.find(kind="secgrouprule",
category="general",
group=groupname,
name=rulename,
scope='first',
output='dict')
ruleid = None
for rule in rules:
if 'cidr' in rule['ip_range']:
if (db_rule["fromPort"] == str(rule['from_port']) and
db_rule["toPort"] == str(rule['to_port']) and
db_rule["protocol"] == rule['ip_protocol'] and
db_rule["cidr"] == rule['ip_range']['cidr']
):
ruleid = rule['id'] #uuid for the rule
return ruleid
if __name__ == '__main__':
nova = CloudProvider.set("kilo")
# groups = nova.security_groups.list()
# print(groups)
# print("\n\n")
# d = SecGroup.convert_list_to_dict(groups)
# print(d)
# security_group = nova.security_groups.create(name="oct17_secgroup", description="Created by Gourav")
print("Created sec group\n")
# rule = nova.security_group_rules.create(security_group.id, ip_protocol="icmp",
# from_port=-1, to_port=-1, cidr="0.0.0.0/0")
print("Created sec group rules\n")
# print(rule)
security_group = nova.security_groups.find(name="oct17_secgroup")
rules = security_group.rules
print(rules)
d = SecGroup.convert_rules_to_dict(rules)
print(d)
nova.security_group_rules.delete('6220f8a4-e4fb-4340-bfe7-ffa028a7c6af')
print("Deleted Sec Group Rule")
|
[
"requests.packages.urllib3.disable_warnings",
"cloudmesh_client.shell.console.Console.info",
"cloudmesh_client.shell.console.Console.ok",
"cloudmesh_client.db.CloudmeshDatabase",
"cloudmesh_client.shell.console.Console.msg",
"cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider",
"cloudmesh_client.shell.console.Console.error",
"cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider.set",
"cloudmesh_client.common.ConfigDict.ConfigDict",
"cloudmesh_client.common.dotdict.dotdict",
"cloudmesh_client.common.Printer.Printer.write",
"cloudmesh_client.default.Default.set_secgroup"
] |
[((582, 626), 'requests.packages.urllib3.disable_warnings', 'requests.packages.urllib3.disable_warnings', ([], {}), '()\n', (624, 626), False, 'import requests\n'), ((668, 687), 'cloudmesh_client.db.CloudmeshDatabase', 'CloudmeshDatabase', ([], {}), '()\n', (685, 687), False, 'from cloudmesh_client.db import CloudmeshDatabase\n'), ((20698, 20723), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider.set', 'CloudProvider.set', (['"""kilo"""'], {}), "('kilo')\n", (20715, 20723), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((4314, 4343), 'cloudmesh_client.shell.console.Console.info', 'Console.info', (['"""All completed"""'], {}), "('All completed')\n", (4326, 4343), False, 'from cloudmesh_client.shell.console import Console\n'), ((5469, 5518), 'cloudmesh_client.common.dotdict.dotdict', 'dotdict', (["{'kind': 'secgrouprule', 'scope': 'all'}"], {}), "({'kind': 'secgrouprule', 'scope': 'all'})\n", (5476, 5518), False, 'from cloudmesh_client.common.dotdict import dotdict\n'), ((12103, 12133), 'cloudmesh_client.default.Default.set_secgroup', 'Default.set_secgroup', (['secgroup'], {}), '(secgroup)\n', (12123, 12133), False, 'from cloudmesh_client.default import Default\n'), ((4283, 4305), 'cloudmesh_client.shell.console.Console.msg', 'Console.msg', (['"""...done"""'], {}), "('...done')\n", (4294, 4305), False, 'from cloudmesh_client.shell.console import Console\n'), ((6314, 6380), 'cloudmesh_client.common.Printer.Printer.write', 'Printer.write', (['elements'], {'order': 'order', 'header': 'header', 'output': 'output'}), '(elements, order=order, header=header, output=output)\n', (6327, 6380), False, 'from cloudmesh_client.common.Printer import Printer\n'), ((7189, 7315), 'cloudmesh_client.common.Printer.Printer.write', 'Printer.write', (['rules'], {'order': "['user', 'group', 'category', 'name', 'fromPort', 'toPort', 'protocol', 'cidr']", 'output': 'output'}), "(rules, order=['user', 'group', 'category', 'name', 'fromPort',\n 'toPort', 'protocol', 'cidr'], output=output)\n", (7202, 7315), False, 'from cloudmesh_client.common.Printer import Printer\n'), ((7954, 8017), 'cloudmesh_client.shell.console.Console.info', 'Console.info', (['"""Creating and adding security group for libcloud"""'], {}), "('Creating and adding security group for libcloud')\n", (7966, 8017), False, 'from cloudmesh_client.shell.console import Console\n'), ((16744, 16764), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (16757, 16764), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((16913, 16933), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (16926, 16933), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((17107, 17127), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (17120, 17127), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((17988, 18008), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (18001, 18008), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((18482, 18502), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (18495, 18502), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((18960, 18980), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (18973, 18980), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((19144, 19164), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (19157, 19164), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((19507, 19527), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (19520, 19527), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((2401, 2437), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['"""Problem adding rule"""'], {}), "('Problem adding rule')\n", (2414, 2437), False, 'from cloudmesh_client.shell.console import Console\n'), ((4695, 4718), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['category'], {}), '(category)\n', (4708, 4718), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((7713, 7758), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['"""Listing Security group rules"""'], {}), "('Listing Security group rules')\n", (7726, 7758), False, 'from cloudmesh_client.shell.console import Console\n'), ((8047, 8067), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (8060, 8067), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((10566, 10595), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['"""get secgroup"""'], {}), "('get secgroup')\n", (10579, 10595), False, 'from cloudmesh_client.shell.console import Console\n'), ((10803, 10823), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (10816, 10823), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((14148, 14171), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['category'], {}), '(category)\n', (14161, 14171), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((14588, 14608), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (14601, 14608), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((14746, 14775), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['"""delete group"""'], {}), "('delete group')\n", (14759, 14775), False, 'from cloudmesh_client.shell.console import Console\n'), ((15842, 15870), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['"""delete rule"""'], {}), "('delete rule')\n", (15855, 15870), False, 'from cloudmesh_client.shell.console import Console\n'), ((16448, 16481), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['"""delete all rules"""'], {}), "('delete all rules')\n", (16461, 16481), False, 'from cloudmesh_client.shell.console import Console\n'), ((2829, 2857), 'cloudmesh_client.common.ConfigDict.ConfigDict', 'ConfigDict', (['"""cloudmesh.yaml"""'], {}), "('cloudmesh.yaml')\n", (2839, 2857), False, 'from cloudmesh_client.common.ConfigDict import ConfigDict\n'), ((8246, 8266), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (8259, 8266), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((11815, 11861), 'cloudmesh_client.shell.console.Console.ok', 'Console.ok', (['"""Rule already exists. Added rule."""'], {}), "('Rule already exists. Added rule.')\n", (11825, 11861), False, 'from cloudmesh_client.shell.console import Console\n'), ((11919, 11948), 'cloudmesh_client.shell.console.Console.error', 'Console.error', (['ex.message', 'ex'], {}), '(ex.message, ex)\n', (11932, 11948), False, 'from cloudmesh_client.shell.console import Console\n'), ((15406, 15426), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['cloud'], {}), '(cloud)\n', (15419, 15426), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n'), ((5846, 5869), 'cloudmesh_client.cloud.iaas.CloudProvider.CloudProvider', 'CloudProvider', (['category'], {}), '(category)\n', (5859, 5869), False, 'from cloudmesh_client.cloud.iaas.CloudProvider import CloudProvider\n')]
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class DescribeFileRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'cloudmarketing', '2018-09-10', 'DescribeFile')
self.set_method('POST')
def get_FileName(self):
return self.get_query_params().get('FileName')
def set_FileName(self,FileName):
self.add_query_param('FileName',FileName)
def get_DataSchemaStatusLists(self):
return self.get_query_params().get('DataSchemaStatusLists')
def set_DataSchemaStatusLists(self, DataSchemaStatusLists):
for depth1 in range(len(DataSchemaStatusLists)):
if DataSchemaStatusLists[depth1] is not None:
self.add_query_param('DataSchemaStatusList.' + str(depth1 + 1) , DataSchemaStatusLists[depth1])
def get_PageNo(self):
return self.get_query_params().get('PageNo')
def set_PageNo(self,PageNo):
self.add_query_param('PageNo',PageNo)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_FileId(self):
return self.get_query_params().get('FileId')
def set_FileId(self,FileId):
self.add_query_param('FileId',FileId)
|
[
"aliyunsdkcore.request.RpcRequest.__init__"
] |
[((901, 974), 'aliyunsdkcore.request.RpcRequest.__init__', 'RpcRequest.__init__', (['self', '"""cloudmarketing"""', '"""2018-09-10"""', '"""DescribeFile"""'], {}), "(self, 'cloudmarketing', '2018-09-10', 'DescribeFile')\n", (920, 974), False, 'from aliyunsdkcore.request import RpcRequest\n')]
|
import curses
from typing import Union, Callable
from unittest.mock import Mock, MagicMock
import pytest
from pytest_mock import MockerFixture
from precon.remote_control import steer_vehicle, Screen
@pytest.fixture()
def patch_screen() -> Callable[[Union[str, int]], Mock]:
def inner(key: Union[str, int]) -> Mock:
screen = MagicMock(Screen)
if isinstance(key, str):
key = ord(key)
screen.get_pressed_char = Mock(side_effect=[key, ord("q")])
return screen
return inner
@pytest.fixture()
def distance_ahead_to_stop() -> int:
return 3
@pytest.fixture()
def patch_gpio(mocker: MockerFixture) -> None:
mocker.patch("precon.remote_control.fake_rpi.RPi.GPIO.output")
@pytest.fixture()
def patch_print(mocker: MockerFixture) -> None:
mocker.patch("builtins.print")
@pytest.fixture()
def patch_time(mocker: MockerFixture) -> None:
mocker.patch("precon.devices_handlers.driving_engines.time")
@pytest.mark.parametrize(
"key, called_function",
[
(curses.KEY_DOWN, "drive_backward"),
("s", "drive_backward"),
(curses.KEY_LEFT, "turn_left"),
("a", "turn_left"),
(curses.KEY_RIGHT, "turn_right"),
("d", "turn_right"),
],
)
@pytest.mark.asyncio
async def test_drive_on_pressed_keys(
mocker: MockerFixture,
patch_screen: Callable[[Union[str, int]], Mock],
patch_gpio: None,
patch_print: None,
patch_time: None,
key: Union[int, str],
called_function: str,
) -> None:
drive_func = mocker.patch(f"precon.remote_control.{called_function}")
mocker.patch("precon.remote_control.get_distance", return_value=distance_ahead_to_stop)
screen = patch_screen(key)
await steer_vehicle(screen)
drive_func.assert_called_once()
@pytest.mark.parametrize(
"key",
[
curses.KEY_UP,
"w",
],
)
@pytest.mark.asyncio
async def test_drive_forward_on_pressed_keys(
mocker: MockerFixture,
patch_screen: Callable[[Union[str, int]], Mock],
patch_gpio: None,
patch_print: None,
patch_time: None,
key: Union[int, str],
distance_ahead_to_stop: int,
) -> None:
drive_func = mocker.patch("precon.remote_control.drive_forward")
distance_allows_to_drive = distance_ahead_to_stop + 1
mocker.patch("precon.remote_control.get_distance", return_value=distance_allows_to_drive)
screen = patch_screen(key)
await steer_vehicle(screen)
drive_func.assert_called_once()
@pytest.mark.parametrize(
"key",
[
curses.KEY_UP,
"w",
curses.KEY_DOWN,
"s",
curses.KEY_LEFT,
"a",
curses.KEY_RIGHT,
"d",
],
)
@pytest.mark.asyncio
async def test_stop_after_each_drive(
mocker: MockerFixture,
patch_screen: Callable[[Union[str, int]], Mock],
distance_ahead_to_stop: int,
patch_gpio: None,
patch_print: None,
patch_time: None,
key: Union[int, str],
) -> None:
mocker.patch("precon.remote_control.get_distance", return_value=distance_ahead_to_stop + 1)
stop_func = mocker.patch("precon.devices_handlers.driving_engines.stop_driving")
screen = patch_screen(key)
await steer_vehicle(screen)
stop_func.assert_called_once()
@pytest.mark.parametrize(
"key",
[
curses.KEY_UP,
"w",
],
)
@pytest.mark.asyncio
async def test_stop_when_distance_ahead_is_equal_or_less_than_3_and_robot_is_driving_forward(
mocker: MockerFixture,
patch_screen: Callable[[Union[str, int]], Mock],
distance_ahead_to_stop: int,
patch_gpio: None,
patch_print: None,
patch_time: None,
key: Union[int, str],
) -> None:
drive_func = mocker.patch("precon.remote_control.drive_forward")
stop_func = mocker.patch("precon.remote_control.stop_driving")
mocker.patch("precon.remote_control.get_distance", return_value=distance_ahead_to_stop)
screen = patch_screen(key)
await steer_vehicle(screen)
stop_func.assert_called_once()
drive_func.assert_not_called()
@pytest.mark.parametrize(
"key, called_function",
[
(curses.KEY_DOWN, "drive_backward"),
("s", "drive_backward"),
(curses.KEY_LEFT, "turn_left"),
("a", "turn_left"),
(curses.KEY_RIGHT, "turn_right"),
("d", "turn_right"),
],
)
@pytest.mark.asyncio
async def test_not_stop_when_distance_ahead_is_equal_or_less_than_3(
mocker: MockerFixture,
patch_screen: Callable[[Union[str, int]], Mock],
distance_ahead_to_stop: int,
patch_gpio: None,
patch_print: None,
patch_time: None,
key: Union[int, str],
called_function: str,
) -> None:
drive_func = mocker.patch(f"precon.remote_control.{called_function}")
mocker.patch("precon.remote_control.get_distance", return_value=distance_ahead_to_stop)
screen = patch_screen(key)
await steer_vehicle(screen)
drive_func.assert_called_once()
|
[
"pytest.mark.parametrize",
"pytest.fixture",
"unittest.mock.MagicMock",
"precon.remote_control.steer_vehicle"
] |
[((204, 220), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (218, 220), False, 'import pytest\n'), ((529, 545), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (543, 545), False, 'import pytest\n'), ((599, 615), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (613, 615), False, 'import pytest\n'), ((733, 749), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (747, 749), False, 'import pytest\n'), ((836, 852), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (850, 852), False, 'import pytest\n'), ((968, 1199), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key, called_function"""', "[(curses.KEY_DOWN, 'drive_backward'), ('s', 'drive_backward'), (curses.\n KEY_LEFT, 'turn_left'), ('a', 'turn_left'), (curses.KEY_RIGHT,\n 'turn_right'), ('d', 'turn_right')]"], {}), "('key, called_function', [(curses.KEY_DOWN,\n 'drive_backward'), ('s', 'drive_backward'), (curses.KEY_LEFT,\n 'turn_left'), ('a', 'turn_left'), (curses.KEY_RIGHT, 'turn_right'), (\n 'd', 'turn_right')])\n", (991, 1199), False, 'import pytest\n'), ((1792, 1844), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key"""', "[curses.KEY_UP, 'w']"], {}), "('key', [curses.KEY_UP, 'w'])\n", (1815, 1844), False, 'import pytest\n'), ((2488, 2611), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key"""', "[curses.KEY_UP, 'w', curses.KEY_DOWN, 's', curses.KEY_LEFT, 'a', curses.\n KEY_RIGHT, 'd']"], {}), "('key', [curses.KEY_UP, 'w', curses.KEY_DOWN, 's',\n curses.KEY_LEFT, 'a', curses.KEY_RIGHT, 'd'])\n", (2511, 2611), False, 'import pytest\n'), ((3250, 3302), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key"""', "[curses.KEY_UP, 'w']"], {}), "('key', [curses.KEY_UP, 'w'])\n", (3273, 3302), False, 'import pytest\n'), ((4035, 4266), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""key, called_function"""', "[(curses.KEY_DOWN, 'drive_backward'), ('s', 'drive_backward'), (curses.\n KEY_LEFT, 'turn_left'), ('a', 'turn_left'), (curses.KEY_RIGHT,\n 'turn_right'), ('d', 'turn_right')]"], {}), "('key, called_function', [(curses.KEY_DOWN,\n 'drive_backward'), ('s', 'drive_backward'), (curses.KEY_LEFT,\n 'turn_left'), ('a', 'turn_left'), (curses.KEY_RIGHT, 'turn_right'), (\n 'd', 'turn_right')])\n", (4058, 4266), False, 'import pytest\n'), ((340, 357), 'unittest.mock.MagicMock', 'MagicMock', (['Screen'], {}), '(Screen)\n', (349, 357), False, 'from unittest.mock import Mock, MagicMock\n'), ((1730, 1751), 'precon.remote_control.steer_vehicle', 'steer_vehicle', (['screen'], {}), '(screen)\n', (1743, 1751), False, 'from precon.remote_control import steer_vehicle, Screen\n'), ((2426, 2447), 'precon.remote_control.steer_vehicle', 'steer_vehicle', (['screen'], {}), '(screen)\n', (2439, 2447), False, 'from precon.remote_control import steer_vehicle, Screen\n'), ((3189, 3210), 'precon.remote_control.steer_vehicle', 'steer_vehicle', (['screen'], {}), '(screen)\n', (3202, 3210), False, 'from precon.remote_control import steer_vehicle, Screen\n'), ((3939, 3960), 'precon.remote_control.steer_vehicle', 'steer_vehicle', (['screen'], {}), '(screen)\n', (3952, 3960), False, 'from precon.remote_control import steer_vehicle, Screen\n'), ((4861, 4882), 'precon.remote_control.steer_vehicle', 'steer_vehicle', (['screen'], {}), '(screen)\n', (4874, 4882), False, 'from precon.remote_control import steer_vehicle, Screen\n')]
|
import json
from typing import Any, Dict
import boto3
import uuid
import os
from aws_lambda_powertools.utilities.typing import LambdaContext
from aws_lambda_powertools import Logger, Metrics, Tracer
_queue_name = os.getenv("QUEUE_NAME")
_bucket_name = os.getenv("BUCKET_NAME")
_s3 = boto3.resource("s3")
_sqs = boto3.resource("sqs")
_queue = _sqs.get_queue_by_name(QueueName=_queue_name)
logger = Logger()
tracer = Tracer()
metrics = Metrics()
@metrics.log_metrics(capture_cold_start_metric=True)
@logger.inject_lambda_context
@tracer.capture_lambda_handler
def main(event: Dict[str, Any], context: LambdaContext):
request_id = context.aws_request_id or uuid.uuid4()
logger.structure_logs(append=True, requestId=request_id)
try:
# write body to s3 bucket
body = json.loads(event["body"])
logger.info(f"producer lambda called with requestId {request_id}")
filename = f"{uuid.uuid4()}.json"
s3object = _s3.Object(_bucket_name, filename)
s3object.put(Body=(bytes(json.dumps(body).encode("UTF-8"))))
logger.info(f"put {filename} into s3 bucket.")
# send filename to sqs.
_queue.send_message(MessageBody=json.dumps({"file_name": filename}))
logger.info(f"sent {filename} to sqs")
return {"statusCode": 200, "body": "published message to SQS"}
except Exception as e:
logger.error(str(e))
return {"statusCode": 500, "body": "something went wrong"}
|
[
"uuid.uuid4",
"json.loads",
"json.dumps",
"boto3.resource",
"aws_lambda_powertools.Tracer",
"aws_lambda_powertools.Logger",
"aws_lambda_powertools.Metrics",
"os.getenv"
] |
[((214, 237), 'os.getenv', 'os.getenv', (['"""QUEUE_NAME"""'], {}), "('QUEUE_NAME')\n", (223, 237), False, 'import os\n'), ((253, 277), 'os.getenv', 'os.getenv', (['"""BUCKET_NAME"""'], {}), "('BUCKET_NAME')\n", (262, 277), False, 'import os\n'), ((285, 305), 'boto3.resource', 'boto3.resource', (['"""s3"""'], {}), "('s3')\n", (299, 305), False, 'import boto3\n'), ((313, 334), 'boto3.resource', 'boto3.resource', (['"""sqs"""'], {}), "('sqs')\n", (327, 334), False, 'import boto3\n'), ((400, 408), 'aws_lambda_powertools.Logger', 'Logger', ([], {}), '()\n', (406, 408), False, 'from aws_lambda_powertools import Logger, Metrics, Tracer\n'), ((418, 426), 'aws_lambda_powertools.Tracer', 'Tracer', ([], {}), '()\n', (424, 426), False, 'from aws_lambda_powertools import Logger, Metrics, Tracer\n'), ((437, 446), 'aws_lambda_powertools.Metrics', 'Metrics', ([], {}), '()\n', (444, 446), False, 'from aws_lambda_powertools import Logger, Metrics, Tracer\n'), ((663, 675), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (673, 675), False, 'import uuid\n'), ((795, 820), 'json.loads', 'json.loads', (["event['body']"], {}), "(event['body'])\n", (805, 820), False, 'import json\n'), ((918, 930), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (928, 930), False, 'import uuid\n'), ((1189, 1224), 'json.dumps', 'json.dumps', (["{'file_name': filename}"], {}), "({'file_name': filename})\n", (1199, 1224), False, 'import json\n'), ((1025, 1041), 'json.dumps', 'json.dumps', (['body'], {}), '(body)\n', (1035, 1041), False, 'import json\n')]
|
#!/usr/bin/env python3
import json
import os
import sys
from classes import Graph
if 'C3NAVPROJECT' in os.environ:
project = os.environ['C3NAVPROJECT']
elif len(sys.argv) > 1:
project = sys.argv[1]
else:
print('Please specify project: run.py <project> or environment variable C3NAVPROJECT')
sys.exit(1)
if len(sys.argv) != 3:
print('select language!')
sys.exit(1)
lang = sys.argv[2]
print('translating into %s…' % lang)
data = Graph(project, auto_connect=False).data
rooms = list(data['rooms'].keys())
pois = list(data['pois'].keys())
superrooms = [room['superroom'] for room in data['rooms'].values() if 'superroom' in room]
roomgroups = list(sum((room.get('groups', []) for room in data['rooms'].values()), []))
roomgroups += [(':'+s) for s in roomgroups]
poigroups = list(sum((poi.get('groups', []) for poi in data['pois'].values()), []))
poigroups += [(':'+s) for s in poigroups]
for name in set(pois+roomgroups+rooms+superrooms+poigroups):
data = json.load(open('projects/'+project+'/titles.json'))
titles = data.get(name, {})
if lang in titles:
continue
for l, t in titles.items():
print('%s: %s' % (l, t))
newtitle = input('%s [%s]: ' % (name, titles.get(lang, name))).strip()
if not newtitle.strip():
newtitle = titles.get(lang, name)
titles[lang] = newtitle
data[name] = titles
json.dump(data, open('projects/'+project+'/titles.json', 'w'), indent=4, sort_keys=True)
print('')
print('all done.')
|
[
"sys.exit",
"classes.Graph"
] |
[((379, 390), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (387, 390), False, 'import sys\n'), ((455, 489), 'classes.Graph', 'Graph', (['project'], {'auto_connect': '(False)'}), '(project, auto_connect=False)\n', (460, 489), False, 'from classes import Graph\n'), ((309, 320), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (317, 320), False, 'import sys\n')]
|
from botnet.config import Config
from botnet.modules.builtin.mumble import Mumble
from botnet.modules.builtin.mumble import mumble_pb2
from botnet.modules.builtin.mumble.mumble import encode, decode_header, \
message_types, Decoder
def test_encode_decode():
ver = mumble_pb2.Version()
ver.version = 1
ver.release = 'dev'
ver.os = 'gnu'
ver.os_version = 'linux'
b = encode(ver)
decoder = Decoder
typ, length = decode_header(b[:6])
assert message_types[typ] == type(ver)
assert length
def test_decoder():
ver = mumble_pb2.Version()
ver.version = 1
ver.release = 'dev'
ver.os = 'gnu'
ver.os_version = 'linux'
b = encode(ver)
def tester(msg):
assert msg.version == 1
assert msg.relase == 'dev'
assert msg.os == 'gnu'
assert msg.os_version == 'linux'
decoder = Decoder(tester)
decoder.write(b)
|
[
"botnet.modules.builtin.mumble.mumble.encode",
"botnet.modules.builtin.mumble.mumble.Decoder",
"botnet.modules.builtin.mumble.mumble_pb2.Version",
"botnet.modules.builtin.mumble.mumble.decode_header"
] |
[((274, 294), 'botnet.modules.builtin.mumble.mumble_pb2.Version', 'mumble_pb2.Version', ([], {}), '()\n', (292, 294), False, 'from botnet.modules.builtin.mumble import mumble_pb2\n'), ((395, 406), 'botnet.modules.builtin.mumble.mumble.encode', 'encode', (['ver'], {}), '(ver)\n', (401, 406), False, 'from botnet.modules.builtin.mumble.mumble import encode, decode_header, message_types, Decoder\n'), ((448, 468), 'botnet.modules.builtin.mumble.mumble.decode_header', 'decode_header', (['b[:6]'], {}), '(b[:6])\n', (461, 468), False, 'from botnet.modules.builtin.mumble.mumble import encode, decode_header, message_types, Decoder\n'), ((563, 583), 'botnet.modules.builtin.mumble.mumble_pb2.Version', 'mumble_pb2.Version', ([], {}), '()\n', (581, 583), False, 'from botnet.modules.builtin.mumble import mumble_pb2\n'), ((684, 695), 'botnet.modules.builtin.mumble.mumble.encode', 'encode', (['ver'], {}), '(ver)\n', (690, 695), False, 'from botnet.modules.builtin.mumble.mumble import encode, decode_header, message_types, Decoder\n'), ((872, 887), 'botnet.modules.builtin.mumble.mumble.Decoder', 'Decoder', (['tester'], {}), '(tester)\n', (879, 887), False, 'from botnet.modules.builtin.mumble.mumble import encode, decode_header, message_types, Decoder\n')]
|
"""
This package includes my constraints/utilities/etc for cpmpy.
This cpmpy model was written by <NAME> (<EMAIL>)
See also my cpmpy page: http://hakank.org/cpmpy/
"""
import sys, math, re
import itertools
import numpy as np
from functools import reduce
from cpmpy import *
from cpmpy.expressions.globalconstraints import GlobalConstraint
from cpmpy.solvers import *
from ortools.sat.python import cp_model as ort
from cpmpy.transformations.flatten_model import flatten_constraint, flatten_model
from cpmpy.transformations.get_variables import print_variables
def AllDifferent_except_0(args):
"""
Ensure that all arguments that are != 0 must have distinct values.
"""
# Note: The parenthesis around (var1 != 0) are needed!
return [ ((var1!= 0) & (var2 != 0)).implies(var1 != var2) for var1, var2 in all_pairs(args)]
def all_different_except_0(args):
"""
Alias for AllDifferent_except_0(args).
"""
return AllDifferent_except_0(args)
def to_num(a,n,base):
"""
to_num(a, n, base)
Ensure that the digits in array `a` corresponds to the number `n` in base `base`.
"""
tlen = len(a)
return n == sum([(base ** (tlen - i - 1)) * a[i] for i in range(tlen)])
def increasing(args):
"""
Ensure that the values in args are increasing.
"""
return [args[i-1] <= args[i] for i in range(1,len(args))]
def increasing_strict(args):
"""
Ensure that the values in args are strict increasing.
"""
return [args[i-1] < args[i] for i in range(1,len(args))]
def decreasing(args):
"""
Ensure that the values in args are decreasing.
"""
return [args[i-1] >= args[i] for i in range(1,len(args))]
def decreasing_strict(args):
"""
Ensure that the values in args are strict decreasing.
"""
return [args[i-1] >= args[i] for i in range(1,len(args))]
def all_pairs(args):
"""
Generate all pairs from the list of lists args.
(stolen from cmppy/globalconstraints.py)
"""
return list(itertools.combinations(args, 2))
def get_different_solution(m,x):
"""
Add the current solution (x) in the model to generate
other solutions.
Usage:
# ...
ss = CPM_ortools(model)
if ss.solve():
print(x.value())
get_different_solution(ss, x)
Note: The array in x must be a flattened array. If there are
many decision variables, use flatten_lists(a) to
flatten out the array. E.g.
# ...
ss = CPM_ortools(model)
while ss.solve():
print(x.value()) # an array
print(y.value()) # a variable
print(z.value()) # another variable
get_different_solution(ss,flatten_lists([x,[y,z]])
Note that this might be slow for larger models or models with
many solutions. If so, try to use
- ortools_wrapper()
or the simple solution printers such as
- ORT_simple_printer
- ORT_arrays_printer
- ORT_simple_printer_matrix
- ORT_simple_function_printer
or define a similiar solution printer.
"""
# n = len(x)
# m += [any([x[i].value() != x[i] for i in range(n)])]
m += [any([t.value() != t for t in x])]
def flatten_lists(a):
"""
Flatten a list of lists.
Note: a must be an array of arrays (list of lists).
See get_different_solution for examples.
"""
return [item for sublist in a for item in sublist]
class ORT_simple_printer(ort.CpSolverSolutionCallback):
"""
A simple printer callback for single array printing.
"""
def __init__(self, varmap, a, num_solutions=0):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
self.num_solutions=num_solutions
def on_solution_callback(self):
self.solcount += 1 # I always start at 1. :-)
# populate values before printing
# For array of arrays (Tias' original)
# for wm in self.vars:
# for cpm_var in wm:
# cpm_var._value = self.Value(self.varmap[cpm_var])
# For single arrays:
for cpm_var in self.vars:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
print(f"#{self.solcount}: {a.value()}")
if self.num_solutions > 0 and self.solcount >= self.num_solutions:
self.StopSearch()
class ORT_arrays_printer(ort.CpSolverSolutionCallback):
"""
A simple printer callback for array of arrays.
"""
def __init__(self, varmap, a, num_solutions=0):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
self.num_solutions=num_solutions
def on_solution_callback(self):
self.solcount += 1 # I always start at 1. :-)
# populate values before printing
# For array of arrays (Tias' original)
for wm in self.vars:
for cpm_var in wm:
cpm_var._value = self.Value(self.varmap[cpm_var])
# For single arrays:
for cpm_var in self.vars:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
print(f"#{self.solcount}: {a.value()}")
if self.num_solutions > 0 and self.solcount >= self.num_solutions:
self.StopSearch()
class ORT_simple_printer_matrix(ort.CpSolverSolutionCallback):
"""
A simple printer callback for printing a matrix.
"""
def __init__(self, varmap, a, rows,cols, num_solutions=0):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
self.rows = rows
self.cols = cols
self.num_solutions=num_solutions
def on_solution_callback(self):
self.solcount += 1
for cpm_var in self.vars:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
print(f"#{self.solcount}:")
for i in range(self.rows):
for j in range(self.cols):
print("%3d" % a[i*self.cols+j].value(), end=" ")
print()
print()
if self.num_solutions > 0 and self.solcount >= self.num_solutions:
self.StopSearch()
class ORT_simple_function_printer(ort.CpSolverSolutionCallback):
"""
A printer callback with a callback (cb_fun) for printing
the array a, which should be structured by the user and
including .value() for the variables.
Note that the data array a must be a flattening array
to be used with this printer callback.
Example of a printer function:
def f(a):
print(a[0].value(),"+",a[1].value(),"=",a[2].value())
which will print a solution such as
2 + 3 = 5
"""
def __init__(self, varmap, a, cb_fun,num_solutions=0):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
self.cb_fun = cb_fun
self.num_solutions=num_solutions
def on_solution_callback(self):
self.solcount += 1
# For single arrays:
for cpm_var in self.vars:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
print(f"\n#{self.solcount}:")
self.cb_fun(a)
if self.num_solutions > 0 and self.solcount >= self.num_solutions:
self.StopSearch()
class ORT_simple_solution_counter(ort.CpSolverSolutionCallback):
"""
This is a solution 'printer' that just count the solutions.
"""
def __init__(self, varmap, a):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
def on_solution_callback(self):
self.solcount += 1
for wm in self.vars:
for cpm_var in wm:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
class ORT_function_printer_arrays(ort.CpSolverSolutionCallback):
"""
A printer callback with a callback (cb_fun) for printing
the array of arrays a, which should be structured by the user and
including .value() for the variables.
This version t prints solution number.
Example of a printer function:
def print_solution(a):
print('x:', a[0].value())
print('y:', a[1].value())
"""
def __init__(self, varmap, a, cb_fun,num_solutions=0):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
self.cb_fun = cb_fun
self.num_solutions=num_solutions
def on_solution_callback(self):
self.solcount += 1
for wm in self.vars:
for cpm_var in wm:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
print(f"sol #{self.solcount}")
self.cb_fun(a)
print()
if self.num_solutions > 0 and self.solcount >= self.num_solutions:
self.StopSearch()
class ORT_function_printer_arrays2(ort.CpSolverSolutionCallback):
"""
A printer callback with a callback (cb_fun) for printing
the array of arrays a, which should be structured by the user and
including .value() for the variables.
This version don't print solution number.
Example of a printer function:
def print_solution(a):
print('x:', a[0].value())
print('y:', a[1].value())
"""
def __init__(self, varmap, a, cb_fun,num_solutions=0):
super().__init__()
self.solcount = 0
self.varmap = varmap
self.vars = (a)
self.cb_fun = cb_fun
self.num_solutions=num_solutions
def on_solution_callback(self):
self.solcount += 1
for wm in self.vars:
for cpm_var in wm:
cpm_var._value = self.Value(self.varmap[cpm_var])
(a) = self.vars
self.cb_fun(a)
if self.num_solutions > 0 and self.solcount >= self.num_solutions:
self.StopSearch()
def print_solution(a):
"""
print_solution(a)
Default callback method for printing the solution in a printer callback.
Note: a must be an array of arrays to be used with ortools_wrapper
(defined below).
"""
for x in a:
print(x.value())
def ortools_wrapper(model,var_array,print_solution=print_solution,num_sols=0):
"""
ortools_wrapper((model,var_array,print_solution=print_solution,num_sols=0)
This is a simple wrapper for printing the solutions of a model and tends
to be (significantly) faster than using
ss = CPM_ortools(model)
while ss.solve():
# ...
get_different_solution(ss,flatten_lists(var_array))
Parameters:
- model : the model
- var_array: the array of arrays of the decision variables to be printed
with print_solution(var_array)
- print_solution: the method used to do the actual printing of the solution.
Default is print_solution(a) defined above. The function
can be overwritten / defined in the current constraint model.
- num_sols : number of solutions. Default 0, all solutions.
Note: For optimality problems, use ortools_wrapper_opt(.) instead.
"""
ss = CPM_ortools(model)
cb = ORT_function_printer_arrays(ss.varmap,var_array,print_solution,num_sols)
# Flags to experiment with
# ss.ort_solver.parameters.num_search_workers = 8 # Don't work together with SearchForAllSolutions
# ss.ort_solver.parameters.search_branching = ort.PORTFOLIO_SEARCH
# ss.ort_solver.parameters.cp_model_presolve = False
ss.ort_solver.parameters.linearization_level = 0
ss.ort_solver.parameters.cp_model_probing_level = 0
ort_status = ss.ort_solver.SearchForAllSolutions(ss.ort_model, cb)
ss._after_solve(ort_status)
print(ss.status())
print("Nr solutions:", cb.solcount)
print("Num conflicts:", ss.ort_solver.NumConflicts())
print("NumBranches:", ss.ort_solver.NumBranches())
print("WallTime:", ss.ort_solver.WallTime())
print()
def ortools_wrapper2(model,var_array,print_solution=print_solution,num_sols=0):
"""
ortools_wrapper((model,var_array,print_solution=print_solution,num_sols=0)
This is a simple wrapper for printing the solutions of a model and tends
to be (significantly) faster than using
ss = CPM_ortools(model)
while ss.solve():
# ...
get_different_solution(ss,flatten_lists(var_array))
This version don't print the solution number.
Parameters:
- model : the model
- var_array: the array of arrays of the decision variables to be printed
with print_solution(var_array)
- print_solution: the method used to do the actual printing of the solution.
Default is print_solution(a) defined above. The function
can be overwritten / defined in the current constraint model.
- num_sols : number of solutions. Default 0, all solutions.
Note: For optimality problems, use ortools_wrapper_opt(.) instead.
"""
ss = CPM_ortools(model)
cb = ORT_function_printer_arrays2(ss.varmap,var_array,print_solution,num_sols)
# Flags to experiment with
# ss.ort_solver.parameters.num_search_workers = 8 # Don't work together with SearchForAllSolutions
# ss.ort_solver.parameters.search_branching = ort.PORTFOLIO_SEARCH
# ss.ort_solver.parameters.cp_model_presolve = False
ss.ort_solver.parameters.linearization_level = 0
ss.ort_solver.parameters.cp_model_probing_level = 0
ort_status = ss.ort_solver.SearchForAllSolutions(ss.ort_model, cb)
print()
ss._after_solve(ort_status) # post-process after solve() call...
print(ss.status())
print("Nr solutions:", cb.solcount)
print("Num conflicts:", ss.ort_solver.NumConflicts())
print("NumBranches:", ss.ort_solver.NumBranches())
print("WallTime:", ss.ort_solver.WallTime())
print()
def ortools_wrapper_opt(model,var_array,print_solution=print_solution,num_sols=1,num_procs=1):
"""
ortools_wrapper_opt((model,var_array,print_solution=print_solution,num_sols=0)
This is a simple wrapper for printing the _optimal_ solution of a model.
This tends to be (significantly) faster than using
if model.solve():
# ...
Parameters:
- model : the model
- var_array: the array of arrays of the decision variables to be printed
with print_solution(var_array)
- print_solution: the method used to do the actual printing of the solution.
Default is print_solution(a) defined above. The function
can be overwritten / defined in the current constraint model.
- num_sols : number of solutions. Default 0, all solutions.
"""
ss = CPM_ortools(model)
cb = ORT_function_printer_arrays(ss.varmap,var_array,print_solution,1)
# Flags to experiment with
if num_procs > 1:
ss.ort_solver.parameters.num_search_workers = num_procs
# ss.ort_solver.parameters.search_branching = ort.PORTFOLIO_SEARCH
# ss.ort_solver.parameters.cp_model_presolve = False
ss.ort_solver.parameters.linearization_level = 0
ss.ort_solver.parameters.cp_model_probing_level = 0
# Note: This is the real difference between this method and ortool_wrapper.
# For optimal problems one cannot use SearchForAllSolutions. Instead
# one must use ss.ort_solver.Solve(,)
# ort_status = ss.ort_solver.SearchForAllSolutions(ss.ort_model, cb)
ort_status = ss.ort_solver.Solve(ss.ort_model, cb)
ss._after_solve(ort_status) # post-process after solve() call...
print(ss.status())
print("Nr solutions:", cb.solcount)
print("Num conflicts:", ss.ort_solver.NumConflicts())
print("NumBranches:", ss.ort_solver.NumBranches())
print("WallTime:", ss.ort_solver.WallTime())
print()
def ortools_wrapper_count_solutions(model,var_array):
"""
ortools_wrapper((model,var_array,print_solution=print_solution,num_sols=0)
This is a simple wrapper for just counting the solutions of a model.
Parameters:
- model : the model
- var_array: the array of arrays of the decision variables to be printed
with print_solution(var_array)
"""
ss = CPM_ortools(model)
cb = ORT_simple_solution_counter(ss.varmap,var_array)
# Flags to experiment with
# ss.ort_solver.parameters.num_search_workers = 8 # Don't work together with SearchForAllSolutions
# ss.ort_solver.parameters.search_branching = ort.PORTFOLIO_SEARCH
# ss.ort_solver.parameters.cp_model_presolve = False
ss.ort_solver.parameters.linearization_level = 0
ss.ort_solver.parameters.cp_model_probing_level = 0
ort_status = ss.ort_solver.SearchForAllSolutions(ss.ort_model, cb)
ss._after_solve(ort_status)
return cb.solcount
def base_array(n):
"""
Returns an array of length `n` with base coefficients.
Example: `base_array(4)` returns the array [1000,100,10,1]
"""
return np.array([10**i for i in range(n-1,-1,-1)])
def scalar_product(a,b):
"""
`scalar_product(a,b)`
Returns the scalar product of the arrays `a` and `b`.
Assumption: `len(a) == len(b)`
"""
assert len(a) == len(a), f"len(a) == len(b)"
# return np.dot(a,b)
return sum(a*b)
def scalar_product1(a):
"""
`scalar_product1(a)`
Returns the scalar product of the array `a` and a base_array of appropriate length.
Assumption: `len(a) == len(b)`
"""
assert len(a) == len(a), f"len(a) == len(b)"
# return np.dot(a,base_array(len(a)))
return sum(a*base_array(len(a)))
def my_circuit(x):
"""
circuit(x)
Exsures that x is a circuit.
Note: This assumes that x is has the domain 0..len(x)-1,
i.e. 0-based.
"""
assert x[0].lb == 0, f"circuit: lb is {x[0].lb}, but must be 0"
n = len(x)
z = intvar(0, n-1,shape=n,name='z')
constraints = [
AllDifferent(x),
AllDifferent(z),
# put the orbit of x[0] in in z[1..n]
z[0] == x[0],
[ z[i] == x[z[i-1]] for i in range(1, n-1)],
# may not be 0 for i < n-1
[ z[i] != 0 for i in range(1, n-1)],
# when i = n-1 it must be 0
z[n-1] == 0
]
return constraints
def my_circuit_path(x,z):
"""
circuit(x,z)
Ensures that x is an circuit and z is the path.
Note: This assumes that x is has the domain 0..len(x)-1,
i.e. 0-based.
"""
assert x[0].lb == 0, f"circuit: x[0].lb is {x[0].lb}, but must be 0"
n = len(x)
constraints = [
AllDifferent(x),
AllDifferent(z),
# put the orbit of x[0] in in z[1..n]
z[0] == x[0],
[ z[i] == x[z[i-1]] for i in range(1, n-1)],
# may not be 0 for i < n-1
[ z[i] != 0 for i in range(1, n-1)],
# when i = n-1 it must be 0
z[n-1] == 0
]
return constraints
def count(a,val,c):
"""
count(a,val,c)
c is the number of occurrences of val in array a.
"""
return [c == sum([a[i] == val for i in range(len(a))])
]
def atmost(a,val,c):
"""
atmost(a,val,c)
Ensure that the number of occurrences of val in a is atmost c.
"""
return [sum([a[i] == val for i in range(len(a))]) <= c]
def atleast(a,val,c):
"""
atleast(a,val,c)
Ensure that the number of occurrences of val in a is atmost c.
"""
return [sum([a[i] == val for i in range(len(a))]) >= c]
def exactly(a,val,c):
"""
exactly(a,val,c)
Ensure that the number of occurrences of val in a is exactly c.
"""
return [sum([a[i] == val for i in range(len(a))]) == c]
def global_cardinality_count(a,gcc):
"""
global_cardinality_count(a,gcc)
Global cardinality count: Collect the number of occurrences of each value 0..a.ub
in gcc. The array gcc must be of length 0..ub.
"""
n = len(a)
ub = max([a[i].ub for i in range(n)])
constraints = []
for i in range(ub+1):
constraints += [count(a,i,gcc[i])]
return constraints
def inverse(x,y):
"""
inverse(x,y)
Ensures that:
x[i] == j #<=> y[j] == i
Note: inverse(x,y) is sometimes called assignment(x,y).
There is an alternative version: inverse(x) which can
be simulated by inverse(x,x)
"""
n = len(x)
assert n == len(y), "x and y must be of equal length"
constraints = []
for i in range(n):
for j in range(n):
constraints += [(x[i] == j) == (y[j] == i)]
return constraints
def my_cumulative(s, d, r, b):
"""
Decompositon of cumulative.
Inspired by the MiniZinc implementation.
The MiniZinc decomposition is discussed in the paper:
<NAME>, <NAME>, <NAME>, and <NAME>.
'Why cumulative decomposition is not as bad as it sounds.'
Parameters:
s: start_times assumption: array of varint
d: durations assumption: array of int
r: resources assumption: array of int
b: resource limit assumption: varint or int
"""
constraints = []
max_d = max(d)
tasks = [i for i in range(len(s)) if r[i] > 0 and d[i] > 0]
times_min = min([s[i].lb for i in tasks])
times_max = max([s[i].ub + max_d for i in tasks])
for t in range(times_min, times_max + 1):
constraints += [ b >= sum([((s[i] <= t) & (t < s[i] + d[i])) * r[i] for i in tasks])]
# Somewhat experimental:
# This constraint is needed to contrain the upper limit of b.
if not isinstance(b, int):
constraints += [b <= sum(r)]
return constraints
def member_of(x, val):
"""
member_of(x, val)
Ensures that the value `val` is in the array `x`.
"""
n = len(x)
# cc = intvar(0,n)
# constraints = [count(x, val, cc), cc > 0]
constraints = [sum([x[i] == val for i in range(n)]) > 0]
return constraints
def regular(x, Q, S, d, q0, F):
"""
Global constraint regular
This is a translation of MiniZinc's regular constraint (defined in
lib/zinc/globals.mzn), via the Comet code refered above.
All comments are from the MiniZinc code.
'''
The sequence of values in array 'x' (which must all be in the range 1..S)
is accepted by the DFA of 'Q' states with input 1..S and transition
function 'd' (which maps (1..Q, 1..S) -> 0..Q)) and initial state 'q0'
(which must be in 1..Q) and accepting states 'F' (which all must be in
1..Q). We reserve state 0 to be an always failing state.
'''
x : IntVar array
Q : number of states
S : input_max
d : transition matrix
q0: initial state
F : accepting states
Note: As mentioned above the states must start at 1 since 0 is
represents a failed state.
Note: Compare with regular_table which use the Table constraints
instead of Element constraint in the main loop.
"""
assert Q > 0, 'regular: "Q" must be greater than zero'
assert S > 0, 'regular: "S" must be greater than zero'
# d2 is the same as d, except we add one extra transition for
# each possible input; each extra transition is from state zero
# to state zero. This allows us to continue even if we hit a
# non-accepted input.
d2 = []
for i in range(Q + 1):
row = []
for j in range(S):
if i == 0:
row.append(0)
else:
row.append(d[i - 1][j])
d2.append(row)
d2_flatten = [d2[i][j] for i in range(Q + 1) for j in range(S)]
# If x has index set m..n, then a[m-1] holds the initial state
# (q0), and a[i+1] holds the state we're in after processing
# x[i]. If a[n] is in F, then we succeed (ie. accept the
# string).
x_range = list(range(0, len(x)))
m = 0
n = len(x)
a = [intvar(0, Q + 1) for i in range(m, n + 1)]
constraints = []
# Check that the final state is in F
constraints += [member_of(F,a[-1])]
# First state is q0
constraints += [a[m] == q0]
for i in x_range:
constraints += [x[i] >= 1]
constraints += [x[i] <= S]
# Determine a[i+1]: a[i+1] == d2[a[i], x[i]]
constraints += [
a[i + 1] == Element(d2_flatten,(a[i]) * S + (x[i] - 1))
]
return constraints
def regular_table(x, Q, S, d, q0, F):
"""
Global constraint regular_table
This is a translation of MiniZinc's regular constraint (defined in
lib/zinc/globals.mzn), via the Comet code refered above.
All comments are from the MiniZinc code.
'''
The sequence of values in array 'x' (which must all be in the range 1..S)
is accepted by the DFA of 'Q' states with input 1..S and transition
function 'd' (which maps (1..Q, 1..S) -> 0..Q)) and initial state 'q0'
(which must be in 1..Q) and accepting states 'F' (which all must be in
1..Q). We reserve state 0 to be an always failing state.
'''
x : IntVar array
Q : number of states
S : input_max
d : transition matrix
q0: initial state
F : accepting states
Note: As mentioned above the states must start at 1 since 0 is
represents a failed state.
The difference between this version (regular_table) and
regular is that this version use Table constraint instead
of Element constraint.
"""
assert Q > 0, 'regular: "Q" must be greater than zero'
assert S > 0, 'regular: "S" must be greater than zero'
# d2 is the same as d, except we add one extra transition for
# each possible input; each extra transition is from state zero
# to state zero. This allows us to continue even if we hit a
# non-accepted input.
d2 = []
for i in range(Q + 1):
row = []
for j in range(S):
if i == 0:
# This is different from regular(.)
row.append((0,j,0))
else:
# This is different from regular(.)
row.append((i,j, d[i - 1][j]))
d2.append(row)
d2_flatten = [d2[i][j] for i in range(Q + 1) for j in range(S)]
# If x has index set m..n, then a[m-1] holds the initial state
# (q0), and a[i+1] holds the state we're in after processing
# x[i]. If a[n] is in F, then we succeed (ie. accept the
# string).
x_range = list(range(0, len(x)))
m = 0
n = len(x)
a = [intvar(0, Q + 1) for i in range(m, n + 1)]
constraints = []
# Check that the final state is in F
constraints += [member_of(F,a[-1])]
# First state is q0
constraints += [a[m] == q0]
x_lb, x_ub = get_min_max_domain(x)
for i in x_range:
constraints += [x[i] >= 1]
constraints += [x[i] <= S]
# Determine a[i+1]: a[i+1] == d2[a[i], x[i]]
xi1 = intvar(0,x_ub)
constraints += [
# These two constraints are different
# from regular(.)
xi1 == x[i]-1,
Table((a[i], xi1, a[i + 1]), d2_flatten)
]
return constraints
def lex_less(x,y):
"""
lex_less(x,y)
Ensures that the array 'x' is strictly lexicographically less than array 'y'.
Compares them from first to last element, regardless of indices
This is a port of MiniZinc's definition lex_less_int
https://github.com/MiniZinc/libminizinc/blob/master/share/minizinc/std/fzn_lex_less_int.mzn
Note that we simplify the calculation of lx and ly since cpmpy has start index 0 (in MiniZinc
the start index can be user defined).
"""
xlen = len(x)
ylen = len(y)
ux = xlen
uy = ylen
size = min([ux,uy])
# Do not name variables in global constraints
# since then the variables are not unique.
# b = boolvar(shape=size+1,name="b")
b = boolvar(shape=size+1)
constraints = []
constraints += [b[0] == 1 ]
for i in range(size):
constraints += [b[i] == ((x[i] <= y[i]) &
((x[i] < y[i]) | (b[i+1] == 1)) )]
constraints += [b[size] == (ux < uy)]
return constraints
def lex_greater(x,y):
"""
lex_greater(x,y)
Ensures that the array 'x' is strictly lexicographically greater than array 'y'.
Compares them from first to last element, regardless of indices.
This constraint is defined by lex_less(y,x) defined above .
"""
return lex_less(y,x)
def lex2(x):
"""
lex2(x)
Ensures that the rows and columns in the matrix `x` are increasing,
using lex_less.
"""
x_t = x.transpose()
return [[lex_less(x[i],x[i+1]) for i in range(len(x)-1)],
[lex_less(x_t[i],x_t[i+1]) for i in range(len(x_t)-1)]]
#
# Somewhat general definition of knapsack.
#
def knapsack(values, weights, n):
"""
knapsack(values, weights, n)
Creates a model for the knapsack problem with the values, weights and limit n.
See knapsack.py for usage of this.
"""
z = intvar(0, 10000,name="z")
x = intvar(0,1,shape=len(values),name="x")
model = Model(
[
z >= 0,
z == sum(x*values),
sum(x*weights) <= n,
],
maximize=z
)
return [model, x, z]
def my_abs(x,y,d):
"""
A decomposition of abs() for experimentation.
"""
constraints = []
b = boolvar()
constraints += [b == (x >= y)]
constraints += [(b).implies(d == x - y)]
constraints += [(~b).implies(d == y - x)]
return constraints
def my_abs2(x,y):
"""
A decomposition of abs() for experimentation.
"""
constraints = []
b = boolvar()
d = intvar(0,1000000)
constraints += [b == (x >= y)]
constraints += [(b).implies(d == x - y)]
constraints += [(~b).implies(d == y - x)]
return d
def prod(x,res):
"""
prod(x,res)
res is the product of the values in x.
"""
return [reduce(lambda a, b: a * b, x) == res]
def prod1(x):
"""
prod1(x)
return the product of the values in x.
"""
return reduce(lambda a, b: a * b, x)
def among(m,x,v):
"""
among(m,x,v)
Requires exactly m variables in x to take one of the values in v.
"""
return [m == sum([x[i] == j for i in range(len(x)) for j in v])]
#
# Symmetry breaking
#
# From
# http://en.wikipedia.org/wiki/Fr#C3#A9nicle_standard_form
# """
# A magic square is in Frénicle standard form, named for
# <NAME>, if the following two conditions apply:
# - the element at position [1,1] (top left corner) is the smallest
# of the four corner elements; and
# - the element at position [1,2] (top edge, second from left) is
# smaller than the element in [2,1].
# """
#
def frenicle(x,n):
constraints = [x[(0,0)] == min([x[0,0], x[0,n-1], x[n-1,0], x[n-1,n-1]])]
constraints += [x[0,1] < x[1,0]]
return constraints
def distribute(card, value, base):
"""
distribute(card, value, base)
Requires that 'card[i]' is the number of occurences of 'value[i]' in 'base'.
Note: card, value, and base are assumed to be intvar arrays.
"""
card_len = len(card)
value_len = len(value)
assert card_len == value_len, "`card` and `value` must have the same length"
base_len = len(base)
constraints = []
constraints += [AllDifferent(value)]
for i in range(card_len):
constraints += [
card[i] == sum([value[i] == base[j] for j in range(base_len)])
]
return constraints
def fill_array(x,x_val):
"""
fill_array(x,x_val)
If x_val[i] != None then x[i] == x_val[i].
"""
constraints = []
for i in range(len(x)):
if x_val[i] != None:
constraints += [x[i] == x_val[i]]
return constraints
def all_different_pairs(a, s):
"""
all_different_pairs(a, s)
all pairs must be different
"""
return [AllDifferent([p for p in pairs(a,s)])]
def increasing_pairs(a, s):
"""
increasing_pairs(a, s)
Ensure that the pairs are in increasing order.
"""
return [increasing(pairs(a,s))]
def decreasing_pairs(a, s):
"""
decreasing_pairs(a, s)
Ensure that the pairs are in decreasing order.
"""
return [decreasing(pairs(a,s))]
def pairs(a, s):
"""
return the pairs of a in the 'integer representation': a[k,0]*(n-1) + a[k,1]
s is the size of max value of n
"""
n = len(a)
return [ a[(k,0)]*(s-1) + a[(k,1)] for k in range(n)]
def all_min_dist(min_dist, x, n):
"""
all_min_dist(min_dist, x, n)
Ensures that the differences of all pairs (i !=j) are >= min_dist.
"""
constraints = []
for i in range(n):
for j in range(i):
constraints += [abs(x[i]-x[j]) >= min_dist] # Nope!
return constraints
def all_different_on_intersection(x, y):
"""
all_different_on_intersection(x, y)
Ensure that the values that are common in x and y are distinct (in each array).
"""
return [count_a_in_b(x,y), count_a_in_b(y,x)]
def count_a_in_b(ass,bss):
"""
count_a_in_b(ass,bss)
helper for all_different_on_intersection
"""
constraints = []
for a in ass:
constraints += [sum([a == b for b in bss]) <= 1]
return constraints
def all_different_modulo(x, m):
"""
all_different_modulo(x, m)
Ensure that all elements in x (modulo m) are distinct
"""
print("x2:",x)
n = len(x)
constraints = []
mods = intvar(0,m-1,shape=n)
for i in range(n):
constraints += [mods[i] == x[i] % m]
constraints += [AllDifferent(mods)]
return constraints
def all_different_cst(xs, cst):
"""
all_different_cst(xs, cst)
Ensure that all elements in xs + cst are distinct
"""
return [AllDifferent([(x + c) for (x,c) in zip(xs,cst)])]
def arith(x, relop, val):
"""
arith(x, relop, val)
Ensure that all elements in x are <relop> val.
"""
constraints = []
for i in range(len(x)):
constraints += [arith_relop(x[i],relop, val)]
return constraints
def arith_relop(a, t, b):
"""
arith_relop(a, t, b)
This is (arguably) a hack.
Represents each function as an integer 0..5.
"""
return [(t == 0).implies(a < b),
(t == 1).implies(a <= b),
(t == 2).implies(a == b),
(t == 3).implies(a >= b),
(t == 4).implies(a > b),
(t == 5).implies(a != b)
]
#
# diffn ported from MiniZinc's fzn_diffn:
#
def diffn(x,y,dx,dy):
"""
diffn(x,y,dx,dy)
Constrains rectangles i, given by their origins x[i], y[i])
and sizes (dx[i], dy[i]), to be non-overlapping. Zero-width
rectangles can still not overlap with any other rectangle.
"""
n = len(x)
constraints = []
for i in range(n):
for j in range(i+1,n):
constraints += [(x[i] + dx[i] <= x[j]) |
(y[i] + dy[i] <= y[j]) |
(x[j] + dx[j] <= x[i]) |
(y[j] + dy[j] <= y[i])
]
return constraints
def nvalue(m, x):
"""
nvalue(m, x)
Requires that there is exactly m distinct values in x
(min_val and max_val are the minimum and maximum value
in x, respectively)
"""
n = len(x)
min_val = min([x[i].lb for i in range(n)])
max_val = max([x[i].ub for i in range(n)])
return (m == sum([ sum([ x[j] == i for j in range(n)]) > 0 for i in range(min_val, max_val+1)]))
#
# nvalues(x,op,n)
#
# Requires that the number of distinct values in the array x is
# op n
# where
# op is either one of
# =, <m, =<, >=, >
#
def nvalues(x, op, n):
xlen = len(x)
m = intvar(1,xlen)
return [nvalue(m,x),
arith_relop(m,op,n)
]
def clique(g, clique, card):
"""
clique(g, clique, card)
Ensure that the boolean array 'clique' (of Integer Array type)
represents a clique in the graph g with the cardinality card.
Note: This is kind of backward, but it is the whole thing:
If there is a connection between nodes I and J (I != J) then
there should be a node from I to J in G. If it's not then
both c1 and c2 is not in the clique.
"""
n = len(g)
constraints = []
constraints += [card == sum([clique[i] for i in range(n)])]
for (c1,i) in zip(clique, range(n)):
for (c2,j) in zip(clique, range(n)):
if i != j and g[i][j] == 0:
constraints += [(c1 == 0) | (c2 == 0)]
return constraints
def assignment_model(cost, tasks=None,people=None,print_solution=None,opt="min"):
"""
assignment_model(cost, rows, cols, tasks=None,people=None,print_solution=None,opt='min'):
Fairly general implementation of the assignment problem:
Minimize total cost of assign all task to one person given
the cost of assigning a person to the tasks.
For problems were 'task' and 'people' does not applies, a used-defined
method 'print_solution' can be used.
For maximization problems, use opt='max'.
"""
rows = len(cost)
cols = len(cost[0])
max_cost = np.sum(np.array(cost))
total_cost = intvar(0,max_cost,name='cost')
x = boolvar(shape=(rows,cols),name="x")
model = Model(
total_cost >= 0,
total_cost == np.sum([ x_row*cost_row for (x_row, cost_row) in zip(x, cost)]),
# exacly one assignment per row, all rows (tasks) must be assigned.
[sum(row) == 1 for row in x],
# zero or one assignments per column (people)
[sum(col) <= 1 for col in x.transpose()],
)
if opt == "max":
model.maximize(total_cost)
else:
model.minimize(total_cost)
ss = CPM_ortools(model)
if ss.solve():
print("total_cost: ", total_cost.value())
print("x:")
print(x.value())
print()
if tasks == None and people == None:
for i in range(rows):
print("Task", i, end="")
for j in range(cols):
if x[i][j].value() == 1:
print(" is done by ", j)
print()
else:
if print_solution != None:
print_solution(x.value(),tasks,people)
else:
for i in range(rows):
print("Task", tasks[i], end="")
for j in range(cols):
if x[i][j].value() == 1:
print(" is done by", people[j])
print()
def latin_square(x):
"""
latin_square(x)
The matrix x is a Latin square.
"""
return [[AllDifferent(row) for row in x],
[AllDifferent(col) for col in x.transpose()]]
#
# reverses an array from -> to
#
def reverse(xfrom, xto):
"""
reverse(xfrom, xto)
xto is reverse of xfrom.
"""
n = len(xfrom)
return [xto[i] == xfrom[n-i-1] for i in range(n)]
def print_model_and_variables(model):
"""
print_model_and_variables(model)
Prints the following:
- the unflattened model (via print(model))
- the flattened model
- the variables and the domains in the flattened model
(From <NAME> when he debugged one of my models. Thanks, Tias!)
"""
print("Model:")
print(model)
print("\nFlattened model and variables:")
mf = flatten_model(model)
print_variables(mf)
print(mf)
print()
def argmax(x,p):
"""
argmax(x,p)
Ensure that p is the argmax, i.e. the position of the maximum value
in x.
Note: If there are many maximum values then argmax(x,p) will find
all these values.
"""
n = len(x)
constraints = []
for i in range(n):
constraints += [(p != i).implies(x[p] > x[i]) ]
return constraints
def argmin(x,p):
"""
argmin(x,p)
Ensure that p is the argmin, i.e. the position of the minimum value
in x.
Note: If there are many minimum values then argmin(x,p) will find
all these values.
"""
n = len(x)
constraints = []
for i in range(n):
constraints += [(p != i).implies(x[p] < x[i]) ]
return constraints
def argmin_except_c(x,p,c):
"""
argmin_except_c(x,p,c)
Ensure that p is the argmin, i.e. the position of the minimum value
in x, but ignores any value of c.
Note:
- If there are many minimum values then argmin_except_c(x,p,c) will find
all these values.
- We assume that there are at least one value != c.
"""
n = len(x)
constraints = [x[p] != c]
for i in range(n):
constraints += [(p != i).implies((x[i] == c) | (x[p] < x[i])) ]
return constraints
def argmin_except_0(x,p):
"""
argmin_except_0(x,p)
Ensure that p is the argmin, i.e. the position of the minimum value
in x, but ignores any value of 0.
Note:
- If there are many minimum values then argmin_except_0(x,p) will find
all these values.
- We assume that there are at least one value > 0.
"""
return argmin_except_c(x,p,0)
def argmax_except_c(x,p,c):
"""
argmax_except_c(x,p,c)
Ensure that p is the argmax, i.e. the position of the minimum value
in x, but ignores any value of c.
Note:
- If there are many maximum values then argmax_except_c(x,p,c) will find
all these values.
- We assume that there are at least one value != c.
"""
n = len(x)
constraints = [x[p] != c]
for i in range(n):
constraints += [(p != i).implies((x[i] == c) | (x[p] > x[i])) ]
return constraints
def permutation3(x,p,y):
"""
permutation(x,p,y)
Ensure that the array y is a permutation of array x with the permutation
operations in array p.
Example:
x = [2,0,1,3]
p = [2,1,3,0]
What is y?
y[0] = x[p[0]] = x[2] = 1
y[1] = x[p[1]] = x[1] = 0
y[2] = x[p[2]] = x[3] = 3
y[3] = x[p[3]] = x[0] = 2
Thus:
y = [1,0,3,2]
Assumptions:
- We assume that x, p, and y has distinct values, i.e. constrained by
AllDifferent.
We check that:
- p has the domain of 0..len(p)-1
"""
n = len(x)
assert n == len(p) and n == len(y), f"Length of x, p, and y must be the same"
p_lb, p_ub = get_min_max_domain(p)
assert p_lb == 0 and p_ub == n-1, "Domain value of p must be 0..n-1"
constraints = []
for i in range(n):
constraints += [y[i] == x[p[i]] ]
return constraints
def permutation(x,y):
"""
permutation(x,y)
Ensure that the array y is a permutation of array x,
connected with some unknown permutation.
permutation3(x,p,y) is used (which see).
"""
n = len(x)
p = intvar(0,n-1,shape=n)
return permutation3(x,p,y)
def get_min_max_domain(x):
"""
get_min_max_domain(x)
Return the minimum and maximum domain of an array x.
"""
n = len(x)
x_lb = min([x[i].lb for i in range(n)])
x_ub = max([x[i].ub for i in range(n)])
return [x_lb,x_ub]
def chain(op,x):
"""
chain(op,x)
Ensure that all elements pairwise satisfies the binary operator op.
Note: In order for this to work the operator must be from the
operator library, e.g. operator.lt, operator.ne, e.g:
chain(operator.lt,x)
Note: Many of the binary operator.* has a definition already, e.g.
(from cpmpy_hakank.py):
increasing, increasing_strict, decreasing, descreasing_strict
and
AllDifferent, AllEqual
"""
n = len(x)
constraints = []
for i in range(1,n):
constraints += [ op(x[i-1], x[i]) ]
return constraints
def minimum_except_c(x,min_val,c,allow_all_c=False):
"""
minimum_except_c(x,min_val,c,allow_all_c)
Ensures that min_val is the minimum value in array x, ignoring the value of c.
The flag allow_all_c:
- If True: allow an array with only c values: min_val is thus c.
- If False: assume that there is at least one non c value. min_val must be != c.
"""
n = len(x)
ix = intvar(0,n-1)
# Ensure that min_val is in x
constraints = [min_val == x[ix]]
for j in range(n):
constraints += [(min_val <= x[j]) | (x[j] == 0)]
if allow_all_c:
max_val = max(x) # To be able to handle the case when there is only 0s
constraints += [(max_val == c)==(min_val == c)]
else:
constraints += [min_val != c]
return constraints
def minimum_except_0(x,min_val,allow_all_0s=False):
"""
minimum_except_0(x,min_val,allow_all_0s)
Ensures that min_val is the minimum value in array x, ignoring 0s.
The flag allow_all_0s:
- If True: allow an array with only 0 values: min_val is thus 0.
- If False: assume that there is at least one non 0 value. min_val must be != 0.
"""
return minimum_except_c(x,min_val,0,False)
def value_precede(s,t,x):
"""
value_precede(s,t, x)
Ensures that the (first occurrence) of the value s precedes
the (first occurrence) of the value t in array x if both
s and t are in x.
This means that for t to occur in x then s has to precede t.
This definition is inspired by MiniZinc's definition
value_precede.mzn
"""
n = len(x)
bs = boolvar(shape=n+1)
constraints = []
for i in range(n):
xis = boolvar()
constraints += [(xis ==1)==(x[i] == s),
(xis ==1).implies(bs[i+1]==1),
(xis == 0).implies(bs[i]==bs[i+1]),
(bs[i] == 0).implies(x[i] != t)
]
constraints += [bs[0] == 0]
return constraints
def value_precede_chain(c,x):
"""
value_precede_chain(c, x)
Ensures that the value c[i-1] precedes the value c[i] is the array x
if both c[i-1] and c[i] are in x.
See value_precede().
"""
n=len(c)
constraints = []
for i in range(1,n):
constraints += [value_precede(c[i-1],c[i],x)]
return constraints
def sliding_sum(low, up, seq, x):
"""
sliding_sum(low, up, seq, x)
Ensure that all sequences of length seq in x sums to between low and up.
"""
vlen = len(x)
constraints = []
for i in range(vlen-seq+1):
s = intvar(low,up)
constraints += [s == sum([x[j] for j in range(i,i+seq)])]
return constraints
def no_overlap(s1, d1, s2, d2):
"""
no_overlap(s1, d1, s2, d2)
Ensures that task 1 (start time s1 with duration d1) does not overlap with
task2 (start time s2 with duration d2)
"""
return [(s1 + d1 <= s2) | (s2 + d2 <= s1)]
def is_prime(n):
"""
is_prime(n)
Returns True if the number n is a prime number, otherwise return False.
"""
if n < 2: return False
if n == 2: return True
if not n & 1:
return False
for i in range(3, 1+int(math.sqrt(n)), 2):
if n % i == 0:
return False
return True
def primes(limit):
"""
primes(limit)
Returns the prime numbers below limit.
"""
primes = [2]
i = 3
for i in range(3, limit, 2):
if is_prime(i):
primes.append(i)
return primes
def all_different_reif(x,b):
"""
all_different_reif(x,b)
b == 1 if all values in x are different, else 0.
"""
n = len(x)
m = intvar(1,n)
return [nvalue(m,x),
(m==n)==(b==1)
]
def all_different_reif_m(model,x):
"""
all_different_reif(x,b)
b == 1 if all values in x are different, else 0.
This version returns b.
Note that the model is a parameter so it must be
created first:
x = intvar(...)
b = boolvar()
model = Model(...)
model += [b == all_different_reif_m(model,x)]
"""
n = len(x)
m = intvar(1,n)
b = boolvar()
model += [nvalue(m,x),
(m==n)==(b==1)]
return b
def lex_chain_less(x):
"""
lex_chain_less(x)
Require that all the rows are lexicographically sorted
(but not the columns as in lex2).
See: http://www.emn.fr/z-info/sdemasse/gccat/Clex_chain_less.html
"""
n = len(x)
m = len(x[0])
constraints = []
for i in range(1,n):
constraints += [lex_less([x[i-1,j] for j in range(m)], [x[i,j] for j in range(m)])]
return constraints
def soft_alldifferent(x,p):
"""
soft_alldifferent(x,p)
p is the number of pairs that have the same value.
See http://www.emn.fr/z-info/sdemasse/gccat/Csoft_alldifferent_ctr.html
"""
n = len(x)
return [p == sum([x[i] == x[j] for i in range(n) for j in range(i+1,n)])]
def among_seq(low,high,seqlen,x,v):
"""
among_seq(low, high, seqlen, x, v)
Ensures that all sequences of length SeqLen in the list X
contains at least Low and atmost High occurrences of V.
"""
n = len(x)
size = n-seqlen+1
constraints = []
for i in range(size):
seq = [x[j] for j in range(i,i+seqlen)]
constraints += [among_range(low, high, seq, v)]
return constraints
def among_range(low, high,x,v):
"""
among_range(low, high, x, v)
Ensures that the list x contains at least low and atmost high
occurrences of v.
Used by among_seq.
"""
xs = intvar(0,len(x))
vlen = len(v)
return [
xs == sum([sum([el == v[i] for i in range(vlen)])>0 for el in x]),
xs >= low,
xs <= high]
def sequence(x,seq_length, lbound,ubound):
"""
sequence(,length,lbound,ubound)
Ensures that all sums of every subsequence of length length
in array x is between lbound and ubound
"""
n = len(x)
xs = intvar(lbound.lb,ubound.ub)
constraints = []
for i in range(n-seq_length+1):
constraints += [xs == sum([x[j] for j in range(i,i+seq_length)]),
xs >= lbound,
xs <= ubound
]
return constraints
|
[
"math.sqrt",
"itertools.combinations",
"numpy.array",
"functools.reduce",
"cpmpy.transformations.flatten_model.flatten_model",
"cpmpy.transformations.get_variables.print_variables"
] |
[((29382, 29411), 'functools.reduce', 'reduce', (['(lambda a, b: a * b)', 'x'], {}), '(lambda a, b: a * b, x)\n', (29388, 29411), False, 'from functools import reduce\n'), ((38373, 38393), 'cpmpy.transformations.flatten_model.flatten_model', 'flatten_model', (['model'], {}), '(model)\n', (38386, 38393), False, 'from cpmpy.transformations.flatten_model import flatten_constraint, flatten_model\n'), ((38396, 38415), 'cpmpy.transformations.get_variables.print_variables', 'print_variables', (['mf'], {}), '(mf)\n', (38411, 38415), False, 'from cpmpy.transformations.get_variables import print_variables\n'), ((1950, 1981), 'itertools.combinations', 'itertools.combinations', (['args', '(2)'], {}), '(args, 2)\n', (1972, 1981), False, 'import itertools\n'), ((36209, 36223), 'numpy.array', 'np.array', (['cost'], {}), '(cost)\n', (36217, 36223), True, 'import numpy as np\n'), ((29255, 29284), 'functools.reduce', 'reduce', (['(lambda a, b: a * b)', 'x'], {}), '(lambda a, b: a * b, x)\n', (29261, 29284), False, 'from functools import reduce\n'), ((45503, 45515), 'math.sqrt', 'math.sqrt', (['n'], {}), '(n)\n', (45512, 45515), False, 'import sys, math, re\n')]
|
# --------------
# Importing header files
import numpy as np
import warnings
warnings.filterwarnings('ignore')
#New record
new_record=[[50, 9, 4, 1, 0, 0, 40, 0]]
#Reading file
data = np.genfromtxt(path, delimiter=",", skip_header=1)
print(data)
#Code starts here
census = np.concatenate((new_record,data),axis=0)
print(census)
age = np.array(census[0:,0])
print(age)
max_age = np.max(age)
min_age = np.min(age)
age_mean = age.mean()
age_std = np.std(age)
print(max_age,min_age,age_mean,age_std, sep='\n')
race = np.array(census[0:,2])
race_0=census[census[:,2]==0]
race_1=census[census[:,2]==1]
race_2=census[census[:,2]==2]
race_3=census[census[:,2]==3]
race_4=census[census[:,2]==4]
len_0=len(race_0)
len_1=len(race_1)
len_2=len(race_2)
len_3=len(race_3)
len_4=len(race_4)
print(len_0,len_1,len_2,len_3,len_4)
minority_race = min(len_0,len_1,len_2,len_3,len_4)
print(minority_race)
senior_citizens =census[census[:,0]>60]
#working_hours_sum = 0
working_hours = np.array(senior_citizens[0:,6])
working_hours_sum= np.sum(working_hours)
#for i in range(0,len(senior_citizens)):
# working_hours_sum = working_hours_sum + senior_citizens[i][6]
print(working_hours_sum)
print(len(senior_citizens))
avg_working_hours = np.mean(working_hours)
print(avg_working_hours)
high=census[census[:,1]>10]
low=census[census[:,1]<=10]
avg_pay_high = np.mean(np.array(high[0:,7]))
avg_pay_low = np.mean(np.array(low[0:,7]))
if(avg_pay_high>avg_pay_low):
print("yaa good edu mean good pay")
else:
print("no good edu don mean good pay")
|
[
"numpy.sum",
"warnings.filterwarnings",
"numpy.std",
"numpy.genfromtxt",
"numpy.max",
"numpy.min",
"numpy.array",
"numpy.mean",
"numpy.concatenate"
] |
[((82, 115), 'warnings.filterwarnings', 'warnings.filterwarnings', (['"""ignore"""'], {}), "('ignore')\n", (105, 115), False, 'import warnings\n'), ((203, 252), 'numpy.genfromtxt', 'np.genfromtxt', (['path'], {'delimiter': '""","""', 'skip_header': '(1)'}), "(path, delimiter=',', skip_header=1)\n", (216, 252), True, 'import numpy as np\n'), ((295, 337), 'numpy.concatenate', 'np.concatenate', (['(new_record, data)'], {'axis': '(0)'}), '((new_record, data), axis=0)\n', (309, 337), True, 'import numpy as np\n'), ((358, 381), 'numpy.array', 'np.array', (['census[0:, 0]'], {}), '(census[0:, 0])\n', (366, 381), True, 'import numpy as np\n'), ((404, 415), 'numpy.max', 'np.max', (['age'], {}), '(age)\n', (410, 415), True, 'import numpy as np\n'), ((427, 438), 'numpy.min', 'np.min', (['age'], {}), '(age)\n', (433, 438), True, 'import numpy as np\n'), ((473, 484), 'numpy.std', 'np.std', (['age'], {}), '(age)\n', (479, 484), True, 'import numpy as np\n'), ((544, 567), 'numpy.array', 'np.array', (['census[0:, 2]'], {}), '(census[0:, 2])\n', (552, 567), True, 'import numpy as np\n'), ((1023, 1055), 'numpy.array', 'np.array', (['senior_citizens[0:, 6]'], {}), '(senior_citizens[0:, 6])\n', (1031, 1055), True, 'import numpy as np\n'), ((1075, 1096), 'numpy.sum', 'np.sum', (['working_hours'], {}), '(working_hours)\n', (1081, 1096), True, 'import numpy as np\n'), ((1287, 1309), 'numpy.mean', 'np.mean', (['working_hours'], {}), '(working_hours)\n', (1294, 1309), True, 'import numpy as np\n'), ((1422, 1443), 'numpy.array', 'np.array', (['high[0:, 7]'], {}), '(high[0:, 7])\n', (1430, 1443), True, 'import numpy as np\n'), ((1467, 1487), 'numpy.array', 'np.array', (['low[0:, 7]'], {}), '(low[0:, 7])\n', (1475, 1487), True, 'import numpy as np\n')]
|
# coding=utf-8
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
# Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import tensorflow as tf
from tensorflow.python.ops import data_flow_ops
import re
import os
from operator import itemgetter
class PrefillStagingAreasHook(tf.train.SessionRunHook):
def after_create_session(self, session, coord):
enqueue_ops = tf.get_collection('STAGING_AREA_PUTS')
for i in range(len(enqueue_ops)):
session.run(enqueue_ops[:i + 1])
def stage(tensors):
"""Stages the given tensors in a StagingArea for asynchronous put/get.
"""
stage_area = data_flow_ops.StagingArea(
dtypes=[tensor.dtype for tensor in tensors],
shapes=[tensor.get_shape() for tensor in tensors])
put_op = stage_area.put(tensors)
get_tensors = stage_area.get()
tf.add_to_collection('STAGING_AREA_PUTS', put_op)
return put_op, get_tensors
def sort_and_load_ckpts(log_dir):
ckpts = []
for f in os.listdir(log_dir):
m = re.match(r'model.ckpt-([0-9]+).index', f)
if m is None:
continue
fullpath = os.path.join(log_dir, f)
ckpts.append({'step': int(m.group(1)),
'path': os.path.splitext(fullpath)[0],
'mtime': os.stat(fullpath).st_mtime,
})
ckpts.sort(key=itemgetter('step'))
return ckpts
|
[
"os.stat",
"tensorflow.get_collection",
"re.match",
"tensorflow.add_to_collection",
"os.path.splitext",
"operator.itemgetter",
"os.path.join",
"os.listdir"
] |
[((2087, 2136), 'tensorflow.add_to_collection', 'tf.add_to_collection', (['"""STAGING_AREA_PUTS"""', 'put_op'], {}), "('STAGING_AREA_PUTS', put_op)\n", (2107, 2136), True, 'import tensorflow as tf\n'), ((2232, 2251), 'os.listdir', 'os.listdir', (['log_dir'], {}), '(log_dir)\n', (2242, 2251), False, 'import os\n'), ((1625, 1663), 'tensorflow.get_collection', 'tf.get_collection', (['"""STAGING_AREA_PUTS"""'], {}), "('STAGING_AREA_PUTS')\n", (1642, 1663), True, 'import tensorflow as tf\n'), ((2265, 2305), 're.match', 're.match', (['"""model.ckpt-([0-9]+).index"""', 'f'], {}), "('model.ckpt-([0-9]+).index', f)\n", (2273, 2305), False, 'import re\n'), ((2369, 2393), 'os.path.join', 'os.path.join', (['log_dir', 'f'], {}), '(log_dir, f)\n', (2381, 2393), False, 'import os\n'), ((2605, 2623), 'operator.itemgetter', 'itemgetter', (['"""step"""'], {}), "('step')\n", (2615, 2623), False, 'from operator import itemgetter\n'), ((2471, 2497), 'os.path.splitext', 'os.path.splitext', (['fullpath'], {}), '(fullpath)\n', (2487, 2497), False, 'import os\n'), ((2533, 2550), 'os.stat', 'os.stat', (['fullpath'], {}), '(fullpath)\n', (2540, 2550), False, 'import os\n')]
|
"""
sentry.web.frontend.projects
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from sentry.constants import (
PLATFORM_LIST, PLATFORM_TITLES, PLATFORM_ROOTS)
from sentry.models import ProjectKey
from sentry.web.decorators import has_access
from sentry.web.helpers import render_to_response, render_to_string, get_raven_js_url
def get_key_context(user, project):
try:
key = ProjectKey.objects.get(user=user, project=project)
except ProjectKey.DoesNotExist:
try:
key = ProjectKey.objects.filter(
project=project,
user__isnull=True,
)[0]
except IndexError:
key = None
if key is None:
dsn = 'SENTRY_DSN'
dsn_public = 'SENTRY_PUBLIC_DSN'
else:
dsn = key.dsn_private
dsn_public = key.dsn_public
return {
'key': key,
'dsn': dsn,
'dsn_public': dsn_public,
'raven_js_url': get_raven_js_url(),
}
@has_access
def client_help(request, team, project):
context = {
'page': 'client_help',
'project': project,
'team': project.team,
'SUBSECTION': 'projects',
'SECTION': 'team',
}
context.update(get_key_context(request.user, project))
return render_to_response('sentry/projects/client_help.html', context, request)
@has_access
def client_guide(request, team, project, platform):
if platform not in PLATFORM_LIST:
return HttpResponseRedirect(reverse('sentry'))
template = 'sentry/partial/client_config/%s.html' % (platform,)
context = {
'platform': platform,
'platform_title': PLATFORM_TITLES.get(platform, platform.title()),
'project': project,
'page': 'client_help_%s' % (PLATFORM_ROOTS.get(platform, platform),),
'team': project.team,
'SUBSECTION': 'projects',
'SECTION': 'team',
}
context.update(get_key_context(request.user, project))
if request.is_ajax():
return render_to_response(template, context, request)
context['template'] = render_to_string(template, context, request)
return render_to_response('sentry/projects/docs/client_config.html', context, request)
|
[
"sentry.web.helpers.render_to_string",
"django.core.urlresolvers.reverse",
"sentry.models.ProjectKey.objects.get",
"sentry.web.helpers.get_raven_js_url",
"sentry.constants.PLATFORM_ROOTS.get",
"sentry.web.helpers.render_to_response",
"sentry.models.ProjectKey.objects.filter"
] |
[((1467, 1539), 'sentry.web.helpers.render_to_response', 'render_to_response', (['"""sentry/projects/client_help.html"""', 'context', 'request'], {}), "('sentry/projects/client_help.html', context, request)\n", (1485, 1539), False, 'from sentry.web.helpers import render_to_response, render_to_string, get_raven_js_url\n'), ((2268, 2312), 'sentry.web.helpers.render_to_string', 'render_to_string', (['template', 'context', 'request'], {}), '(template, context, request)\n', (2284, 2312), False, 'from sentry.web.helpers import render_to_response, render_to_string, get_raven_js_url\n'), ((2325, 2404), 'sentry.web.helpers.render_to_response', 'render_to_response', (['"""sentry/projects/docs/client_config.html"""', 'context', 'request'], {}), "('sentry/projects/docs/client_config.html', context, request)\n", (2343, 2404), False, 'from sentry.web.helpers import render_to_response, render_to_string, get_raven_js_url\n'), ((586, 636), 'sentry.models.ProjectKey.objects.get', 'ProjectKey.objects.get', ([], {'user': 'user', 'project': 'project'}), '(user=user, project=project)\n', (608, 636), False, 'from sentry.models import ProjectKey\n'), ((1143, 1161), 'sentry.web.helpers.get_raven_js_url', 'get_raven_js_url', ([], {}), '()\n', (1159, 1161), False, 'from sentry.web.helpers import render_to_response, render_to_string, get_raven_js_url\n'), ((2194, 2240), 'sentry.web.helpers.render_to_response', 'render_to_response', (['template', 'context', 'request'], {}), '(template, context, request)\n', (2212, 2240), False, 'from sentry.web.helpers import render_to_response, render_to_string, get_raven_js_url\n'), ((1680, 1697), 'django.core.urlresolvers.reverse', 'reverse', (['"""sentry"""'], {}), "('sentry')\n", (1687, 1697), False, 'from django.core.urlresolvers import reverse\n'), ((1954, 1992), 'sentry.constants.PLATFORM_ROOTS.get', 'PLATFORM_ROOTS.get', (['platform', 'platform'], {}), '(platform, platform)\n', (1972, 1992), False, 'from sentry.constants import PLATFORM_LIST, PLATFORM_TITLES, PLATFORM_ROOTS\n'), ((704, 765), 'sentry.models.ProjectKey.objects.filter', 'ProjectKey.objects.filter', ([], {'project': 'project', 'user__isnull': '(True)'}), '(project=project, user__isnull=True)\n', (729, 765), False, 'from sentry.models import ProjectKey\n')]
|
# Generated by Django 4.0.1 on 2022-03-14 10:45
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('VulnerableScan', '0004_remove_exploitregister_file_object_and_more'),
]
operations = [
migrations.AddField(
model_name='vulnerablescantasks',
name='notice',
field=models.BooleanField(db_column='notice', default=False, verbose_name='是否钉钉通知'),
),
]
|
[
"django.db.models.BooleanField"
] |
[((379, 456), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'db_column': '"""notice"""', 'default': '(False)', 'verbose_name': '"""是否钉钉通知"""'}), "(db_column='notice', default=False, verbose_name='是否钉钉通知')\n", (398, 456), False, 'from django.db import migrations, models\n')]
|
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import sys
try:
from Cython.Distutils import build_ext
except ImportError:
def build_ext(*args, **kwargs):
from Cython.Distutils import build_ext
return build_ext(*args, **kwargs)
class lazy_extlist(list):
def __init__(self, callback):
self._list, self.callback = None, callback
def c_list(self):
if self._list is None:
self._list = self.callback()
return self._list
def __iter__(self):
for e in self.c_list():
yield e
def __getitem__(self, ii):
return self.c_list()[ii]
def __len__(self):
return len(self.c_list())
def extensions():
__builtins__.__NUMPY_SETUP__ = False
from Cython.Distutils import Extension
import numpy as np
extra_compile_args = ["-O3"]
extra_link_args = []
if sys.platform == "darwin":
extra_compile_args.append("-mmacosx-version-min=10.9")
extra_compile_args.append('-stdlib=libc++')
extra_link_args.append('-stdlib=libc++')
return [Extension(
'pydtw.dtw',
["pydtw/dtw.pyx"],
cython_directives={'language_level': sys.version_info[0]},
extra_compile_args=extra_compile_args,
extra_link_args=extra_link_args,
include_dirs=[np.get_include()],
language="c++")
]
setup(
name="pydtw",
description='Fast Implementation of Dynamic Time Warping',
version="2.0.3",
long_description=open('README.rst').read(),
packages=find_packages(),
setup_requires=["numpy", 'cython'],
ext_modules=lazy_extlist(extensions),
cmdclass={'build_ext': build_ext},
author='<NAME>',
author_email="<EMAIL>",
url='https://github.com/shunsukeaihara/pydtw',
license="MIT License",
include_package_data=True,
test_suite='nose.collector',
tests_require=['nose', 'numpy', 'cython'],
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 2",
]
)
|
[
"Cython.Distutils.build_ext",
"numpy.get_include",
"setuptools.find_packages"
] |
[((1559, 1574), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1572, 1574), False, 'from setuptools import setup, find_packages\n'), ((247, 273), 'Cython.Distutils.build_ext', 'build_ext', (['*args'], {}), '(*args, **kwargs)\n', (256, 273), False, 'from Cython.Distutils import build_ext\n'), ((1338, 1354), 'numpy.get_include', 'np.get_include', ([], {}), '()\n', (1352, 1354), True, 'import numpy as np\n')]
|
import signal, subprocess, sys
# On Linux this causes os.waitpid to fail with OSError as the OS has already
# reaped our child process. The wait() passing the OSError on to the caller
# and causing us to exit with an error is what we are testing against.
signal.signal(signal.SIGCHLD, signal.SIG_IGN)
subprocess.Popen([sys.executable, '-c', 'print("albatross")']).wait()
|
[
"signal.signal",
"subprocess.Popen"
] |
[((256, 301), 'signal.signal', 'signal.signal', (['signal.SIGCHLD', 'signal.SIG_IGN'], {}), '(signal.SIGCHLD, signal.SIG_IGN)\n', (269, 301), False, 'import signal, subprocess, sys\n'), ((302, 364), 'subprocess.Popen', 'subprocess.Popen', (['[sys.executable, \'-c\', \'print("albatross")\']'], {}), '([sys.executable, \'-c\', \'print("albatross")\'])\n', (318, 364), False, 'import signal, subprocess, sys\n')]
|
from django.contrib import admin
from models import *
admin.site.register(Company)
admin.site.register(Job)
|
[
"django.contrib.admin.site.register"
] |
[((56, 84), 'django.contrib.admin.site.register', 'admin.site.register', (['Company'], {}), '(Company)\n', (75, 84), False, 'from django.contrib import admin\n'), ((85, 109), 'django.contrib.admin.site.register', 'admin.site.register', (['Job'], {}), '(Job)\n', (104, 109), False, 'from django.contrib import admin\n')]
|
#!/usr/bin/env python3
import tkinter as tk
from tkinter.filedialog import askopenfilenames
import os
import csv
class ScanFtpShare(tk.Tk):
def __init__(self):
super(ScanFtpShare,self).__init__()
self.path=tk.StringVar()
self.title("Scan")
self.setup_Ui()
def selectPath(self):
pathfile = askopenfilenames()
self.path.set(pathfile)
def setup_Ui(self):
self.label_lj=tk.Label(self,text="导入文件").grid(row = 0, column = 0)
self.entry_lj=tk.Entry(self, textvariable = self.path).grid(row = 0, column = 1)
self.button_lj=tk.Button(self, text = "导入路径", command = self.selectPath).grid(row = 0, column = 2)
self.button_dc=tk.Button(self, text = "导出文件", command = self.editFile).grid(row = 1, column = 2)
def editFile(self):
file=self.path.get()
file=eval(file)
dcfile=os.path.abspath(os.path.dirname(file[0]))
with open(file[0],'rt',encoding='utf-16') as input_file:
filereader = csv.reader(input_file,delimiter='\t')
global b
global c
b=0
c=0
for row_list in filereader:
if not row_list[6].strip()=='' and row_list[0] != "狀態" and b==1:
with open(os.path.join(dcfile,"FTP.csv"),'a',encoding='utf-16',newline="") as f:
f_csv = csv.writer(f,delimiter='\t')
list_line=[row_list[0],row_list[1],row_list[2],'','','','',row_list[3],\
'','',row_list[6],'',row_list[8],row_list[9],\
'',row_list[11],row_list[12],'','','','5']
f_csv.writerow(list_line)
f.close()
elif b==0 and row_list[0] == "狀態":
with open(os.path.join(dcfile,"FTP.csv"),'a',encoding='utf-16',newline="") as f:
f_csv = csv.writer(f,delimiter='\t')
list_line=['狀態','名稱','IP','who','why/purpose','passwd','簽承諾書','Radmin',\
'http','https','Ftp','Rdp','共用資料夾','共用印表機',\
'NetBIOS 群組','製造商','MAC 位址','使用者','日期','註解','位址']
f_csv.writerow(list_line)
f.close()
b=1
if not row_list[8].strip()==''and row_list[0] != "狀態" and c==1:
with open(os.path.join(dcfile,"SHARE.csv"),'a',encoding='utf-16',newline="") as f:
f_csv = csv.writer(f,delimiter='\t')
list_line=[row_list[0],row_list[1],row_list[2],'','','','',row_list[3],\
'','',row_list[6],'',row_list[8],row_list[9],\
'',row_list[11],row_list[12],'','','','5']
f_csv.writerow(list_line)
f.close()
elif c==0 and row_list[0] == "狀態":
with open(os.path.join(dcfile,"SHARE.csv"),'a',encoding='utf-16',newline="") as f:
f_csv = csv.writer(f,delimiter='\t')
list_line=['狀態','名稱','IP','who','why/purpose','passwd','簽承諾書','Radmin',\
'http','https','Ftp','Rdp','共用資料夾','共用印表機',\
'NetBIOS 群組','製造商','MAC 位址','使用者','日期','註解','位址']
f_csv.writerow(list_line)
f.close()
c=1
a=ScanFtpShare()
a.mainloop()
|
[
"tkinter.StringVar",
"csv.reader",
"csv.writer",
"os.path.join",
"tkinter.Button",
"tkinter.filedialog.askopenfilenames",
"os.path.dirname",
"tkinter.Entry",
"tkinter.Label"
] |
[((227, 241), 'tkinter.StringVar', 'tk.StringVar', ([], {}), '()\n', (239, 241), True, 'import tkinter as tk\n'), ((348, 366), 'tkinter.filedialog.askopenfilenames', 'askopenfilenames', ([], {}), '()\n', (364, 366), False, 'from tkinter.filedialog import askopenfilenames\n'), ((930, 954), 'os.path.dirname', 'os.path.dirname', (['file[0]'], {}), '(file[0])\n', (945, 954), False, 'import os\n'), ((1046, 1084), 'csv.reader', 'csv.reader', (['input_file'], {'delimiter': '"""\t"""'}), "(input_file, delimiter='\\t')\n", (1056, 1084), False, 'import csv\n'), ((461, 488), 'tkinter.Label', 'tk.Label', (['self'], {'text': '"""导入文件"""'}), "(self, text='导入文件')\n", (469, 488), True, 'import tkinter as tk\n'), ((536, 574), 'tkinter.Entry', 'tk.Entry', (['self'], {'textvariable': 'self.path'}), '(self, textvariable=self.path)\n', (544, 574), True, 'import tkinter as tk\n'), ((626, 679), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""导入路径"""', 'command': 'self.selectPath'}), "(self, text='导入路径', command=self.selectPath)\n", (635, 679), True, 'import tkinter as tk\n'), ((733, 784), 'tkinter.Button', 'tk.Button', (['self'], {'text': '"""导出文件"""', 'command': 'self.editFile'}), "(self, text='导出文件', command=self.editFile)\n", (742, 784), True, 'import tkinter as tk\n'), ((1413, 1442), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (1423, 1442), False, 'import csv\n'), ((2610, 2639), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (2620, 2639), False, 'import csv\n'), ((1310, 1341), 'os.path.join', 'os.path.join', (['dcfile', '"""FTP.csv"""'], {}), "(dcfile, 'FTP.csv')\n", (1322, 1341), False, 'import os\n'), ((1967, 1996), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (1977, 1996), False, 'import csv\n'), ((2505, 2538), 'os.path.join', 'os.path.join', (['dcfile', '"""SHARE.csv"""'], {}), "(dcfile, 'SHARE.csv')\n", (2517, 2538), False, 'import os\n'), ((3166, 3195), 'csv.writer', 'csv.writer', (['f'], {'delimiter': '"""\t"""'}), "(f, delimiter='\\t')\n", (3176, 3195), False, 'import csv\n'), ((1864, 1895), 'os.path.join', 'os.path.join', (['dcfile', '"""FTP.csv"""'], {}), "(dcfile, 'FTP.csv')\n", (1876, 1895), False, 'import os\n'), ((3061, 3094), 'os.path.join', 'os.path.join', (['dcfile', '"""SHARE.csv"""'], {}), "(dcfile, 'SHARE.csv')\n", (3073, 3094), False, 'import os\n')]
|
import unittest
from app.user import Blog, Quote, User,Comment
class QuoteTest(unittest.TestCase):
'''
Test Class to test the behaviour of the Quote class
'''
def setUp(self):
'''
Set up method that will run before every Test
'''
self.new_quote = Quote(1, 'mango', 'try me even now',
'http://quotes.stormconsultancy.co.uk/quotes/7')
def test_instance(self):
self.assertTrue(isinstance(self.new_quote, Quote,))
class UserModelTest(unittest.TestCase):
def setUp(self):
self.new_user = User(password='<PASSWORD>')
def test_password_setter(self):
self.assertTrue(self.new_user.pass_secure is not None)
class BlogModelTest(unittest.TestCase):
def setUp(self):
self.new_blog = Blog(1, "blog1", "power is power", "", 'come on')
def test_password_setter(self):
self.assertTrue(self.new_blog is not None)
class CommentsModelTest(unittest.TestCase):
def setUp(self):
self.new_comment = Comment(1, "blog1", "1", 'come all')
def test_password_setter(self):
self.assertTrue(self.new_comment is not None)
if __name__ == '__main__':
unittest.main()
|
[
"unittest.main",
"app.user.User",
"app.user.Quote",
"app.user.Comment",
"app.user.Blog"
] |
[((1214, 1229), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1227, 1229), False, 'import unittest\n'), ((299, 388), 'app.user.Quote', 'Quote', (['(1)', '"""mango"""', '"""try me even now"""', '"""http://quotes.stormconsultancy.co.uk/quotes/7"""'], {}), "(1, 'mango', 'try me even now',\n 'http://quotes.stormconsultancy.co.uk/quotes/7')\n", (304, 388), False, 'from app.user import Blog, Quote, User, Comment\n'), ((594, 621), 'app.user.User', 'User', ([], {'password': '"""<PASSWORD>"""'}), "(password='<PASSWORD>')\n", (598, 621), False, 'from app.user import Blog, Quote, User, Comment\n'), ((810, 859), 'app.user.Blog', 'Blog', (['(1)', '"""blog1"""', '"""power is power"""', '""""""', '"""come on"""'], {}), "(1, 'blog1', 'power is power', '', 'come on')\n", (814, 859), False, 'from app.user import Blog, Quote, User, Comment\n'), ((1054, 1090), 'app.user.Comment', 'Comment', (['(1)', '"""blog1"""', '"""1"""', '"""come all"""'], {}), "(1, 'blog1', '1', 'come all')\n", (1061, 1090), False, 'from app.user import Blog, Quote, User, Comment\n')]
|
try:
from . import generic as g
except BaseException:
import generic as g
class CollisionTest(g.unittest.TestCase):
def test_collision(self):
# Ensure that FCL is importable
try:
g.trimesh.collision.CollisionManager()
except ValueError:
g.log.warning('skipping collision tests, no FCL installed')
return
cube = g.get_mesh('unit_cube.STL')
tf1 = g.np.eye(4)
tf1[:3, 3] = g.np.array([5, 0, 0])
tf2 = g.np.eye(4)
tf2[:3, 3] = g.np.array([-5, 0, 0])
# Test one-to-many collision checking
m = g.trimesh.collision.CollisionManager()
m.add_object('cube0', cube)
m.add_object('cube1', cube, tf1)
ret = m.in_collision_single(cube)
assert ret == True
ret, names, data = m.in_collision_single(cube,
tf1,
return_names=True,
return_data=True)
assert ret == True
assert all(len(i.point) == 3 for i in data)
if 'cube1' not in names:
print('\n\n', m._objs.keys(), names)
assert 'cube1' in names
ret, names, data = m.in_collision_single(cube,
tf2,
return_names=True,
return_data=True)
assert ret == False
assert len(names) == 0
assert all(len(i.point) == 3 for i in data)
# Test internal collision checking and object
# addition/removal/modification
ret = m.in_collision_internal()
assert ret == False
m.add_object('cube2', cube, tf1)
ret, names = m.in_collision_internal(return_names=True)
assert ret == True
assert ('cube1', 'cube2') in names
assert ('cube0', 'cube1') not in names
assert ('cube2', 'cube1') not in names
m.set_transform('cube2', tf2)
ret = m.in_collision_internal()
assert ret == False
m.set_transform('cube2', tf1)
ret = m.in_collision_internal()
assert ret == True
m.remove_object('cube2')
ret = m.in_collision_internal()
assert ret == False
# Test manager-to-manager collision checking
m = g.trimesh.collision.CollisionManager()
m.add_object('cube0', cube)
m.add_object('cube1', cube, tf1)
n = g.trimesh.collision.CollisionManager()
n.add_object('cube0', cube, tf2)
ret = m.in_collision_other(n)
assert ret == False
n.add_object('cube3', cube, tf1)
ret = m.in_collision_other(n)
assert ret == True
ret, names = m.in_collision_other(n, return_names=True)
assert ret == True
assert ('cube1', 'cube3') in names
assert ('cube3', 'cube1') not in names
def test_distance(self):
# Ensure that FCL is importable
try:
g.trimesh.collision.CollisionManager()
except ValueError:
g.log.warning('skipping collision tests, no FCL installed')
return
cube = g.get_mesh('unit_cube.STL')
tf1 = g.np.eye(4)
tf1[:3, 3] = g.np.array([5, 0, 0])
tf2 = g.np.eye(4)
tf2[:3, 3] = g.np.array([-5, 0, 0])
tf3 = g.np.eye(4)
tf3[:3, 3] = g.np.array([2, 0, 0])
tf4 = g.np.eye(4)
tf4[:3, 3] = g.np.array([-2, 0, 0])
# Test one-to-many distance checking
m = g.trimesh.collision.CollisionManager()
m.add_object('cube1', cube, tf1)
dist = m.min_distance_single(cube)
assert g.np.isclose(dist, 4.0)
dist, name = m.min_distance_single(cube, return_name=True)
assert g.np.isclose(dist, 4.0)
assert name == 'cube1'
m.add_object('cube2', cube, tf2)
dist, name = m.min_distance_single(cube, tf3, return_name=True)
assert g.np.isclose(dist, 2.0)
assert name == 'cube1'
dist, name = m.min_distance_single(cube, tf4, return_name=True)
assert g.np.isclose(dist, 2.0)
assert name == 'cube2'
# Test internal distance checking and object
# addition/removal/modification
dist = m.min_distance_internal()
assert g.np.isclose(dist, 9.0)
dist, names = m.min_distance_internal(return_names=True)
assert g.np.isclose(dist, 9.0)
assert names == ('cube1', 'cube2')
m.add_object('cube3', cube, tf3)
dist, names = m.min_distance_internal(return_names=True)
assert g.np.isclose(dist, 2.0)
assert names == ('cube1', 'cube3')
m.set_transform('cube3', tf4)
dist, names = m.min_distance_internal(return_names=True)
assert g.np.isclose(dist, 2.0)
assert names == ('cube2', 'cube3')
# Test manager-to-manager distance checking
m = g.trimesh.collision.CollisionManager()
m.add_object('cube0', cube)
m.add_object('cube1', cube, tf1)
n = g.trimesh.collision.CollisionManager()
n.add_object('cube0', cube, tf2)
dist, names = m.min_distance_other(n, return_names=True)
assert g.np.isclose(dist, 4.0)
assert names == ('cube0', 'cube0')
n.add_object('cube4', cube, tf4)
dist, names = m.min_distance_other(n, return_names=True)
assert g.np.isclose(dist, 1.0)
assert names == ('cube0', 'cube4')
def test_scene(self):
try:
import fcl
except ImportError:
return
scene = g.get_mesh('cycloidal.3DXML')
manager, objects = g.trimesh.collision.scene_to_collision(scene)
assert manager.in_collision_internal()
if __name__ == '__main__':
g.trimesh.util.attach_to_log()
g.unittest.main()
|
[
"generic.unittest.main",
"generic.trimesh.collision.scene_to_collision",
"generic.trimesh.collision.CollisionManager",
"generic.np.array",
"generic.np.isclose",
"generic.get_mesh",
"generic.log.warning",
"generic.trimesh.util.attach_to_log",
"generic.np.eye"
] |
[((5872, 5902), 'generic.trimesh.util.attach_to_log', 'g.trimesh.util.attach_to_log', ([], {}), '()\n', (5900, 5902), True, 'import generic as g\n'), ((5907, 5924), 'generic.unittest.main', 'g.unittest.main', ([], {}), '()\n', (5922, 5924), True, 'import generic as g\n'), ((395, 422), 'generic.get_mesh', 'g.get_mesh', (['"""unit_cube.STL"""'], {}), "('unit_cube.STL')\n", (405, 422), True, 'import generic as g\n'), ((438, 449), 'generic.np.eye', 'g.np.eye', (['(4)'], {}), '(4)\n', (446, 449), True, 'import generic as g\n'), ((471, 492), 'generic.np.array', 'g.np.array', (['[5, 0, 0]'], {}), '([5, 0, 0])\n', (481, 492), True, 'import generic as g\n'), ((508, 519), 'generic.np.eye', 'g.np.eye', (['(4)'], {}), '(4)\n', (516, 519), True, 'import generic as g\n'), ((541, 563), 'generic.np.array', 'g.np.array', (['[-5, 0, 0]'], {}), '([-5, 0, 0])\n', (551, 563), True, 'import generic as g\n'), ((623, 661), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (659, 661), True, 'import generic as g\n'), ((2419, 2457), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (2455, 2457), True, 'import generic as g\n'), ((2548, 2586), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (2584, 2586), True, 'import generic as g\n'), ((3253, 3280), 'generic.get_mesh', 'g.get_mesh', (['"""unit_cube.STL"""'], {}), "('unit_cube.STL')\n", (3263, 3280), True, 'import generic as g\n'), ((3296, 3307), 'generic.np.eye', 'g.np.eye', (['(4)'], {}), '(4)\n', (3304, 3307), True, 'import generic as g\n'), ((3329, 3350), 'generic.np.array', 'g.np.array', (['[5, 0, 0]'], {}), '([5, 0, 0])\n', (3339, 3350), True, 'import generic as g\n'), ((3366, 3377), 'generic.np.eye', 'g.np.eye', (['(4)'], {}), '(4)\n', (3374, 3377), True, 'import generic as g\n'), ((3399, 3421), 'generic.np.array', 'g.np.array', (['[-5, 0, 0]'], {}), '([-5, 0, 0])\n', (3409, 3421), True, 'import generic as g\n'), ((3437, 3448), 'generic.np.eye', 'g.np.eye', (['(4)'], {}), '(4)\n', (3445, 3448), True, 'import generic as g\n'), ((3470, 3491), 'generic.np.array', 'g.np.array', (['[2, 0, 0]'], {}), '([2, 0, 0])\n', (3480, 3491), True, 'import generic as g\n'), ((3507, 3518), 'generic.np.eye', 'g.np.eye', (['(4)'], {}), '(4)\n', (3515, 3518), True, 'import generic as g\n'), ((3540, 3562), 'generic.np.array', 'g.np.array', (['[-2, 0, 0]'], {}), '([-2, 0, 0])\n', (3550, 3562), True, 'import generic as g\n'), ((3621, 3659), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (3657, 3659), True, 'import generic as g\n'), ((3760, 3783), 'generic.np.isclose', 'g.np.isclose', (['dist', '(4.0)'], {}), '(dist, 4.0)\n', (3772, 3783), True, 'import generic as g\n'), ((3867, 3890), 'generic.np.isclose', 'g.np.isclose', (['dist', '(4.0)'], {}), '(dist, 4.0)\n', (3879, 3890), True, 'import generic as g\n'), ((4052, 4075), 'generic.np.isclose', 'g.np.isclose', (['dist', '(2.0)'], {}), '(dist, 2.0)\n', (4064, 4075), True, 'import generic as g\n'), ((4195, 4218), 'generic.np.isclose', 'g.np.isclose', (['dist', '(2.0)'], {}), '(dist, 2.0)\n', (4207, 4218), True, 'import generic as g\n'), ((4400, 4423), 'generic.np.isclose', 'g.np.isclose', (['dist', '(9.0)'], {}), '(dist, 9.0)\n', (4412, 4423), True, 'import generic as g\n'), ((4505, 4528), 'generic.np.isclose', 'g.np.isclose', (['dist', '(9.0)'], {}), '(dist, 9.0)\n', (4517, 4528), True, 'import generic as g\n'), ((4695, 4718), 'generic.np.isclose', 'g.np.isclose', (['dist', '(2.0)'], {}), '(dist, 2.0)\n', (4707, 4718), True, 'import generic as g\n'), ((4882, 4905), 'generic.np.isclose', 'g.np.isclose', (['dist', '(2.0)'], {}), '(dist, 2.0)\n', (4894, 4905), True, 'import generic as g\n'), ((5014, 5052), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (5050, 5052), True, 'import generic as g\n'), ((5143, 5181), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (5179, 5181), True, 'import generic as g\n'), ((5304, 5327), 'generic.np.isclose', 'g.np.isclose', (['dist', '(4.0)'], {}), '(dist, 4.0)\n', (5316, 5327), True, 'import generic as g\n'), ((5494, 5517), 'generic.np.isclose', 'g.np.isclose', (['dist', '(1.0)'], {}), '(dist, 1.0)\n', (5506, 5517), True, 'import generic as g\n'), ((5687, 5716), 'generic.get_mesh', 'g.get_mesh', (['"""cycloidal.3DXML"""'], {}), "('cycloidal.3DXML')\n", (5697, 5716), True, 'import generic as g\n'), ((5745, 5790), 'generic.trimesh.collision.scene_to_collision', 'g.trimesh.collision.scene_to_collision', (['scene'], {}), '(scene)\n', (5783, 5790), True, 'import generic as g\n'), ((222, 260), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (258, 260), True, 'import generic as g\n'), ((3080, 3118), 'generic.trimesh.collision.CollisionManager', 'g.trimesh.collision.CollisionManager', ([], {}), '()\n', (3116, 3118), True, 'import generic as g\n'), ((300, 359), 'generic.log.warning', 'g.log.warning', (['"""skipping collision tests, no FCL installed"""'], {}), "('skipping collision tests, no FCL installed')\n", (313, 359), True, 'import generic as g\n'), ((3158, 3217), 'generic.log.warning', 'g.log.warning', (['"""skipping collision tests, no FCL installed"""'], {}), "('skipping collision tests, no FCL installed')\n", (3171, 3217), True, 'import generic as g\n')]
|
import openpnm as op
import numpy as np
import matplotlib.pyplot as plt
pn = op.network.Cubic(shape=[10, 10, 10], spacing=1e-4)
geo = op.geometry.SpheresAndCylinders(network=pn, pores=pn.Ps, throats=pn.Ts)
air = op.phases.Air(network=pn, name='air')
water = op.phases.Water(network=pn, name='h2o')
phys_air = op.physics.Standard(network=pn, phase=air, geometry=geo)
phys_water = op.physics.Standard(network=pn, phase=water, geometry=geo)
ip = op.algorithms.InvasionPercolation(network=pn, phase=water)
ip.set_inlets(pores=pn.pores('left'))
ip.run()
Krel = []
for s in np.linspace(0, pn.Nt, 10):
inv = ip['throat.invasion_sequence'] < s
phys_air['throat.hydraulic_conductance'][inv] *= 1e-5
perm_a = op.algorithms.StokesFlow(network=pn, phase=air)
perm_a.set_value_BC(pores=pn.pores('top'), values=1)
perm_a.set_value_BC(pores=pn.pores('bottom'), values=0)
perm_a.run()
Krel.append(perm_a.rate(pores=pn.pores('top')))
plt.plot(np.linspace(0, pn.Nt, 10)/pn.Nt, Krel)
# Export to Statoil format.
# Add reservoir pores on each end
op.io.Statoil.add_reservoir_pore(network=pn,
pores=pn.pores('left'),
offset=0.25)
op.io.Statoil.add_reservoir_pore(network=pn,
pores=pn.pores('right'),
offset=0.25)
op.io.Statoil.export_data(network=pn, shape=[10, 10, 10])
|
[
"openpnm.algorithms.InvasionPercolation",
"openpnm.phases.Air",
"openpnm.network.Cubic",
"openpnm.algorithms.StokesFlow",
"openpnm.geometry.SpheresAndCylinders",
"openpnm.physics.Standard",
"openpnm.phases.Water",
"numpy.linspace",
"openpnm.io.Statoil.export_data"
] |
[((78, 130), 'openpnm.network.Cubic', 'op.network.Cubic', ([], {'shape': '[10, 10, 10]', 'spacing': '(0.0001)'}), '(shape=[10, 10, 10], spacing=0.0001)\n', (94, 130), True, 'import openpnm as op\n'), ((135, 206), 'openpnm.geometry.SpheresAndCylinders', 'op.geometry.SpheresAndCylinders', ([], {'network': 'pn', 'pores': 'pn.Ps', 'throats': 'pn.Ts'}), '(network=pn, pores=pn.Ps, throats=pn.Ts)\n', (166, 206), True, 'import openpnm as op\n'), ((213, 250), 'openpnm.phases.Air', 'op.phases.Air', ([], {'network': 'pn', 'name': '"""air"""'}), "(network=pn, name='air')\n", (226, 250), True, 'import openpnm as op\n'), ((259, 298), 'openpnm.phases.Water', 'op.phases.Water', ([], {'network': 'pn', 'name': '"""h2o"""'}), "(network=pn, name='h2o')\n", (274, 298), True, 'import openpnm as op\n'), ((310, 366), 'openpnm.physics.Standard', 'op.physics.Standard', ([], {'network': 'pn', 'phase': 'air', 'geometry': 'geo'}), '(network=pn, phase=air, geometry=geo)\n', (329, 366), True, 'import openpnm as op\n'), ((380, 438), 'openpnm.physics.Standard', 'op.physics.Standard', ([], {'network': 'pn', 'phase': 'water', 'geometry': 'geo'}), '(network=pn, phase=water, geometry=geo)\n', (399, 438), True, 'import openpnm as op\n'), ((446, 504), 'openpnm.algorithms.InvasionPercolation', 'op.algorithms.InvasionPercolation', ([], {'network': 'pn', 'phase': 'water'}), '(network=pn, phase=water)\n', (479, 504), True, 'import openpnm as op\n'), ((573, 598), 'numpy.linspace', 'np.linspace', (['(0)', 'pn.Nt', '(10)'], {}), '(0, pn.Nt, 10)\n', (584, 598), True, 'import numpy as np\n'), ((1358, 1415), 'openpnm.io.Statoil.export_data', 'op.io.Statoil.export_data', ([], {'network': 'pn', 'shape': '[10, 10, 10]'}), '(network=pn, shape=[10, 10, 10])\n', (1383, 1415), True, 'import openpnm as op\n'), ((716, 763), 'openpnm.algorithms.StokesFlow', 'op.algorithms.StokesFlow', ([], {'network': 'pn', 'phase': 'air'}), '(network=pn, phase=air)\n', (740, 763), True, 'import openpnm as op\n'), ((959, 984), 'numpy.linspace', 'np.linspace', (['(0)', 'pn.Nt', '(10)'], {}), '(0, pn.Nt, 10)\n', (970, 984), True, 'import numpy as np\n')]
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import warnings
import weakref
import six
class UnhashableKeyWarning(RuntimeWarning):
"""Raised when trying to memoize a function with an unhashable argument."""
def _try_weakref(arg, remove_callback):
"""Return a weak reference to arg if possible, or arg itself if not."""
try:
arg = weakref.ref(arg, remove_callback)
except TypeError:
# Not all types can have a weakref. That includes strings
# and floats and such, so just pass them through directly.
pass
return arg
def _get_key(args, kwargs, remove_callback):
"""Calculate the cache key, using weak references where possible."""
# Use tuples, because lists are not hashable.
weak_args = tuple(_try_weakref(arg, remove_callback) for arg in args)
# Use a tuple of (key, values) pairs, because dict is not hashable.
# Sort it, so that we don't depend on the order of keys.
weak_kwargs = tuple(sorted(
(key, _try_weakref(value, remove_callback))
for (key, value) in six.iteritems(kwargs)))
return weak_args, weak_kwargs
def memoized(func):
"""Decorator that caches function calls.
Caches the decorated function's return value the first time it is called
with the given arguments. If called later with the same arguments, the
cached value is returned instead of calling the decorated function again.
The cache uses weak references to the passed arguments, so it doesn't keep
them alive in memory forever.
"""
# The dictionary in which all the data will be cached. This is a separate
# instance for every decorated function, and it's stored in a closure of
# the wrapped function.
cache = {}
@functools.wraps(func)
def wrapped(*args, **kwargs):
# We need to have defined key early, to be able to use it in the
# remove() function, but we calculate the actual value of the key
# later on, because we need the remove() function for that.
key = None
def remove(ref):
"""A callback to remove outdated items from cache."""
try:
# The key here is from closure, and is calculated later.
del cache[key]
except KeyError:
# Some other weak reference might have already removed that
# key -- in that case we don't need to do anything.
pass
key = _get_key(args, kwargs, remove)
try:
# We want cache hit to be as fast as possible, and don't really
# care much about the speed of a cache miss, because it will only
# happen once and likely calls some external API, database, or
# some other slow thing. That's why the hit is in straightforward
# code, and the miss is in an exception.
value = cache[key]
except KeyError:
value = cache[key] = func(*args, **kwargs)
except TypeError:
# The calculated key may be unhashable when an unhashable object,
# such as a list, is passed as one of the arguments. In that case,
# we can't cache anything and simply always call the decorated
# function.
warnings.warn(
"The key %r is not hashable and cannot be memoized." % (key,),
UnhashableKeyWarning, 2)
value = func(*args, **kwargs)
return value
return wrapped
# We can use @memoized for methods now too, because it uses weakref and so
# it doesn't keep the instances in memory forever. We might want to separate
# them in the future, however.
memoized_method = memoized
def memoized_with_request(request_func, request_index=0):
"""Decorator for caching functions which receive a request argument
memoized functions with a request argument are memoized only during the
rendering of a single view because the request argument is a new request
instance on each view.
If you want a function to be memoized for multiple views use this
decorator.
It replaces the request argument in the call to the decorated function
with the result of calling request_func on that request object.
request_function is a function which will receive the request argument.
request_index indicates which argument of the decorated function is the
request object to pass into request_func, which will also be replaced
by the result of request_func being called.
your memoized function will instead receive request_func(request)
passed as argument at the request_index.
The intent of that function is to extract the information needed from the
request, and thus the memoizing will operate just on that part of the
request that is relevant to the function being memoized.
short example:
@memoized
def _get_api_client(username, token_id, project_id, auth_url)
return api_client.Client(username, token_id, project_id, auth_url)
def get_api_client(request):
return _api_client(request.user.username,
request.user.token.id,
request.user.tenant_id)
@memoized_with_request(get_api_client)
def some_api_function(api_client, *args, **kwargs):
# is like returning get_api_client(
# request).some_method(*args, **kwargs)
# but with memoization.
return api_client.some_method(*args, **kwargs)
@memoized_with_request(get_api_client, 1)
def some_other_funt(param, api_client, other_param):
# The decorated function will be called this way:
# some_other_funt(param, request, other_param)
# but will be called behind the scenes this way:
# some_other_funt(param, get_api_client(request), other_param)
return api_client.some_method(param, other_param)
See openstack_dashboard.api.nova for a complete example.
"""
def wrapper(func):
memoized_func = memoized(func)
@functools.wraps(func)
def wrapped(*args, **kwargs):
args = list(args)
request = args.pop(request_index)
args.insert(request_index, request_func(request))
return memoized_func(*args, **kwargs)
return wrapped
return wrapper
|
[
"warnings.warn",
"weakref.ref",
"six.iteritems",
"functools.wraps"
] |
[((2321, 2342), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (2336, 2342), False, 'import functools\n'), ((932, 965), 'weakref.ref', 'weakref.ref', (['arg', 'remove_callback'], {}), '(arg, remove_callback)\n', (943, 965), False, 'import weakref\n'), ((6695, 6716), 'functools.wraps', 'functools.wraps', (['func'], {}), '(func)\n', (6710, 6716), False, 'import functools\n'), ((3842, 3947), 'warnings.warn', 'warnings.warn', (["('The key %r is not hashable and cannot be memoized.' % (key,))", 'UnhashableKeyWarning', '(2)'], {}), "('The key %r is not hashable and cannot be memoized.' % (key,),\n UnhashableKeyWarning, 2)\n", (3855, 3947), False, 'import warnings\n'), ((1638, 1659), 'six.iteritems', 'six.iteritems', (['kwargs'], {}), '(kwargs)\n', (1651, 1659), False, 'import six\n')]
|
__author__ = 'NoNotCar'
import pygame
import os
from random import choice, randint
import math
import colorsys
from itertools import count
from collections import defaultdict
tau = math.pi * 2
hpi = math.pi / 2
np = os.path.normpath
loc = os.getcwd() + "/Assets/"
pygame.mixer.init()
from . import Colour
class ScaledImage(object):
def __init__(self,img):
self.imgs=img if isinstance(img,list) else (img,)+tuple(xn(img,n) for n in (2,3,4))
self.img=self.imgs[0]
self.h,self.w=self.img.get_height(),self.img.get_width()
def blit(self,other,tpos,**kwargs):
for n,i in enumerate(self.imgs):
i.blit(other.imgs[n],(tpos[0]*(n+1),tpos[1]*(n+1)),**kwargs)
def copy(self):
return ScaledImage(self.img.copy())
def __getitem__(self, item):
return self.imgs[item]
def path(fil):
return np(loc+fil)
def convertx(i):
return i.convert_alpha()
"""px=pygame.PixelArray(i)
for p in px:
for n in p:
if i.unmap_rgb(n)[3]!=255:
del px
return i.convert_alpha()
else:
del px
return i.convert()"""
def img(fil):
return convertx(pygame.image.load(np(loc + fil + ".png")))
def imgx(fil):
i=img(fil)
return ScaledImage(i)
def imgn(fil,n):
return xn(img(fil),n)
def xn(img,n):
return pygame.transform.scale(img,(int(img.get_width()*n),int(img.get_height()*n))).convert_alpha()
def ftrans(f,folder):
return lambda x: f(folder+"/"+x)
def imgsz(fil, sz):
return pygame.transform.scale(pygame.image.load(np(loc + fil + ".png")), sz).convert_alpha()
def imgstripx(fil):
i = img(fil)
imgs = []
h=i.get_height()
for n in range(i.get_width() // h):
imgs.append(ScaledImage(i.subsurface(pygame.Rect(n * h, 0, h, h))))
return imgs
def imgstripxf(fil,w=16):
img = pygame.image.load(np(loc + fil + ".png"))
imgs = []
h=img.get_height()
for n in range(img.get_width() // w):
imgs.append(ScaledImage(img.subsurface(pygame.Rect(n * w, 0, w, h))))
return imgs
def tilemapx(fil):
if isinstance(fil,str):
i = img(fil)
else:
i=fil
imgs = []
sz=16
h=i.get_height()
w=i.get_width()
for y in range(h // sz):
for x in range(w // sz):
imgs.append(ScaledImage(i.subsurface(pygame.Rect(x * sz, y*sz, sz, sz))))
return imgs
def tilesplit(tile):
img=tile[0]
sss=[]
for y in range(2):
for x in range(2):
sss.append(ScaledImage(img.subsurface(pygame.Rect(x*8,y*8,8,8))))
return sss
def imgstrip(fil):
i = img(fil)
imgs = []
h=i.get_height()
for n in range(i.get_width() // h):
imgs.append(i.subsurface(pygame.Rect(n * h, 0, h, h)).convert_alpha())
return imgs
def imgstrip4f(fil,w):
i = img(fil)
imgs = []
h=i.get_height()
for n in range(i.get_width() // w):
imgs.append(pygame.transform.scale(i.subsurface(pygame.Rect(n * w, 0, w, h)), (w*4, h*4)).convert_alpha())
return imgs
def imgstripxfs(fil,ws):
i = img(fil)
imgs = []
h = i.get_height()
cw=0
for w in ws:
imgs.append(ScaledImage(i.subsurface(pygame.Rect(cw, 0, w, h))))
cw+=w
return imgs
def imgrot(i,r=4):
if isinstance(i,str):
i=imgx(i)
imgs=[i]
for n in range(r-1):
imgs.append(ScaledImage(pygame.transform.rotate(i[0],-90*n-90)))
return imgs
def imgstriprot(fil,r=4):
return [imgrot(i,r) for i in imgstripx(fil)]
def irot(i,n):
return ScaledImage(pygame.transform.rotate(i.img,-90*n))
def bcentre(font, text, surface, offset=0, col=(0, 0, 0), xoffset=0):
render = font.render(str(text), True, col, )
textrect = render.get_rect()
textrect.centerx = surface.get_rect().centerx + xoffset
textrect.centery = surface.get_rect().centery + offset
return surface.blit(render, textrect)
def bcentrex(font, text, surface, y, col=(0, 0, 0), xoffset=0):
render = font.render(str(text), True, col, )
textrect = render.get_rect()
textrect.centerx = surface.get_rect().centerx + xoffset
textrect.top = y
return surface.blit(render, textrect)
def bcentrerect(font, text, surface, rect, col=(0, 0, 0),yoff=0):
render = font.render(str(text), True, col, )
textrect = render.get_rect()
textrect.centerx = rect.centerx
textrect.centery = rect.centery+yoff
return surface.blit(render, textrect)
def cxblit(source, dest, y, xoff=0):
srect=source.get_rect()
drect=dest.get_rect()
srect.centerx=drect.centerx+xoff
srect.top=y
return dest.blit(source,srect)
def bcentrepos(font,text,surface,cpos,col=(0,0,0)):
render = font.render(str(text), True, col, )
textrect = render.get_rect()
textrect.center=cpos
return surface.blit(render, textrect)
def sndget(fil):
return pygame.mixer.Sound(np(loc+"Sounds/"+fil+".wav"))
def hflip(img):
return [img,ScaledImage(pygame.transform.flip(img.img,1,0))]
def vflip(img):
return [img,ScaledImage(pygame.transform.flip(img.img,0,1))]
def ixn(img,n):
return pygame.transform.scale(img,(img.get_width()*n,img.get_height()*n))
def x4(img):
return xn(img,4)
def colswap(img,sc,ec):
if isinstance(img,pygame.Surface):
px=pygame.PixelArray(img)
px.replace(sc,ec)
else:
for i in img.imgs:
px = pygame.PixelArray(i)
px.replace(sc, ec)
return img
def colcopy(i,sc,ec):
if isinstance(i,list):
return [colcopy(img,sc,ec) for img in i]
i=i.imgs[0].copy()
colswap(i,sc,ec)
return ScaledImage(i)
def multicolcopy(img,*args):
img=colcopy(img,*args[0])
for s,e in args[1:]:
colswap(img,s,e)
return img
def supercolcopy(img,col):
if isinstance(img,list):
return [supercolcopy(i,col) for i in img]
return multicolcopy(img,((255,255,255),col),((192,192,192),Colour.darker(col,0.75)),((191,191,191),Colour.darker(col,0.75)),((128,128,128),Colour.darker(col)),((64,64,64),Colour.darker(col,0.25)))
def new_bot(fil, col):
imgs=imgstripx(fil)
for i in imgs:
colswap(i,(128,128,128),col)
return imgs
def conv_imgs(fil):
src=img(fil)
conv=src.subsurface(pygame.Rect(2,0,12,16))
imgs=[]
for n in range(16):
new=src.copy()
new.blit(conv,(2,-n))
new.blit(conv,(2,16-n))
imgs.append(ScaledImage(new))
return [imgrot(i) for i in imgs]
def musplay(song,loops=-1):
pygame.mixer.music.stop()
pygame.mixer.music.load(np(loc+"Music/"+song+".ogg"))
pygame.mixer.music.play(loops)
def polplus(pos,ang,l):
return tuple(map(sum, zip(pos, (l*math.cos(ang),l*math.sin(ang)))))
def draw_rotor(screen,center,radius,arms,angle,col,w=4):
for n in range(arms):
#magic
pygame.draw.polygon(screen,col,(polplus(center,angle-hpi,w),polplus(center,angle+hpi,w),polplus(polplus(center,angle+hpi,w),angle,radius),polplus(polplus(center,angle-hpi,w),angle,radius)))
angle+=tau/arms
def rot_center(image, angle):
"""rotate an image while keeping its center and size"""
rots=[]
for i in image.imgs:
orig_rect = i.get_rect()
rot_image = pygame.transform.rotate(i, angle)
rot_rect = orig_rect.copy()
rot_rect.center = rot_image.get_rect().center
rots.append(rot_image.subsurface(rot_rect).copy())
return ScaledImage(rots)
def lotsrots(img,degscale,sym=1):
return [rot_center(img,ang) for ang in range(0,360//sym,degscale)]
imss=[]
class ImageManager(object):
def __init__(self):
self.imgs={}
imss.append(self)
def register(self):
used=self.imgs.keys()
new= next((n for n in count() if n not in used))
self[new]
return new
def gen_img(self):
return None
def __getitem__(self, item):
try:
return self.imgs[item]
except KeyError:
ni=self.gen_img()
self.imgs[item]=ni
return ni
def reload(self):
self.imgs={}
class RandomImageManager(ImageManager):
def __init__(self,imgs,cf,sc=(128,128,128)):
self.i=imgs
self.cf=cf
self.sc=sc
ImageManager.__init__(self)
def gen_img(self):
return colcopy(choice(self.i),self.sc,self.cf())
class KeyedImageManager(object):
def __init__(self):
self.imgs={}
def gen_img(self,args):
return None
def __getitem__(self, args):
try:
return self.imgs[args]
except KeyError:
ni=self.gen_img(args)
self.imgs[args]=ni
return ni
class SuperImageManager(KeyedImageManager):
def __init__(self,base):
self.base=base
KeyedImageManager.__init__(self)
def gen_img(self,args):
return supercolcopy(self.base,args)
class ColourImageManager(KeyedImageManager):
def __init__(self,base,sc=(128,128,128)):
self.base=base
self.sc=sc
KeyedImageManager.__init__(self)
def gen_img(self,args):
return colcopy(self.base,self.sc,args)
class ColourGenerator(object):
def __init__(self,min_sat,cd=None):
self.ms=min_sat
self.cd=cd
self.gen_cols=set()
def __call__(self, *args, **kwargs):
while True:
nc=tuple(randint(0,255) for _ in range(3))
if max(nc)-min(nc)>=self.ms:
if self.cd is None:
return nc
for c in self.gen_cols:
cd=sum(abs(c[n]-nc[n]) for n in range(3))
if cd<=self.cd:
break
else:
self.cd+=1
self.gen_cols.add(nc)
return nc
self.cd-=1
class UltraTiles(object):
blank=imgx("Blank")
def __init__(self,fil,*ccs):
tiles=imgstripx(fil)
for n,cc in enumerate(ccs):
if n:
[colswap(t, cc[0], cc[1]) for t in tiles]
else:
[colswap(t, (128,)*3, cc) for t in tiles]
self.tiles=[tilesplit(t) for t in tiles]
self.cache={}
def __getitem__(self, item):
try:
return self.cache[item]
except KeyError:
tile=self.blank.copy()
for n,t in enumerate(item):
tile.blit(self.tiles[t][n],(n%2*8,n//2*8))
self.cache[item]=tile
return tile
def fload(fil,sz=16):
return pygame.font.Font(np(loc+fil+".ttf"),sz)
buttimg=imgx("MenuButton")[3]
def button(text,font):
img=buttimg.copy()
bcentre(font,text,img,-4)
return img
# prog=imgx("Progress")
# def draw_progress(world,pos,p,col=(0,255,0)):
# world.blit(prog,pos,oy=-4)
# pygame.draw.rect(world.screen,col,pygame.Rect(world.screen_space(pos,ox=1,oy=-3),(world.cam_scale*p*14//16,world.cam_scale//8)))
numerals=imgstripxfs("Numbers",[5,3]+[5]*8)
def draw_num(surf,n,pos,rscale,draw_one=False):
if n<1+(not draw_one):
return
n=str(n)
x=15*rscale
for d in reversed(n):
x-=2*rscale if int(d)==1 else 4*rscale
surf.blit(numerals[int(d)][rscale-1], (pos[0]+x, pos[1]+9*rscale))
def draw_with_num(surf,scimg,n,pos,rscale):
surf.blit(scimg[rscale-1],pos)
draw_num(surf,n,pos,rscale)
def music_mix(dir):
return [dir+"/"+m[:-4] for m in os.listdir(np(loc+"Music/"+dir)) if m[-4:]==".ogg"]
def trans_rect(sz,col):
surf=pygame.Surface(sz,pygame.SRCALPHA,32)
surf.fill(col)
return ScaledImage(surf)
class DJ(object):
state="BLEEEEEEEEEEEEEEEEEEERGH"
def __init__(self,state="Title"):
self.switch(state)
def switch(self,d):
if self.state!=d:
self.state=d
self.songs=music_mix(d)
pygame.mixer.music.stop()
def update(self):
if not pygame.mixer.music.get_busy():
musplay(choice(self.songs),1)
|
[
"pygame.transform.flip",
"pygame.transform.rotate",
"pygame.Surface",
"random.randint",
"os.getcwd",
"pygame.mixer.init",
"pygame.Rect",
"pygame.mixer.music.play",
"random.choice",
"itertools.count",
"math.sin",
"pygame.mixer.music.get_busy",
"math.cos",
"pygame.PixelArray",
"pygame.mixer.music.stop"
] |
[((266, 285), 'pygame.mixer.init', 'pygame.mixer.init', ([], {}), '()\n', (283, 285), False, 'import pygame\n'), ((241, 252), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (250, 252), False, 'import os\n'), ((6455, 6480), 'pygame.mixer.music.stop', 'pygame.mixer.music.stop', ([], {}), '()\n', (6478, 6480), False, 'import pygame\n'), ((6543, 6573), 'pygame.mixer.music.play', 'pygame.mixer.music.play', (['loops'], {}), '(loops)\n', (6566, 6573), False, 'import pygame\n'), ((11409, 11448), 'pygame.Surface', 'pygame.Surface', (['sz', 'pygame.SRCALPHA', '(32)'], {}), '(sz, pygame.SRCALPHA, 32)\n', (11423, 11448), False, 'import pygame\n'), ((3544, 3583), 'pygame.transform.rotate', 'pygame.transform.rotate', (['i.img', '(-90 * n)'], {}), '(i.img, -90 * n)\n', (3567, 3583), False, 'import pygame\n'), ((5254, 5276), 'pygame.PixelArray', 'pygame.PixelArray', (['img'], {}), '(img)\n', (5271, 5276), False, 'import pygame\n'), ((6203, 6228), 'pygame.Rect', 'pygame.Rect', (['(2)', '(0)', '(12)', '(16)'], {}), '(2, 0, 12, 16)\n', (6214, 6228), False, 'import pygame\n'), ((7170, 7203), 'pygame.transform.rotate', 'pygame.transform.rotate', (['i', 'angle'], {}), '(i, angle)\n', (7193, 7203), False, 'import pygame\n'), ((4934, 4970), 'pygame.transform.flip', 'pygame.transform.flip', (['img.img', '(1)', '(0)'], {}), '(img.img, 1, 0)\n', (4955, 4970), False, 'import pygame\n'), ((5015, 5051), 'pygame.transform.flip', 'pygame.transform.flip', (['img.img', '(0)', '(1)'], {}), '(img.img, 0, 1)\n', (5036, 5051), False, 'import pygame\n'), ((5357, 5377), 'pygame.PixelArray', 'pygame.PixelArray', (['i'], {}), '(i)\n', (5374, 5377), False, 'import pygame\n'), ((8246, 8260), 'random.choice', 'choice', (['self.i'], {}), '(self.i)\n', (8252, 8260), False, 'from random import choice, randint\n'), ((11738, 11763), 'pygame.mixer.music.stop', 'pygame.mixer.music.stop', ([], {}), '()\n', (11761, 11763), False, 'import pygame\n'), ((11801, 11830), 'pygame.mixer.music.get_busy', 'pygame.mixer.music.get_busy', ([], {}), '()\n', (11828, 11830), False, 'import pygame\n'), ((3374, 3417), 'pygame.transform.rotate', 'pygame.transform.rotate', (['i[0]', '(-90 * n - 90)'], {}), '(i[0], -90 * n - 90)\n', (3397, 3417), False, 'import pygame\n'), ((11852, 11870), 'random.choice', 'choice', (['self.songs'], {}), '(self.songs)\n', (11858, 11870), False, 'from random import choice, randint\n'), ((1771, 1798), 'pygame.Rect', 'pygame.Rect', (['(n * h)', '(0)', 'h', 'h'], {}), '(n * h, 0, h, h)\n', (1782, 1798), False, 'import pygame\n'), ((2022, 2049), 'pygame.Rect', 'pygame.Rect', (['(n * w)', '(0)', 'w', 'h'], {}), '(n * w, 0, w, h)\n', (2033, 2049), False, 'import pygame\n'), ((3183, 3207), 'pygame.Rect', 'pygame.Rect', (['cw', '(0)', 'w', 'h'], {}), '(cw, 0, w, h)\n', (3194, 3207), False, 'import pygame\n'), ((7678, 7685), 'itertools.count', 'count', ([], {}), '()\n', (7683, 7685), False, 'from itertools import count\n'), ((9281, 9296), 'random.randint', 'randint', (['(0)', '(255)'], {}), '(0, 255)\n', (9288, 9296), False, 'from random import choice, randint\n'), ((2337, 2372), 'pygame.Rect', 'pygame.Rect', (['(x * sz)', '(y * sz)', 'sz', 'sz'], {}), '(x * sz, y * sz, sz, sz)\n', (2348, 2372), False, 'import pygame\n'), ((2538, 2569), 'pygame.Rect', 'pygame.Rect', (['(x * 8)', '(y * 8)', '(8)', '(8)'], {}), '(x * 8, y * 8, 8, 8)\n', (2549, 2569), False, 'import pygame\n'), ((2725, 2752), 'pygame.Rect', 'pygame.Rect', (['(n * h)', '(0)', 'h', 'h'], {}), '(n * h, 0, h, h)\n', (2736, 2752), False, 'import pygame\n'), ((6636, 6649), 'math.cos', 'math.cos', (['ang'], {}), '(ang)\n', (6644, 6649), False, 'import math\n'), ((6652, 6665), 'math.sin', 'math.sin', (['ang'], {}), '(ang)\n', (6660, 6665), False, 'import math\n'), ((2958, 2985), 'pygame.Rect', 'pygame.Rect', (['(n * w)', '(0)', 'w', 'h'], {}), '(n * w, 0, w, h)\n', (2969, 2985), False, 'import pygame\n')]
|
import os
import preshed
import pytest
PACKAGE_DIR = os.path.abspath(os.path.dirname(preshed.__file__))
pytest.main([PACKAGE_DIR])
|
[
"os.path.dirname",
"pytest.main"
] |
[((105, 131), 'pytest.main', 'pytest.main', (['[PACKAGE_DIR]'], {}), '([PACKAGE_DIR])\n', (116, 131), False, 'import pytest\n'), ((70, 103), 'os.path.dirname', 'os.path.dirname', (['preshed.__file__'], {}), '(preshed.__file__)\n', (85, 103), False, 'import os\n')]
|
import torch
import numpy as np
from torchvision import models
from utils.misc import *
from utils.process_fp import process_inputs_fp
def compute_features(tg_model, free_model, tg_feature_model, is_start_iteration, evalloader, num_samples, num_features, device=None):
if device is None:
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
tg_feature_model.eval()
tg_model.eval()
if free_model is not None:
free_model.eval()
features = np.zeros([num_samples, num_features])
start_idx = 0
with torch.no_grad():
for inputs, targets in evalloader:
inputs = inputs.to(device)
if is_start_iteration:
the_feature = tg_feature_model(inputs)
else:
the_feature = process_inputs_fp(tg_model, free_model, inputs, feature_mode=True)
features[start_idx:start_idx+inputs.shape[0], :] = np.squeeze(the_feature.cpu().numpy())
start_idx = start_idx+inputs.shape[0]
assert(start_idx==num_samples)
return features
|
[
"utils.process_fp.process_inputs_fp",
"torch.no_grad",
"numpy.zeros",
"torch.cuda.is_available"
] |
[((493, 530), 'numpy.zeros', 'np.zeros', (['[num_samples, num_features]'], {}), '([num_samples, num_features])\n', (501, 530), True, 'import numpy as np\n'), ((558, 573), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (571, 573), False, 'import torch\n'), ((335, 360), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (358, 360), False, 'import torch\n'), ((795, 861), 'utils.process_fp.process_inputs_fp', 'process_inputs_fp', (['tg_model', 'free_model', 'inputs'], {'feature_mode': '(True)'}), '(tg_model, free_model, inputs, feature_mode=True)\n', (812, 861), False, 'from utils.process_fp import process_inputs_fp\n')]
|
from pathlib import Path
from typing import Union, Dict, Any, Tuple
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.common_layers import CBHG
from utils.text.symbols import phonemes
class Encoder(nn.Module):
def __init__(self, embed_dims, num_chars, cbhg_channels, K, num_highways, dropout):
super().__init__()
self.embedding = nn.Embedding(num_chars, embed_dims)
self.pre_net = PreNet(embed_dims)
self.cbhg = CBHG(K=K, in_channels=cbhg_channels, channels=cbhg_channels,
proj_channels=[cbhg_channels, cbhg_channels],
num_highways=num_highways)
def forward(self, x):
x = self.embedding(x)
x = self.pre_net(x)
x.transpose_(1, 2)
x = self.cbhg(x)
return x
class PreNet(nn.Module):
def __init__(self, in_dims, fc1_dims=256, fc2_dims=128, dropout=0.5):
super().__init__()
self.fc1 = nn.Linear(in_dims, fc1_dims)
self.fc2 = nn.Linear(fc1_dims, fc2_dims)
self.p = dropout
def forward(self, x):
x = self.fc1(x)
x = F.relu(x)
x = F.dropout(x, self.p, training=self.training)
x = self.fc2(x)
x = F.relu(x)
x = F.dropout(x, self.p, training=self.training)
return x
class Attention(nn.Module):
def __init__(self, attn_dims):
super().__init__()
self.W = nn.Linear(attn_dims, attn_dims, bias=False)
self.v = nn.Linear(attn_dims, 1, bias=False)
def forward(self, encoder_seq_proj, query, t):
# print(encoder_seq_proj.shape)
# Transform the query vector
query_proj = self.W(query).unsqueeze(1)
# Compute the scores
u = self.v(torch.tanh(encoder_seq_proj + query_proj))
scores = F.softmax(u, dim=1)
return scores.transpose(1, 2)
class LSA(nn.Module):
def __init__(self, attn_dim, kernel_size=31, filters=32):
super().__init__()
self.conv = nn.Conv1d(2, filters, padding=(kernel_size - 1) // 2, kernel_size=kernel_size, bias=False)
self.L = nn.Linear(filters, attn_dim, bias=True)
self.W = nn.Linear(attn_dim, attn_dim, bias=True)
self.v = nn.Linear(attn_dim, 1, bias=False)
self.cumulative = None
self.attention = None
def init_attention(self, encoder_seq_proj):
device = next(self.parameters()).device # use same device as parameters
b, t, c = encoder_seq_proj.size()
self.cumulative = torch.zeros(b, t, device=device)
self.attention = torch.zeros(b, t, device=device)
def forward(self, encoder_seq_proj, query, t):
if t == 0: self.init_attention(encoder_seq_proj)
processed_query = self.W(query).unsqueeze(1)
location = torch.cat([self.cumulative.unsqueeze(1), self.attention.unsqueeze(1)], dim=1)
processed_loc = self.L(self.conv(location).transpose(1, 2))
u = self.v(torch.tanh(processed_query + encoder_seq_proj + processed_loc))
u = u.squeeze(-1)
# Smooth Attention
#scores = torch.sigmoid(u) / torch.sigmoid(u).sum(dim=1, keepdim=True)
scores = F.softmax(u, dim=1)
self.attention = scores
self.cumulative += self.attention
return scores.unsqueeze(-1).transpose(1, 2)
class Decoder(nn.Module):
# Class variable because its value doesn't change between classes
# yet ought to be scoped by class because its a property of a Decoder
max_r = 20
def __init__(self, n_mels, decoder_dims, lstm_dims):
super().__init__()
self.register_buffer('r', torch.tensor(1, dtype=torch.int))
self.n_mels = n_mels
self.prenet = PreNet(n_mels)
self.attn_net = LSA(decoder_dims)
self.attn_rnn = nn.GRUCell(decoder_dims + decoder_dims // 2, decoder_dims)
self.rnn_input = nn.Linear(2 * decoder_dims, lstm_dims)
self.res_rnn1 = nn.LSTMCell(lstm_dims, lstm_dims)
self.res_rnn2 = nn.LSTMCell(lstm_dims, lstm_dims)
self.mel_proj = nn.Linear(lstm_dims, n_mels * self.max_r, bias=False)
def zoneout(self, prev, current, p=0.1):
device = next(self.parameters()).device # Use same device as parameters
mask = torch.zeros(prev.size(), device=device).bernoulli_(p)
return prev * mask + current * (1 - mask)
def forward(self, encoder_seq, encoder_seq_proj, prenet_in,
hidden_states, cell_states, context_vec, t):
# Need this for reshaping mels
batch_size = encoder_seq.size(0)
# Unpack the hidden and cell states
attn_hidden, rnn1_hidden, rnn2_hidden = hidden_states
rnn1_cell, rnn2_cell = cell_states
# PreNet for the Attention RNN
prenet_out = self.prenet(prenet_in)
# Compute the Attention RNN hidden state
attn_rnn_in = torch.cat([context_vec, prenet_out], dim=-1)
attn_hidden = self.attn_rnn(attn_rnn_in.squeeze(1), attn_hidden)
# Compute the attention scores
scores = self.attn_net(encoder_seq_proj, attn_hidden, t)
# Dot product to create the context vector
context_vec = scores @ encoder_seq
context_vec = context_vec.squeeze(1)
# Concat Attention RNN output w. Context Vector & project
x = torch.cat([context_vec, attn_hidden], dim=1)
x = self.rnn_input(x)
# Compute first Residual RNN
rnn1_hidden_next, rnn1_cell = self.res_rnn1(x, (rnn1_hidden, rnn1_cell))
if self.training:
rnn1_hidden = self.zoneout(rnn1_hidden, rnn1_hidden_next)
else:
rnn1_hidden = rnn1_hidden_next
x = x + rnn1_hidden
# Compute second Residual RNN
rnn2_hidden_next, rnn2_cell = self.res_rnn2(x, (rnn2_hidden, rnn2_cell))
if self.training:
rnn2_hidden = self.zoneout(rnn2_hidden, rnn2_hidden_next)
else:
rnn2_hidden = rnn2_hidden_next
x = x + rnn2_hidden
# Project Mels
mels = self.mel_proj(x)
mels = mels.view(batch_size, self.n_mels, self.max_r)[:, :, :self.r]
hidden_states = (attn_hidden, rnn1_hidden, rnn2_hidden)
cell_states = (rnn1_cell, rnn2_cell)
return mels, scores, hidden_states, cell_states, context_vec
class Tacotron(nn.Module):
def __init__(self,
embed_dims: int,
num_chars: int,
encoder_dims: int,
decoder_dims: int,
n_mels: int,
postnet_dims: int,
encoder_k: int,
lstm_dims: int,
postnet_k: int,
num_highways: int,
dropout: float,
stop_threshold: float) -> None:
super().__init__()
self.n_mels = n_mels
self.lstm_dims = lstm_dims
self.decoder_dims = decoder_dims
self.encoder = Encoder(embed_dims, num_chars, encoder_dims,
encoder_k, num_highways, dropout)
self.encoder_proj = nn.Linear(decoder_dims, decoder_dims, bias=False)
self.decoder = Decoder(n_mels, decoder_dims, lstm_dims)
self.postnet = CBHG(postnet_k, n_mels, postnet_dims, [256, 80], num_highways)
self.post_proj = nn.Linear(postnet_dims * 2, n_mels, bias=False)
self.init_model()
self.register_buffer('step', torch.zeros(1, dtype=torch.long))
self.register_buffer('stop_threshold', torch.tensor(stop_threshold, dtype=torch.float32))
@property
def r(self) -> int:
return self.decoder.r.item()
@r.setter
def r(self, value: int) -> None:
self.decoder.r = self.decoder.r.new_tensor(value, requires_grad=False)
def forward(self, x: torch.tensor, m: torch.tensor) -> torch.tensor:
device = next(self.parameters()).device # use same device as parameters
if self.training:
self.step += 1
batch_size, _, steps = m.size()
# Initialise all hidden states and pack into tuple
attn_hidden = torch.zeros(batch_size, self.decoder_dims, device=device)
rnn1_hidden = torch.zeros(batch_size, self.lstm_dims, device=device)
rnn2_hidden = torch.zeros(batch_size, self.lstm_dims, device=device)
hidden_states = (attn_hidden, rnn1_hidden, rnn2_hidden)
# Initialise all lstm cell states and pack into tuple
rnn1_cell = torch.zeros(batch_size, self.lstm_dims, device=device)
rnn2_cell = torch.zeros(batch_size, self.lstm_dims, device=device)
cell_states = (rnn1_cell, rnn2_cell)
# <GO> Frame for start of decoder loop
go_frame = torch.zeros(batch_size, self.n_mels, device=device)
# Need an initial context vector
context_vec = torch.zeros(batch_size, self.decoder_dims, device=device)
# Project the encoder outputs to avoid
# unnecessary matmuls in the decoder loop
encoder_seq = self.encoder(x)
encoder_seq_proj = self.encoder_proj(encoder_seq)
# Need a couple of lists for outputs
mel_outputs, attn_scores = [], []
# Run the decoder loop
for t in range(0, steps, self.r):
prenet_in = m[:, :, t - 1] if t > 0 else go_frame
mel_frames, scores, hidden_states, cell_states, context_vec = \
self.decoder(encoder_seq, encoder_seq_proj, prenet_in,
hidden_states, cell_states, context_vec, t)
mel_outputs.append(mel_frames)
attn_scores.append(scores)
# Concat the mel outputs into sequence
mel_outputs = torch.cat(mel_outputs, dim=2)
# Post-Process for Linear Spectrograms
postnet_out = self.postnet(mel_outputs)
linear = self.post_proj(postnet_out)
linear = linear.transpose(1, 2)
# For easy visualisation
attn_scores = torch.cat(attn_scores, 1)
# attn_scores = attn_scores.cpu().data.numpy()
return mel_outputs, linear, attn_scores
def generate(self, x: torch.tensor, steps=2000) -> Tuple[torch.tensor, torch.tensor, torch.tensor]:
self.eval()
device = next(self.parameters()).device # use same device as parameters
batch_size = 1
# Need to initialise all hidden states and pack into tuple for tidyness
attn_hidden = torch.zeros(batch_size, self.decoder_dims, device=device)
rnn1_hidden = torch.zeros(batch_size, self.lstm_dims, device=device)
rnn2_hidden = torch.zeros(batch_size, self.lstm_dims, device=device)
hidden_states = (attn_hidden, rnn1_hidden, rnn2_hidden)
# Need to initialise all lstm cell states and pack into tuple for tidyness
rnn1_cell = torch.zeros(batch_size, self.lstm_dims, device=device)
rnn2_cell = torch.zeros(batch_size, self.lstm_dims, device=device)
cell_states = (rnn1_cell, rnn2_cell)
# Need a <GO> Frame for start of decoder loop
go_frame = torch.zeros(batch_size, self.n_mels, device=device)
# Need an initial context vector
context_vec = torch.zeros(batch_size, self.decoder_dims, device=device)
# Project the encoder outputs to avoid
# unnecessary matmuls in the decoder loop
encoder_seq = self.encoder(x)
encoder_seq_proj = self.encoder_proj(encoder_seq)
# Need a couple of lists for outputs
mel_outputs, attn_scores = [], []
# Run the decoder loop
for t in range(0, steps, self.r):
prenet_in = mel_outputs[-1][:, :, -1] if t > 0 else go_frame
mel_frames, scores, hidden_states, cell_states, context_vec = \
self.decoder(encoder_seq, encoder_seq_proj, prenet_in,
hidden_states, cell_states, context_vec, t)
mel_outputs.append(mel_frames)
attn_scores.append(scores)
# Stop the loop if silent frames present
if (mel_frames < self.stop_threshold).all() and t > 10: break
# Concat the mel outputs into sequence
mel_outputs = torch.cat(mel_outputs, dim=2)
# Post-Process for Linear Spectrograms
postnet_out = self.postnet(mel_outputs)
linear = self.post_proj(postnet_out)
linear = linear.transpose(1, 2)[0].cpu().data.numpy()
mel_outputs = mel_outputs[0].cpu().data.numpy()
# For easy visualisation
attn_scores = torch.cat(attn_scores, 1)
attn_scores = attn_scores.cpu().data.numpy()[0]
self.train()
return mel_outputs, linear, attn_scores
def init_model(self):
for p in self.parameters():
if p.dim() > 1: nn.init.xavier_uniform_(p)
def get_step(self):
return self.step.data.item()
def reset_step(self):
# assignment to parameters or buffers is overloaded, updates internal dict entry
self.step = self.step.data.new_tensor(1)
@classmethod
def from_config(cls, config: Dict[str, Any]) -> 'Tacotron':
model_config = config['tacotron']['model']
model_config['num_chars'] = len(phonemes)
model_config['n_mels'] = config['dsp']['num_mels']
return Tacotron(**model_config)
@classmethod
def from_checkpoint(cls, path: Union[Path, str]) -> 'Tacotron':
checkpoint = torch.load(path, map_location=torch.device('cpu'))
model = Tacotron.from_config(checkpoint['config'])
model.load_state_dict(checkpoint['model'])
return model
|
[
"torch.nn.Embedding",
"torch.nn.Conv1d",
"torch.nn.functional.dropout",
"torch.nn.LSTMCell",
"torch.cat",
"torch.nn.functional.softmax",
"torch.nn.init.xavier_uniform_",
"torch.nn.Linear",
"models.common_layers.CBHG",
"torch.device",
"torch.nn.functional.relu",
"torch.zeros",
"torch.nn.GRUCell",
"torch.tensor",
"torch.tanh"
] |
[((383, 418), 'torch.nn.Embedding', 'nn.Embedding', (['num_chars', 'embed_dims'], {}), '(num_chars, embed_dims)\n', (395, 418), True, 'import torch.nn as nn\n'), ((481, 619), 'models.common_layers.CBHG', 'CBHG', ([], {'K': 'K', 'in_channels': 'cbhg_channels', 'channels': 'cbhg_channels', 'proj_channels': '[cbhg_channels, cbhg_channels]', 'num_highways': 'num_highways'}), '(K=K, in_channels=cbhg_channels, channels=cbhg_channels, proj_channels=\n [cbhg_channels, cbhg_channels], num_highways=num_highways)\n', (485, 619), False, 'from models.common_layers import CBHG\n'), ((966, 994), 'torch.nn.Linear', 'nn.Linear', (['in_dims', 'fc1_dims'], {}), '(in_dims, fc1_dims)\n', (975, 994), True, 'import torch.nn as nn\n'), ((1014, 1043), 'torch.nn.Linear', 'nn.Linear', (['fc1_dims', 'fc2_dims'], {}), '(fc1_dims, fc2_dims)\n', (1023, 1043), True, 'import torch.nn as nn\n'), ((1132, 1141), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (1138, 1141), True, 'import torch.nn.functional as F\n'), ((1154, 1198), 'torch.nn.functional.dropout', 'F.dropout', (['x', 'self.p'], {'training': 'self.training'}), '(x, self.p, training=self.training)\n', (1163, 1198), True, 'import torch.nn.functional as F\n'), ((1235, 1244), 'torch.nn.functional.relu', 'F.relu', (['x'], {}), '(x)\n', (1241, 1244), True, 'import torch.nn.functional as F\n'), ((1257, 1301), 'torch.nn.functional.dropout', 'F.dropout', (['x', 'self.p'], {'training': 'self.training'}), '(x, self.p, training=self.training)\n', (1266, 1301), True, 'import torch.nn.functional as F\n'), ((1428, 1471), 'torch.nn.Linear', 'nn.Linear', (['attn_dims', 'attn_dims'], {'bias': '(False)'}), '(attn_dims, attn_dims, bias=False)\n', (1437, 1471), True, 'import torch.nn as nn\n'), ((1489, 1524), 'torch.nn.Linear', 'nn.Linear', (['attn_dims', '(1)'], {'bias': '(False)'}), '(attn_dims, 1, bias=False)\n', (1498, 1524), True, 'import torch.nn as nn\n'), ((1812, 1831), 'torch.nn.functional.softmax', 'F.softmax', (['u'], {'dim': '(1)'}), '(u, dim=1)\n', (1821, 1831), True, 'import torch.nn.functional as F\n'), ((2004, 2099), 'torch.nn.Conv1d', 'nn.Conv1d', (['(2)', 'filters'], {'padding': '((kernel_size - 1) // 2)', 'kernel_size': 'kernel_size', 'bias': '(False)'}), '(2, filters, padding=(kernel_size - 1) // 2, kernel_size=\n kernel_size, bias=False)\n', (2013, 2099), True, 'import torch.nn as nn\n'), ((2112, 2151), 'torch.nn.Linear', 'nn.Linear', (['filters', 'attn_dim'], {'bias': '(True)'}), '(filters, attn_dim, bias=True)\n', (2121, 2151), True, 'import torch.nn as nn\n'), ((2169, 2209), 'torch.nn.Linear', 'nn.Linear', (['attn_dim', 'attn_dim'], {'bias': '(True)'}), '(attn_dim, attn_dim, bias=True)\n', (2178, 2209), True, 'import torch.nn as nn\n'), ((2227, 2261), 'torch.nn.Linear', 'nn.Linear', (['attn_dim', '(1)'], {'bias': '(False)'}), '(attn_dim, 1, bias=False)\n', (2236, 2261), True, 'import torch.nn as nn\n'), ((2521, 2553), 'torch.zeros', 'torch.zeros', (['b', 't'], {'device': 'device'}), '(b, t, device=device)\n', (2532, 2553), False, 'import torch\n'), ((2579, 2611), 'torch.zeros', 'torch.zeros', (['b', 't'], {'device': 'device'}), '(b, t, device=device)\n', (2590, 2611), False, 'import torch\n'), ((3176, 3195), 'torch.nn.functional.softmax', 'F.softmax', (['u'], {'dim': '(1)'}), '(u, dim=1)\n', (3185, 3195), True, 'import torch.nn.functional as F\n'), ((3795, 3853), 'torch.nn.GRUCell', 'nn.GRUCell', (['(decoder_dims + decoder_dims // 2)', 'decoder_dims'], {}), '(decoder_dims + decoder_dims // 2, decoder_dims)\n', (3805, 3853), True, 'import torch.nn as nn\n'), ((3879, 3917), 'torch.nn.Linear', 'nn.Linear', (['(2 * decoder_dims)', 'lstm_dims'], {}), '(2 * decoder_dims, lstm_dims)\n', (3888, 3917), True, 'import torch.nn as nn\n'), ((3942, 3975), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['lstm_dims', 'lstm_dims'], {}), '(lstm_dims, lstm_dims)\n', (3953, 3975), True, 'import torch.nn as nn\n'), ((4000, 4033), 'torch.nn.LSTMCell', 'nn.LSTMCell', (['lstm_dims', 'lstm_dims'], {}), '(lstm_dims, lstm_dims)\n', (4011, 4033), True, 'import torch.nn as nn\n'), ((4058, 4111), 'torch.nn.Linear', 'nn.Linear', (['lstm_dims', '(n_mels * self.max_r)'], {'bias': '(False)'}), '(lstm_dims, n_mels * self.max_r, bias=False)\n', (4067, 4111), True, 'import torch.nn as nn\n'), ((4875, 4919), 'torch.cat', 'torch.cat', (['[context_vec, prenet_out]'], {'dim': '(-1)'}), '([context_vec, prenet_out], dim=-1)\n', (4884, 4919), False, 'import torch\n'), ((5317, 5361), 'torch.cat', 'torch.cat', (['[context_vec, attn_hidden]'], {'dim': '(1)'}), '([context_vec, attn_hidden], dim=1)\n', (5326, 5361), False, 'import torch\n'), ((7073, 7122), 'torch.nn.Linear', 'nn.Linear', (['decoder_dims', 'decoder_dims'], {'bias': '(False)'}), '(decoder_dims, decoder_dims, bias=False)\n', (7082, 7122), True, 'import torch.nn as nn\n'), ((7210, 7272), 'models.common_layers.CBHG', 'CBHG', (['postnet_k', 'n_mels', 'postnet_dims', '[256, 80]', 'num_highways'], {}), '(postnet_k, n_mels, postnet_dims, [256, 80], num_highways)\n', (7214, 7272), False, 'from models.common_layers import CBHG\n'), ((7298, 7345), 'torch.nn.Linear', 'nn.Linear', (['(postnet_dims * 2)', 'n_mels'], {'bias': '(False)'}), '(postnet_dims * 2, n_mels, bias=False)\n', (7307, 7345), True, 'import torch.nn as nn\n'), ((8083, 8140), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.decoder_dims'], {'device': 'device'}), '(batch_size, self.decoder_dims, device=device)\n', (8094, 8140), False, 'import torch\n'), ((8163, 8217), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (8174, 8217), False, 'import torch\n'), ((8240, 8294), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (8251, 8294), False, 'import torch\n'), ((8442, 8496), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (8453, 8496), False, 'import torch\n'), ((8517, 8571), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (8528, 8571), False, 'import torch\n'), ((8684, 8735), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.n_mels'], {'device': 'device'}), '(batch_size, self.n_mels, device=device)\n', (8695, 8735), False, 'import torch\n'), ((8800, 8857), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.decoder_dims'], {'device': 'device'}), '(batch_size, self.decoder_dims, device=device)\n', (8811, 8857), False, 'import torch\n'), ((9648, 9677), 'torch.cat', 'torch.cat', (['mel_outputs'], {'dim': '(2)'}), '(mel_outputs, dim=2)\n', (9657, 9677), False, 'import torch\n'), ((9915, 9940), 'torch.cat', 'torch.cat', (['attn_scores', '(1)'], {}), '(attn_scores, 1)\n', (9924, 9940), False, 'import torch\n'), ((10378, 10435), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.decoder_dims'], {'device': 'device'}), '(batch_size, self.decoder_dims, device=device)\n', (10389, 10435), False, 'import torch\n'), ((10458, 10512), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (10469, 10512), False, 'import torch\n'), ((10535, 10589), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (10546, 10589), False, 'import torch\n'), ((10758, 10812), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (10769, 10812), False, 'import torch\n'), ((10833, 10887), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.lstm_dims'], {'device': 'device'}), '(batch_size, self.lstm_dims, device=device)\n', (10844, 10887), False, 'import torch\n'), ((11007, 11058), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.n_mels'], {'device': 'device'}), '(batch_size, self.n_mels, device=device)\n', (11018, 11058), False, 'import torch\n'), ((11123, 11180), 'torch.zeros', 'torch.zeros', (['batch_size', 'self.decoder_dims'], {'device': 'device'}), '(batch_size, self.decoder_dims, device=device)\n', (11134, 11180), False, 'import torch\n'), ((12101, 12130), 'torch.cat', 'torch.cat', (['mel_outputs'], {'dim': '(2)'}), '(mel_outputs, dim=2)\n', (12110, 12130), False, 'import torch\n'), ((12448, 12473), 'torch.cat', 'torch.cat', (['attn_scores', '(1)'], {}), '(attn_scores, 1)\n', (12457, 12473), False, 'import torch\n'), ((1752, 1793), 'torch.tanh', 'torch.tanh', (['(encoder_seq_proj + query_proj)'], {}), '(encoder_seq_proj + query_proj)\n', (1762, 1793), False, 'import torch\n'), ((2962, 3024), 'torch.tanh', 'torch.tanh', (['(processed_query + encoder_seq_proj + processed_loc)'], {}), '(processed_query + encoder_seq_proj + processed_loc)\n', (2972, 3024), False, 'import torch\n'), ((3629, 3661), 'torch.tensor', 'torch.tensor', (['(1)'], {'dtype': 'torch.int'}), '(1, dtype=torch.int)\n', (3641, 3661), False, 'import torch\n'), ((7411, 7443), 'torch.zeros', 'torch.zeros', (['(1)'], {'dtype': 'torch.long'}), '(1, dtype=torch.long)\n', (7422, 7443), False, 'import torch\n'), ((7492, 7541), 'torch.tensor', 'torch.tensor', (['stop_threshold'], {'dtype': 'torch.float32'}), '(stop_threshold, dtype=torch.float32)\n', (7504, 7541), False, 'import torch\n'), ((12692, 12718), 'torch.nn.init.xavier_uniform_', 'nn.init.xavier_uniform_', (['p'], {}), '(p)\n', (12715, 12718), True, 'import torch.nn as nn\n'), ((13365, 13384), 'torch.device', 'torch.device', (['"""cpu"""'], {}), "('cpu')\n", (13377, 13384), False, 'import torch\n')]
|
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
from airflow.exceptions import AirflowException
from botocore.config import Config
import boto3
import json
import logging
import base64
class ExecuteLambdaOperator(BaseOperator):
@apply_defaults
def __init__(
self,
lambda_function_name,
airflow_context_to_lambda_payload=None,
additional_payload={},
*args,
**kwargs
):
"""
Trigger AWS Lambda function
:param airflow_context_to_lambda_payload: function extracting fields from Airflow context to Lambda payload
:param additional_payload: additional parameters for Lambda payload
:param lambda_function_name: name of Lambda function
"""
super(ExecuteLambdaOperator, self).__init__(*args, **kwargs)
self.airflow_context_to_lambda_payload = airflow_context_to_lambda_payload
self.additional_payload = additional_payload
self.lambda_function_name = lambda_function_name
self.lambda_client = boto3.client(
'lambda', config=Config(read_timeout=300, connect_timeout=300))
def execute(self, context):
request_payload = self.__create_lambda_payload(context)
logging.info('Executing AWS Lambda {} with payload {}'.format(
self.lambda_function_name, request_payload))
response = self.lambda_client.invoke(
FunctionName=self.lambda_function_name,
InvocationType='RequestResponse',
Payload=json.dumps(request_payload),
LogType='Tail'
)
response_log_tail = base64.b64decode(response.get('LogResult'))
response_payload = json.loads(response.get('Payload').read())
response_code = response.get('StatusCode')
log_msg_logs = 'Tail of logs from AWS Lambda:\n{logs}'.format(
logs=response_log_tail)
log_msg_payload = 'Response payload from AWS Lambda:\n{resp}'.format(
resp=response_payload)
if response_code == 200:
logging.info(log_msg_logs)
logging.info(log_msg_payload)
return response_code
else:
logging.error(log_msg_logs)
logging.error(log_msg_payload)
raise AirflowException('Lambda invoke failed')
def __create_lambda_payload(self, context):
payload = self.airflow_context_to_lambda_payload(
context) if self.airflow_context_to_lambda_payload is not None else {}
payload.update(self.additional_payload)
return payload
|
[
"logging.error",
"json.dumps",
"botocore.config.Config",
"logging.info",
"airflow.exceptions.AirflowException"
] |
[((2102, 2128), 'logging.info', 'logging.info', (['log_msg_logs'], {}), '(log_msg_logs)\n', (2114, 2128), False, 'import logging\n'), ((2141, 2170), 'logging.info', 'logging.info', (['log_msg_payload'], {}), '(log_msg_payload)\n', (2153, 2170), False, 'import logging\n'), ((2230, 2257), 'logging.error', 'logging.error', (['log_msg_logs'], {}), '(log_msg_logs)\n', (2243, 2257), False, 'import logging\n'), ((2270, 2300), 'logging.error', 'logging.error', (['log_msg_payload'], {}), '(log_msg_payload)\n', (2283, 2300), False, 'import logging\n'), ((2319, 2359), 'airflow.exceptions.AirflowException', 'AirflowException', (['"""Lambda invoke failed"""'], {}), "('Lambda invoke failed')\n", (2335, 2359), False, 'from airflow.exceptions import AirflowException\n'), ((1137, 1182), 'botocore.config.Config', 'Config', ([], {'read_timeout': '(300)', 'connect_timeout': '(300)'}), '(read_timeout=300, connect_timeout=300)\n', (1143, 1182), False, 'from botocore.config import Config\n'), ((1575, 1602), 'json.dumps', 'json.dumps', (['request_payload'], {}), '(request_payload)\n', (1585, 1602), False, 'import json\n')]
|
import beluga
import logging
ocp = beluga.Problem('hanging_chain')
ocp.independent('s', 'ft')
ocp.state('x', 'cos(theta)', 'ft')
ocp.state('y', 'sin(theta)', 'ft')
ocp.control('theta', 'rad')
ocp.constant('y_floor', -10, 'ft')
ocp.constant('x_0', 0, 'ft')
ocp.constant('y_0', 0, 'ft')
ocp.constant('s_0', 0, 'ft')
ocp.constant('x_f', 1, 'ft')
ocp.constant('y_f', 0, 'ft')
ocp.constant('s_f', 1.1, 'ft')
ocp.constant('eps', 0.001, '1')
ocp.path_cost('y', 'ft')
ocp.initial_constraint('x - x_0', 'ft')
ocp.initial_constraint('y - y_0', 'ft')
ocp.initial_constraint('s', 'ft')
ocp.terminal_constraint('x - x_f', 'ft')
ocp.terminal_constraint('y - y_f', 'ft')
ocp.terminal_constraint('s - s_f', 'ft')
ocp.path_constraint('y', 'ft', 'y_floor', '-y_floor', 'eps', 'utm')
ocp.scale(ft='s', rad=1)
bvp_solver = beluga.bvp_algorithm('spbvp')
guess_maker = beluga.guess_generator('auto', start=[0, 0], costate_guess=-0.1, time_integrate=1.1,
control_guess=[0], use_control_guess=True)
continuation_steps = beluga.init_continuation()
continuation_steps.add_step('bisection') \
.num_cases(6) \
.const('x_f', 1) \
.const('y_f', 0) \
.const('s_f', 1.1)
continuation_steps.add_step('bisection') \
.num_cases(6) \
.const('x_f', 1) \
.const('y_f', 0) \
.const('s_f', 3)
continuation_steps.add_step('bisection') \
.num_cases(21) \
.const('y_floor', -1.4)
continuation_steps.add_step('bisection') \
.num_cases(41) \
.const('y_floor', -1.25)
beluga.add_logger(display_level=logging.INFO)
sol_set = beluga.solve(ocp=ocp, method='traditional', bvp_algorithm=bvp_solver, steps=continuation_steps,
guess_generator=guess_maker, autoscale=True, initial_helper=True, save_sols='chain.beluga')
|
[
"beluga.solve",
"beluga.bvp_algorithm",
"beluga.init_continuation",
"beluga.add_logger",
"beluga.guess_generator",
"beluga.Problem"
] |
[((36, 67), 'beluga.Problem', 'beluga.Problem', (['"""hanging_chain"""'], {}), "('hanging_chain')\n", (50, 67), False, 'import beluga\n'), ((817, 846), 'beluga.bvp_algorithm', 'beluga.bvp_algorithm', (['"""spbvp"""'], {}), "('spbvp')\n", (837, 846), False, 'import beluga\n'), ((862, 993), 'beluga.guess_generator', 'beluga.guess_generator', (['"""auto"""'], {'start': '[0, 0]', 'costate_guess': '(-0.1)', 'time_integrate': '(1.1)', 'control_guess': '[0]', 'use_control_guess': '(True)'}), "('auto', start=[0, 0], costate_guess=-0.1,\n time_integrate=1.1, control_guess=[0], use_control_guess=True)\n", (884, 993), False, 'import beluga\n'), ((1049, 1075), 'beluga.init_continuation', 'beluga.init_continuation', ([], {}), '()\n', (1073, 1075), False, 'import beluga\n'), ((1672, 1717), 'beluga.add_logger', 'beluga.add_logger', ([], {'display_level': 'logging.INFO'}), '(display_level=logging.INFO)\n', (1689, 1717), False, 'import beluga\n'), ((1729, 1925), 'beluga.solve', 'beluga.solve', ([], {'ocp': 'ocp', 'method': '"""traditional"""', 'bvp_algorithm': 'bvp_solver', 'steps': 'continuation_steps', 'guess_generator': 'guess_maker', 'autoscale': '(True)', 'initial_helper': '(True)', 'save_sols': '"""chain.beluga"""'}), "(ocp=ocp, method='traditional', bvp_algorithm=bvp_solver, steps\n =continuation_steps, guess_generator=guess_maker, autoscale=True,\n initial_helper=True, save_sols='chain.beluga')\n", (1741, 1925), False, 'import beluga\n')]
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
# Export this package's modules as members:
from ._enums import *
from .get_peer_asn import *
from .get_peering import *
from .get_peering_service import *
from .get_prefix import *
from .get_registered_asn import *
from .get_registered_prefix import *
from .peer_asn import *
from .peering import *
from .peering_service import *
from .prefix import *
from .registered_asn import *
from .registered_prefix import *
from ._inputs import *
from . import outputs
# Make subpackages available:
from . import (
latest,
v20190801preview,
v20190901preview,
v20200101preview,
v20200401,
v20201001,
)
def _register_module():
import pulumi
from .. import _utilities
class Module(pulumi.runtime.ResourceModule):
_version = _utilities.get_semver_version()
def version(self):
return Module._version
def construct(self, name: str, typ: str, urn: str) -> pulumi.Resource:
if typ == "azure-nextgen:peering:PeerAsn":
return PeerAsn(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-nextgen:peering:Peering":
return Peering(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-nextgen:peering:PeeringService":
return PeeringService(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-nextgen:peering:Prefix":
return Prefix(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-nextgen:peering:RegisteredAsn":
return RegisteredAsn(name, pulumi.ResourceOptions(urn=urn))
elif typ == "azure-nextgen:peering:RegisteredPrefix":
return RegisteredPrefix(name, pulumi.ResourceOptions(urn=urn))
else:
raise Exception(f"unknown resource type {typ}")
_module_instance = Module()
pulumi.runtime.register_resource_module("azure-nextgen", "peering", _module_instance)
_register_module()
|
[
"pulumi.ResourceOptions",
"pulumi.runtime.register_resource_module"
] |
[((2027, 2116), 'pulumi.runtime.register_resource_module', 'pulumi.runtime.register_resource_module', (['"""azure-nextgen"""', '"""peering"""', '_module_instance'], {}), "('azure-nextgen', 'peering',\n _module_instance)\n", (2066, 2116), False, 'import pulumi\n'), ((1197, 1228), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1219, 1228), False, 'import pulumi\n'), ((1324, 1355), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1346, 1355), False, 'import pulumi\n'), ((1465, 1496), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1487, 1496), False, 'import pulumi\n'), ((1590, 1621), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1612, 1621), False, 'import pulumi\n'), ((1729, 1760), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1751, 1760), False, 'import pulumi\n'), ((1874, 1905), 'pulumi.ResourceOptions', 'pulumi.ResourceOptions', ([], {'urn': 'urn'}), '(urn=urn)\n', (1896, 1905), False, 'import pulumi\n')]
|
import unittest
from caldera.app.logic.logic import Term, convert_to_action
from caldera.app.operation.operation_steps import all_steps
import itertools
class TestActionConversion(unittest.TestCase):
# We want to detect situations such as this:
# + has_property(user_id, CREDENTIAL_G, USER_ID_18)
# + has_property(user_id, CREDENTIAL_G, USER_ID_19)
i = 0
@classmethod
def unique_count(cls):
cls.i += 1
return cls.i
def test_duplicate_conditions(self):
for step in all_steps:
new_action = convert_to_action(step, self.unique_count)
terms_only = [x for x in new_action.requirements + new_action.add if isinstance(x, Term)]
for statement, other_statement in itertools.combinations(terms_only, 2):
# We only care about has_property statements
if statement.predicate == "has_property" and other_statement.predicate == "has_property" and \
other_statement.literals[0] == statement.literals[0] and \
other_statement.literals[1] == statement.literals[1]:
self.assertEqual(other_statement.literals[2], statement.literals[2])
# We want to detect situations where an object is defined in the post-conditions but a has_property predicate
# in the preconditions references a field of that object. This should never happen
def test_object_defined_in_post_conditions(self):
for step in all_steps:
new_action = convert_to_action(step, self.unique_count)
objects_in_post = []
for term in new_action.add:
if isinstance(term, Term) and len(term.literals) == 1:
objects_in_post.append(term.literals[0])
for term in new_action.requirements:
if isinstance(term, Term) and term.predicate == "has_property":
self.assertNotIn(term.literals[1], objects_in_post)
|
[
"itertools.combinations",
"caldera.app.logic.logic.convert_to_action"
] |
[((572, 614), 'caldera.app.logic.logic.convert_to_action', 'convert_to_action', (['step', 'self.unique_count'], {}), '(step, self.unique_count)\n', (589, 614), False, 'from caldera.app.logic.logic import Term, convert_to_action\n'), ((763, 800), 'itertools.combinations', 'itertools.combinations', (['terms_only', '(2)'], {}), '(terms_only, 2)\n', (785, 800), False, 'import itertools\n'), ((1538, 1580), 'caldera.app.logic.logic.convert_to_action', 'convert_to_action', (['step', 'self.unique_count'], {}), '(step, self.unique_count)\n', (1555, 1580), False, 'from caldera.app.logic.logic import Term, convert_to_action\n')]
|
"Unit tests for Constraint, MonomialEquality and SignomialInequality"
import unittest
import numpy as np
from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable
from gpkit.nomials import SignomialInequality, PosynomialInequality
from gpkit.nomials import MonomialEquality
from gpkit import Model, ConstraintSet
from gpkit.constraints.costed import CostedConstraintSet
from gpkit.constraints.tight import Tight
from gpkit.constraints.loose import Loose
from gpkit.tests.helpers import run_tests
from gpkit.exceptions import (InvalidGPConstraint, PrimalInfeasible,
DimensionalityError)
from gpkit.constraints.relax import (ConstraintsRelaxed, ConstantsRelaxed,
ConstraintsRelaxedEqually)
from gpkit.constraints.bounded import Bounded
from gpkit.globals import NamedVariables
import gpkit
class TestConstraint(unittest.TestCase):
"Tests for Constraint class"
def test_uninited_element(self):
x = Variable("x")
class SelfPass(Model):
"A model which contains itself!"
def setup(self):
ConstraintSet([self, x <= 1])
self.assertRaises(ValueError, SelfPass)
def test_bad_elements(self):
x = Variable("x")
with self.assertRaises(ValueError):
_ = Model(x, [x == "A"])
with self.assertRaises(ValueError):
_ = Model(x, [x >= 1, x == "A"])
with self.assertRaises(ValueError):
_ = Model(x, [x >= 1, x == "A", x >= 1, ])
with self.assertRaises(ValueError):
_ = Model(x, [x == "A", x >= 1])
v = VectorVariable(2, "v")
with self.assertRaises(ValueError):
_ = Model(x, [v == "A"])
with self.assertRaises(TypeError):
_ = Model(x, [v <= ["A", "B"]])
with self.assertRaises(TypeError):
_ = Model(x, [v >= ["A", "B"]])
def test_evalfn(self):
x = Variable("x")
x2 = Variable("x^2", evalfn=lambda solv: solv[x]**2)
m = Model(x, [x >= 2])
m.unique_varkeys = set([x2.key])
sol = m.solve(verbosity=0)
self.assertAlmostEqual(sol(x2), sol(x)**2)
def test_relax_list(self):
x = Variable("x")
x_max = Variable("x_max", 1)
x_min = Variable("x_min", 2)
constraints = [x_min <= x, x <= x_max]
ConstraintsRelaxed(constraints)
ConstantsRelaxed(constraints)
ConstraintsRelaxedEqually(constraints)
def test_relax_linked(self):
x = Variable("x")
x_max = Variable("x_max", 1)
x_min = Variable("x_min", lambda c: 2*c[x_max])
zero = Variable("zero", lambda c: 0*c[x_max])
constraints = ConstraintSet([x_min + zero <= x, x + zero <= x_max])
_ = ConstantsRelaxed(constraints)
NamedVariables.reset_modelnumbers()
include_min = ConstantsRelaxed(constraints, include_only=["x_min"])
NamedVariables.reset_modelnumbers()
exclude_max = ConstantsRelaxed(constraints, exclude=["x_max"])
self.assertEqual(str(include_min), str(exclude_max))
def test_equality_relaxation(self):
x = Variable("x")
m = Model(x, [x == 3, x == 4])
rc = ConstraintsRelaxed(m)
m2 = Model(rc.relaxvars.prod() * x**0.01, rc)
self.assertAlmostEqual(m2.solve(verbosity=0)(x), 3, places=3)
def test_constraintget(self):
x = Variable("x")
x_ = Variable("x", lineage=[("_", 0)])
xv = VectorVariable(2, "x")
xv_ = VectorVariable(2, "x", lineage=[("_", 0)])
self.assertEqual(Model(x, [x >= 1])["x"], x)
with self.assertRaises(ValueError):
_ = Model(x, [x >= 1, x_ >= 1])["x"]
with self.assertRaises(ValueError):
_ = Model(x, [x >= 1, xv >= 1])["x"]
self.assertTrue(all(Model(xv.prod(), [xv >= 1])["x"] == xv))
with self.assertRaises(ValueError):
_ = Model(xv.prod(), [xv >= 1, xv_ >= 1])["x"]
with self.assertRaises(ValueError):
_ = Model(xv.prod(), [xv >= 1, x_ >= 1])["x"]
def test_additive_scalar(self):
"Make sure additive scalars simplify properly"
x = Variable('x')
c1 = 1 >= 10*x
c2 = 1 >= 5*x + 0.5
self.assertEqual(type(c1), PosynomialInequality)
self.assertEqual(type(c2), PosynomialInequality)
c1hmap, = c1.as_hmapslt1({})
c2hmap, = c2.as_hmapslt1({})
self.assertEqual(c1hmap, c2hmap)
def test_additive_scalar_gt1(self):
"1 can't be greater than (1 + something positive)"
x = Variable('x')
def constr():
"method that should raise a ValueError"
return 1 >= 5*x + 1.1
self.assertRaises(PrimalInfeasible, constr)
def test_init(self):
"Test Constraint __init__"
x = Variable('x')
y = Variable('y')
c = PosynomialInequality(x, ">=", y**2)
self.assertEqual(c.as_hmapslt1({}), [(y**2/x).hmap])
self.assertEqual(c.left, x)
self.assertEqual(c.right, y**2)
c = PosynomialInequality(x, "<=", y**2)
self.assertEqual(c.as_hmapslt1({}), [(x/y**2).hmap])
self.assertEqual(c.left, x)
self.assertEqual(c.right, y**2)
self.assertEqual(type((1 >= x).latex()), str)
def test_oper_overload(self):
"Test Constraint initialization by operator overloading"
x = Variable('x')
y = Variable('y')
c = (y >= 1 + x**2)
self.assertEqual(c.as_hmapslt1({}), [(1/y + x**2/y).hmap])
self.assertEqual(c.left, y)
self.assertEqual(c.right, 1 + x**2)
# same constraint, switched operator direction
c2 = (1 + x**2 <= y) # same as c
self.assertEqual(c2.as_hmapslt1({}), c.as_hmapslt1({}))
def test_sub_tol(self):
" Test PosyIneq feasibility tolerance under substitutions"
x = Variable('x')
y = Variable('y')
z = Variable('z')
PosynomialInequality.feastol = 1e-5
m = Model(z, [x == z, x >= y], {x: 1, y: 1.0001})
self.assertRaises(PrimalInfeasible, m.solve, verbosity=0)
PosynomialInequality.feastol = 1e-3
self.assertEqual(m.substitutions('x'), m.solve(verbosity=0)('x'))
class TestCostedConstraint(unittest.TestCase):
"Tests for Costed Constraint class"
def test_vector_cost(self):
x = VectorVariable(2, "x")
self.assertRaises(ValueError, CostedConstraintSet, x, [])
_ = CostedConstraintSet(np.array(x[0]), [])
class TestMonomialEquality(unittest.TestCase):
"Test monomial equality constraint class"
def test_init(self):
"Test initialization via both operator overloading and __init__"
x = Variable('x')
y = Variable('y')
mono = y**2/x
# operator overloading
mec = (x == y**2)
# __init__
mec2 = MonomialEquality(x, y**2)
self.assertTrue(mono.hmap in mec.as_hmapslt1({}))
self.assertTrue(mono.hmap in mec2.as_hmapslt1({}))
x = Variable("x", "ft")
y = Variable("y")
if gpkit.units:
self.assertRaises(DimensionalityError, MonomialEquality, x, y)
self.assertRaises(DimensionalityError, MonomialEquality, y, x)
def test_vector(self):
"Monomial Equalities with VectorVariables"
x = VectorVariable(3, "x")
self.assertFalse(x == 3)
self.assertTrue(x == x) # pylint: disable=comparison-with-itself
def test_inheritance(self):
"Make sure MonomialEquality inherits from the right things"
F = Variable('F')
m = Variable('m')
a = Variable('a')
mec = (F == m*a)
self.assertTrue(isinstance(mec, MonomialEquality))
def test_non_monomial(self):
"Try to initialize a MonomialEquality with non-monomial args"
x = Variable('x')
y = Variable('y')
def constr():
"method that should raise a TypeError"
MonomialEquality(x*y, x+y)
self.assertRaises(TypeError, constr)
def test_str(self):
"Test that MonomialEquality.__str__ returns a string"
x = Variable('x')
y = Variable('y')
mec = (x == y)
self.assertEqual(type(mec.str_without()), str)
def test_united_dimensionless(self):
"Check dimensionless unit-ed variables work"
x = Variable('x')
y = Variable('y', 'hr/day')
c = MonomialEquality(x, y)
self.assertTrue(isinstance(c, MonomialEquality))
class TestSignomialInequality(unittest.TestCase):
"Test Signomial constraints"
def test_becomes_posy_sensitivities(self):
# pylint: disable=invalid-name
# model from #1165
ujet = Variable("ujet")
PK = Variable("PK")
Dp = Variable("Dp", 0.662)
fBLI = Variable("fBLI", 0.4)
fsurf = Variable("fsurf", 0.836)
mdot = Variable("mdot", 1/0.7376)
with SignomialsEnabled():
m = Model(PK, [mdot*ujet + fBLI*Dp >= 1,
PK >= 0.5*mdot*ujet*(2 + ujet) + fBLI*fsurf*Dp])
var_senss = m.solve(verbosity=0)["sensitivities"]["variables"]
self.assertAlmostEqual(var_senss[Dp], -0.16, 2)
self.assertAlmostEqual(var_senss[fBLI], -0.16, 2)
self.assertAlmostEqual(var_senss[fsurf], 0.19, 2)
self.assertAlmostEqual(var_senss[mdot], -0.17, 2)
# Linked variable
Dp = Variable("Dp", 0.662)
mDp = Variable("-Dp", lambda c: -c[Dp])
fBLI = Variable("fBLI", 0.4)
fsurf = Variable("fsurf", 0.836)
mdot = Variable("mdot", 1/0.7376)
m = Model(PK, [mdot*ujet >= 1 + fBLI*mDp,
PK >= 0.5*mdot*ujet*(2 + ujet) + fBLI*fsurf*Dp])
var_senss = m.solve(verbosity=0)["sensitivities"]["variables"]
self.assertAlmostEqual(var_senss[Dp], -0.16, 2)
self.assertAlmostEqual(var_senss[fBLI], -0.16, 2)
self.assertAlmostEqual(var_senss[fsurf], 0.19, 2)
self.assertAlmostEqual(var_senss[mdot], -0.17, 2)
# fixed negative variable
Dp = Variable("Dp", 0.662)
mDp = Variable("-Dp", -0.662)
fBLI = Variable("fBLI", 0.4)
fsurf = Variable("fsurf", 0.836)
mdot = Variable("mdot", 1/0.7376)
m = Model(PK, [mdot*ujet >= 1 + fBLI*mDp,
PK >= 0.5*mdot*ujet*(2 + ujet) + fBLI*fsurf*Dp])
var_senss = m.solve(verbosity=0)["sensitivities"]["variables"]
self.assertAlmostEqual(var_senss[Dp] + var_senss[mDp], -0.16, 2)
self.assertAlmostEqual(var_senss[fBLI], -0.16, 2)
self.assertAlmostEqual(var_senss[fsurf], 0.19, 2)
self.assertAlmostEqual(var_senss[mdot], -0.17, 2)
def test_init(self):
"Test initialization and types"
D = Variable('D', units="N")
x1, x2, x3 = (Variable("x_%s" % i, units="N") for i in range(3))
with self.assertRaises(TypeError):
sc = (D >= x1 + x2 - x3)
with SignomialsEnabled():
sc = (D >= x1 + x2 - x3)
self.assertTrue(isinstance(sc, SignomialInequality))
self.assertFalse(isinstance(sc, Posynomial))
def test_posyslt1(self):
x = Variable("x")
y = Variable("y")
with SignomialsEnabled():
sc = (x + y >= x*y)
# make sure that the error type doesn't change on our users
with self.assertRaises(InvalidGPConstraint):
_ = sc.as_hmapslt1({})
class TestLoose(unittest.TestCase):
"Test loose constraint set"
def test_raiseerror(self):
x = Variable('x')
x_min = Variable('x_{min}', 2)
m = Model(x, [Loose([x >= x_min]),
x >= 1])
Loose.raiseerror = True
self.assertRaises(RuntimeWarning, m.solve, verbosity=0)
Loose.raiseerror = False
def test_posyconstr_in_gp(self):
"Tests loose constraint set with solve()"
x = Variable('x')
x_min = Variable('x_{min}', 2)
m = Model(x, [Loose([x >= x_min]),
x >= 1])
sol = m.solve(verbosity=0)
warndata = sol["warnings"]["Unexpectedly Tight Constraints"][0][1]
self.assertIs(warndata[-1], m[0][0])
self.assertAlmostEqual(warndata[0], +1, 3)
m.substitutions[x_min] = 0.5
self.assertAlmostEqual(m.solve(verbosity=0)["cost"], 1)
def test_posyconstr_in_sp(self):
x = Variable('x')
y = Variable('y')
x_min = Variable('x_min', 1)
y_min = Variable('y_min', 2)
with SignomialsEnabled():
sig_constraint = (x + y >= 3.5)
m = Model(x*y, [Loose([x >= y]),
x >= x_min, y >= y_min, sig_constraint])
sol = m.localsolve(verbosity=0)
warndata = sol["warnings"]["Unexpectedly Tight Constraints"][0][1]
self.assertIs(warndata[-1], m[0][0])
self.assertAlmostEqual(warndata[0], +1, 3)
m.substitutions[x_min] = 2
m.substitutions[y_min] = 1
self.assertAlmostEqual(m.localsolve(verbosity=0)["cost"], 2.5, 5)
class TestTight(unittest.TestCase):
"Test tight constraint set"
def test_posyconstr_in_gp(self):
"Tests tight constraint set with solve()"
x = Variable('x')
x_min = Variable('x_{min}', 2)
m = Model(x, [Tight([x >= 1]),
x >= x_min])
sol = m.solve(verbosity=0)
warndata = sol["warnings"]["Unexpectedly Loose Constraints"][0][1]
self.assertIs(warndata[-1], m[0][0])
self.assertAlmostEqual(warndata[0], 1, 3)
m.substitutions[x_min] = 0.5
self.assertAlmostEqual(m.solve(verbosity=0)["cost"], 1)
def test_posyconstr_in_sp(self):
x = Variable('x')
y = Variable('y')
with SignomialsEnabled():
sig_constraint = (x + y >= 0.1)
m = Model(x*y, [Tight([x >= y]),
x >= 2, y >= 1, sig_constraint])
sol = m.localsolve(verbosity=0)
warndata = sol["warnings"]["Unexpectedly Loose Constraints"][0][1]
self.assertIs(warndata[-1], m[0][0])
self.assertAlmostEqual(warndata[0], 1, 3)
m.pop(1)
self.assertAlmostEqual(m.localsolve(verbosity=0)["cost"], 1, 5)
def test_sigconstr_in_sp(self):
"Tests tight constraint set with localsolve()"
x = Variable('x')
y = Variable('y')
x_min = Variable('x_{min}', 2)
y_max = Variable('y_{max}', 0.5)
with SignomialsEnabled():
m = Model(x, [Tight([x + y >= 1]),
x >= x_min,
y <= y_max])
sol = m.localsolve(verbosity=0)
warndata = sol["warnings"]["Unexpectedly Loose Constraints"][0][1]
self.assertIs(warndata[-1], m[0][0])
self.assertGreater(warndata[0], 0.5)
m.substitutions[x_min] = 0.5
self.assertAlmostEqual(m.localsolve(verbosity=0)["cost"], 0.5, 5)
class TestBounded(unittest.TestCase):
"Test bounded constraint set"
def test_substitution_issue905(self):
x = Variable("x")
y = Variable("y")
m = Model(x, [x >= y], {"y": 1})
bm = Model(m.cost, Bounded(m))
sol = bm.solve(verbosity=0)
self.assertAlmostEqual(sol["cost"], 1.0)
bm = Model(m.cost, Bounded(m, lower=1e-10))
sol = bm.solve(verbosity=0)
self.assertAlmostEqual(sol["cost"], 1.0)
bm = Model(m.cost, Bounded(m, upper=1e10))
sol = bm.solve(verbosity=0)
self.assertAlmostEqual(sol["cost"], 1.0)
TESTS = [TestConstraint, TestMonomialEquality, TestSignomialInequality,
TestTight, TestLoose, TestBounded, TestCostedConstraint]
if __name__ == "__main__": # pragma: no cover
run_tests(TESTS)
|
[
"gpkit.VectorVariable",
"gpkit.constraints.loose.Loose",
"gpkit.ConstraintSet",
"gpkit.constraints.relax.ConstraintsRelaxedEqually",
"gpkit.globals.NamedVariables.reset_modelnumbers",
"gpkit.nomials.PosynomialInequality",
"gpkit.tests.helpers.run_tests",
"gpkit.constraints.relax.ConstantsRelaxed",
"gpkit.Model",
"gpkit.constraints.bounded.Bounded",
"gpkit.constraints.tight.Tight",
"numpy.array",
"gpkit.constraints.relax.ConstraintsRelaxed",
"gpkit.SignomialsEnabled",
"gpkit.Variable",
"gpkit.nomials.MonomialEquality"
] |
[((15753, 15769), 'gpkit.tests.helpers.run_tests', 'run_tests', (['TESTS'], {}), '(TESTS)\n', (15762, 15769), False, 'from gpkit.tests.helpers import run_tests\n'), ((997, 1010), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (1005, 1010), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((1258, 1271), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (1266, 1271), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((1642, 1664), 'gpkit.VectorVariable', 'VectorVariable', (['(2)', '"""v"""'], {}), "(2, 'v')\n", (1656, 1664), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((1960, 1973), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (1968, 1973), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((1987, 2036), 'gpkit.Variable', 'Variable', (['"""x^2"""'], {'evalfn': '(lambda solv: solv[x] ** 2)'}), "('x^2', evalfn=lambda solv: solv[x] ** 2)\n", (1995, 2036), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2047, 2065), 'gpkit.Model', 'Model', (['x', '[x >= 2]'], {}), '(x, [x >= 2])\n', (2052, 2065), False, 'from gpkit import Model, ConstraintSet\n'), ((2237, 2250), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (2245, 2250), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2267, 2287), 'gpkit.Variable', 'Variable', (['"""x_max"""', '(1)'], {}), "('x_max', 1)\n", (2275, 2287), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2304, 2324), 'gpkit.Variable', 'Variable', (['"""x_min"""', '(2)'], {}), "('x_min', 2)\n", (2312, 2324), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2380, 2411), 'gpkit.constraints.relax.ConstraintsRelaxed', 'ConstraintsRelaxed', (['constraints'], {}), '(constraints)\n', (2398, 2411), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((2420, 2449), 'gpkit.constraints.relax.ConstantsRelaxed', 'ConstantsRelaxed', (['constraints'], {}), '(constraints)\n', (2436, 2449), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((2458, 2496), 'gpkit.constraints.relax.ConstraintsRelaxedEqually', 'ConstraintsRelaxedEqually', (['constraints'], {}), '(constraints)\n', (2483, 2496), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((2543, 2556), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (2551, 2556), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2573, 2593), 'gpkit.Variable', 'Variable', (['"""x_max"""', '(1)'], {}), "('x_max', 1)\n", (2581, 2593), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2610, 2651), 'gpkit.Variable', 'Variable', (['"""x_min"""', '(lambda c: 2 * c[x_max])'], {}), "('x_min', lambda c: 2 * c[x_max])\n", (2618, 2651), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2665, 2705), 'gpkit.Variable', 'Variable', (['"""zero"""', '(lambda c: 0 * c[x_max])'], {}), "('zero', lambda c: 0 * c[x_max])\n", (2673, 2705), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((2726, 2779), 'gpkit.ConstraintSet', 'ConstraintSet', (['[x_min + zero <= x, x + zero <= x_max]'], {}), '([x_min + zero <= x, x + zero <= x_max])\n', (2739, 2779), False, 'from gpkit import Model, ConstraintSet\n'), ((2792, 2821), 'gpkit.constraints.relax.ConstantsRelaxed', 'ConstantsRelaxed', (['constraints'], {}), '(constraints)\n', (2808, 2821), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((2830, 2865), 'gpkit.globals.NamedVariables.reset_modelnumbers', 'NamedVariables.reset_modelnumbers', ([], {}), '()\n', (2863, 2865), False, 'from gpkit.globals import NamedVariables\n'), ((2888, 2941), 'gpkit.constraints.relax.ConstantsRelaxed', 'ConstantsRelaxed', (['constraints'], {'include_only': "['x_min']"}), "(constraints, include_only=['x_min'])\n", (2904, 2941), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((2950, 2985), 'gpkit.globals.NamedVariables.reset_modelnumbers', 'NamedVariables.reset_modelnumbers', ([], {}), '()\n', (2983, 2985), False, 'from gpkit.globals import NamedVariables\n'), ((3008, 3056), 'gpkit.constraints.relax.ConstantsRelaxed', 'ConstantsRelaxed', (['constraints'], {'exclude': "['x_max']"}), "(constraints, exclude=['x_max'])\n", (3024, 3056), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((3171, 3184), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (3179, 3184), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((3197, 3223), 'gpkit.Model', 'Model', (['x', '[x == 3, x == 4]'], {}), '(x, [x == 3, x == 4])\n', (3202, 3223), False, 'from gpkit import Model, ConstraintSet\n'), ((3237, 3258), 'gpkit.constraints.relax.ConstraintsRelaxed', 'ConstraintsRelaxed', (['m'], {}), '(m)\n', (3255, 3258), False, 'from gpkit.constraints.relax import ConstraintsRelaxed, ConstantsRelaxed, ConstraintsRelaxedEqually\n'), ((3430, 3443), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (3438, 3443), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((3457, 3490), 'gpkit.Variable', 'Variable', (['"""x"""'], {'lineage': "[('_', 0)]"}), "('x', lineage=[('_', 0)])\n", (3465, 3490), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((3504, 3526), 'gpkit.VectorVariable', 'VectorVariable', (['(2)', '"""x"""'], {}), "(2, 'x')\n", (3518, 3526), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((3541, 3583), 'gpkit.VectorVariable', 'VectorVariable', (['(2)', '"""x"""'], {'lineage': "[('_', 0)]"}), "(2, 'x', lineage=[('_', 0)])\n", (3555, 3583), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((4201, 4214), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (4209, 4214), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((4607, 4620), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (4615, 4620), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((4855, 4868), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (4863, 4868), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((4881, 4894), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (4889, 4894), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((4907, 4944), 'gpkit.nomials.PosynomialInequality', 'PosynomialInequality', (['x', '""">="""', '(y ** 2)'], {}), "(x, '>=', y ** 2)\n", (4927, 4944), False, 'from gpkit.nomials import SignomialInequality, PosynomialInequality\n'), ((5092, 5129), 'gpkit.nomials.PosynomialInequality', 'PosynomialInequality', (['x', '"""<="""', '(y ** 2)'], {}), "(x, '<=', y ** 2)\n", (5112, 5129), False, 'from gpkit.nomials import SignomialInequality, PosynomialInequality\n'), ((5431, 5444), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (5439, 5444), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((5457, 5470), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (5465, 5470), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((5915, 5928), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (5923, 5928), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((5941, 5954), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (5949, 5954), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((5967, 5980), 'gpkit.Variable', 'Variable', (['"""z"""'], {}), "('z')\n", (5975, 5980), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((6037, 6082), 'gpkit.Model', 'Model', (['z', '[x == z, x >= y]', '{x: 1, y: 1.0001}'], {}), '(z, [x == z, x >= y], {x: 1, y: 1.0001})\n', (6042, 6082), False, 'from gpkit import Model, ConstraintSet\n'), ((6400, 6422), 'gpkit.VectorVariable', 'VectorVariable', (['(2)', '"""x"""'], {}), "(2, 'x')\n", (6414, 6422), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((6746, 6759), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (6754, 6759), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((6772, 6785), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (6780, 6785), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((6899, 6926), 'gpkit.nomials.MonomialEquality', 'MonomialEquality', (['x', '(y ** 2)'], {}), '(x, y ** 2)\n', (6915, 6926), False, 'from gpkit.nomials import MonomialEquality\n'), ((7054, 7073), 'gpkit.Variable', 'Variable', (['"""x"""', '"""ft"""'], {}), "('x', 'ft')\n", (7062, 7073), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7086, 7099), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (7094, 7099), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7365, 7387), 'gpkit.VectorVariable', 'VectorVariable', (['(3)', '"""x"""'], {}), "(3, 'x')\n", (7379, 7387), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7608, 7621), 'gpkit.Variable', 'Variable', (['"""F"""'], {}), "('F')\n", (7616, 7621), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7634, 7647), 'gpkit.Variable', 'Variable', (['"""m"""'], {}), "('m')\n", (7642, 7647), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7660, 7673), 'gpkit.Variable', 'Variable', (['"""a"""'], {}), "('a')\n", (7668, 7673), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7874, 7887), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (7882, 7887), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((7900, 7913), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (7908, 7913), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8171, 8184), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (8179, 8184), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8197, 8210), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (8205, 8210), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8396, 8409), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (8404, 8409), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8422, 8445), 'gpkit.Variable', 'Variable', (['"""y"""', '"""hr/day"""'], {}), "('y', 'hr/day')\n", (8430, 8445), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8458, 8480), 'gpkit.nomials.MonomialEquality', 'MonomialEquality', (['x', 'y'], {}), '(x, y)\n', (8474, 8480), False, 'from gpkit.nomials import MonomialEquality\n'), ((8752, 8768), 'gpkit.Variable', 'Variable', (['"""ujet"""'], {}), "('ujet')\n", (8760, 8768), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8782, 8796), 'gpkit.Variable', 'Variable', (['"""PK"""'], {}), "('PK')\n", (8790, 8796), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8810, 8831), 'gpkit.Variable', 'Variable', (['"""Dp"""', '(0.662)'], {}), "('Dp', 0.662)\n", (8818, 8831), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8847, 8868), 'gpkit.Variable', 'Variable', (['"""fBLI"""', '(0.4)'], {}), "('fBLI', 0.4)\n", (8855, 8868), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8885, 8909), 'gpkit.Variable', 'Variable', (['"""fsurf"""', '(0.836)'], {}), "('fsurf', 0.836)\n", (8893, 8909), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((8925, 8953), 'gpkit.Variable', 'Variable', (['"""mdot"""', '(1 / 0.7376)'], {}), "('mdot', 1 / 0.7376)\n", (8933, 8953), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9456, 9477), 'gpkit.Variable', 'Variable', (['"""Dp"""', '(0.662)'], {}), "('Dp', 0.662)\n", (9464, 9477), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9492, 9525), 'gpkit.Variable', 'Variable', (['"""-Dp"""', '(lambda c: -c[Dp])'], {}), "('-Dp', lambda c: -c[Dp])\n", (9500, 9525), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9541, 9562), 'gpkit.Variable', 'Variable', (['"""fBLI"""', '(0.4)'], {}), "('fBLI', 0.4)\n", (9549, 9562), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9579, 9603), 'gpkit.Variable', 'Variable', (['"""fsurf"""', '(0.836)'], {}), "('fsurf', 0.836)\n", (9587, 9603), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9619, 9647), 'gpkit.Variable', 'Variable', (['"""mdot"""', '(1 / 0.7376)'], {}), "('mdot', 1 / 0.7376)\n", (9627, 9647), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9658, 9762), 'gpkit.Model', 'Model', (['PK', '[mdot * ujet >= 1 + fBLI * mDp, PK >= 0.5 * mdot * ujet * (2 + ujet) + fBLI *\n fsurf * Dp]'], {}), '(PK, [mdot * ujet >= 1 + fBLI * mDp, PK >= 0.5 * mdot * ujet * (2 +\n ujet) + fBLI * fsurf * Dp])\n', (9663, 9762), False, 'from gpkit import Model, ConstraintSet\n'), ((10117, 10138), 'gpkit.Variable', 'Variable', (['"""Dp"""', '(0.662)'], {}), "('Dp', 0.662)\n", (10125, 10138), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((10153, 10176), 'gpkit.Variable', 'Variable', (['"""-Dp"""', '(-0.662)'], {}), "('-Dp', -0.662)\n", (10161, 10176), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((10192, 10213), 'gpkit.Variable', 'Variable', (['"""fBLI"""', '(0.4)'], {}), "('fBLI', 0.4)\n", (10200, 10213), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((10230, 10254), 'gpkit.Variable', 'Variable', (['"""fsurf"""', '(0.836)'], {}), "('fsurf', 0.836)\n", (10238, 10254), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((10270, 10298), 'gpkit.Variable', 'Variable', (['"""mdot"""', '(1 / 0.7376)'], {}), "('mdot', 1 / 0.7376)\n", (10278, 10298), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((10309, 10413), 'gpkit.Model', 'Model', (['PK', '[mdot * ujet >= 1 + fBLI * mDp, PK >= 0.5 * mdot * ujet * (2 + ujet) + fBLI *\n fsurf * Dp]'], {}), '(PK, [mdot * ujet >= 1 + fBLI * mDp, PK >= 0.5 * mdot * ujet * (2 +\n ujet) + fBLI * fsurf * Dp])\n', (10314, 10413), False, 'from gpkit import Model, ConstraintSet\n'), ((10815, 10839), 'gpkit.Variable', 'Variable', (['"""D"""'], {'units': '"""N"""'}), "('D', units='N')\n", (10823, 10839), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11220, 11233), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (11228, 11233), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11246, 11259), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (11254, 11259), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11596, 11609), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (11604, 11609), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11626, 11648), 'gpkit.Variable', 'Variable', (['"""x_{min}"""', '(2)'], {}), "('x_{min}', 2)\n", (11634, 11648), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11952, 11965), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (11960, 11965), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11982, 12004), 'gpkit.Variable', 'Variable', (['"""x_{min}"""', '(2)'], {}), "('x_{min}', 2)\n", (11990, 12004), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((12436, 12449), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (12444, 12449), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((12462, 12475), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (12470, 12475), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((12492, 12512), 'gpkit.Variable', 'Variable', (['"""x_min"""', '(1)'], {}), "('x_min', 1)\n", (12500, 12512), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((12529, 12549), 'gpkit.Variable', 'Variable', (['"""y_min"""', '(2)'], {}), "('y_min', 2)\n", (12537, 12549), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((13259, 13272), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (13267, 13272), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((13289, 13311), 'gpkit.Variable', 'Variable', (['"""x_{min}"""', '(2)'], {}), "('x_{min}', 2)\n", (13297, 13311), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((13742, 13755), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (13750, 13755), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((13768, 13781), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (13776, 13781), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((14361, 14374), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (14369, 14374), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((14387, 14400), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (14395, 14400), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((14417, 14439), 'gpkit.Variable', 'Variable', (['"""x_{min}"""', '(2)'], {}), "('x_{min}', 2)\n", (14425, 14439), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((14456, 14480), 'gpkit.Variable', 'Variable', (['"""y_{max}"""', '(0.5)'], {}), "('y_{max}', 0.5)\n", (14464, 14480), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((15084, 15097), 'gpkit.Variable', 'Variable', (['"""x"""'], {}), "('x')\n", (15092, 15097), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((15110, 15123), 'gpkit.Variable', 'Variable', (['"""y"""'], {}), "('y')\n", (15118, 15123), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((15136, 15164), 'gpkit.Model', 'Model', (['x', '[x >= y]', "{'y': 1}"], {}), "(x, [x >= y], {'y': 1})\n", (15141, 15164), False, 'from gpkit import Model, ConstraintSet\n'), ((1332, 1352), 'gpkit.Model', 'Model', (['x', "[x == 'A']"], {}), "(x, [x == 'A'])\n", (1337, 1352), False, 'from gpkit import Model, ConstraintSet\n'), ((1413, 1441), 'gpkit.Model', 'Model', (['x', "[x >= 1, x == 'A']"], {}), "(x, [x >= 1, x == 'A'])\n", (1418, 1441), False, 'from gpkit import Model, ConstraintSet\n'), ((1502, 1538), 'gpkit.Model', 'Model', (['x', "[x >= 1, x == 'A', x >= 1]"], {}), "(x, [x >= 1, x == 'A', x >= 1])\n", (1507, 1538), False, 'from gpkit import Model, ConstraintSet\n'), ((1601, 1629), 'gpkit.Model', 'Model', (['x', "[x == 'A', x >= 1]"], {}), "(x, [x == 'A', x >= 1])\n", (1606, 1629), False, 'from gpkit import Model, ConstraintSet\n'), ((1725, 1745), 'gpkit.Model', 'Model', (['x', "[v == 'A']"], {}), "(x, [v == 'A'])\n", (1730, 1745), False, 'from gpkit import Model, ConstraintSet\n'), ((1805, 1832), 'gpkit.Model', 'Model', (['x', "[v <= ['A', 'B']]"], {}), "(x, [v <= ['A', 'B']])\n", (1810, 1832), False, 'from gpkit import Model, ConstraintSet\n'), ((1892, 1919), 'gpkit.Model', 'Model', (['x', "[v >= ['A', 'B']]"], {}), "(x, [v >= ['A', 'B']])\n", (1897, 1919), False, 'from gpkit import Model, ConstraintSet\n'), ((6521, 6535), 'numpy.array', 'np.array', (['x[0]'], {}), '(x[0])\n', (6529, 6535), True, 'import numpy as np\n'), ((8000, 8030), 'gpkit.nomials.MonomialEquality', 'MonomialEquality', (['(x * y)', '(x + y)'], {}), '(x * y, x + y)\n', (8016, 8030), False, 'from gpkit.nomials import MonomialEquality\n'), ((8965, 8984), 'gpkit.SignomialsEnabled', 'SignomialsEnabled', ([], {}), '()\n', (8982, 8984), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((9002, 9105), 'gpkit.Model', 'Model', (['PK', '[mdot * ujet + fBLI * Dp >= 1, PK >= 0.5 * mdot * ujet * (2 + ujet) + fBLI *\n fsurf * Dp]'], {}), '(PK, [mdot * ujet + fBLI * Dp >= 1, PK >= 0.5 * mdot * ujet * (2 +\n ujet) + fBLI * fsurf * Dp])\n', (9007, 9105), False, 'from gpkit import Model, ConstraintSet\n'), ((10862, 10893), 'gpkit.Variable', 'Variable', (["('x_%s' % i)"], {'units': '"""N"""'}), "('x_%s' % i, units='N')\n", (10870, 10893), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11006, 11025), 'gpkit.SignomialsEnabled', 'SignomialsEnabled', ([], {}), '()\n', (11023, 11025), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((11273, 11292), 'gpkit.SignomialsEnabled', 'SignomialsEnabled', ([], {}), '()\n', (11290, 11292), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((12563, 12582), 'gpkit.SignomialsEnabled', 'SignomialsEnabled', ([], {}), '()\n', (12580, 12582), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((13795, 13814), 'gpkit.SignomialsEnabled', 'SignomialsEnabled', ([], {}), '()\n', (13812, 13814), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((14494, 14513), 'gpkit.SignomialsEnabled', 'SignomialsEnabled', ([], {}), '()\n', (14511, 14513), False, 'from gpkit import Variable, SignomialsEnabled, Posynomial, VectorVariable\n'), ((15192, 15202), 'gpkit.constraints.bounded.Bounded', 'Bounded', (['m'], {}), '(m)\n', (15199, 15202), False, 'from gpkit.constraints.bounded import Bounded\n'), ((15316, 15339), 'gpkit.constraints.bounded.Bounded', 'Bounded', (['m'], {'lower': '(1e-10)'}), '(m, lower=1e-10)\n', (15323, 15339), False, 'from gpkit.constraints.bounded import Bounded\n'), ((15453, 15484), 'gpkit.constraints.bounded.Bounded', 'Bounded', (['m'], {'upper': '(10000000000.0)'}), '(m, upper=10000000000.0)\n', (15460, 15484), False, 'from gpkit.constraints.bounded import Bounded\n'), ((1133, 1162), 'gpkit.ConstraintSet', 'ConstraintSet', (['[self, x <= 1]'], {}), '([self, x <= 1])\n', (1146, 1162), False, 'from gpkit import Model, ConstraintSet\n'), ((3609, 3627), 'gpkit.Model', 'Model', (['x', '[x >= 1]'], {}), '(x, [x >= 1])\n', (3614, 3627), False, 'from gpkit import Model, ConstraintSet\n'), ((3697, 3724), 'gpkit.Model', 'Model', (['x', '[x >= 1, x_ >= 1]'], {}), '(x, [x >= 1, x_ >= 1])\n', (3702, 3724), False, 'from gpkit import Model, ConstraintSet\n'), ((3790, 3817), 'gpkit.Model', 'Model', (['x', '[x >= 1, xv >= 1]'], {}), '(x, [x >= 1, xv >= 1])\n', (3795, 3817), False, 'from gpkit import Model, ConstraintSet\n'), ((11671, 11690), 'gpkit.constraints.loose.Loose', 'Loose', (['[x >= x_min]'], {}), '([x >= x_min])\n', (11676, 11690), False, 'from gpkit.constraints.loose import Loose\n'), ((12027, 12046), 'gpkit.constraints.loose.Loose', 'Loose', (['[x >= x_min]'], {}), '([x >= x_min])\n', (12032, 12046), False, 'from gpkit.constraints.loose import Loose\n'), ((12652, 12667), 'gpkit.constraints.loose.Loose', 'Loose', (['[x >= y]'], {}), '([x >= y])\n', (12657, 12667), False, 'from gpkit.constraints.loose import Loose\n'), ((13334, 13349), 'gpkit.constraints.tight.Tight', 'Tight', (['[x >= 1]'], {}), '([x >= 1])\n', (13339, 13349), False, 'from gpkit.constraints.tight import Tight\n'), ((13884, 13899), 'gpkit.constraints.tight.Tight', 'Tight', (['[x >= y]'], {}), '([x >= y])\n', (13889, 13899), False, 'from gpkit.constraints.tight import Tight\n'), ((14541, 14560), 'gpkit.constraints.tight.Tight', 'Tight', (['[x + y >= 1]'], {}), '([x + y >= 1])\n', (14546, 14560), False, 'from gpkit.constraints.tight import Tight\n')]
|
from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph,TapTool,Div
from bokeh.plotting import figure, show, output_file
from bokeh.io import curdoc
from bokeh.layouts import widgetbox , layout
from bokeh.models.widgets import Select, Slider, Button
import dim_reduction
import numpy as np
import clustering
import random
import sys
import os
import pickle
from bokeh.models.glyphs import ImageURL
import re
from bokeh.models.callbacks import CustomJS
from sklearn.preprocessing import MinMaxScaler
src_path = os.path.abspath("./src/")
if src_path not in sys.path:
sys.path.insert(0, src_path)
import data_format
from wcloud_standalone import get_wcloud
import heatmap as hmap
from lrp import get_lrp_timedata
def button_callback():
text_review,words,word_embeddings = get_rawText_data(rawInput_selections.value,keys_raw,data_raw,testX,embed_mat)
text_banner.text = text_review
def get_wc_colourGroups(rawInput_source):
words = np.array(rawInput_source.data['w'])
colors = rawInput_source.data['z']
color_dict = dict()
for color in sorted(data_format.list_duplicates(colors)):
color_dict[color[0]] = list(words[color[1]])
return color_dict
def get_selections():
gates = ["IN - what to add on","NOT IMPORTANT - what to drop off","IMPORTANT - where to focus on"]
select_gate = Select(title="Gate", value="IN - what to add on", options=gates)
if select_gate.value == "IN - what to add on":
select_gate.value = "input_gate"
elif select_gate.value == "NOT IMPORTANT - what to drop off":
select_gate.value = "forget_gate"
elif select_gate.value == "IMPORTANT - where to focus on":
select_gate.value = "output_gate"
return select_gate
def get_clustering_selections(algorithms_neurons):
algorithm_select_neuron = Select(value="KMeans - selected gate",title="Select clustering option for neurons:",width=250, options=algorithms_neurons)
cluster_slider = Slider(title="Number of clusters (use in kmeans,hierarchical clustering)",value=2.0,start=2.0,end=4.0,step=1,width=400)
return (algorithm_select_neuron,cluster_slider)
def get_rawInput_selections(keys_raw):
review = [str(r) for r in list(keys_raw)]
select_rawInput = Select(title="Input review", value=review[0], options=review)
return select_rawInput
def get_projection_selections(algorithms):
algorithm_select = Select(value="PCA",title="Select projection algorithm:",width=250, options=algorithms)
return algorithm_select
def get_rawText_data(rawInput_selections,keys_raw,data_raw,feed,embed_mat):
text_review = np.array_str(data_raw[int(rawInput_selections)])
txt_rev = text_review.replace('<UNK>','UNK')
words = text_review.split()
word_embeddings = [embed_mat[i,:] for i in list(feed[int(rawInput_selections),:].astype(int))]
return txt_rev,words,word_embeddings
"""
-------------------------------------------------------------------------------------------------------
UPDATE SOURCE
-------------------------------------------------------------------------------------------------------
"""
def update_source(attrname, old, new):
gate_value = gate_selections.value
if gate_value == "IN - what to add on":
gate_value = "input_gate"
elif gate_value == "NOT IMPORTANT - what to drop off":
gate_value = "forget_gate"
elif gate_value == "IMPORTANT - where to focus on":
gate_value = "output_gate"
x = data[lstm_layer_name][gate_value]
text_review,words,word_embeddings = get_rawText_data(rawInput_selections.value,keys_raw,data_raw,testX,embed_mat)
#update raw input
text_banner.text = text_review
text_banner2.text = text_review
label_banner.text = "Network decision : POSITIVE" if predicted_tgs[int(rawInput_selections.value)][0] == 0 else "Network decision : NEGATIVE"
#update dimension reduction source
algorithm = projection_selections.value
knn = 5
x_pr,performance_metric = dim_reduction.project(x, algorithm, knn, labels)
#update clustering
algorithm_cl_neurons = clustering_selections[0].value
n_clusters = int(clustering_selections[1].value)
if algorithm_cl_neurons=="Internal state clustering (LSTM's outputs)":
text_set.text = "Internal state clustering - selected review: Clusters representation of input review at every timestep as learned by the LSTM layer."
lstm_hidVal = np.array(lstm_hidden[int(rawInput_selections.value)])
x_pr,performance_metric = dim_reduction.project(np.transpose(lstm_hidVal), algorithm, knn, labels)
cluster_labels, colors, _ = clustering.apply_cluster(data=np.transpose(lstm_hidVal),algorithm=algorithm_cl_neurons,n_clusters=n_clusters,review=None,neuronData=None,mode="nn")
elif algorithm_cl_neurons=="DBSCAN - all reviews" or algorithm_cl_neurons== "AgglomerativeClustering - all reviews":
if algorithm_cl_neurons=="DBSCAN - all reviews":
text_set.text = "DBSCAN - all reviews: Clusters neurons based on how related their most activating words are. List of activating words generated from all reviews."
elif algorithm_cl_neurons== "AgglomerativeClustering - all reviews":
text_set.text = "AgglomerativeClustering - all reviews: Hierarchical clustering of neurons based on how related their most activating words are. List of activating words generated from all reviews."
neuronData = similarityMatrix_AllReviews
cluster_labels, colors, _ = clustering.apply_cluster(x,algorithm_cl_neurons,n_clusters,review=rawInput_selections.value,neuronData=neuronData,mode="nn")
elif algorithm_cl_neurons=="Positive-Negative neuron clustering (LSTM's predictions)":
text_set.text = "Positive-Negative neuron clustering: Clusters neurons based on how much they contributed to classifying the review as positive or negative."
neuronData = neuron_types
cluster_labels, colors, spectr = clustering.apply_cluster(x,algorithm_cl_neurons,n_clusters,review=rawInput_selections.value,neuronData=neuronData,mode="nn")
neutral = tuple(int((spectr[0].lstrip('#'))[i:i+2], 16) for i in (0, 2 ,4))
positive = tuple(int((spectr[1].lstrip('#'))[i:i+2], 16) for i in (0, 2 ,4))
negative = tuple(int((spectr[2].lstrip('#'))[i:i+2], 16) for i in (0, 2 ,4))
neu = "<span style='background-color: rgb("+str(neutral[0])+","+str(neutral[1])+","+str(neutral[2])+")'>Neutral</span>"
pos = "<span style='background-color: rgb("+str(positive[0])+","+str(positive[1])+","+str(positive[2])+")'>Positive</span>"
neg = "<span style='background-color: rgb("+str(negative[0])+","+str(negative[1])+","+str(negative[2])+")'>Negative</span>"
text_set.text = "Positive-Negative neuron clustering: Clusters neurons based on how much they contributed to classifying the review as positive or negative:"+neu+" "+pos+" "+neg
else:
if algorithm_cl_neurons=="KMeans - selected gate":
text_set.text = "KMeans: Clusters neurons based on their gate values after training."
elif algorithm_cl_neurons=="DBSCAN - selected review":
text_set.text = "DBSCAN - selected review: Clusters neurons based on how related their most activating words are. List of activating words generated from selected review."
neuronData = similarityMatrix_PerReview
cluster_labels, colors, _ = clustering.apply_cluster(x,algorithm_cl_neurons,n_clusters,review=int(rawInput_selections.value),neuronData=neuronData,mode="nn")
proj_source.data = dict(x=x_pr[:, 0], y=x_pr[:, 1], z=colors)
w2v_labels, w2v_colors, _ = clustering.apply_cluster(np.array(word_embeddings),"KMeans - selected gate",n_clusters,mode="wc")
rawInput_source.data = dict(z=w2v_colors, w=words)
color_dict = get_wc_colourGroups(rawInput_source)
if gate_value=="input_gate":
wc_filename,wc_img,wc_words = get_wcloud(LRP,int(rawInput_selections.value),load_dir,color_dict=color_dict,gate="in",text=text_banner.text)
elif gate_value=="forget_gate":
wc_filename,wc_img,wc_words = get_wcloud(LRP,int(rawInput_selections.value),load_dir,color_dict=color_dict,gate="forget")
elif gate_value=="output_gate":
wc_filename,wc_img,wc_words = get_wcloud(LRP,int(rawInput_selections.value),load_dir,color_dict=color_dict,gate="out")
words_to_be_highlighted = list(set(wc_words).intersection(totalLRP[int(rawInput_selections.value)]['words']))
lrp_source.data['lrp'] = scaler.fit_transform(np.array(totalLRP[int(rawInput_selections.value)]['lrp'].tolist()).reshape(-1,1))
tap_source.data['wc_words'] = words_to_be_highlighted
wc_plot.add_glyph(img_source, ImageURL(url=dict(value=load_dir+wc_filename), x=0, y=0, anchor="bottom_left"))
"""
------------------------------------------------------------------------------------------------------------------------
MAIN APP CODE
------------------------------------------------------------------------------------------------------------------------
"""
# Provide data paths and files
load_dir = "./bokeh_vis/static/"
lstm_layer_name = "lstm"
#Get trained model parameters: weights and gate values
keys,data = data_format.get_data(load_dir+"model.json")
#Get raw input
keys_raw,data_raw = data_format.get_data(load_dir+"test_data_text.pickle")
#Load auxiliary data
with open(load_dir+"lstm_predictions.pickle","rb") as handle:
predicted_tgs = pickle.load(handle)
with open(load_dir+"exploratoryDataFull.pickle", 'rb') as f:
(testX,embed_mat,excitingWords_fullSet,similarityMatrix_AllReviews,similarityMatrix_PerReview,neuron_types,totalLRP,LRP) = pickle.load(f)
_,lstm_hidden = data_format.get_data(load_dir+"test_model_internals_lstm_hidden.pickle")
#Get preset buttons' selections
#LSTM gates
gate_selections = get_selections()
#Dimensionality reduction
projection_selections = get_projection_selections(dim_reduction.get_dimReduction_algorithms())
#Clustering
algorithm_neurons = clustering.get_cluster_algorithms()
clustering_selections = get_clustering_selections(algorithm_neurons)
#Raw input clustering
rawInput_selections = get_rawInput_selections(keys_raw)
tools = "pan,wheel_zoom,box_zoom,reset"
#Dimensionality reduction
labels = None
data_pr = data[lstm_layer_name][gate_selections.value]
X, performance_metric = dim_reduction.project(data_pr, "PCA", n_neighbors=5, labels=labels)
X_cluster_labels, X_colors, _ = clustering.apply_cluster(data_pr,algorithm=clustering_selections[0].value,n_clusters=int(clustering_selections[1].value),mode="nn")
proj_source = ColumnDataSource(dict(x=X[:,0],y=X[:,1],z=X_colors))
project_plot = figure(title=projection_selections.value,tools=tools,plot_width=300, plot_height=300)
scatter_tap = project_plot.scatter('x', 'y', marker='circle', size=10, fill_color='z', alpha=0.5, source=proj_source, legend=None)
project_plot.xaxis.axis_label = 'Dim 1'
project_plot.yaxis.axis_label = 'Dim 2'
taptool = TapTool()
project_plot.add_tools(taptool)
#Input text
text_review,words,word_embeddings = get_rawText_data(rawInput_selections.value,keys_raw,data_raw,testX,embed_mat)
w2v_labels, w2v_colors, _ = clustering.apply_cluster(np.array(word_embeddings),algorithm="KMeans - selected gate",n_clusters=int(clustering_selections[1].value),mode="wc")
rawInput_source = ColumnDataSource(dict(z=w2v_colors,w=words))
text_banner = Div(text=text_review, width=1300, height=100)
text_banner2 = Div(text=text_review, width=1300, height=100)
label_banner = Paragraph(text="Network decision : POSITIVE" if predicted_tgs[int(rawInput_selections.value)][0] == 0 else "Network decision : NEGATIVE", width=200, height=30)
button = Button(label="Reset text")
button.on_click(button_callback)
#WordCloud
color_dict = get_wc_colourGroups(rawInput_source) #Colors based on similarity in embedding space
wc_filename,wc_img,wc_words = get_wcloud(LRP,int(rawInput_selections.value),load_dir,color_dict=color_dict,gate="in",text=text_banner.text)
words_to_be_highlighted = list(set(wc_words).intersection(totalLRP[int(rawInput_selections.value)]['words']))
highlight_source = ColumnDataSource(dict(scores=[]))
tap_source = ColumnDataSource(dict(wc_words=words_to_be_highlighted))
scaler = MinMaxScaler(copy=True, feature_range=(-1, 1))
lrp_source = ColumnDataSource(dict(lrp=scaler.fit_transform(np.array(totalLRP[int(rawInput_selections.value)]['lrp'].tolist()).reshape(-1,1))))
#totalLRP : how relevant is each LSTM neuron
taptool.callback = CustomJS(args=dict(source=tap_source,lrp=lrp_source,high=highlight_source,div=text_banner,div_orig=text_banner2),
code="""
cell = cb_obj.selected['1d']['indices'][0]
var d = high.data;
d['scores'] = []
for(var i=0; i<source.data['wc_words'].length; i++){
d['scores'].push(lrp.data['lrp'][cell])
}
high.change.emit();
ws = div_orig.text.split(" ");
ws_out = [];
for(var j=0; j<ws.length; j++){
w_idx = source.data['wc_words'].indexOf(ws[j])
if (w_idx>=0){
if (d['scores'][w_idx]>0){
ws_out.push("<span style='background-color: rgba(255,0,0,"+d['scores'][w_idx]+")'>"+ws[j]+"</span>")
}
else if (d['scores'][w_idx]<0){
ws_out.push("<span style='background-color: rgba(0,255,0,"+Math.abs(d['scores'][w_idx])+")'>"+ws[j]+"</span>")
}
}
else {
ws_out.push(ws[j])
}
}
div.text = ws_out.join(" ")
console.log(ws_out)
""")
img_source = ColumnDataSource(dict(url = [load_dir+wc_filename]))
xdr = Range1d(start=0, end=600)
ydr = Range1d(start=0, end=600)
wc_plot = Plot(title=None, x_range=xdr, y_range=ydr, plot_width=500, plot_height=550, min_border=0)
image = ImageURL(url=dict(value=load_dir+wc_filename), x=0, y=0, anchor="bottom_left", retry_attempts=3, retry_timeout=1000)
wc_plot.add_glyph(img_source, image)
text_0 = Paragraph(text="Clustering option:", width=200, height=20)
text_set = Div(text="KMeans: Clusters neurons based on their gate values after training.", width=250, height=100)
lrp_timedata = get_lrp_timedata(LRP)
time = [i for i in range(len(lrp_timedata))]
lrptime_source = ColumnDataSource(dict(lrptime = lrp_timedata,time=time))
lrp_plot = figure(title="Network focus per timestep",plot_width=300, plot_height=50)
lrp_plot.scatter('time','lrptime', marker='circle', size=5, alpha=0.5, source=lrptime_source)
lrp_plot.xaxis.axis_label = 'Time'
lrp_plot.yaxis.axis_label = 'Normalized relevance score'
#Layout
gate_selections.on_change('value', update_source)
projection_selections.on_change('value', update_source)
for attr in clustering_selections:
attr.on_change('value', update_source)
rawInput_selections.on_change('value', update_source)
gp = layout([project_plot, wc_plot, widgetbox(rawInput_selections,gate_selections,projection_selections,clustering_selections[0],clustering_selections[1],text_0,text_set,label_banner,button)],
[lrp_plot],
[text_banner],
responsive=True)
curdoc().add_root(gp)
curdoc().title = "tRustNN"
|
[
"bokeh.models.Plot",
"bokeh.layouts.widgetbox",
"sklearn.preprocessing.MinMaxScaler",
"dim_reduction.project",
"pickle.load",
"data_format.get_data",
"bokeh.models.widgets.Slider",
"bokeh.models.widgets.Select",
"os.path.abspath",
"numpy.transpose",
"bokeh.io.curdoc",
"bokeh.models.Div",
"clustering.get_cluster_algorithms",
"dim_reduction.get_dimReduction_algorithms",
"bokeh.models.Range1d",
"data_format.list_duplicates",
"bokeh.models.widgets.Button",
"bokeh.models.Paragraph",
"bokeh.models.TapTool",
"bokeh.plotting.figure",
"lrp.get_lrp_timedata",
"sys.path.insert",
"clustering.apply_cluster",
"numpy.array"
] |
[((552, 577), 'os.path.abspath', 'os.path.abspath', (['"""./src/"""'], {}), "('./src/')\n", (567, 577), False, 'import os\n'), ((9359, 9404), 'data_format.get_data', 'data_format.get_data', (["(load_dir + 'model.json')"], {}), "(load_dir + 'model.json')\n", (9379, 9404), False, 'import data_format\n'), ((9438, 9494), 'data_format.get_data', 'data_format.get_data', (["(load_dir + 'test_data_text.pickle')"], {}), "(load_dir + 'test_data_text.pickle')\n", (9458, 9494), False, 'import data_format\n'), ((9837, 9911), 'data_format.get_data', 'data_format.get_data', (["(load_dir + 'test_model_internals_lstm_hidden.pickle')"], {}), "(load_dir + 'test_model_internals_lstm_hidden.pickle')\n", (9857, 9911), False, 'import data_format\n'), ((10152, 10187), 'clustering.get_cluster_algorithms', 'clustering.get_cluster_algorithms', ([], {}), '()\n', (10185, 10187), False, 'import clustering\n'), ((10496, 10563), 'dim_reduction.project', 'dim_reduction.project', (['data_pr', '"""PCA"""'], {'n_neighbors': '(5)', 'labels': 'labels'}), "(data_pr, 'PCA', n_neighbors=5, labels=labels)\n", (10517, 10563), False, 'import dim_reduction\n'), ((10810, 10901), 'bokeh.plotting.figure', 'figure', ([], {'title': 'projection_selections.value', 'tools': 'tools', 'plot_width': '(300)', 'plot_height': '(300)'}), '(title=projection_selections.value, tools=tools, plot_width=300,\n plot_height=300)\n', (10816, 10901), False, 'from bokeh.plotting import figure, show, output_file\n'), ((11117, 11126), 'bokeh.models.TapTool', 'TapTool', ([], {}), '()\n', (11124, 11126), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((11536, 11581), 'bokeh.models.Div', 'Div', ([], {'text': 'text_review', 'width': '(1300)', 'height': '(100)'}), '(text=text_review, width=1300, height=100)\n', (11539, 11581), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((11597, 11642), 'bokeh.models.Div', 'Div', ([], {'text': 'text_review', 'width': '(1300)', 'height': '(100)'}), '(text=text_review, width=1300, height=100)\n', (11600, 11642), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((11828, 11854), 'bokeh.models.widgets.Button', 'Button', ([], {'label': '"""Reset text"""'}), "(label='Reset text')\n", (11834, 11854), False, 'from bokeh.models.widgets import Select, Slider, Button\n'), ((12380, 12426), 'sklearn.preprocessing.MinMaxScaler', 'MinMaxScaler', ([], {'copy': '(True)', 'feature_range': '(-1, 1)'}), '(copy=True, feature_range=(-1, 1))\n', (12392, 12426), False, 'from sklearn.preprocessing import MinMaxScaler\n'), ((13737, 13762), 'bokeh.models.Range1d', 'Range1d', ([], {'start': '(0)', 'end': '(600)'}), '(start=0, end=600)\n', (13744, 13762), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((13769, 13794), 'bokeh.models.Range1d', 'Range1d', ([], {'start': '(0)', 'end': '(600)'}), '(start=0, end=600)\n', (13776, 13794), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((13805, 13898), 'bokeh.models.Plot', 'Plot', ([], {'title': 'None', 'x_range': 'xdr', 'y_range': 'ydr', 'plot_width': '(500)', 'plot_height': '(550)', 'min_border': '(0)'}), '(title=None, x_range=xdr, y_range=ydr, plot_width=500, plot_height=550,\n min_border=0)\n', (13809, 13898), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((14068, 14126), 'bokeh.models.Paragraph', 'Paragraph', ([], {'text': '"""Clustering option:"""', 'width': '(200)', 'height': '(20)'}), "(text='Clustering option:', width=200, height=20)\n", (14077, 14126), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((14138, 14249), 'bokeh.models.Div', 'Div', ([], {'text': '"""KMeans: Clusters neurons based on their gate values after training."""', 'width': '(250)', 'height': '(100)'}), "(text=\n 'KMeans: Clusters neurons based on their gate values after training.',\n width=250, height=100)\n", (14141, 14249), False, 'from bokeh.models import ColumnDataSource, HoverTool, Range1d, Plot, LinearAxis, Grid, Paragraph, TapTool, Div\n'), ((14258, 14279), 'lrp.get_lrp_timedata', 'get_lrp_timedata', (['LRP'], {}), '(LRP)\n', (14274, 14279), False, 'from lrp import get_lrp_timedata\n'), ((14410, 14484), 'bokeh.plotting.figure', 'figure', ([], {'title': '"""Network focus per timestep"""', 'plot_width': '(300)', 'plot_height': '(50)'}), "(title='Network focus per timestep', plot_width=300, plot_height=50)\n", (14416, 14484), False, 'from bokeh.plotting import figure, show, output_file\n'), ((611, 639), 'sys.path.insert', 'sys.path.insert', (['(0)', 'src_path'], {}), '(0, src_path)\n', (626, 639), False, 'import sys\n'), ((992, 1027), 'numpy.array', 'np.array', (["rawInput_source.data['w']"], {}), "(rawInput_source.data['w'])\n", (1000, 1027), True, 'import numpy as np\n'), ((1379, 1443), 'bokeh.models.widgets.Select', 'Select', ([], {'title': '"""Gate"""', 'value': '"""IN - what to add on"""', 'options': 'gates'}), "(title='Gate', value='IN - what to add on', options=gates)\n", (1385, 1443), False, 'from bokeh.models.widgets import Select, Slider, Button\n'), ((1858, 1992), 'bokeh.models.widgets.Select', 'Select', ([], {'value': '"""KMeans - selected gate"""', 'title': '"""Select clustering option for neurons:"""', 'width': '(250)', 'options': 'algorithms_neurons'}), "(value='KMeans - selected gate', title=\n 'Select clustering option for neurons:', width=250, options=\n algorithms_neurons)\n", (1864, 1992), False, 'from bokeh.models.widgets import Select, Slider, Button\n'), ((2002, 2130), 'bokeh.models.widgets.Slider', 'Slider', ([], {'title': '"""Number of clusters (use in kmeans,hierarchical clustering)"""', 'value': '(2.0)', 'start': '(2.0)', 'end': '(4.0)', 'step': '(1)', 'width': '(400)'}), "(title='Number of clusters (use in kmeans,hierarchical clustering)',\n value=2.0, start=2.0, end=4.0, step=1, width=400)\n", (2008, 2130), False, 'from bokeh.models.widgets import Select, Slider, Button\n'), ((2285, 2346), 'bokeh.models.widgets.Select', 'Select', ([], {'title': '"""Input review"""', 'value': 'review[0]', 'options': 'review'}), "(title='Input review', value=review[0], options=review)\n", (2291, 2346), False, 'from bokeh.models.widgets import Select, Slider, Button\n'), ((2444, 2536), 'bokeh.models.widgets.Select', 'Select', ([], {'value': '"""PCA"""', 'title': '"""Select projection algorithm:"""', 'width': '(250)', 'options': 'algorithms'}), "(value='PCA', title='Select projection algorithm:', width=250,\n options=algorithms)\n", (2450, 2536), False, 'from bokeh.models.widgets import Select, Slider, Button\n'), ((4087, 4135), 'dim_reduction.project', 'dim_reduction.project', (['x', 'algorithm', 'knn', 'labels'], {}), '(x, algorithm, knn, labels)\n', (4108, 4135), False, 'import dim_reduction\n'), ((9597, 9616), 'pickle.load', 'pickle.load', (['handle'], {}), '(handle)\n', (9608, 9616), False, 'import pickle\n'), ((9805, 9819), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (9816, 9819), False, 'import pickle\n'), ((10075, 10118), 'dim_reduction.get_dimReduction_algorithms', 'dim_reduction.get_dimReduction_algorithms', ([], {}), '()\n', (10116, 10118), False, 'import dim_reduction\n'), ((11339, 11364), 'numpy.array', 'np.array', (['word_embeddings'], {}), '(word_embeddings)\n', (11347, 11364), True, 'import numpy as np\n'), ((15214, 15222), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (15220, 15222), False, 'from bokeh.io import curdoc\n'), ((1116, 1151), 'data_format.list_duplicates', 'data_format.list_duplicates', (['colors'], {}), '(colors)\n', (1143, 1151), False, 'import data_format\n'), ((7793, 7818), 'numpy.array', 'np.array', (['word_embeddings'], {}), '(word_embeddings)\n', (7801, 7818), True, 'import numpy as np\n'), ((14955, 15125), 'bokeh.layouts.widgetbox', 'widgetbox', (['rawInput_selections', 'gate_selections', 'projection_selections', 'clustering_selections[0]', 'clustering_selections[1]', 'text_0', 'text_set', 'label_banner', 'button'], {}), '(rawInput_selections, gate_selections, projection_selections,\n clustering_selections[0], clustering_selections[1], text_0, text_set,\n label_banner, button)\n', (14964, 15125), False, 'from bokeh.layouts import widgetbox, layout\n'), ((15192, 15200), 'bokeh.io.curdoc', 'curdoc', ([], {}), '()\n', (15198, 15200), False, 'from bokeh.io import curdoc\n'), ((4640, 4665), 'numpy.transpose', 'np.transpose', (['lstm_hidVal'], {}), '(lstm_hidVal)\n', (4652, 4665), True, 'import numpy as np\n'), ((5607, 5741), 'clustering.apply_cluster', 'clustering.apply_cluster', (['x', 'algorithm_cl_neurons', 'n_clusters'], {'review': 'rawInput_selections.value', 'neuronData': 'neuronData', 'mode': '"""nn"""'}), "(x, algorithm_cl_neurons, n_clusters, review=\n rawInput_selections.value, neuronData=neuronData, mode='nn')\n", (5631, 5741), False, 'import clustering\n'), ((4757, 4782), 'numpy.transpose', 'np.transpose', (['lstm_hidVal'], {}), '(lstm_hidVal)\n', (4769, 4782), True, 'import numpy as np\n'), ((6069, 6203), 'clustering.apply_cluster', 'clustering.apply_cluster', (['x', 'algorithm_cl_neurons', 'n_clusters'], {'review': 'rawInput_selections.value', 'neuronData': 'neuronData', 'mode': '"""nn"""'}), "(x, algorithm_cl_neurons, n_clusters, review=\n rawInput_selections.value, neuronData=neuronData, mode='nn')\n", (6093, 6203), False, 'import clustering\n')]
|
#!/usr/bin/env -S conda run -n tf python
import numpy as np
import cv2
import onnxruntime
import json
import requests
from PIL import Image
from tqdm import tqdm
from pathlib import Path
from fire import Fire
from typing import List, Union, Tuple
def preprocess_image(
image_path,
min_side = 800,
max_side = 1333,
):
image = read_image_bgr(image_path)
image = _preprocess_image(image)
image, scale = resize_image(
image, min_side = min_side, max_side = max_side
)
return image, scale
def read_image_bgr(path):
""" Read an image in BGR format.
Args
path: Path to the image.
"""
if isinstance(path, (str, Path)):
image = np.array(Image.open(path).convert("RGB"))
else:
path = cv2.cvtColor(path, cv2.COLOR_BGR2RGB)
image = np.array(Image.fromarray(path))
return image[:, :, ::-1]
def _preprocess_image(x, mode = "caffe"):
x = x.astype(np.float32)
if mode == "tf":
x /= 127.5
x -= 1.0
elif mode == "caffe":
x -= np.array([103.939, 116.779, 123.68])
return x
def compute_resize_scale(image_shape, min_side = 800, max_side = 1333):
(rows, cols, _) = image_shape
smallest_side = min(rows, cols)
scale = min_side / smallest_side
largest_side = max(rows, cols)
if largest_side * scale > max_side:
scale = max_side / largest_side
return scale
def resize_image(img, min_side = 800, max_side = 1333):
scale = compute_resize_scale(
img.shape, min_side = min_side, max_side = max_side
)
img = cv2.resize(img, None, fx = scale, fy = scale)
return img, scale
def detect(model, classes, image_path, fast: bool = False) -> List[dict]:
image, scale = preprocess_image(image_path)
args_o = [s.name for s in model.get_outputs()]
args_i = {model.get_inputs()[0].name: image[None, :, :, :]}
boxes, labels, scores = model.run(args_o, args_i)
boxes, labels, scores = boxes[0], labels[0], scores[0]
min_prob = 0.6
boxes /= (image.shape[:2] * 2)
results = [
dict(
box = box.tolist(),
score = score.item(),
label = classes[label.item()],
path = str(image_path),
) for box, score, label in zip(boxes, scores, labels)
if score > min_prob
]
return results
def download_file(url: str, to: Union[str, Path])->None:
to = Path(to)
if to.exists():
print('using cached file', to)
return
tmp = to.with_suffix('.tmp')
resp = requests.get(url, stream = True)
block_size = 1024*1024
total_length = int(resp.headers.get('content-length', 0))
progress_bar = tqdm(total = total_length, unit = 'iB', unit_scale = True)
with tmp.open('wb') as file:
for data in resp.iter_content(block_size):
progress_bar.update(len(data))
file.write(data)
progress_bar.close()
if progress_bar.n == total_length:
tmp.rename(to)
def prepare_weight(model_name: str) -> Tuple[str, List[str]]:
root = Path.home() / '.NudeNet'
url = 'https://github.com/notAI-tech/NudeNet/releases/download/v0'
w = root / f'{model_name}_checkpoint.onnx'
wurl = f'{url}/{w.name}'
download_file(wurl, w)
c = root / f'{model_name}_classes'
curl = f'{url}/{c.name}'
download_file(curl, c)
return str(w), c.read_text().splitlines()
def prepare_weights() -> Tuple[str, List[str]]:
# prepare_weight('detector_v2_base')
return prepare_weight('detector_v2_default')
def main(file_or_dir: str = None, out: str = None):
ckpt, classes = prepare_weights()
model = onnxruntime.InferenceSession(
ckpt,
providers = [
'CUDAExecutionProvider',
# 'TensorrtExecutionProvider',
'CPUExecutionProvider',
]
)
if file_or_dir is None:
file_or_dir = 'whats.train/psed.a/3C49E51BF3B55D51B9582CE4735DDE3CDA0523C7-t.jpg'
file_or_dir = Path(file_or_dir)
if file_or_dir.is_file():
images = [file_or_dir]
else:
images = [
image for image in file_or_dir.rglob('*.*')
if image.suffix.lower() in {'.jpg', '.jpeg', '.png'}
]
if not out:
out = file_or_dir.name
out = Path(out).with_suffix('.json')
out.parent.mkdir(parents = True, exist_ok = True)
with out.open('w', encoding = 'utf-8') as out:
for image_path in tqdm(images):
for r in detect(model, classes, image_path):
json.dump(r, out, ensure_ascii = False)
out.write('\n')
if __name__ == '__main__':
# run as standalone script:
# nu-detect.py --file_or_dir=... --out=...
Fire(main)
|
[
"json.dump",
"tqdm.tqdm",
"fire.Fire",
"pathlib.Path.home",
"cv2.cvtColor",
"PIL.Image.open",
"onnxruntime.InferenceSession",
"pathlib.Path",
"numpy.array",
"requests.get",
"PIL.Image.fromarray",
"cv2.resize"
] |
[((1586, 1627), 'cv2.resize', 'cv2.resize', (['img', 'None'], {'fx': 'scale', 'fy': 'scale'}), '(img, None, fx=scale, fy=scale)\n', (1596, 1627), False, 'import cv2\n'), ((2423, 2431), 'pathlib.Path', 'Path', (['to'], {}), '(to)\n', (2427, 2431), False, 'from pathlib import Path\n'), ((2551, 2581), 'requests.get', 'requests.get', (['url'], {'stream': '(True)'}), '(url, stream=True)\n', (2563, 2581), False, 'import requests\n'), ((2693, 2745), 'tqdm.tqdm', 'tqdm', ([], {'total': 'total_length', 'unit': '"""iB"""', 'unit_scale': '(True)'}), "(total=total_length, unit='iB', unit_scale=True)\n", (2697, 2745), False, 'from tqdm import tqdm\n'), ((3657, 3756), 'onnxruntime.InferenceSession', 'onnxruntime.InferenceSession', (['ckpt'], {'providers': "['CUDAExecutionProvider', 'CPUExecutionProvider']"}), "(ckpt, providers=['CUDAExecutionProvider',\n 'CPUExecutionProvider'])\n", (3685, 3756), False, 'import onnxruntime\n'), ((3992, 4009), 'pathlib.Path', 'Path', (['file_or_dir'], {}), '(file_or_dir)\n', (3996, 4009), False, 'from pathlib import Path\n'), ((4725, 4735), 'fire.Fire', 'Fire', (['main'], {}), '(main)\n', (4729, 4735), False, 'from fire import Fire\n'), ((763, 800), 'cv2.cvtColor', 'cv2.cvtColor', (['path', 'cv2.COLOR_BGR2RGB'], {}), '(path, cv2.COLOR_BGR2RGB)\n', (775, 800), False, 'import cv2\n'), ((3071, 3082), 'pathlib.Path.home', 'Path.home', ([], {}), '()\n', (3080, 3082), False, 'from pathlib import Path\n'), ((4454, 4466), 'tqdm.tqdm', 'tqdm', (['images'], {}), '(images)\n', (4458, 4466), False, 'from tqdm import tqdm\n'), ((826, 847), 'PIL.Image.fromarray', 'Image.fromarray', (['path'], {}), '(path)\n', (841, 847), False, 'from PIL import Image\n'), ((1049, 1085), 'numpy.array', 'np.array', (['[103.939, 116.779, 123.68]'], {}), '([103.939, 116.779, 123.68])\n', (1057, 1085), True, 'import numpy as np\n'), ((4291, 4300), 'pathlib.Path', 'Path', (['out'], {}), '(out)\n', (4295, 4300), False, 'from pathlib import Path\n'), ((4541, 4578), 'json.dump', 'json.dump', (['r', 'out'], {'ensure_ascii': '(False)'}), '(r, out, ensure_ascii=False)\n', (4550, 4578), False, 'import json\n'), ((705, 721), 'PIL.Image.open', 'Image.open', (['path'], {}), '(path)\n', (715, 721), False, 'from PIL import Image\n')]
|
import time
from functools import partial
import torch
def timer(fxn, max_time=5):
N = 0
total_time = 0
fxn()
while total_time < max_time:
start = time.perf_counter()
fxn()
total_time += time.perf_counter() - start
N += 1
return total_time / N
def task(A, target):
result = 0
i = 0
N = 0
while result < target:
r = A[i]
result += r
i = A[i]
N += 1
return N
if __name__ == "__main__":
N = 1000
print(f"Testing with array of length {N}")
A_py = (torch.rand(N) * N).type(torch.int).to("cuda:0")
A_np = A_py.cpu().numpy()
t_py = timer(partial(task, A_py, 500))
t_np = timer(partial(task, A_np, 500))
print(f"PyTorch took: {t_py:0.3e}s")
print(f"Numpy took: {t_np:0.3e}s")
print(f"Numpy is {100 - t_np/t_py*100:0.2f}% faster")
|
[
"functools.partial",
"time.perf_counter",
"torch.rand"
] |
[((174, 193), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (191, 193), False, 'import time\n'), ((664, 688), 'functools.partial', 'partial', (['task', 'A_py', '(500)'], {}), '(task, A_py, 500)\n', (671, 688), False, 'from functools import partial\n'), ((707, 731), 'functools.partial', 'partial', (['task', 'A_np', '(500)'], {}), '(task, A_np, 500)\n', (714, 731), False, 'from functools import partial\n'), ((230, 249), 'time.perf_counter', 'time.perf_counter', ([], {}), '()\n', (247, 249), False, 'import time\n'), ((568, 581), 'torch.rand', 'torch.rand', (['N'], {}), '(N)\n', (578, 581), False, 'import torch\n')]
|
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.ads.google_ads.v3.proto.resources import ad_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_ad__pb2
from google.ads.google_ads.v3.proto.services import ad_service_pb2 as google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2
class AdServiceStub(object):
"""Proto file describing the Ad service.
Service to manage ads.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.GetAd = channel.unary_unary(
'/google.ads.googleads.v3.services.AdService/GetAd',
request_serializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.GetAdRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_ad__pb2.Ad.FromString,
)
self.MutateAds = channel.unary_unary(
'/google.ads.googleads.v3.services.AdService/MutateAds',
request_serializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.MutateAdsRequest.SerializeToString,
response_deserializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.MutateAdsResponse.FromString,
)
class AdServiceServicer(object):
"""Proto file describing the Ad service.
Service to manage ads.
"""
def GetAd(self, request, context):
"""Returns the requested ad in full detail.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def MutateAds(self, request, context):
"""Updates ads. Operation statuses are returned.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_AdServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'GetAd': grpc.unary_unary_rpc_method_handler(
servicer.GetAd,
request_deserializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.GetAdRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_ad__pb2.Ad.SerializeToString,
),
'MutateAds': grpc.unary_unary_rpc_method_handler(
servicer.MutateAds,
request_deserializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.MutateAdsRequest.FromString,
response_serializer=google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.MutateAdsResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'google.ads.googleads.v3.services.AdService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
|
[
"grpc.method_handlers_generic_handler",
"grpc.unary_unary_rpc_method_handler"
] |
[((2776, 2884), 'grpc.method_handlers_generic_handler', 'grpc.method_handlers_generic_handler', (['"""google.ads.googleads.v3.services.AdService"""', 'rpc_method_handlers'], {}), "(\n 'google.ads.googleads.v3.services.AdService', rpc_method_handlers)\n", (2812, 2884), False, 'import grpc\n'), ((2050, 2355), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.GetAd'], {'request_deserializer': 'google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.GetAdRequest.FromString', 'response_serializer': 'google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_ad__pb2.Ad.SerializeToString'}), '(servicer.GetAd, request_deserializer=\n google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2\n .GetAdRequest.FromString, response_serializer=\n google_dot_ads_dot_googleads__v3_dot_proto_dot_resources_dot_ad__pb2.Ad\n .SerializeToString)\n', (2085, 2355), False, 'import grpc\n'), ((2395, 2735), 'grpc.unary_unary_rpc_method_handler', 'grpc.unary_unary_rpc_method_handler', (['servicer.MutateAds'], {'request_deserializer': 'google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.MutateAdsRequest.FromString', 'response_serializer': 'google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2.MutateAdsResponse.SerializeToString'}), '(servicer.MutateAds,\n request_deserializer=\n google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2\n .MutateAdsRequest.FromString, response_serializer=\n google_dot_ads_dot_googleads__v3_dot_proto_dot_services_dot_ad__service__pb2\n .MutateAdsResponse.SerializeToString)\n', (2430, 2735), False, 'import grpc\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 1 21:08:48 2019
@author: <NAME>
This file is for producing the table for the validation results (NN).
"""
import random
import pandas as pd
from tabulate import tabulate
def fmt(avg_, min_, max_, n):
return "{:.3f} ({:.3f}, {:.3f})".format(avg_[n], min_[n], max_[n])
def reg_fmt(avg_, n):
return "{:.3f}".format(avg_[n])
def sample(df, n = 5):
if len(df) == 0:
raise Exception("DF is empty.")
num = list(range(len(df))); random.shuffle(num); to_keep = num[:n]
max_, min_ = df.iloc[to_keep[0]], df.iloc[to_keep[0]]
f1, prec, recall, token_auc, mass = 0, 0, 0, 0, 0
for k in to_keep:
row = df.iloc[k]
f1 += row['test_f1'] / n
prec += row['test_p'] / n
recall += row['test_p'] / n
token_auc += row['test_auc'] / n
mass += row['test_evidence_token_mass'] / n
if row['test_f1'] > max_['test_f1']:
max_ = row
if row['test_f1'] < min_['test_f1']:
min_ = row
return max_, min_, {'test_f1': f1, 'test_p': prec, 'test_r': recall, 'test_auc': token_auc, 'test_evidence_token_mass': mass}
def gen_latex_table(table, names):
""" Generate latex table based on table. """
table = table_preprocess(table, names)
content = "\\begin{table*}\n"
content += tabulate(table[1:], headers = table[0], tablefmt="latex")
content += "\n\\end{table*}"
return content
def table_preprocess(table, names):
new_table = [['Model', 'Precision', 'Recall', 'F1', 'Token AUC / Mass']]
def one_row(max_, min_, avg_, n):
return [n, fmt(avg_, min_, max_, 'test_p'),
fmt(avg_, min_, max_, 'test_r'),
fmt(avg_, min_, max_, 'test_f1'),
"{} / {}".format(reg_fmt(avg_, 'test_auc'), reg_fmt(avg_, 'test_evidence_token_mass'))]
i = 0
for row in table:
max_, min_, avg_ = row
new_table.append(one_row(max_, min_, avg_, names[i]))
i += 1
return new_table
def get_headers(header_row):
""" Takes array of headers and sets up a dictionary corresponding to columns. """
ans = {}
for i in range(len(header_row)):
ans[header_row[i]] = i
return ans
def load(f = './attn_logs_val.csv'):
""" Loads the data, and gets a proper extraction of the header and data. """
df = pd.read_csv(f, header = 0)
df['attention_acceptance'].fillna("auc", inplace = True)
loh = list(df.columns)
header = get_headers(loh)
return df, header
def get_results(df, restraints):
new_df = df
for key in restraints.keys():
new_df = new_df[new_df[key] == restraints[key]]
val_f1_rows = new_df[pd.notnull(new_df['best_val_f1'])]
l_max, l_min, avg = sample(val_f1_rows)
return l_max, l_min, avg
# Mix with 3 styles of attention acceptance
attn1 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'False'}
pre_attn1 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention'}
pre_attn2 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'entropy', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention'}
pre_attn3 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'evidence_mass', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention'}
pre_attn4 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention_balanced'}
pre_attn5 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'entropy', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention_balanced'}
pre_attn6 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'evidence_mass', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention_balanced'}
pre_attn7 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_max_evidence_attention'}
pre_attn8 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'entropy', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_max_evidence_attention'}
pre_attn9 = {'attn': True, 'cond_attn': False, 'attention_acceptance': 'evidence_mass', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_max_evidence_attention'}
cond_attn1 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'False'}
pre_cond_attn1 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention'}
pre_cond_attn2 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'entropy', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention'}
pre_cond_attn3 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'evidence_mass', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention'}
pre_cond_attn4 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention_balanced'}
pre_cond_attn5 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'entropy', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention_balanced'}
pre_cond_attn6 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'evidence_mass', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_tokenwise_attention_balanced'}
pre_cond_attn7 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'auc', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_max_evidence_attention'}
pre_cond_attn8 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'entropy', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_max_evidence_attention'}
pre_cond_attn9 = {'attn': True, 'cond_attn': True, 'attention_acceptance': 'evidence_mass', 'data_config': 'vanilla', 'pretrain_attention': 'pretrain_max_evidence_attention'}
config = [attn1,
pre_attn1, pre_attn2, pre_attn3, pre_attn4, pre_attn5, pre_attn6, pre_attn7, pre_attn8, pre_attn9,
cond_attn1,
pre_cond_attn1, pre_cond_attn2, pre_cond_attn3, pre_cond_attn4, pre_cond_attn5, pre_cond_attn6, pre_cond_attn7, pre_cond_attn8, pre_cond_attn9]
df, _ = load()
table = []
for c in config:
df_row = get_results(df, c)
table.append(df_row)
names = ['+ Attn.',
'+ Pretrain attn. [AUC] (Tokenwise attn.)',
'+ Pretrain attn. [Entropy] (Tokenwise attn)',
'+ Pretrain attn. [Evidence Mass] (Tokenwise attention)',
'+ Pretrain attn. [AUC] (Tokenwise attn. balanced)',
'+ Pretrain attn. [Entropy] (Tokenwise attn. balanced)',
'+ Pretrain attn. [Evidence Mass] (Tokenwise attn. balanced)',
'+ Pretrain attn. [AUC] (Max evidence attn.)',
'+ Pretrain attn. [Entropy] (Max evidence attn.)',
'+ Pretrain attn. [Evidence Mass] (Max evidence attn.)',
'+ Cond. attn.',
'+ Pretrain cond. attn. [AUC] (Tokenwise attn.)',
'+ Pretrain cond. attention [Entropy] (Tokenwise attn.)',
'+ Pretrain cond. attn. [Evidence Mass] (Tokenwise attn.)',
'+ Pretrain cond. attn. [AUC] (Tokenwise attn. balanced)',
'+ Pretrain cond. attn. [Entropy] (Tokenwise attn. balanced)',
'+ Pretrain cond. attn. [Evidence Mass] (Tokenwise attn. balanced)',
'+ Pretrain cond. attn. [AUC] (Max evidence attn.)',
'+ Pretrain cond. attn. [Entropy] (Max evidence attn.)',
'+ Pretrain cond. attn. [Evidence Mass] (Max evidence attn.)']
print(gen_latex_table(table, names))
|
[
"pandas.read_csv",
"random.shuffle",
"tabulate.tabulate",
"pandas.notnull"
] |
[((507, 526), 'random.shuffle', 'random.shuffle', (['num'], {}), '(num)\n', (521, 526), False, 'import random\n'), ((1377, 1432), 'tabulate.tabulate', 'tabulate', (['table[1:]'], {'headers': 'table[0]', 'tablefmt': '"""latex"""'}), "(table[1:], headers=table[0], tablefmt='latex')\n", (1385, 1432), False, 'from tabulate import tabulate\n'), ((2422, 2446), 'pandas.read_csv', 'pd.read_csv', (['f'], {'header': '(0)'}), '(f, header=0)\n', (2433, 2446), True, 'import pandas as pd\n'), ((2782, 2815), 'pandas.notnull', 'pd.notnull', (["new_df['best_val_f1']"], {}), "(new_df['best_val_f1'])\n", (2792, 2815), True, 'import pandas as pd\n')]
|
#########################################
#Created by <NAME> on 12 January 2021
#NOTE: This is not a finalised version
#########################################
from labelbox import Client
from LabelboxConstants import API_KEY
import wx
from wx import App, Frame
from wx import TextCtrl, StaticText, Button, ListCtrl
from wx import Point, Size, FileDialog, MessageBox
from os.path import abspath
def create_project_in_labelbox(proj_name, data_name, files):
'''
Inputs:
proj_name - String representing the name of the project
data_name - String representing the name of the dataset
files - List of strings representing a list of filenames
Returns:
Nothing. Instead, this function creates the given project in Labelbox.
'''
client = Client(API_KEY)
project = client.create_project(name = proj_name)
dataset = client.create_dataset(name = data_name, projects = project)
task = dataset.create_data_rows(files)
task.wait_till_done()
class Interface(Frame):
def __init__(self, parent, title):
super(Interface, self).__init__(parent, title=title, size=(500, 500))
# Show the window on screen
self.filepaths = []
self.setup()
self.Show()
def setup(self):
'''
Sets up the window for the user to input data
'''
#Create the text boxes for our project name and dataset name
self.project_text = StaticText(self, label = 'Project Name', pos = Point(5,15))
self.dataset_text = StaticText(self, label = 'Dataset Name', pos = Point(5,45))
self.textbox_project = TextCtrl(self, pos = Point(100,10), size = Size(150,25))
self.textbox_dataset = TextCtrl(self, pos = Point(100,40), size = Size(150,25))
#Create the buttons and windows to upload, delete, and view files
self.upload_button = Button(self, label = 'Upload Files', pos = Point(5,75))
self.delete_button = Button(self, label = 'Delete Selected Files', pos = Point(100,75))
self.delete_all_button = Button(self, label = 'Delete All Files', pos = Point(250,75))
self.file_list = ListCtrl(self, style=wx.LC_REPORT|wx.BORDER_SUNKEN, pos = Point(5,100), size = Size(450,300))
self.file_list.InsertColumn(0, 'Filename')
self.upload_button.Bind(wx.EVT_BUTTON, self.upload_files)
self.delete_button.Bind(wx.EVT_BUTTON, self.delete_files)
self.delete_all_button.Bind(wx.EVT_BUTTON, self.delete_all)
#Lastly, set up our "Create Project" button
self.create_project_button = Button(self, label = 'Create Project', size = Size(200,50), pos = Point(150,400))
self.create_project_button.Bind(wx.EVT_BUTTON, self.create_project)
def upload_files(self, event):
wildcard = "PNG files (*.png)|*.png|JPG files (*.jpg)|*.jpg|JPEG files (*.jpeg)|*.jpg"
with FileDialog(self, 'Open Photos', wildcard = wildcard, style=wx.FD_OPEN | wx.FD_FILE_MUST_EXIST | wx.FD_MULTIPLE) as fileDialog:
if fileDialog.ShowModal() == wx.ID_CANCEL:
return
pathnames = fileDialog.GetPaths()
self.update_display([abspath(path) for path in pathnames])
def delete_files(self, event):
length = self.file_list.GetItemCount()
if length == 0:
resp = MessageBox('There are no files to delete', 'Error',
wx.OK)
return
selected = []
for i in range(length):
if not self.file_list.IsSelected(i):
selected.append(self.file_list.GetItemText(i))
if len(selected) == length:
resp = MessageBox('You haven\'t selected any files to delete', 'Error',
wx.OK)
return
self.clear()
self.update_display(selected)
def delete_all(self, event):
if self.file_list.GetItemCount() == 0:
resp = MessageBox('There are no files to delete', 'Error',
wx.OK)
return
self.clear()
def clear(self):
self.filepaths = []
self.file_list.DeleteAllItems()
def update_display(self, paths):
for index, path in enumerate(paths):
self.filepaths.append(path)
self.file_list.InsertItem(index, path)
def create_project(self, event):
self.project = self.textbox_project.GetValue()
self.dataset = self.textbox_dataset.GetValue()
if self.project and self.dataset and self.filepaths:
try:
create_project_in_labelbox(self.project, self.dataset, self.filepaths)
resp = MessageBox('Your project was successfully created in Labelbox! Log in to set up your ontology!', 'Success!', wx.OK)
self.Destroy()
except:
resp = MessageBox('Hmm, an error was encountered while setting up your project. Try again.', 'Error', wx.OK)
else:
resp = MessageBox('You must input a project name, a dataset name, and at least one file to upload.', 'Error',
wx.OK)
if __name__ == '__main__':
# Create the application object
app = App()
Interface(None, title='Labelbox Project Creator')
app.MainLoop()
|
[
"os.path.abspath",
"wx.Point",
"labelbox.Client",
"wx.App",
"wx.FileDialog",
"wx.MessageBox",
"wx.Size"
] |
[((774, 789), 'labelbox.Client', 'Client', (['API_KEY'], {}), '(API_KEY)\n', (780, 789), False, 'from labelbox import Client\n'), ((5294, 5299), 'wx.App', 'App', ([], {}), '()\n', (5297, 5299), False, 'from wx import App, Frame\n'), ((2903, 3017), 'wx.FileDialog', 'FileDialog', (['self', '"""Open Photos"""'], {'wildcard': 'wildcard', 'style': '(wx.FD_OPEN | wx.FD_FILE_MUST_EXIST | wx.FD_MULTIPLE)'}), "(self, 'Open Photos', wildcard=wildcard, style=wx.FD_OPEN | wx.\n FD_FILE_MUST_EXIST | wx.FD_MULTIPLE)\n", (2913, 3017), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((3355, 3413), 'wx.MessageBox', 'MessageBox', (['"""There are no files to delete"""', '"""Error"""', 'wx.OK'], {}), "('There are no files to delete', 'Error', wx.OK)\n", (3365, 3413), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((3691, 3761), 'wx.MessageBox', 'MessageBox', (['"""You haven\'t selected any files to delete"""', '"""Error"""', 'wx.OK'], {}), '("You haven\'t selected any files to delete", \'Error\', wx.OK)\n', (3701, 3761), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((3991, 4049), 'wx.MessageBox', 'MessageBox', (['"""There are no files to delete"""', '"""Error"""', 'wx.OK'], {}), "('There are no files to delete', 'Error', wx.OK)\n", (4001, 4049), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((5073, 5192), 'wx.MessageBox', 'MessageBox', (['"""You must input a project name, a dataset name, and at least one file to upload."""', '"""Error"""', 'wx.OK'], {}), "(\n 'You must input a project name, a dataset name, and at least one file to upload.'\n , 'Error', wx.OK)\n", (5083, 5192), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1493, 1505), 'wx.Point', 'Point', (['(5)', '(15)'], {}), '(5, 15)\n', (1498, 1505), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1581, 1593), 'wx.Point', 'Point', (['(5)', '(45)'], {}), '(5, 45)\n', (1586, 1593), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1646, 1660), 'wx.Point', 'Point', (['(100)', '(10)'], {}), '(100, 10)\n', (1651, 1660), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1668, 1681), 'wx.Size', 'Size', (['(150)', '(25)'], {}), '(150, 25)\n', (1672, 1681), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1734, 1748), 'wx.Point', 'Point', (['(100)', '(40)'], {}), '(100, 40)\n', (1739, 1748), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1756, 1769), 'wx.Size', 'Size', (['(150)', '(25)'], {}), '(150, 25)\n', (1760, 1769), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((1925, 1937), 'wx.Point', 'Point', (['(5)', '(75)'], {}), '(5, 75)\n', (1930, 1937), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((2019, 2033), 'wx.Point', 'Point', (['(100)', '(75)'], {}), '(100, 75)\n', (2024, 2033), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((2114, 2128), 'wx.Point', 'Point', (['(250)', '(75)'], {}), '(250, 75)\n', (2119, 2128), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((2212, 2225), 'wx.Point', 'Point', (['(5)', '(100)'], {}), '(5, 100)\n', (2217, 2225), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((2233, 2247), 'wx.Size', 'Size', (['(450)', '(300)'], {}), '(450, 300)\n', (2237, 2247), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((2643, 2656), 'wx.Size', 'Size', (['(200)', '(50)'], {}), '(200, 50)\n', (2647, 2656), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((2663, 2678), 'wx.Point', 'Point', (['(150)', '(400)'], {}), '(150, 400)\n', (2668, 2678), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((4748, 4873), 'wx.MessageBox', 'MessageBox', (['"""Your project was successfully created in Labelbox! Log in to set up your ontology!"""', '"""Success!"""', 'wx.OK'], {}), "(\n 'Your project was successfully created in Labelbox! Log in to set up your ontology!'\n , 'Success!', wx.OK)\n", (4758, 4873), False, 'from wx import Point, Size, FileDialog, MessageBox\n'), ((3187, 3200), 'os.path.abspath', 'abspath', (['path'], {}), '(path)\n', (3194, 3200), False, 'from os.path import abspath\n'), ((4938, 5048), 'wx.MessageBox', 'MessageBox', (['"""Hmm, an error was encountered while setting up your project. Try again."""', '"""Error"""', 'wx.OK'], {}), "(\n 'Hmm, an error was encountered while setting up your project. Try again.',\n 'Error', wx.OK)\n", (4948, 5048), False, 'from wx import Point, Size, FileDialog, MessageBox\n')]
|
# -*- coding:utf-8 -*-
import logging
# 配置日志信息
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s%(name)-12s%(levelname)-8s%(message)s',
datefmt='%m-%d %H:%M',
filename='demo/error_exception/myapp.log',
filemode='w')
# 定义一个Handler打印INFO及以上级别的日志到sys.stderr
console = logging.StreamHandler()
console.setLevel(logging.INFO)
# 设置日志打印格式
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
# 将定义好的console日志handler添加到root logger
logging.getLogger('').addHandler(console)
logging.info('Jackdaws love my big sphinx of quartz.')
logger1 = logging.getLogger('myapp.area1')
logger2 = logging.getLogger('myapp.area2')
logger1.debug('Quick zephyrs blow, vexing daft Jim.')
logger1.info('How quickly daft jumping zebras vex.')
logger2.warning('Jail zesty vixen who grabbed pay from quack.')
|
[
"logging.basicConfig",
"logging.StreamHandler",
"logging.Formatter",
"logging.info",
"logging.getLogger"
] |
[((48, 237), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG', 'format': '"""%(asctime)s%(name)-12s%(levelname)-8s%(message)s"""', 'datefmt': '"""%m-%d %H:%M"""', 'filename': '"""demo/error_exception/myapp.log"""', 'filemode': '"""w"""'}), "(level=logging.DEBUG, format=\n '%(asctime)s%(name)-12s%(levelname)-8s%(message)s', datefmt=\n '%m-%d %H:%M', filename='demo/error_exception/myapp.log', filemode='w')\n", (67, 237), False, 'import logging\n'), ((357, 380), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (378, 380), False, 'import logging\n'), ((435, 496), 'logging.Formatter', 'logging.Formatter', (['"""%(name)-12s: %(levelname)-8s %(message)s"""'], {}), "('%(name)-12s: %(levelname)-8s %(message)s')\n", (452, 496), False, 'import logging\n'), ((610, 664), 'logging.info', 'logging.info', (['"""Jackdaws love my big sphinx of quartz."""'], {}), "('Jackdaws love my big sphinx of quartz.')\n", (622, 664), False, 'import logging\n'), ((676, 708), 'logging.getLogger', 'logging.getLogger', (['"""myapp.area1"""'], {}), "('myapp.area1')\n", (693, 708), False, 'import logging\n'), ((719, 751), 'logging.getLogger', 'logging.getLogger', (['"""myapp.area2"""'], {}), "('myapp.area2')\n", (736, 751), False, 'import logging\n'), ((567, 588), 'logging.getLogger', 'logging.getLogger', (['""""""'], {}), "('')\n", (584, 588), False, 'import logging\n')]
|
"""Class to run MR on SIMBAD results"""
from __future__ import division
__author__ = "<NAME>"
__date__ = "09 Mar 2017"
__version__ = "1.0"
import logging
import os
from pyjob.script import ScriptCollector, Script
from pyjob.exception import PyJobExecutionError
from simbad.mr import anomalous_util
from simbad.mr.options import MrPrograms, RefPrograms
from simbad.parsers import molrep_parser
from simbad.parsers import mtz_parser
from simbad.parsers import phaser_parser
from simbad.parsers import refmac_parser
from simbad.util import source_ccp4
from simbad.util import submit_chunk
from simbad.util.pdb_util import PdbStructure
from simbad.util.matthews_prob import MatthewsProbability, SolventContent
from simbad.core.lattice_score import LatticeSearchResult
from simbad.core.amore_score import AmoreRotationScore
from simbad.core.phaser_score import PhaserRotationScore
from simbad.core.mr_score import MrScore
logger = logging.getLogger(__name__)
EXPORT = "SET" if os.name == "nt" else "export"
CMD_PREFIX = "call" if os.name == "nt" else ""
class MrSubmit(object):
"""Class to run MR on a defined set of models
Attributes
----------
mtz : str
Path to the input MTZ file
mr_program : str
Name of the molecular replacement program to use
refine_program : str
Name of the refinement program to use
refine_type : str
Type of refinement to run (None | jelly)
refine_cycles : int
The number of refinement cycles (default: 30)
output_dir : str
Path to the directory to output results
sgalternative : str
Specify whether to try alternative space groups (all | enant)
Examples
--------
>>> from simbad.mr import MrSubmit
>>> MR = MrSubmit('<mtz>', '<mr_program>', '<refine_program>', '<refine_type>', '<output_dir>', '<nmol>', '<sgalternative>')
>>> MR.submit_jobs('<results>', '<nproc>', '<submit_cluster>', '<submit_qtype>', '<submit_queue>',
... '<submit_array>', '<submit_max_array>', '<process_all>', '<monitor>')
If a solution is found and process_all is not set, the queued jobs will be terminated.
"""
def __init__(self, mtz, mr_program, refine_program, refine_type, refine_cycles, output_dir, tmp_dir, timeout,
nmol=0, sgalternative=None):
"""Initialise MrSubmit class"""
self.input_file = None
self._process_all = None
self._sgalternative = None
self._mtz = None
self._mtz_obj = None
self._mr_program = None
self._nmol = None
self._output_dir = None
self._refine_program = None
self._refine_type = None
self._refine_cycles = None
self._search_results = []
self._timeout = None
self.dano_columns = []
self.sgalternative = sgalternative
self.mat_prob = None
self.mtz = mtz
self.mr_program = mr_program
self.mute = False
self.nmol = nmol
self.output_dir = output_dir
self.refine_program = refine_program
self.refine_type = refine_type
self.refine_cycles = refine_cycles
self.sol_cont = None
self.tmp_dir = tmp_dir
self.timeout = timeout
@property
def mtz(self):
"""The input MTZ file"""
return self._mtz
@mtz.setter
def mtz(self, mtz):
"""Define the input MTZ file"""
self._mtz = os.path.abspath(mtz)
self._mtz_obj = mtz_parser.MtzParser(mtz)
self._mtz_obj.parse()
@property
def mtz_obj(self):
"""Column object containing info on input mtz"""
return self._mtz_obj
@property
def nmol(self):
"""The number of molecules to look for"""
return self._nmol
@nmol.setter
def nmol(self, nmol):
"""Define the number of molecules to look for"""
self._nmol = nmol
@property
def search_results(self):
"""The results from the amore rotation search"""
return sorted(self._search_results, key=lambda x: float(x.final_r_free), reverse=False)
@property
def sgalternative(self):
"""Whether to check for alternative space groups"""
return self._sgalternative
@sgalternative.setter
def sgalternative(self, sgalternative):
"""Define whether to check for alternative space groups"""
if sgalternative:
self._sgalternative = sgalternative.lower()
else:
self._sgalternative = sgalternative
@property
def mr_python_module(self):
"""The MR python module"""
return MrPrograms[self.mr_program].value
@property
def mr_program(self):
"""The molecular replacement program to use"""
return self._mr_program
@mr_program.setter
def mr_program(self, mr_program):
"""Define the molecular replacement program to use"""
if mr_program.lower() in MrPrograms.__members__:
self._mr_program = mr_program.lower()
else:
msg = "Unknown MR program!"
raise RuntimeError(msg)
@property
def refine_python_module(self):
"""The Refinement python module"""
return RefPrograms[self.refine_program].value
@property
def refine_program(self):
"""The refinement program to use"""
return self._refine_program
@refine_program.setter
def refine_program(self, refine_program):
"""Define the refinement program to use"""
if refine_program.lower() in RefPrograms.__members__:
self._refine_program = refine_program
else:
msg = "Unknown Refinement program!"
raise RuntimeError(msg)
@property
def refine_type(self):
"""The refinement type to use"""
return self._refine_type
@refine_type.setter
def refine_type(self, refine_type):
"""Define the refinement type to use"""
self._refine_type = refine_type
@property
def refine_cycles(self):
"""The number of refinement cycles to use"""
return self._refine_cycles
@refine_cycles.setter
def refine_cycles(self, refine_cycles):
"""Define the number of refinement cycles to use"""
self._refine_cycles = refine_cycles
@property
def output_dir(self):
"""The path to the output directory"""
return self._output_dir
@output_dir.setter
def output_dir(self, output_dir):
"""Define the output directory"""
self._output_dir = output_dir
@property
def timeout(self):
"""The time in minutes before phaser is killed"""
return self._timeout
@timeout.setter
def timeout(self, timeout):
"""Define the time in minutes before phaser should be killed"""
self._timeout = timeout
def submit_jobs(self, results, nproc=1, process_all=False, submit_qtype=None, submit_queue=False, monitor=None):
"""Submit jobs to run in serial or on a cluster
Parameters
----------
results : class
Results from :obj: '_LatticeParameterScore' or :obj: '_AmoreRotationScore'
nproc : int, optional
Number of processors to use [default: 1]
process_all : bool, optional
Terminate MR after a success [default: True]
submit_qtype : str
The cluster submission queue type
submit_queue : str
The queue to submit to on the cluster
monitor : str
Returns
-------
file
Output pdb from mr
file
Output hkl from mr - if using phaser
file
Output log file from mr program
file
Output pdb from refinement
file
Output hkl from refinement
file
Output log file from refinement program
"""
if not os.path.isdir(self.output_dir):
os.mkdir(self.output_dir)
if self.existing_solution(results):
return
self.sol_cont = SolventContent(self.mtz_obj.cell.volume_per_image())
self.mat_prob = MatthewsProbability(self.mtz_obj.cell.volume_per_image())
run_files = []
collector = ScriptCollector(None)
for result in results:
script, run_file = self.generate_script(result)
collector.add(script)
run_files.append(run_file)
if not self.mute:
logger.info("Running %s Molecular Replacement", self.mr_program)
input_arguments = [collector, self.output_dir, nproc, "simbad_mr", submit_qtype, submit_queue, True, monitor]
if process_all:
input_arguments.append(None)
else:
input_arguments.append(mr_succeeded_log)
submit_chunk(*input_arguments, timeout=self.timeout)
mr_results = []
mr_pdbouts, mr_logfiles, ref_logfiles = zip(*run_files)
for result, mr_logfile, mr_pdbout, ref_logfile in zip(results, mr_logfiles, mr_pdbouts, ref_logfiles):
if not os.path.isfile(mr_logfile):
logger.debug("Cannot find %s MR log file: %s", self.mr_program, mr_logfile)
continue
elif not os.path.isfile(ref_logfile):
logger.debug("Cannot find %s refine log file: %s", self.mr_program, ref_logfile)
continue
elif not os.path.isfile(mr_pdbout):
logger.debug("Cannot find %s output file: %s", self.mr_program, mr_pdbout)
continue
score = MrScore(pdb_code=result.pdb_code)
if self.mr_program == "molrep":
mp = molrep_parser.MolrepParser(mr_logfile)
score.molrep_score = mp.score
score.molrep_tfscore = mp.tfscore
elif self.mr_program == "phaser":
pp = phaser_parser.PhaserParser(mr_logfile)
score.phaser_tfz = pp.tfz
score.phaser_llg = pp.llg
score.phaser_rfz = pp.rfz
if self.anomalous_data_present():
try:
work_dir = os.path.join(self.output_dir, result.pdb_code, "anomalous")
anode = anomalous_util.AnodeSearch(self.mtz, work_dir)
input_model = os.path.join(self.output_dir, result.pdb_code, "mr",
self.mr_program, "{0}_mr_output.pdb".format(result.pdb_code))
anode.run(input_model)
a = anode.search_results()
score.dano_peak_height = a.dano_peak_height
score.nearest_atom = a.nearest_atom
self.dano_columns = ["dano_peak_height", "nearest_atom"]
except RuntimeError:
logger.debug("RuntimeError: Unable to create DANO map for: %s", result.pdb_code)
except PyJobExecutionError:
logger.debug("PyJobExecutionError: Unable to run exectute anode for: %s", result.pdb_code)
if os.path.isfile(ref_logfile):
rp = refmac_parser.RefmacParser(ref_logfile)
score.final_r_free = rp.final_r_free
score.final_r_fact = rp.final_r_fact
else:
logger.debug("Cannot find %s log file: %s", self.refine_program, ref_logfile)
mr_results += [score]
self._search_results = mr_results
def generate_script(self, result):
mr_workdir = os.path.join(self.output_dir, result.pdb_code, "mr", self.mr_program)
mr_logfile = os.path.join(mr_workdir, "{0}_mr.log".format(result.pdb_code))
mr_pdbout = os.path.join(mr_workdir, "{0}_mr_output.pdb".format(result.pdb_code))
mr_hklout = os.path.join(mr_workdir, "{0}_mr_output.mtz".format(result.pdb_code))
ref_workdir = os.path.join(mr_workdir, "refine")
ref_hklout = os.path.join(ref_workdir, "{0}_refinement_output.mtz".format(result.pdb_code))
ref_logfile = os.path.join(ref_workdir, "{0}_ref.log".format(result.pdb_code))
ref_pdbout = os.path.join(ref_workdir, "{0}_refinement_output.pdb".format(result.pdb_code))
if isinstance(result, (AmoreRotationScore, PhaserRotationScore)):
pdb_struct = PdbStructure.from_file(result.dat_path)
mr_pdbin = os.path.join(self.output_dir, result.pdb_code + ".pdb")
elif isinstance(result, LatticeSearchResult):
pdb_struct = PdbStructure.from_file(result.pdb_path)
mr_pdbin = result.pdb_path
else:
raise ValueError("Do not recognize result container")
if self.nmol > 0:
solvent_content = 0.5
pdb_struct.save(mr_pdbin)
else:
solvent_content = self.sol_cont.calculate_from_struct(pdb_struct)
if solvent_content > 0.3:
solvent_content, self.nmol = self.mat_prob.calculate_from_struct(pdb_struct)
pdb_struct.save(mr_pdbin)
else:
pdb_struct.keep_first_chain_only()
pdb_struct.save(mr_pdbin)
solvent_content, self.nmol = self.mat_prob.calculate_from_struct(pdb_struct)
msg = (
"%s is predicted to be too large to fit in the unit "
+ "cell with a solvent content of at least 30 percent, "
+ "therefore MR will use only the first chain"
)
logger.debug(msg, result.pdb_code)
if solvent_content < 0.2:
msg = (
"%s is predicted to have a solvent content below 20 percent,"
+ "and therefore will be removed from the search"
)
raise ValueError(msg, result.pdb_code)
mr_cmd = [
CMD_PREFIX,
"ccp4-python",
"-m",
self.mr_python_module,
"-hklin",
self.mtz,
"-hklout",
mr_hklout,
"-pdbin",
mr_pdbin,
"-pdbout",
mr_pdbout,
"-logfile",
mr_logfile,
"-work_dir",
mr_workdir,
"-nmol",
self.nmol,
"-sgalternative",
self.sgalternative,
]
if self.mr_program == "molrep":
mr_cmd += ["-space_group", "".join(self.mtz_obj.spacegroup_symbol.split())]
elif self.mr_program == "phaser":
mr_cmd += [
"-i",
self.mtz_obj.i,
"-sigi",
self.mtz_obj.sigi,
"-f",
self.mtz_obj.f,
"-sigf",
self.mtz_obj.sigf,
"-solvent",
solvent_content,
"-timeout",
self.timeout,
]
if isinstance(result, LatticeSearchResult):
mr_cmd += ["-autohigh", 4.0, "-hires", 5.0]
ref_cmd = [
CMD_PREFIX,
"ccp4-python",
"-m",
self.refine_python_module,
"-pdbin",
mr_pdbout,
"-pdbout",
ref_pdbout,
"-hklin",
mr_hklout,
"-hklout",
ref_hklout,
"-logfile",
ref_logfile,
"-work_dir",
ref_workdir,
"-ncyc",
self.refine_cycles,
]
if self.refine_program == "refmac5":
ref_cmd += ["-refinement_type", self.refine_type]
# ====
# Create a run script - prefix __needs__ to contain mr_program so we can find log
# Leave order of this as SGE does not like scripts with numbers as first char
# ====
prefix, stem = self.mr_program + "_", result.pdb_code
ccp4_scr = os.environ["CCP4_SCR"]
if self.tmp_dir:
tmp_dir = os.path.join(self.tmp_dir)
else:
tmp_dir = os.path.join(self.output_dir)
source = source_ccp4()
cmd = [
[source],
[EXPORT, "CCP4_SCR=" + tmp_dir],
mr_cmd + [os.linesep],
ref_cmd + [os.linesep],
[EXPORT, "CCP4_SCR=" + ccp4_scr],
]
run_script = Script(directory=self.output_dir, prefix=prefix, stem=stem)
for c in cmd:
run_script.append(" ".join(map(str, c)))
run_files = (mr_pdbout, mr_logfile, ref_logfile)
return run_script, run_files
def existing_solution(self, results):
"""Function to check if a solution is has already been found
Parameters
----------
results : class
Results from :obj: '_LatticeParameterScore' or :obj: '_AmoreRotationScore'
Returns
-------
bool
True/False depending on whether a solution is found amongst results
"""
for result in results:
mr_workdir = os.path.join(self.output_dir, result.pdb_code, "mr", self.mr_program)
mr_logfile = os.path.join(mr_workdir, "{0}_mr.log".format(result.pdb_code))
ref_workdir = os.path.join(mr_workdir, "refine")
ref_logfile = os.path.join(ref_workdir, "{0}_ref.log".format(result.pdb_code))
if os.path.isfile(ref_logfile):
rp = refmac_parser.RefmacParser(ref_logfile)
if _mr_job_succeeded(rp.final_r_fact, rp.final_r_free):
score = MrScore(pdb_code=result.pdb_code)
if self.mr_program == "molrep":
mp = molrep_parser.MolrepParser(mr_logfile)
score.molrep_score = mp.score
score.molrep_tfscore = mp.tfscore
elif self.mr_program == "phaser":
pp = phaser_parser.PhaserParser(mr_logfile)
score.phaser_tfz = pp.tfz
score.phaser_llg = pp.llg
score.phaser_rfz = pp.rfz
rp = refmac_parser.RefmacParser(ref_logfile)
score.final_r_free = rp.final_r_free
score.final_r_fact = rp.final_r_fact
self._search_results = [score]
return True
return False
def anomalous_data_present(self):
"""Function to check if there is anomalous data present in the input MTZ
Returns
-------
bool
True/False depending on whether anomalous data is present
"""
if self.mtz_obj.dp:
return True
elif self.mtz_obj.i_plus:
return True
elif self.mtz_obj.f_plus:
return True
else:
return False
def summarize(self, csv_file):
"""Summarize the search results
Parameters
----------
csv_file : str
The path for a backup CSV file
Raises
------
No results found
"""
from simbad.util import summarize_result
columns = []
if self.mr_program == "molrep":
columns += ["molrep_score", "molrep_tfscore"]
elif self.mr_program == "phaser":
columns += ["phaser_tfz", "phaser_llg", "phaser_rfz"]
columns += ["final_r_fact", "final_r_free"]
if self.anomalous_data_present():
columns += self.dano_columns
summarize_result(self.search_results, csv_file=csv_file, columns=columns)
def _mr_job_succeeded(r_fact, r_free):
"""Check values for job success"""
return r_fact < 0.45 and r_free < 0.45
def _refinement_succeeded(r_fact, r_free):
"""Check values for job success"""
return r_fact < 0.45 and r_free < 0.45
def _phaser_succeeded(llg, tfz):
"""Check values for job success"""
return llg > 120 and tfz > 8
def mr_succeeded_log(log):
"""Check a Molecular Replacement job for it's success
Parameters
----------
log : str
The path to a log file
Returns
-------
bool
Success status of the MR run
"""
mr_prog, pdb = os.path.basename(log).replace(".log", "").split("_", 1)
refmac_log = os.path.join(os.path.dirname(log), pdb, "mr", mr_prog, "refine", pdb + "_ref.log")
if os.path.isfile(refmac_log):
rp = refmac_parser.RefmacParser(refmac_log)
return _mr_job_succeeded(rp.final_r_fact, rp.final_r_free)
return False
def mr_succeeded_csvfile(f):
"""Check a Molecular Replacement job for it's success
Parameters
----------
f : str
The path to f
Returns
-------
bool
Success status of the MR run
"""
import pandas as pd
df = pd.read_csv(f)
try:
data = zip(df.final_r_fact.tolist(), df.final_r_free.tolist(), df.phaser_llg.tolist(), df.phaser_tfz.tolist())
return any(_refinement_succeeded(rfact, rfree) or
_phaser_succeeded(llg, tfz) for rfact, rfree, llg, tfz in data)
except AttributeError:
data = zip(df.final_r_fact.tolist(), df.final_r_free.tolist())
return any(_refinement_succeeded(rfact, rfree) for rfact, rfree in data)
|
[
"os.mkdir",
"pandas.read_csv",
"simbad.core.mr_score.MrScore",
"simbad.util.pdb_util.PdbStructure.from_file",
"os.path.isfile",
"simbad.parsers.refmac_parser.RefmacParser",
"os.path.join",
"simbad.parsers.molrep_parser.MolrepParser",
"simbad.util.source_ccp4",
"os.path.abspath",
"pyjob.script.Script",
"os.path.dirname",
"simbad.mr.anomalous_util.AnodeSearch",
"simbad.parsers.phaser_parser.PhaserParser",
"simbad.parsers.mtz_parser.MtzParser",
"os.path.basename",
"pyjob.script.ScriptCollector",
"simbad.util.summarize_result",
"simbad.util.submit_chunk",
"os.path.isdir",
"logging.getLogger"
] |
[((933, 960), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (950, 960), False, 'import logging\n'), ((20288, 20314), 'os.path.isfile', 'os.path.isfile', (['refmac_log'], {}), '(refmac_log)\n', (20302, 20314), False, 'import os\n'), ((20719, 20733), 'pandas.read_csv', 'pd.read_csv', (['f'], {}), '(f)\n', (20730, 20733), True, 'import pandas as pd\n'), ((3439, 3459), 'os.path.abspath', 'os.path.abspath', (['mtz'], {}), '(mtz)\n', (3454, 3459), False, 'import os\n'), ((3484, 3509), 'simbad.parsers.mtz_parser.MtzParser', 'mtz_parser.MtzParser', (['mtz'], {}), '(mtz)\n', (3504, 3509), False, 'from simbad.parsers import mtz_parser\n'), ((8233, 8254), 'pyjob.script.ScriptCollector', 'ScriptCollector', (['None'], {}), '(None)\n', (8248, 8254), False, 'from pyjob.script import ScriptCollector, Script\n'), ((8784, 8836), 'simbad.util.submit_chunk', 'submit_chunk', (['*input_arguments'], {'timeout': 'self.timeout'}), '(*input_arguments, timeout=self.timeout)\n', (8796, 8836), False, 'from simbad.util import submit_chunk\n'), ((11497, 11566), 'os.path.join', 'os.path.join', (['self.output_dir', 'result.pdb_code', '"""mr"""', 'self.mr_program'], {}), "(self.output_dir, result.pdb_code, 'mr', self.mr_program)\n", (11509, 11566), False, 'import os\n'), ((11854, 11888), 'os.path.join', 'os.path.join', (['mr_workdir', '"""refine"""'], {}), "(mr_workdir, 'refine')\n", (11866, 11888), False, 'import os\n'), ((16032, 16045), 'simbad.util.source_ccp4', 'source_ccp4', ([], {}), '()\n', (16043, 16045), False, 'from simbad.util import source_ccp4\n'), ((16278, 16337), 'pyjob.script.Script', 'Script', ([], {'directory': 'self.output_dir', 'prefix': 'prefix', 'stem': 'stem'}), '(directory=self.output_dir, prefix=prefix, stem=stem)\n', (16284, 16337), False, 'from pyjob.script import ScriptCollector, Script\n'), ((19434, 19507), 'simbad.util.summarize_result', 'summarize_result', (['self.search_results'], {'csv_file': 'csv_file', 'columns': 'columns'}), '(self.search_results, csv_file=csv_file, columns=columns)\n', (19450, 19507), False, 'from simbad.util import summarize_result\n'), ((20211, 20231), 'os.path.dirname', 'os.path.dirname', (['log'], {}), '(log)\n', (20226, 20231), False, 'import os\n'), ((20329, 20367), 'simbad.parsers.refmac_parser.RefmacParser', 'refmac_parser.RefmacParser', (['refmac_log'], {}), '(refmac_log)\n', (20355, 20367), False, 'from simbad.parsers import refmac_parser\n'), ((7895, 7925), 'os.path.isdir', 'os.path.isdir', (['self.output_dir'], {}), '(self.output_dir)\n', (7908, 7925), False, 'import os\n'), ((7939, 7964), 'os.mkdir', 'os.mkdir', (['self.output_dir'], {}), '(self.output_dir)\n', (7947, 7964), False, 'import os\n'), ((9558, 9591), 'simbad.core.mr_score.MrScore', 'MrScore', ([], {'pdb_code': 'result.pdb_code'}), '(pdb_code=result.pdb_code)\n', (9565, 9591), False, 'from simbad.core.mr_score import MrScore\n'), ((11051, 11078), 'os.path.isfile', 'os.path.isfile', (['ref_logfile'], {}), '(ref_logfile)\n', (11065, 11078), False, 'import os\n'), ((12276, 12315), 'simbad.util.pdb_util.PdbStructure.from_file', 'PdbStructure.from_file', (['result.dat_path'], {}), '(result.dat_path)\n', (12298, 12315), False, 'from simbad.util.pdb_util import PdbStructure\n'), ((12339, 12394), 'os.path.join', 'os.path.join', (['self.output_dir', "(result.pdb_code + '.pdb')"], {}), "(self.output_dir, result.pdb_code + '.pdb')\n", (12351, 12394), False, 'import os\n'), ((15921, 15947), 'os.path.join', 'os.path.join', (['self.tmp_dir'], {}), '(self.tmp_dir)\n', (15933, 15947), False, 'import os\n'), ((15984, 16013), 'os.path.join', 'os.path.join', (['self.output_dir'], {}), '(self.output_dir)\n', (15996, 16013), False, 'import os\n'), ((16965, 17034), 'os.path.join', 'os.path.join', (['self.output_dir', 'result.pdb_code', '"""mr"""', 'self.mr_program'], {}), "(self.output_dir, result.pdb_code, 'mr', self.mr_program)\n", (16977, 17034), False, 'import os\n'), ((17149, 17183), 'os.path.join', 'os.path.join', (['mr_workdir', '"""refine"""'], {}), "(mr_workdir, 'refine')\n", (17161, 17183), False, 'import os\n'), ((17290, 17317), 'os.path.isfile', 'os.path.isfile', (['ref_logfile'], {}), '(ref_logfile)\n', (17304, 17317), False, 'import os\n'), ((9056, 9082), 'os.path.isfile', 'os.path.isfile', (['mr_logfile'], {}), '(mr_logfile)\n', (9070, 9082), False, 'import os\n'), ((9658, 9696), 'simbad.parsers.molrep_parser.MolrepParser', 'molrep_parser.MolrepParser', (['mr_logfile'], {}), '(mr_logfile)\n', (9684, 9696), False, 'from simbad.parsers import molrep_parser\n'), ((11101, 11140), 'simbad.parsers.refmac_parser.RefmacParser', 'refmac_parser.RefmacParser', (['ref_logfile'], {}), '(ref_logfile)\n', (11127, 11140), False, 'from simbad.parsers import refmac_parser\n'), ((12474, 12513), 'simbad.util.pdb_util.PdbStructure.from_file', 'PdbStructure.from_file', (['result.pdb_path'], {}), '(result.pdb_path)\n', (12496, 12513), False, 'from simbad.util.pdb_util import PdbStructure\n'), ((17340, 17379), 'simbad.parsers.refmac_parser.RefmacParser', 'refmac_parser.RefmacParser', (['ref_logfile'], {}), '(ref_logfile)\n', (17366, 17379), False, 'from simbad.parsers import refmac_parser\n'), ((9222, 9249), 'os.path.isfile', 'os.path.isfile', (['ref_logfile'], {}), '(ref_logfile)\n', (9236, 9249), False, 'import os\n'), ((9860, 9898), 'simbad.parsers.phaser_parser.PhaserParser', 'phaser_parser.PhaserParser', (['mr_logfile'], {}), '(mr_logfile)\n', (9886, 9898), False, 'from simbad.parsers import phaser_parser\n'), ((10124, 10183), 'os.path.join', 'os.path.join', (['self.output_dir', 'result.pdb_code', '"""anomalous"""'], {}), "(self.output_dir, result.pdb_code, 'anomalous')\n", (10136, 10183), False, 'import os\n'), ((10212, 10258), 'simbad.mr.anomalous_util.AnodeSearch', 'anomalous_util.AnodeSearch', (['self.mtz', 'work_dir'], {}), '(self.mtz, work_dir)\n', (10238, 10258), False, 'from simbad.mr import anomalous_util\n'), ((17480, 17513), 'simbad.core.mr_score.MrScore', 'MrScore', ([], {'pdb_code': 'result.pdb_code'}), '(pdb_code=result.pdb_code)\n', (17487, 17513), False, 'from simbad.core.mr_score import MrScore\n'), ((18045, 18084), 'simbad.parsers.refmac_parser.RefmacParser', 'refmac_parser.RefmacParser', (['ref_logfile'], {}), '(ref_logfile)\n', (18071, 18084), False, 'from simbad.parsers import refmac_parser\n'), ((20125, 20146), 'os.path.basename', 'os.path.basename', (['log'], {}), '(log)\n', (20141, 20146), False, 'import os\n'), ((9394, 9419), 'os.path.isfile', 'os.path.isfile', (['mr_pdbout'], {}), '(mr_pdbout)\n', (9408, 9419), False, 'import os\n'), ((17596, 17634), 'simbad.parsers.molrep_parser.MolrepParser', 'molrep_parser.MolrepParser', (['mr_logfile'], {}), '(mr_logfile)\n', (17622, 17634), False, 'from simbad.parsers import molrep_parser\n'), ((17830, 17868), 'simbad.parsers.phaser_parser.PhaserParser', 'phaser_parser.PhaserParser', (['mr_logfile'], {}), '(mr_logfile)\n', (17856, 17868), False, 'from simbad.parsers import phaser_parser\n')]
|
"""Visualize the Fibonacci sequence in binary.
This script plots the Fibonacci sequence in binary form; the idea comes from
https://mathworld.wolfram.com/FibonacciNumber.html
and
https://www.maa.org/editorial/mathgames
This script depends on the dataset `fibonacci.dat`, which is hosted on Zenodo:
https://zenodo.org/record/5187276/files/fibonacci.dat
The instructions for downloading this file are specified in the `Snakefile`
at the top level of the repository.
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib
from pathlib import Path
# Path to the "data" directory
DATA = Path(__file__).parents[1].absolute() / "data"
# Read the Fibonacci numbers
with open(DATA / "fibonacci.dat", "r") as f:
n = [int(l) for l in f.readlines()]
# The dimensions of the image we'll plot
N = len(n)
B = len("{:b}".format(n[-1]))
# Cast each number to binary and then to an array of bits
b = np.zeros((N, B), dtype=int)
b[0] = np.zeros(B)
b[1] = np.zeros(B)
b[1, -1] = 1
for i in range(2, N):
bi = list("{:b}".format(n[i]))
b[i, -len(bi) :] = bi
# Plot the Fibonacci sequence in binary; idea from
# https://mathworld.wolfram.com/FibonacciNumber.html and
# https://www.maa.org/editorial/mathgames
fig, ax = plt.subplots(figsize=(6, 6))
cmap = matplotlib.colors.ListedColormap(["white", "C0"])
ax.imshow(b, interpolation="nearest", cmap=cmap, aspect="auto")
ax.axis("off")
fig.savefig("fibonacci.pdf", bbox_inches="tight")
|
[
"pathlib.Path",
"numpy.zeros",
"matplotlib.colors.ListedColormap",
"matplotlib.pyplot.subplots"
] |
[((916, 943), 'numpy.zeros', 'np.zeros', (['(N, B)'], {'dtype': 'int'}), '((N, B), dtype=int)\n', (924, 943), True, 'import numpy as np\n'), ((951, 962), 'numpy.zeros', 'np.zeros', (['B'], {}), '(B)\n', (959, 962), True, 'import numpy as np\n'), ((970, 981), 'numpy.zeros', 'np.zeros', (['B'], {}), '(B)\n', (978, 981), True, 'import numpy as np\n'), ((1239, 1267), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': '(6, 6)'}), '(figsize=(6, 6))\n', (1251, 1267), True, 'import matplotlib.pyplot as plt\n'), ((1275, 1324), 'matplotlib.colors.ListedColormap', 'matplotlib.colors.ListedColormap', (["['white', 'C0']"], {}), "(['white', 'C0'])\n", (1307, 1324), False, 'import matplotlib\n'), ((609, 623), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (613, 623), False, 'from pathlib import Path\n')]
|