code
stringlengths 13
6.09M
| order_type
stringclasses 2
values | original_example
dict | step_ids
listlengths 1
5
|
|---|---|---|---|
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@dp.message_handler(commands='upload', user_id=ADMINS, state='*')
async def upload_profile(command_msg: Message, state: FSMContext):
profile_msg = command_msg.reply_to_message
admin = command_msg.from_user
param = command_msg.get_args()
if not profile_msg:
await command_msg.answer('Чтобы загрузить анкету сделай на неё REPLY')
return
elif param != 'g' and param != 'b':
await command_msg.answer(
'Чтобы воспользоваться командой /upload нужно добавить параметры <b>b | g</b>'
)
return
other_bot = profile_msg.forward_from
if not other_bot or other_bot.id != 1234060895:
await profile_msg.reply(
'Загружать анкеты можно только из нашего БотаX :)')
return
elif not profile_msg.photo and not profile_msg.video or not profile_msg.caption:
await profile_msg.reply(
'Загружать нужно именно анкету, а не части анкеты')
return
profile_data = text.get_parse_data(profile_msg.caption)
if profile_msg.photo:
media_id = profile_msg.photo[-1].file_id
with_video = False
else:
media_id = profile_msg.video.file_id
with_video = True
profile_data.update(id=random.randint(1, 100000), username='f',
media_id=media_id, with_video=with_video, sex=1 if param == 'g' else 2)
await db.add_user(**profile_data)
await profile_msg.reply('Пользователь {}-{} успешно добавлен ✅'.format(
profile_data['user_nick'], profile_data['id']))
logging.info(
f"Admin @{admin.username}-{admin.id} successfully added fake {profile_data['user_nick']}-{profile_data['id']} "
)
@dp.message_handler(commands='get_msg_info', user_id=ADMINS, state='*')
async def get_msg_info(command_msg: Message, state: FSMContext):
msg = command_msg.reply_to_message
await command_msg.delete()
if not msg:
await command_msg.answer('Нужно делать реплай на сообщение.')
return
state = await state.get_state()
await msg.reply(
f"""Эхо в состоянии <code>{state}</code>.
Содержание сообщения:
<code>{msg}</code>
content_type = {msg.content_type}
entities={msg.entities}"""
)
@dp.message_handler(commands='ban_user', user_id=ADMINS, state='*')
async def ban_user(command_msg: Message, state: FSMContext):
ban_user_id = command_msg.get_args()
admin = command_msg.from_user
await command_msg.delete()
if not ban_user_id or not ban_user_id.isdecimal():
await command_msg.answer(f'Формат команды: /ban_user user_id')
return
ban_user_id = int(ban_user_id)
is_banned = await db.ban_user(ban_user_id)
if not is_banned:
await command_msg.answer(
f'Пользователя с таким <user_id> не существует')
return
await redis_commands.ban_user(ban_user_id)
await command_msg.answer('Пользователь({}) успешно забанен 😎'.format(
ban_user_id))
logging.info(f'Admin @{admin.username}-{admin.id} BAN USER-{ban_user_id}')
@dp.message_handler(commands='unban_user', user_id=ADMINS, state='*')
async def unban_user(command_msg: Message, state: FSMContext):
unban_user_id = command_msg.get_args()
admin = command_msg.from_user
await command_msg.delete()
if not unban_user_id or not unban_user_id.isdecimal():
await command_msg.answer(f'Формат команды: /unban_user user_id')
return
unban_user_id = int(unban_user_id)
is_unbanned = await db.unban_user(unban_user_id)
if not is_unbanned:
await command_msg.answer(
f'Пользователя с таким <user_id> не существует')
return
await redis_commands.unban_user(unban_user_id)
await command_msg.answer('Пользователь({}) успешно разбанен 👻'.format(
unban_user_id))
logging.info(
f'Admin @{admin.username}-{admin.id} UNBAN USER-{unban_user_id}')
@dp.message_handler(commands='clean_old_likes', user_id=ADMINS, state='*')
async def clean_old_likes(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
await command_msg.delete()
count = await db.clean_old_likes(interval=24)
await command_msg.answer(
'Было успешно удалено {} старых лайков(за {} hours)'.format(count, 24))
logging.info(
f'Admin @{admin.username}-{admin.id} delete old likes(count={count})')
@dp.message_handler(commands='say_to_all_now_go', user_id=ADMINS, state='*')
async def say_to_all(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
msg = command_msg.reply_to_message
await command_msg.delete()
if not msg:
await command_msg.answer(
'Чтобы воспользоваться этой командой сделай REPLY')
return
active_user_ids = await db.get_all_users(active=True)
delete_bot_count = 0
for user_id in active_user_ids:
try:
await dp.bot.copy_message(chat_id=user_id, from_chat_id=
command_msg.chat.id, message_id=msg.message_id)
await asyncio.sleep(0.05)
except BotBlocked as exc:
await db.update_user(user_id, active=False)
await redis_commands.clear_user(user_id)
await redis_commands.clear_search_ids(user_id)
delete_bot_count += 1
await msg.reply(
'Сообщение успешно отправлено: оставили бот({}), заблокировали бот({})'
.format(len(active_user_ids) - delete_bot_count, delete_bot_count))
logging.info(
f'Admin @{admin.username}-{admin.id} SAY TO ALL MSG(id={msg.message_id})'
)
@dp.message_handler(commands='show_state_statistic', user_id=ADMINS, state='*')
async def show_state_statistic(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
statistic = dict()
await command_msg.delete()
states_list = await storage.get_states_list()
for states_item in states_list:
chat_id, user_id = states_item
state_text = await storage.get_state(chat=chat_id, user=user_id,
default='Deactivate bot')
try:
statistic[state_text] += 1
except KeyError:
statistic.update({state_text: 1})
out_text = '<b>Статичктика по пользователям:</b>\n\n'
for state_text, count_users in statistic.items():
out_text += (
f'В состоянии {state_text} — {count_users} пользователей\n\n')
await command_msg.answer(out_text)
logging.info(f'For Admin @{admin.username}-{admin.id} show state statistic'
)
@rate_limit(3)
@dp.message_handler(commands='show_info', user_id=ADMINS, state='*')
async def show_info(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
await command_msg.delete()
await cur_bot_info(for_chat_id=command_msg.chat.id)
logging.info(f'For admin @{admin.username}-{admin.id} SHOW INFO(command)')
@dp.callback_query_handler(active_menu_callback.filter(), chat_id=
ADMIN_CHAT_ID, state='*')
async def change_active(call: CallbackQuery, state: FSMContext,
callback_data: dict):
active = not bool(int(callback_data['active']))
user_id = int(callback_data['user_id'])
admin = call.from_user
profile_msg = call.message
if active:
await db.unban_user(user_id)
await redis_commands.unban_user(user_id)
else:
await db.ban_user(user_id)
await redis_commands.ban_user(user_id)
await profile_msg.edit_reply_markup(keyboards.inline.get_activate_menu(
user_id=user_id, active=active))
await call.answer()
logging.info(
f'Admin @{admin.username}-{admin.id} CHANGE ACTIVE FOR USER-{user_id} TO {active}'
)
<|reserved_special_token_1|>
import asyncio
import logging
import random
from aiogram.dispatcher import FSMContext
from aiogram.types import ContentTypes, Message, CallbackQuery
from aiogram.utils.exceptions import BotBlocked
import keyboards
from data.config import ADMINS, ADMIN_CHAT_ID
from keyboards.inline.activate_menu import active_menu_callback
from loader import dp, db, storage
from utils import text
from utils.db_api import redis_commands
from utils.jobs import cur_bot_info
from utils.misc import rate_limit
@dp.message_handler(commands='upload', user_id=ADMINS, state='*')
async def upload_profile(command_msg: Message, state: FSMContext):
profile_msg = command_msg.reply_to_message
admin = command_msg.from_user
param = command_msg.get_args()
if not profile_msg:
await command_msg.answer('Чтобы загрузить анкету сделай на неё REPLY')
return
elif param != 'g' and param != 'b':
await command_msg.answer(
'Чтобы воспользоваться командой /upload нужно добавить параметры <b>b | g</b>'
)
return
other_bot = profile_msg.forward_from
if not other_bot or other_bot.id != 1234060895:
await profile_msg.reply(
'Загружать анкеты можно только из нашего БотаX :)')
return
elif not profile_msg.photo and not profile_msg.video or not profile_msg.caption:
await profile_msg.reply(
'Загружать нужно именно анкету, а не части анкеты')
return
profile_data = text.get_parse_data(profile_msg.caption)
if profile_msg.photo:
media_id = profile_msg.photo[-1].file_id
with_video = False
else:
media_id = profile_msg.video.file_id
with_video = True
profile_data.update(id=random.randint(1, 100000), username='f',
media_id=media_id, with_video=with_video, sex=1 if param == 'g' else 2)
await db.add_user(**profile_data)
await profile_msg.reply('Пользователь {}-{} успешно добавлен ✅'.format(
profile_data['user_nick'], profile_data['id']))
logging.info(
f"Admin @{admin.username}-{admin.id} successfully added fake {profile_data['user_nick']}-{profile_data['id']} "
)
@dp.message_handler(commands='get_msg_info', user_id=ADMINS, state='*')
async def get_msg_info(command_msg: Message, state: FSMContext):
msg = command_msg.reply_to_message
await command_msg.delete()
if not msg:
await command_msg.answer('Нужно делать реплай на сообщение.')
return
state = await state.get_state()
await msg.reply(
f"""Эхо в состоянии <code>{state}</code>.
Содержание сообщения:
<code>{msg}</code>
content_type = {msg.content_type}
entities={msg.entities}"""
)
@dp.message_handler(commands='ban_user', user_id=ADMINS, state='*')
async def ban_user(command_msg: Message, state: FSMContext):
ban_user_id = command_msg.get_args()
admin = command_msg.from_user
await command_msg.delete()
if not ban_user_id or not ban_user_id.isdecimal():
await command_msg.answer(f'Формат команды: /ban_user user_id')
return
ban_user_id = int(ban_user_id)
is_banned = await db.ban_user(ban_user_id)
if not is_banned:
await command_msg.answer(
f'Пользователя с таким <user_id> не существует')
return
await redis_commands.ban_user(ban_user_id)
await command_msg.answer('Пользователь({}) успешно забанен 😎'.format(
ban_user_id))
logging.info(f'Admin @{admin.username}-{admin.id} BAN USER-{ban_user_id}')
@dp.message_handler(commands='unban_user', user_id=ADMINS, state='*')
async def unban_user(command_msg: Message, state: FSMContext):
unban_user_id = command_msg.get_args()
admin = command_msg.from_user
await command_msg.delete()
if not unban_user_id or not unban_user_id.isdecimal():
await command_msg.answer(f'Формат команды: /unban_user user_id')
return
unban_user_id = int(unban_user_id)
is_unbanned = await db.unban_user(unban_user_id)
if not is_unbanned:
await command_msg.answer(
f'Пользователя с таким <user_id> не существует')
return
await redis_commands.unban_user(unban_user_id)
await command_msg.answer('Пользователь({}) успешно разбанен 👻'.format(
unban_user_id))
logging.info(
f'Admin @{admin.username}-{admin.id} UNBAN USER-{unban_user_id}')
@dp.message_handler(commands='clean_old_likes', user_id=ADMINS, state='*')
async def clean_old_likes(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
await command_msg.delete()
count = await db.clean_old_likes(interval=24)
await command_msg.answer(
'Было успешно удалено {} старых лайков(за {} hours)'.format(count, 24))
logging.info(
f'Admin @{admin.username}-{admin.id} delete old likes(count={count})')
@dp.message_handler(commands='say_to_all_now_go', user_id=ADMINS, state='*')
async def say_to_all(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
msg = command_msg.reply_to_message
await command_msg.delete()
if not msg:
await command_msg.answer(
'Чтобы воспользоваться этой командой сделай REPLY')
return
active_user_ids = await db.get_all_users(active=True)
delete_bot_count = 0
for user_id in active_user_ids:
try:
await dp.bot.copy_message(chat_id=user_id, from_chat_id=
command_msg.chat.id, message_id=msg.message_id)
await asyncio.sleep(0.05)
except BotBlocked as exc:
await db.update_user(user_id, active=False)
await redis_commands.clear_user(user_id)
await redis_commands.clear_search_ids(user_id)
delete_bot_count += 1
await msg.reply(
'Сообщение успешно отправлено: оставили бот({}), заблокировали бот({})'
.format(len(active_user_ids) - delete_bot_count, delete_bot_count))
logging.info(
f'Admin @{admin.username}-{admin.id} SAY TO ALL MSG(id={msg.message_id})'
)
@dp.message_handler(commands='show_state_statistic', user_id=ADMINS, state='*')
async def show_state_statistic(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
statistic = dict()
await command_msg.delete()
states_list = await storage.get_states_list()
for states_item in states_list:
chat_id, user_id = states_item
state_text = await storage.get_state(chat=chat_id, user=user_id,
default='Deactivate bot')
try:
statistic[state_text] += 1
except KeyError:
statistic.update({state_text: 1})
out_text = '<b>Статичктика по пользователям:</b>\n\n'
for state_text, count_users in statistic.items():
out_text += (
f'В состоянии {state_text} — {count_users} пользователей\n\n')
await command_msg.answer(out_text)
logging.info(f'For Admin @{admin.username}-{admin.id} show state statistic'
)
@rate_limit(3)
@dp.message_handler(commands='show_info', user_id=ADMINS, state='*')
async def show_info(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
await command_msg.delete()
await cur_bot_info(for_chat_id=command_msg.chat.id)
logging.info(f'For admin @{admin.username}-{admin.id} SHOW INFO(command)')
@dp.callback_query_handler(active_menu_callback.filter(), chat_id=
ADMIN_CHAT_ID, state='*')
async def change_active(call: CallbackQuery, state: FSMContext,
callback_data: dict):
active = not bool(int(callback_data['active']))
user_id = int(callback_data['user_id'])
admin = call.from_user
profile_msg = call.message
if active:
await db.unban_user(user_id)
await redis_commands.unban_user(user_id)
else:
await db.ban_user(user_id)
await redis_commands.ban_user(user_id)
await profile_msg.edit_reply_markup(keyboards.inline.get_activate_menu(
user_id=user_id, active=active))
await call.answer()
logging.info(
f'Admin @{admin.username}-{admin.id} CHANGE ACTIVE FOR USER-{user_id} TO {active}'
)
<|reserved_special_token_1|>
import asyncio
import logging
import random
from aiogram.dispatcher import FSMContext
from aiogram.types import ContentTypes, Message, CallbackQuery
from aiogram.utils.exceptions import BotBlocked
import keyboards
from data.config import ADMINS, ADMIN_CHAT_ID
from keyboards.inline.activate_menu import active_menu_callback
from loader import dp, db, storage
from utils import text
from utils.db_api import redis_commands
from utils.jobs import cur_bot_info
from utils.misc import rate_limit
@dp.message_handler(commands="upload", user_id=ADMINS, state="*")
async def upload_profile(command_msg: Message, state: FSMContext):
profile_msg = command_msg.reply_to_message
admin = command_msg.from_user
param = command_msg.get_args()
if not profile_msg:
await command_msg.answer("Чтобы загрузить анкету сделай на неё REPLY")
return
elif param != "g" and param != "b":
await command_msg.answer("Чтобы воспользоваться командой /upload нужно добавить параметры <b>b | g</b>")
return
other_bot = profile_msg.forward_from
if not other_bot or other_bot.id != 1234060895:
await profile_msg.reply("Загружать анкеты можно только из нашего БотаX :)")
return
elif (not profile_msg.photo and not profile_msg.video) or not profile_msg.caption:
await profile_msg.reply("Загружать нужно именно анкету, а не части анкеты")
return
profile_data = text.get_parse_data(profile_msg.caption)
if profile_msg.photo:
media_id = profile_msg.photo[-1].file_id
with_video = False
else:
media_id = profile_msg.video.file_id
with_video = True
profile_data.update(
id=random.randint(1, 100000),
username="f",
media_id=media_id,
with_video=with_video,
sex=1 if param == "g" else 2
)
await db.add_user(**profile_data)
await profile_msg.reply("Пользователь {}-{} успешно добавлен ✅"
"".format(profile_data["user_nick"], profile_data["id"]))
logging.info(f"Admin @{admin.username}-{admin.id} successfully "
f"added fake {profile_data['user_nick']}-{profile_data['id']} ")
@dp.message_handler(commands="get_msg_info", user_id=ADMINS, state="*")
async def get_msg_info(command_msg: Message, state: FSMContext):
msg = command_msg.reply_to_message
await command_msg.delete()
if not msg:
await command_msg.answer("Нужно делать реплай на сообщение.")
return
state = await state.get_state()
await msg.reply(f"Эхо в состоянии <code>{state}</code>.\n"
f"\nСодержание сообщения:\n"
f"\n<code>{msg}</code>\n"
f"\ncontent_type = {msg.content_type}\n"
f"\nentities={msg.entities}")
@dp.message_handler(commands="ban_user", user_id=ADMINS, state="*")
async def ban_user(command_msg: Message, state: FSMContext):
ban_user_id = command_msg.get_args()
admin = command_msg.from_user
await command_msg.delete()
if not ban_user_id or not ban_user_id.isdecimal():
await command_msg.answer(f"Формат команды: /ban_user user_id")
return
ban_user_id = int(ban_user_id)
is_banned = await db.ban_user(ban_user_id)
if not is_banned:
await command_msg.answer(f"Пользователя с таким <user_id> не существует")
return
await redis_commands.ban_user(ban_user_id)
await command_msg.answer("Пользователь({}) успешно забанен 😎".format(ban_user_id))
logging.info(f"Admin @{admin.username}-{admin.id} BAN USER-{ban_user_id}")
@dp.message_handler(commands="unban_user", user_id=ADMINS, state="*")
async def unban_user(command_msg: Message, state: FSMContext):
unban_user_id = command_msg.get_args()
admin = command_msg.from_user
await command_msg.delete()
if not unban_user_id or not unban_user_id.isdecimal():
await command_msg.answer(f"Формат команды: /unban_user user_id")
return
unban_user_id = int(unban_user_id)
is_unbanned = await db.unban_user(unban_user_id)
if not is_unbanned:
await command_msg.answer(f"Пользователя с таким <user_id> не существует")
return
await redis_commands.unban_user(unban_user_id)
await command_msg.answer("Пользователь({}) успешно разбанен 👻".format(unban_user_id))
logging.info(f"Admin @{admin.username}-{admin.id} UNBAN USER-{unban_user_id}")
@dp.message_handler(commands="clean_old_likes", user_id=ADMINS, state="*")
async def clean_old_likes(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
await command_msg.delete()
count = await db.clean_old_likes(interval=24)
await command_msg.answer("Было успешно удалено {} старых лайков(за {} hours)".format(count, 24))
logging.info(f"Admin @{admin.username}-{admin.id} delete old likes(count={count})")
@dp.message_handler(commands="say_to_all_now_go", user_id=ADMINS, state="*")
async def say_to_all(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
msg = command_msg.reply_to_message
await command_msg.delete()
if not msg:
await command_msg.answer("Чтобы воспользоваться этой командой сделай REPLY")
return
active_user_ids = await db.get_all_users(active=True) # [375766905, 997319478]
delete_bot_count = 0
for user_id in active_user_ids:
try:
await dp.bot.copy_message(
chat_id=user_id,
from_chat_id=command_msg.chat.id,
message_id=msg.message_id
)
await asyncio.sleep(0.05)
except BotBlocked as exc:
await db.update_user(user_id, active=False)
await redis_commands.clear_user(user_id)
await redis_commands.clear_search_ids(user_id)
delete_bot_count += 1
await msg.reply("Сообщение успешно отправлено: оставили бот({}), заблокировали бот({})"
"".format(len(active_user_ids) - delete_bot_count, delete_bot_count))
logging.info(f"Admin @{admin.username}-{admin.id} SAY TO ALL MSG(id={msg.message_id})")
@dp.message_handler(commands="show_state_statistic", user_id=ADMINS, state="*")
async def show_state_statistic(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
statistic = dict()
await command_msg.delete()
states_list = await storage.get_states_list()
for states_item in states_list:
chat_id, user_id = states_item
state_text = await storage.get_state(chat=chat_id, user=user_id, default="Deactivate bot")
try:
statistic[state_text] += 1
except KeyError:
statistic.update({state_text: 1})
out_text = "<b>Статичктика по пользователям:</b>\n\n"
for state_text, count_users in statistic.items():
out_text += f"В состоянии {state_text} — {count_users} пользователей\n\n"
await command_msg.answer(out_text)
logging.info(f"For Admin @{admin.username}-{admin.id} show state statistic")
@rate_limit(3)
@dp.message_handler(commands="show_info", user_id=ADMINS, state="*")
async def show_info(command_msg: Message, state: FSMContext):
admin = command_msg.from_user
await command_msg.delete()
await cur_bot_info(for_chat_id=command_msg.chat.id)
logging.info(f"For admin @{admin.username}-{admin.id} SHOW INFO(command)")
@dp.callback_query_handler(active_menu_callback.filter(), chat_id=ADMIN_CHAT_ID, state="*")
async def change_active(call: CallbackQuery, state: FSMContext, callback_data: dict):
active = not bool(int(callback_data["active"]))
user_id = int(callback_data["user_id"])
admin = call.from_user
profile_msg = call.message
if active:
await db.unban_user(user_id)
await redis_commands.unban_user(user_id)
else:
await db.ban_user(user_id)
await redis_commands.ban_user(user_id)
await profile_msg.edit_reply_markup(keyboards.inline.get_activate_menu(user_id=user_id, active=active))
await call.answer()
logging.info(f"Admin @{admin.username}-{admin.id} CHANGE ACTIVE FOR USER-{user_id} TO {active}")
|
flexible
|
{
"blob_id": "302accfd5001a27c7bbe6081856d43dbec704168",
"index": 339,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@dp.message_handler(commands='upload', user_id=ADMINS, state='*')\nasync def upload_profile(command_msg: Message, state: FSMContext):\n profile_msg = command_msg.reply_to_message\n admin = command_msg.from_user\n param = command_msg.get_args()\n if not profile_msg:\n await command_msg.answer('Чтобы загрузить анкету сделай на неё REPLY')\n return\n elif param != 'g' and param != 'b':\n await command_msg.answer(\n 'Чтобы воспользоваться командой /upload нужно добавить параметры <b>b | g</b>'\n )\n return\n other_bot = profile_msg.forward_from\n if not other_bot or other_bot.id != 1234060895:\n await profile_msg.reply(\n 'Загружать анкеты можно только из нашего БотаX :)')\n return\n elif not profile_msg.photo and not profile_msg.video or not profile_msg.caption:\n await profile_msg.reply(\n 'Загружать нужно именно анкету, а не части анкеты')\n return\n profile_data = text.get_parse_data(profile_msg.caption)\n if profile_msg.photo:\n media_id = profile_msg.photo[-1].file_id\n with_video = False\n else:\n media_id = profile_msg.video.file_id\n with_video = True\n profile_data.update(id=random.randint(1, 100000), username='f',\n media_id=media_id, with_video=with_video, sex=1 if param == 'g' else 2)\n await db.add_user(**profile_data)\n await profile_msg.reply('Пользователь {}-{} успешно добавлен ✅'.format(\n profile_data['user_nick'], profile_data['id']))\n logging.info(\n f\"Admin @{admin.username}-{admin.id} successfully added fake {profile_data['user_nick']}-{profile_data['id']} \"\n )\n\n\n@dp.message_handler(commands='get_msg_info', user_id=ADMINS, state='*')\nasync def get_msg_info(command_msg: Message, state: FSMContext):\n msg = command_msg.reply_to_message\n await command_msg.delete()\n if not msg:\n await command_msg.answer('Нужно делать реплай на сообщение.')\n return\n state = await state.get_state()\n await msg.reply(\n f\"\"\"Эхо в состоянии <code>{state}</code>.\n\nСодержание сообщения:\n\n<code>{msg}</code>\n\ncontent_type = {msg.content_type}\n\nentities={msg.entities}\"\"\"\n )\n\n\n@dp.message_handler(commands='ban_user', user_id=ADMINS, state='*')\nasync def ban_user(command_msg: Message, state: FSMContext):\n ban_user_id = command_msg.get_args()\n admin = command_msg.from_user\n await command_msg.delete()\n if not ban_user_id or not ban_user_id.isdecimal():\n await command_msg.answer(f'Формат команды: /ban_user user_id')\n return\n ban_user_id = int(ban_user_id)\n is_banned = await db.ban_user(ban_user_id)\n if not is_banned:\n await command_msg.answer(\n f'Пользователя с таким <user_id> не существует')\n return\n await redis_commands.ban_user(ban_user_id)\n await command_msg.answer('Пользователь({}) успешно забанен 😎'.format(\n ban_user_id))\n logging.info(f'Admin @{admin.username}-{admin.id} BAN USER-{ban_user_id}')\n\n\n@dp.message_handler(commands='unban_user', user_id=ADMINS, state='*')\nasync def unban_user(command_msg: Message, state: FSMContext):\n unban_user_id = command_msg.get_args()\n admin = command_msg.from_user\n await command_msg.delete()\n if not unban_user_id or not unban_user_id.isdecimal():\n await command_msg.answer(f'Формат команды: /unban_user user_id')\n return\n unban_user_id = int(unban_user_id)\n is_unbanned = await db.unban_user(unban_user_id)\n if not is_unbanned:\n await command_msg.answer(\n f'Пользователя с таким <user_id> не существует')\n return\n await redis_commands.unban_user(unban_user_id)\n await command_msg.answer('Пользователь({}) успешно разбанен 👻'.format(\n unban_user_id))\n logging.info(\n f'Admin @{admin.username}-{admin.id} UNBAN USER-{unban_user_id}')\n\n\n@dp.message_handler(commands='clean_old_likes', user_id=ADMINS, state='*')\nasync def clean_old_likes(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n await command_msg.delete()\n count = await db.clean_old_likes(interval=24)\n await command_msg.answer(\n 'Было успешно удалено {} старых лайков(за {} hours)'.format(count, 24))\n logging.info(\n f'Admin @{admin.username}-{admin.id} delete old likes(count={count})')\n\n\n@dp.message_handler(commands='say_to_all_now_go', user_id=ADMINS, state='*')\nasync def say_to_all(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n msg = command_msg.reply_to_message\n await command_msg.delete()\n if not msg:\n await command_msg.answer(\n 'Чтобы воспользоваться этой командой сделай REPLY')\n return\n active_user_ids = await db.get_all_users(active=True)\n delete_bot_count = 0\n for user_id in active_user_ids:\n try:\n await dp.bot.copy_message(chat_id=user_id, from_chat_id=\n command_msg.chat.id, message_id=msg.message_id)\n await asyncio.sleep(0.05)\n except BotBlocked as exc:\n await db.update_user(user_id, active=False)\n await redis_commands.clear_user(user_id)\n await redis_commands.clear_search_ids(user_id)\n delete_bot_count += 1\n await msg.reply(\n 'Сообщение успешно отправлено: оставили бот({}), заблокировали бот({})'\n .format(len(active_user_ids) - delete_bot_count, delete_bot_count))\n logging.info(\n f'Admin @{admin.username}-{admin.id} SAY TO ALL MSG(id={msg.message_id})'\n )\n\n\n@dp.message_handler(commands='show_state_statistic', user_id=ADMINS, state='*')\nasync def show_state_statistic(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n statistic = dict()\n await command_msg.delete()\n states_list = await storage.get_states_list()\n for states_item in states_list:\n chat_id, user_id = states_item\n state_text = await storage.get_state(chat=chat_id, user=user_id,\n default='Deactivate bot')\n try:\n statistic[state_text] += 1\n except KeyError:\n statistic.update({state_text: 1})\n out_text = '<b>Статичктика по пользователям:</b>\\n\\n'\n for state_text, count_users in statistic.items():\n out_text += (\n f'В состоянии {state_text} — {count_users} пользователей\\n\\n')\n await command_msg.answer(out_text)\n logging.info(f'For Admin @{admin.username}-{admin.id} show state statistic'\n )\n\n\n@rate_limit(3)\n@dp.message_handler(commands='show_info', user_id=ADMINS, state='*')\nasync def show_info(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n await command_msg.delete()\n await cur_bot_info(for_chat_id=command_msg.chat.id)\n logging.info(f'For admin @{admin.username}-{admin.id} SHOW INFO(command)')\n\n\n@dp.callback_query_handler(active_menu_callback.filter(), chat_id=\n ADMIN_CHAT_ID, state='*')\nasync def change_active(call: CallbackQuery, state: FSMContext,\n callback_data: dict):\n active = not bool(int(callback_data['active']))\n user_id = int(callback_data['user_id'])\n admin = call.from_user\n profile_msg = call.message\n if active:\n await db.unban_user(user_id)\n await redis_commands.unban_user(user_id)\n else:\n await db.ban_user(user_id)\n await redis_commands.ban_user(user_id)\n await profile_msg.edit_reply_markup(keyboards.inline.get_activate_menu(\n user_id=user_id, active=active))\n await call.answer()\n logging.info(\n f'Admin @{admin.username}-{admin.id} CHANGE ACTIVE FOR USER-{user_id} TO {active}'\n )\n",
"step-3": "import asyncio\nimport logging\nimport random\nfrom aiogram.dispatcher import FSMContext\nfrom aiogram.types import ContentTypes, Message, CallbackQuery\nfrom aiogram.utils.exceptions import BotBlocked\nimport keyboards\nfrom data.config import ADMINS, ADMIN_CHAT_ID\nfrom keyboards.inline.activate_menu import active_menu_callback\nfrom loader import dp, db, storage\nfrom utils import text\nfrom utils.db_api import redis_commands\nfrom utils.jobs import cur_bot_info\nfrom utils.misc import rate_limit\n\n\n@dp.message_handler(commands='upload', user_id=ADMINS, state='*')\nasync def upload_profile(command_msg: Message, state: FSMContext):\n profile_msg = command_msg.reply_to_message\n admin = command_msg.from_user\n param = command_msg.get_args()\n if not profile_msg:\n await command_msg.answer('Чтобы загрузить анкету сделай на неё REPLY')\n return\n elif param != 'g' and param != 'b':\n await command_msg.answer(\n 'Чтобы воспользоваться командой /upload нужно добавить параметры <b>b | g</b>'\n )\n return\n other_bot = profile_msg.forward_from\n if not other_bot or other_bot.id != 1234060895:\n await profile_msg.reply(\n 'Загружать анкеты можно только из нашего БотаX :)')\n return\n elif not profile_msg.photo and not profile_msg.video or not profile_msg.caption:\n await profile_msg.reply(\n 'Загружать нужно именно анкету, а не части анкеты')\n return\n profile_data = text.get_parse_data(profile_msg.caption)\n if profile_msg.photo:\n media_id = profile_msg.photo[-1].file_id\n with_video = False\n else:\n media_id = profile_msg.video.file_id\n with_video = True\n profile_data.update(id=random.randint(1, 100000), username='f',\n media_id=media_id, with_video=with_video, sex=1 if param == 'g' else 2)\n await db.add_user(**profile_data)\n await profile_msg.reply('Пользователь {}-{} успешно добавлен ✅'.format(\n profile_data['user_nick'], profile_data['id']))\n logging.info(\n f\"Admin @{admin.username}-{admin.id} successfully added fake {profile_data['user_nick']}-{profile_data['id']} \"\n )\n\n\n@dp.message_handler(commands='get_msg_info', user_id=ADMINS, state='*')\nasync def get_msg_info(command_msg: Message, state: FSMContext):\n msg = command_msg.reply_to_message\n await command_msg.delete()\n if not msg:\n await command_msg.answer('Нужно делать реплай на сообщение.')\n return\n state = await state.get_state()\n await msg.reply(\n f\"\"\"Эхо в состоянии <code>{state}</code>.\n\nСодержание сообщения:\n\n<code>{msg}</code>\n\ncontent_type = {msg.content_type}\n\nentities={msg.entities}\"\"\"\n )\n\n\n@dp.message_handler(commands='ban_user', user_id=ADMINS, state='*')\nasync def ban_user(command_msg: Message, state: FSMContext):\n ban_user_id = command_msg.get_args()\n admin = command_msg.from_user\n await command_msg.delete()\n if not ban_user_id or not ban_user_id.isdecimal():\n await command_msg.answer(f'Формат команды: /ban_user user_id')\n return\n ban_user_id = int(ban_user_id)\n is_banned = await db.ban_user(ban_user_id)\n if not is_banned:\n await command_msg.answer(\n f'Пользователя с таким <user_id> не существует')\n return\n await redis_commands.ban_user(ban_user_id)\n await command_msg.answer('Пользователь({}) успешно забанен 😎'.format(\n ban_user_id))\n logging.info(f'Admin @{admin.username}-{admin.id} BAN USER-{ban_user_id}')\n\n\n@dp.message_handler(commands='unban_user', user_id=ADMINS, state='*')\nasync def unban_user(command_msg: Message, state: FSMContext):\n unban_user_id = command_msg.get_args()\n admin = command_msg.from_user\n await command_msg.delete()\n if not unban_user_id or not unban_user_id.isdecimal():\n await command_msg.answer(f'Формат команды: /unban_user user_id')\n return\n unban_user_id = int(unban_user_id)\n is_unbanned = await db.unban_user(unban_user_id)\n if not is_unbanned:\n await command_msg.answer(\n f'Пользователя с таким <user_id> не существует')\n return\n await redis_commands.unban_user(unban_user_id)\n await command_msg.answer('Пользователь({}) успешно разбанен 👻'.format(\n unban_user_id))\n logging.info(\n f'Admin @{admin.username}-{admin.id} UNBAN USER-{unban_user_id}')\n\n\n@dp.message_handler(commands='clean_old_likes', user_id=ADMINS, state='*')\nasync def clean_old_likes(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n await command_msg.delete()\n count = await db.clean_old_likes(interval=24)\n await command_msg.answer(\n 'Было успешно удалено {} старых лайков(за {} hours)'.format(count, 24))\n logging.info(\n f'Admin @{admin.username}-{admin.id} delete old likes(count={count})')\n\n\n@dp.message_handler(commands='say_to_all_now_go', user_id=ADMINS, state='*')\nasync def say_to_all(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n msg = command_msg.reply_to_message\n await command_msg.delete()\n if not msg:\n await command_msg.answer(\n 'Чтобы воспользоваться этой командой сделай REPLY')\n return\n active_user_ids = await db.get_all_users(active=True)\n delete_bot_count = 0\n for user_id in active_user_ids:\n try:\n await dp.bot.copy_message(chat_id=user_id, from_chat_id=\n command_msg.chat.id, message_id=msg.message_id)\n await asyncio.sleep(0.05)\n except BotBlocked as exc:\n await db.update_user(user_id, active=False)\n await redis_commands.clear_user(user_id)\n await redis_commands.clear_search_ids(user_id)\n delete_bot_count += 1\n await msg.reply(\n 'Сообщение успешно отправлено: оставили бот({}), заблокировали бот({})'\n .format(len(active_user_ids) - delete_bot_count, delete_bot_count))\n logging.info(\n f'Admin @{admin.username}-{admin.id} SAY TO ALL MSG(id={msg.message_id})'\n )\n\n\n@dp.message_handler(commands='show_state_statistic', user_id=ADMINS, state='*')\nasync def show_state_statistic(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n statistic = dict()\n await command_msg.delete()\n states_list = await storage.get_states_list()\n for states_item in states_list:\n chat_id, user_id = states_item\n state_text = await storage.get_state(chat=chat_id, user=user_id,\n default='Deactivate bot')\n try:\n statistic[state_text] += 1\n except KeyError:\n statistic.update({state_text: 1})\n out_text = '<b>Статичктика по пользователям:</b>\\n\\n'\n for state_text, count_users in statistic.items():\n out_text += (\n f'В состоянии {state_text} — {count_users} пользователей\\n\\n')\n await command_msg.answer(out_text)\n logging.info(f'For Admin @{admin.username}-{admin.id} show state statistic'\n )\n\n\n@rate_limit(3)\n@dp.message_handler(commands='show_info', user_id=ADMINS, state='*')\nasync def show_info(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n await command_msg.delete()\n await cur_bot_info(for_chat_id=command_msg.chat.id)\n logging.info(f'For admin @{admin.username}-{admin.id} SHOW INFO(command)')\n\n\n@dp.callback_query_handler(active_menu_callback.filter(), chat_id=\n ADMIN_CHAT_ID, state='*')\nasync def change_active(call: CallbackQuery, state: FSMContext,\n callback_data: dict):\n active = not bool(int(callback_data['active']))\n user_id = int(callback_data['user_id'])\n admin = call.from_user\n profile_msg = call.message\n if active:\n await db.unban_user(user_id)\n await redis_commands.unban_user(user_id)\n else:\n await db.ban_user(user_id)\n await redis_commands.ban_user(user_id)\n await profile_msg.edit_reply_markup(keyboards.inline.get_activate_menu(\n user_id=user_id, active=active))\n await call.answer()\n logging.info(\n f'Admin @{admin.username}-{admin.id} CHANGE ACTIVE FOR USER-{user_id} TO {active}'\n )\n",
"step-4": "import asyncio\nimport logging\nimport random\n\nfrom aiogram.dispatcher import FSMContext\nfrom aiogram.types import ContentTypes, Message, CallbackQuery\nfrom aiogram.utils.exceptions import BotBlocked\n\nimport keyboards\nfrom data.config import ADMINS, ADMIN_CHAT_ID\nfrom keyboards.inline.activate_menu import active_menu_callback\nfrom loader import dp, db, storage\nfrom utils import text\nfrom utils.db_api import redis_commands\nfrom utils.jobs import cur_bot_info\nfrom utils.misc import rate_limit\n\n\n@dp.message_handler(commands=\"upload\", user_id=ADMINS, state=\"*\")\nasync def upload_profile(command_msg: Message, state: FSMContext):\n profile_msg = command_msg.reply_to_message\n admin = command_msg.from_user\n param = command_msg.get_args()\n\n if not profile_msg:\n await command_msg.answer(\"Чтобы загрузить анкету сделай на неё REPLY\")\n return\n elif param != \"g\" and param != \"b\":\n await command_msg.answer(\"Чтобы воспользоваться командой /upload нужно добавить параметры <b>b | g</b>\")\n return\n\n other_bot = profile_msg.forward_from\n if not other_bot or other_bot.id != 1234060895:\n await profile_msg.reply(\"Загружать анкеты можно только из нашего БотаX :)\")\n return\n elif (not profile_msg.photo and not profile_msg.video) or not profile_msg.caption:\n await profile_msg.reply(\"Загружать нужно именно анкету, а не части анкеты\")\n return\n\n profile_data = text.get_parse_data(profile_msg.caption)\n if profile_msg.photo:\n media_id = profile_msg.photo[-1].file_id\n with_video = False\n else:\n media_id = profile_msg.video.file_id\n with_video = True\n\n profile_data.update(\n id=random.randint(1, 100000),\n username=\"f\",\n media_id=media_id,\n with_video=with_video,\n sex=1 if param == \"g\" else 2\n )\n\n await db.add_user(**profile_data)\n await profile_msg.reply(\"Пользователь {}-{} успешно добавлен ✅\"\n \"\".format(profile_data[\"user_nick\"], profile_data[\"id\"]))\n logging.info(f\"Admin @{admin.username}-{admin.id} successfully \"\n f\"added fake {profile_data['user_nick']}-{profile_data['id']} \")\n\n\n@dp.message_handler(commands=\"get_msg_info\", user_id=ADMINS, state=\"*\")\nasync def get_msg_info(command_msg: Message, state: FSMContext):\n msg = command_msg.reply_to_message\n\n await command_msg.delete()\n\n if not msg:\n await command_msg.answer(\"Нужно делать реплай на сообщение.\")\n return\n\n state = await state.get_state()\n await msg.reply(f\"Эхо в состоянии <code>{state}</code>.\\n\"\n f\"\\nСодержание сообщения:\\n\"\n f\"\\n<code>{msg}</code>\\n\"\n f\"\\ncontent_type = {msg.content_type}\\n\"\n f\"\\nentities={msg.entities}\")\n\n\n@dp.message_handler(commands=\"ban_user\", user_id=ADMINS, state=\"*\")\nasync def ban_user(command_msg: Message, state: FSMContext):\n ban_user_id = command_msg.get_args()\n admin = command_msg.from_user\n\n await command_msg.delete()\n\n if not ban_user_id or not ban_user_id.isdecimal():\n await command_msg.answer(f\"Формат команды: /ban_user user_id\")\n return\n ban_user_id = int(ban_user_id)\n\n is_banned = await db.ban_user(ban_user_id)\n if not is_banned:\n await command_msg.answer(f\"Пользователя с таким <user_id> не существует\")\n return\n\n await redis_commands.ban_user(ban_user_id)\n\n await command_msg.answer(\"Пользователь({}) успешно забанен 😎\".format(ban_user_id))\n\n logging.info(f\"Admin @{admin.username}-{admin.id} BAN USER-{ban_user_id}\")\n\n\n@dp.message_handler(commands=\"unban_user\", user_id=ADMINS, state=\"*\")\nasync def unban_user(command_msg: Message, state: FSMContext):\n unban_user_id = command_msg.get_args()\n admin = command_msg.from_user\n\n await command_msg.delete()\n\n if not unban_user_id or not unban_user_id.isdecimal():\n await command_msg.answer(f\"Формат команды: /unban_user user_id\")\n return\n unban_user_id = int(unban_user_id)\n\n is_unbanned = await db.unban_user(unban_user_id)\n if not is_unbanned:\n await command_msg.answer(f\"Пользователя с таким <user_id> не существует\")\n return\n\n await redis_commands.unban_user(unban_user_id)\n\n await command_msg.answer(\"Пользователь({}) успешно разбанен 👻\".format(unban_user_id))\n\n logging.info(f\"Admin @{admin.username}-{admin.id} UNBAN USER-{unban_user_id}\")\n\n\n@dp.message_handler(commands=\"clean_old_likes\", user_id=ADMINS, state=\"*\")\nasync def clean_old_likes(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n\n await command_msg.delete()\n\n count = await db.clean_old_likes(interval=24)\n\n await command_msg.answer(\"Было успешно удалено {} старых лайков(за {} hours)\".format(count, 24))\n\n logging.info(f\"Admin @{admin.username}-{admin.id} delete old likes(count={count})\")\n\n\n@dp.message_handler(commands=\"say_to_all_now_go\", user_id=ADMINS, state=\"*\")\nasync def say_to_all(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n msg = command_msg.reply_to_message\n\n await command_msg.delete()\n\n if not msg:\n await command_msg.answer(\"Чтобы воспользоваться этой командой сделай REPLY\")\n return\n\n active_user_ids = await db.get_all_users(active=True) # [375766905, 997319478]\n delete_bot_count = 0\n\n for user_id in active_user_ids:\n try:\n await dp.bot.copy_message(\n chat_id=user_id,\n from_chat_id=command_msg.chat.id,\n message_id=msg.message_id\n )\n await asyncio.sleep(0.05)\n except BotBlocked as exc:\n await db.update_user(user_id, active=False)\n await redis_commands.clear_user(user_id)\n await redis_commands.clear_search_ids(user_id)\n delete_bot_count += 1\n\n await msg.reply(\"Сообщение успешно отправлено: оставили бот({}), заблокировали бот({})\"\n \"\".format(len(active_user_ids) - delete_bot_count, delete_bot_count))\n\n logging.info(f\"Admin @{admin.username}-{admin.id} SAY TO ALL MSG(id={msg.message_id})\")\n\n\n@dp.message_handler(commands=\"show_state_statistic\", user_id=ADMINS, state=\"*\")\nasync def show_state_statistic(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n statistic = dict()\n\n await command_msg.delete()\n\n states_list = await storage.get_states_list()\n for states_item in states_list:\n chat_id, user_id = states_item\n state_text = await storage.get_state(chat=chat_id, user=user_id, default=\"Deactivate bot\")\n try:\n statistic[state_text] += 1\n except KeyError:\n statistic.update({state_text: 1})\n\n out_text = \"<b>Статичктика по пользователям:</b>\\n\\n\"\n for state_text, count_users in statistic.items():\n out_text += f\"В состоянии {state_text} — {count_users} пользователей\\n\\n\"\n\n await command_msg.answer(out_text)\n\n logging.info(f\"For Admin @{admin.username}-{admin.id} show state statistic\")\n\n\n@rate_limit(3)\n@dp.message_handler(commands=\"show_info\", user_id=ADMINS, state=\"*\")\nasync def show_info(command_msg: Message, state: FSMContext):\n admin = command_msg.from_user\n\n await command_msg.delete()\n\n await cur_bot_info(for_chat_id=command_msg.chat.id)\n\n logging.info(f\"For admin @{admin.username}-{admin.id} SHOW INFO(command)\")\n\n\n@dp.callback_query_handler(active_menu_callback.filter(), chat_id=ADMIN_CHAT_ID, state=\"*\")\nasync def change_active(call: CallbackQuery, state: FSMContext, callback_data: dict):\n active = not bool(int(callback_data[\"active\"]))\n user_id = int(callback_data[\"user_id\"])\n admin = call.from_user\n profile_msg = call.message\n\n if active:\n await db.unban_user(user_id)\n await redis_commands.unban_user(user_id)\n else:\n await db.ban_user(user_id)\n await redis_commands.ban_user(user_id)\n\n await profile_msg.edit_reply_markup(keyboards.inline.get_activate_menu(user_id=user_id, active=active))\n await call.answer()\n\n logging.info(f\"Admin @{admin.username}-{admin.id} CHANGE ACTIVE FOR USER-{user_id} TO {active}\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class City(BaseModel):
name = models.CharField(max_length=255, db_index=True)
def __str__(self):
return self.name
class Article(BaseModel):
created_by = models.ForeignKey(User, related_name='articles', on_delete
=models.SET_NULL, null=True)
title = models.CharField(max_length=200)
description = models.TextField()
image = models.ImageField(upload_to=get_upload_path, blank=True)
is_archived = models.BooleanField(default=False)
def __str__(self):
return self.title
class UserNotification(BaseModel):
title = models.CharField(max_length=150)
sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='sent_notifications')
sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='notifications')
content = models.TextField(blank=True)
is_read = models.BooleanField(default=False)
notification_type = models.CharField(max_length=15, choices=
NOTIFICATION_TYPE_CHOICES, default=INFO)
def __str__(self):
if self.sent_by:
return (
f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'
)
return f'{str(self.sent_to)} content {self.content}'
class Meta:
ordering = 'is_read', '-created_at'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class City(BaseModel):
name = models.CharField(max_length=255, db_index=True)
def __str__(self):
return self.name
class Article(BaseModel):
created_by = models.ForeignKey(User, related_name='articles', on_delete
=models.SET_NULL, null=True)
title = models.CharField(max_length=200)
description = models.TextField()
image = models.ImageField(upload_to=get_upload_path, blank=True)
is_archived = models.BooleanField(default=False)
def __str__(self):
return self.title
class UserNotification(BaseModel):
title = models.CharField(max_length=150)
sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='sent_notifications')
sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='notifications')
content = models.TextField(blank=True)
is_read = models.BooleanField(default=False)
notification_type = models.CharField(max_length=15, choices=
NOTIFICATION_TYPE_CHOICES, default=INFO)
def __str__(self):
if self.sent_by:
return (
f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'
)
return f'{str(self.sent_to)} content {self.content}'
class Meta:
ordering = 'is_read', '-created_at'
<|reserved_special_token_0|>
def send_article_notifications(sender, instance, created, **kwargs):
if created:
UserNotification.objects.bulk_create([UserNotification(**{'title':
instance.title, 'sent_to': user, 'notification_type': INFO,
'content': instance.description}) for user in User.objects.all()])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class City(BaseModel):
name = models.CharField(max_length=255, db_index=True)
def __str__(self):
return self.name
class Article(BaseModel):
created_by = models.ForeignKey(User, related_name='articles', on_delete
=models.SET_NULL, null=True)
title = models.CharField(max_length=200)
description = models.TextField()
image = models.ImageField(upload_to=get_upload_path, blank=True)
is_archived = models.BooleanField(default=False)
def __str__(self):
return self.title
class UserNotification(BaseModel):
title = models.CharField(max_length=150)
sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='sent_notifications')
sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='notifications')
content = models.TextField(blank=True)
is_read = models.BooleanField(default=False)
notification_type = models.CharField(max_length=15, choices=
NOTIFICATION_TYPE_CHOICES, default=INFO)
def __str__(self):
if self.sent_by:
return (
f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'
)
return f'{str(self.sent_to)} content {self.content}'
class Meta:
ordering = 'is_read', '-created_at'
def send_push_notification(sender, instance, created, **kwargs):
if created:
receiver = instance.sent_to
receiver_device = receiver.devices.filter(is_active=True).first()
if receiver_device:
send_push_message(receiver_device.registration_id, title=
instance.title, body=instance.content)
def send_article_notifications(sender, instance, created, **kwargs):
if created:
UserNotification.objects.bulk_create([UserNotification(**{'title':
instance.title, 'sent_to': user, 'notification_type': INFO,
'content': instance.description}) for user in User.objects.all()])
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class City(BaseModel):
name = models.CharField(max_length=255, db_index=True)
def __str__(self):
return self.name
class Article(BaseModel):
created_by = models.ForeignKey(User, related_name='articles', on_delete
=models.SET_NULL, null=True)
title = models.CharField(max_length=200)
description = models.TextField()
image = models.ImageField(upload_to=get_upload_path, blank=True)
is_archived = models.BooleanField(default=False)
def __str__(self):
return self.title
class UserNotification(BaseModel):
title = models.CharField(max_length=150)
sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='sent_notifications')
sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,
related_name='notifications')
content = models.TextField(blank=True)
is_read = models.BooleanField(default=False)
notification_type = models.CharField(max_length=15, choices=
NOTIFICATION_TYPE_CHOICES, default=INFO)
def __str__(self):
if self.sent_by:
return (
f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'
)
return f'{str(self.sent_to)} content {self.content}'
class Meta:
ordering = 'is_read', '-created_at'
def send_push_notification(sender, instance, created, **kwargs):
if created:
receiver = instance.sent_to
receiver_device = receiver.devices.filter(is_active=True).first()
if receiver_device:
send_push_message(receiver_device.registration_id, title=
instance.title, body=instance.content)
def send_article_notifications(sender, instance, created, **kwargs):
if created:
UserNotification.objects.bulk_create([UserNotification(**{'title':
instance.title, 'sent_to': user, 'notification_type': INFO,
'content': instance.description}) for user in User.objects.all()])
post_save.connect(send_push_notification, sender=UserNotification)
post_save.connect(send_article_notifications, sender=Article)
<|reserved_special_token_1|>
from django.contrib.auth import get_user_model
from django.db import models
from django.db.models.signals import post_save
from apps.common.constants import NOTIFICATION_TYPE_CHOICES, INFO
from apps.core.models import BaseModel
from apps.core.utils.helpers import get_upload_path
from apps.core.utils.push_notification import send_push_message
User = get_user_model()
class City(BaseModel):
name = models.CharField(max_length=255, db_index=True)
def __str__(self):
return self.name
class Article(BaseModel):
created_by = models.ForeignKey(User, related_name='articles', on_delete=models.SET_NULL, null=True)
title = models.CharField(max_length=200)
description = models.TextField()
# Below fields are optional
image = models.ImageField(
upload_to=get_upload_path,
blank=True
)
is_archived = models.BooleanField(default=False)
def __str__(self):
return self.title
class UserNotification(BaseModel):
title = models.CharField(max_length=150)
sent_by = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True, related_name='sent_notifications')
sent_to = models.ForeignKey(
User, on_delete=models.SET_NULL, null=True, related_name='notifications')
content = models.TextField(blank=True)
is_read = models.BooleanField(default=False) # To mark notification as read
notification_type = models.CharField(
max_length=15,
choices=NOTIFICATION_TYPE_CHOICES,
default=INFO
)
def __str__(self):
if self.sent_by:
return f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'
return f'{str(self.sent_to)} content {self.content}'
class Meta:
ordering = ('is_read', '-created_at')
def send_push_notification(sender, instance, created, **kwargs):
if created:
receiver = instance.sent_to
receiver_device = receiver.devices.filter(is_active=True).first()
if receiver_device:
send_push_message(
receiver_device.registration_id,
title=instance.title,
body=instance.content
)
def send_article_notifications(sender, instance, created, **kwargs):
if created:
UserNotification.objects.bulk_create([
UserNotification(**{
'title': instance.title,
'sent_to': user,
'notification_type': INFO,
'content': instance.description
}) for user in User.objects.all()
])
post_save.connect(send_push_notification, sender=UserNotification)
post_save.connect(send_article_notifications, sender=Article)
|
flexible
|
{
"blob_id": "c2260278c8dfb353f55ee9ea3495049b08169447",
"index": 4115,
"step-1": "<mask token>\n\n\nclass City(BaseModel):\n name = models.CharField(max_length=255, db_index=True)\n\n def __str__(self):\n return self.name\n\n\nclass Article(BaseModel):\n created_by = models.ForeignKey(User, related_name='articles', on_delete\n =models.SET_NULL, null=True)\n title = models.CharField(max_length=200)\n description = models.TextField()\n image = models.ImageField(upload_to=get_upload_path, blank=True)\n is_archived = models.BooleanField(default=False)\n\n def __str__(self):\n return self.title\n\n\nclass UserNotification(BaseModel):\n title = models.CharField(max_length=150)\n sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='sent_notifications')\n sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='notifications')\n content = models.TextField(blank=True)\n is_read = models.BooleanField(default=False)\n notification_type = models.CharField(max_length=15, choices=\n NOTIFICATION_TYPE_CHOICES, default=INFO)\n\n def __str__(self):\n if self.sent_by:\n return (\n f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'\n )\n return f'{str(self.sent_to)} content {self.content}'\n\n\n class Meta:\n ordering = 'is_read', '-created_at'\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass City(BaseModel):\n name = models.CharField(max_length=255, db_index=True)\n\n def __str__(self):\n return self.name\n\n\nclass Article(BaseModel):\n created_by = models.ForeignKey(User, related_name='articles', on_delete\n =models.SET_NULL, null=True)\n title = models.CharField(max_length=200)\n description = models.TextField()\n image = models.ImageField(upload_to=get_upload_path, blank=True)\n is_archived = models.BooleanField(default=False)\n\n def __str__(self):\n return self.title\n\n\nclass UserNotification(BaseModel):\n title = models.CharField(max_length=150)\n sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='sent_notifications')\n sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='notifications')\n content = models.TextField(blank=True)\n is_read = models.BooleanField(default=False)\n notification_type = models.CharField(max_length=15, choices=\n NOTIFICATION_TYPE_CHOICES, default=INFO)\n\n def __str__(self):\n if self.sent_by:\n return (\n f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'\n )\n return f'{str(self.sent_to)} content {self.content}'\n\n\n class Meta:\n ordering = 'is_read', '-created_at'\n\n\n<mask token>\n\n\ndef send_article_notifications(sender, instance, created, **kwargs):\n if created:\n UserNotification.objects.bulk_create([UserNotification(**{'title':\n instance.title, 'sent_to': user, 'notification_type': INFO,\n 'content': instance.description}) for user in User.objects.all()])\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass City(BaseModel):\n name = models.CharField(max_length=255, db_index=True)\n\n def __str__(self):\n return self.name\n\n\nclass Article(BaseModel):\n created_by = models.ForeignKey(User, related_name='articles', on_delete\n =models.SET_NULL, null=True)\n title = models.CharField(max_length=200)\n description = models.TextField()\n image = models.ImageField(upload_to=get_upload_path, blank=True)\n is_archived = models.BooleanField(default=False)\n\n def __str__(self):\n return self.title\n\n\nclass UserNotification(BaseModel):\n title = models.CharField(max_length=150)\n sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='sent_notifications')\n sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='notifications')\n content = models.TextField(blank=True)\n is_read = models.BooleanField(default=False)\n notification_type = models.CharField(max_length=15, choices=\n NOTIFICATION_TYPE_CHOICES, default=INFO)\n\n def __str__(self):\n if self.sent_by:\n return (\n f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'\n )\n return f'{str(self.sent_to)} content {self.content}'\n\n\n class Meta:\n ordering = 'is_read', '-created_at'\n\n\ndef send_push_notification(sender, instance, created, **kwargs):\n if created:\n receiver = instance.sent_to\n receiver_device = receiver.devices.filter(is_active=True).first()\n if receiver_device:\n send_push_message(receiver_device.registration_id, title=\n instance.title, body=instance.content)\n\n\ndef send_article_notifications(sender, instance, created, **kwargs):\n if created:\n UserNotification.objects.bulk_create([UserNotification(**{'title':\n instance.title, 'sent_to': user, 'notification_type': INFO,\n 'content': instance.description}) for user in User.objects.all()])\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass City(BaseModel):\n name = models.CharField(max_length=255, db_index=True)\n\n def __str__(self):\n return self.name\n\n\nclass Article(BaseModel):\n created_by = models.ForeignKey(User, related_name='articles', on_delete\n =models.SET_NULL, null=True)\n title = models.CharField(max_length=200)\n description = models.TextField()\n image = models.ImageField(upload_to=get_upload_path, blank=True)\n is_archived = models.BooleanField(default=False)\n\n def __str__(self):\n return self.title\n\n\nclass UserNotification(BaseModel):\n title = models.CharField(max_length=150)\n sent_by = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='sent_notifications')\n sent_to = models.ForeignKey(User, on_delete=models.SET_NULL, null=True,\n related_name='notifications')\n content = models.TextField(blank=True)\n is_read = models.BooleanField(default=False)\n notification_type = models.CharField(max_length=15, choices=\n NOTIFICATION_TYPE_CHOICES, default=INFO)\n\n def __str__(self):\n if self.sent_by:\n return (\n f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'\n )\n return f'{str(self.sent_to)} content {self.content}'\n\n\n class Meta:\n ordering = 'is_read', '-created_at'\n\n\ndef send_push_notification(sender, instance, created, **kwargs):\n if created:\n receiver = instance.sent_to\n receiver_device = receiver.devices.filter(is_active=True).first()\n if receiver_device:\n send_push_message(receiver_device.registration_id, title=\n instance.title, body=instance.content)\n\n\ndef send_article_notifications(sender, instance, created, **kwargs):\n if created:\n UserNotification.objects.bulk_create([UserNotification(**{'title':\n instance.title, 'sent_to': user, 'notification_type': INFO,\n 'content': instance.description}) for user in User.objects.all()])\n\n\npost_save.connect(send_push_notification, sender=UserNotification)\npost_save.connect(send_article_notifications, sender=Article)\n",
"step-5": "from django.contrib.auth import get_user_model\nfrom django.db import models\nfrom django.db.models.signals import post_save\n\nfrom apps.common.constants import NOTIFICATION_TYPE_CHOICES, INFO\nfrom apps.core.models import BaseModel\nfrom apps.core.utils.helpers import get_upload_path\nfrom apps.core.utils.push_notification import send_push_message\n\nUser = get_user_model()\n\n\nclass City(BaseModel):\n name = models.CharField(max_length=255, db_index=True)\n\n def __str__(self):\n return self.name\n\n\nclass Article(BaseModel):\n created_by = models.ForeignKey(User, related_name='articles', on_delete=models.SET_NULL, null=True)\n title = models.CharField(max_length=200)\n description = models.TextField()\n # Below fields are optional\n image = models.ImageField(\n upload_to=get_upload_path,\n blank=True\n )\n is_archived = models.BooleanField(default=False)\n\n def __str__(self):\n return self.title\n\n\nclass UserNotification(BaseModel):\n title = models.CharField(max_length=150)\n sent_by = models.ForeignKey(\n User, on_delete=models.SET_NULL, null=True, related_name='sent_notifications')\n sent_to = models.ForeignKey(\n User, on_delete=models.SET_NULL, null=True, related_name='notifications')\n content = models.TextField(blank=True)\n is_read = models.BooleanField(default=False) # To mark notification as read\n notification_type = models.CharField(\n max_length=15,\n choices=NOTIFICATION_TYPE_CHOICES,\n default=INFO\n )\n\n def __str__(self):\n if self.sent_by:\n return f'Sent by {str(self.sent_by)} to {str(self.sent_to)} content {self.content}'\n return f'{str(self.sent_to)} content {self.content}'\n\n class Meta:\n ordering = ('is_read', '-created_at')\n\n\ndef send_push_notification(sender, instance, created, **kwargs):\n if created:\n receiver = instance.sent_to\n receiver_device = receiver.devices.filter(is_active=True).first()\n if receiver_device:\n send_push_message(\n receiver_device.registration_id,\n title=instance.title,\n body=instance.content\n )\n\n\ndef send_article_notifications(sender, instance, created, **kwargs):\n if created:\n UserNotification.objects.bulk_create([\n UserNotification(**{\n 'title': instance.title,\n 'sent_to': user,\n 'notification_type': INFO,\n 'content': instance.description\n }) for user in User.objects.all()\n ])\n\n\npost_save.connect(send_push_notification, sender=UserNotification)\npost_save.connect(send_article_notifications, sender=Article)\n",
"step-ids": [
9,
10,
11,
12,
15
]
}
|
[
9,
10,
11,
12,
15
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def calc(x):
return str(math.log(abs(12 * math.sin(int(x)))))
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def calc(x):
return str(math.log(abs(12 * math.sin(int(x)))))
try:
br = webdriver.Chrome()
lk = 'http://suninjuly.github.io/get_attribute.html'
br.get(lk)
treasure = br.find_element_by_id('treasure')
valuex = treasure.get_attribute('valuex')
radio_button = br.find_element_by_id('robotsRule')
check_box = br.find_element_by_id('robotCheckbox')
input_text = br.find_element_by_id('answer')
button = br.find_element_by_css_selector('button.btn')
answer = calc(valuex)
check_box.click()
radio_button.click()
input_text.send_keys(answer)
button.click()
finally:
time.sleep(10)
br.quit()
<|reserved_special_token_1|>
from selenium import webdriver
import time
import math
def calc(x):
return str(math.log(abs(12 * math.sin(int(x)))))
try:
br = webdriver.Chrome()
lk = 'http://suninjuly.github.io/get_attribute.html'
br.get(lk)
treasure = br.find_element_by_id('treasure')
valuex = treasure.get_attribute('valuex')
radio_button = br.find_element_by_id('robotsRule')
check_box = br.find_element_by_id('robotCheckbox')
input_text = br.find_element_by_id('answer')
button = br.find_element_by_css_selector('button.btn')
answer = calc(valuex)
check_box.click()
radio_button.click()
input_text.send_keys(answer)
button.click()
finally:
time.sleep(10)
br.quit()
<|reserved_special_token_1|>
from selenium import webdriver
import time
import math
def calc(x):
return str(math.log(abs(12*math.sin(int(x)))))
try:
br = webdriver.Chrome();
lk = 'http://suninjuly.github.io/get_attribute.html'
br.get(lk)
#собираю
treasure=br.find_element_by_id('treasure')
valuex = treasure.get_attribute('valuex')
radio_button = br.find_element_by_id('robotsRule')
check_box = br.find_element_by_id('robotCheckbox')
input_text = br.find_element_by_id('answer')
button = br.find_element_by_css_selector('button.btn')
#раздаю
answer = calc(valuex)
check_box.click()
radio_button.click()
input_text.send_keys(answer)
button.click()
finally:
time.sleep(10)
br.quit()
|
flexible
|
{
"blob_id": "2a92c47231b75a441660fed80a9bce9a35695af5",
"index": 1222,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef calc(x):\n return str(math.log(abs(12 * math.sin(int(x)))))\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef calc(x):\n return str(math.log(abs(12 * math.sin(int(x)))))\n\n\ntry:\n br = webdriver.Chrome()\n lk = 'http://suninjuly.github.io/get_attribute.html'\n br.get(lk)\n treasure = br.find_element_by_id('treasure')\n valuex = treasure.get_attribute('valuex')\n radio_button = br.find_element_by_id('robotsRule')\n check_box = br.find_element_by_id('robotCheckbox')\n input_text = br.find_element_by_id('answer')\n button = br.find_element_by_css_selector('button.btn')\n answer = calc(valuex)\n check_box.click()\n radio_button.click()\n input_text.send_keys(answer)\n button.click()\nfinally:\n time.sleep(10)\n br.quit()\n",
"step-4": "from selenium import webdriver\nimport time\nimport math\n\n\ndef calc(x):\n return str(math.log(abs(12 * math.sin(int(x)))))\n\n\ntry:\n br = webdriver.Chrome()\n lk = 'http://suninjuly.github.io/get_attribute.html'\n br.get(lk)\n treasure = br.find_element_by_id('treasure')\n valuex = treasure.get_attribute('valuex')\n radio_button = br.find_element_by_id('robotsRule')\n check_box = br.find_element_by_id('robotCheckbox')\n input_text = br.find_element_by_id('answer')\n button = br.find_element_by_css_selector('button.btn')\n answer = calc(valuex)\n check_box.click()\n radio_button.click()\n input_text.send_keys(answer)\n button.click()\nfinally:\n time.sleep(10)\n br.quit()\n",
"step-5": "from selenium import webdriver\nimport time\nimport math\n\ndef calc(x):\n\treturn str(math.log(abs(12*math.sin(int(x)))))\n\n\ntry:\n\tbr = webdriver.Chrome();\n\tlk = 'http://suninjuly.github.io/get_attribute.html'\n\tbr.get(lk)\n\n#собираю\n\ttreasure=br.find_element_by_id('treasure')\n\tvaluex = treasure.get_attribute('valuex')\n\tradio_button = br.find_element_by_id('robotsRule')\n\tcheck_box = br.find_element_by_id('robotCheckbox')\n\tinput_text = br.find_element_by_id('answer')\n\tbutton = br.find_element_by_css_selector('button.btn')\t\n#раздаю\n\tanswer = calc(valuex)\n\tcheck_box.click()\n\tradio_button.click()\n\tinput_text.send_keys(answer)\n\tbutton.click()\n\t\nfinally:\n\ttime.sleep(10)\n\tbr.quit()",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import os
import torch
from data_loader import FER
from torch.utils.data import DataLoader
from tqdm import tqdm
# from tensorboardX import SummaryWriter
import model as md
# train_writer = SummaryWriter(log_dir="log_last_last_last/train")
# valid_writer = SummaryWriter(log_dir="log_last_last_last/valid")
# os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
# os.environ["CUDA_VISIBLE_DEVICES"] = "2"
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
# Hyper-parameters
lr = 1e-5
epochs = 500
batch_size = 2
train_data_path = '../../../data/face_data'
train_dataset = FER(train_data_path , image_size=64, mode='train')
train_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle = True)
valid_data_path = '../../../data/face_data'
valid_dataset = FER(valid_data_path,image_size=64, mode='val')
valid_dataloader = DataLoader(valid_dataset, batch_size=batch_size, shuffle = False)
model = md.vgg16_bn(num_classes = 3).to(device)
# model_name = 'vgg16'
# feature_extract = True
# num_classes = 3
# model = md.init_pretrained_models(model_name, num_classes, feature_extract, use_pretrained=True)
model.to(device)
criterion = torch.nn.CrossEntropyLoss().to(device)
optimizer = torch.optim.Adam(params = model.parameters(), lr = lr)
...
for epoch in range(epochs):
running_loss = 0
running_acc = 0
train_loss = 0
model.train()
# ================== Training ==================
for image, label in tqdm(train_dataloader, desc="Epoch [%d/%d]" % (epoch + 1, epochs)):
optimizer.zero_grad() # Optimizer를 0으로 초기화
image = image / 255.
pred = model(image.float().transpose(3,2).transpose(2,1).to(device))
loss = criterion(pred, label.to(device))
loss.backward()
optimizer.step()
Softmax = torch.nn.Softmax(dim=1)
_, prediction_tr = torch.max(Softmax(pred), 1)
y_true_tr = label.cpu().detach().numpy()
y_pred_tr = prediction_tr.cpu().detach().numpy()
# acc = confusion_matrix(y_true, y_pred)
acc_tr = ((label == prediction_tr.cpu()).sum().item() / pred.shape[0]) * 100
# running_loss += loss.item()
running_loss += loss * image.size(0)
running_acc += acc_tr * image.size(0)
train_loss = running_loss / len(train_dataset)
train_acc = running_acc / len(train_dataset)
# loss_sum = tf.summary.scalar("train_loss", train_loss)
# acc_sum = tf.summary.scalar("train_accuracy", train_acc)
# writer = tf.summary.FileWriter("./abc")
# summary, _ = sess.run([loss_sum, epochs], feed_dict={x: loss_sum, y: epochs})
print('>>> Train loss : %.4f - Train acc : %.4f'% (train_loss, train_acc))
# train_acc = running_acc / len(train_dataloader)
# =================== Validation ===================
running_loss = 0
running_acc = 0
model.eval()
# model.load_state_dict(torch.load('filenname'))
with torch.no_grad():
# val_st ep = 0
for image, label in valid_dataloader:
image = image / 255.
pred = model(image.float().transpose(3,2).transpose(1,2).to(device))
loss = criterion(pred, label.to(device))
Softmax = torch.nn.Softmax(dim=1)
_, prediction = torch.max(Softmax(pred), 1)
y_true = label.cpu().detach().numpy()
y_pred = prediction.cpu().detach().numpy()
# acc = confusion_matrix(y_true, y_pred)
acc_tr = ((label == prediction.cpu()).sum().item() / pred.shape[0]) * 100
# running_acc += acc_tr
# running_loss += loss.item()
# val_step +=1
running_loss += loss.item() * image.size(0)
running_acc += acc_tr * image.size(0)
valid_loss = running_loss / len(valid_dataset)
valid_acc = running_acc / len(valid_dataset)
print(">>> Valid loss : %.4f - Valid acc : %.4f\n" % (valid_loss, valid_acc))
print(prediction)
print(label)
print()
# train_writer.add_scalar('loss', train_loss, epoch)
# train_writer.add_scalar('accuracy', train_acc, epoch)
# valid_writer.add_scalar('loss', valid_loss, epoch)
# valid_writer.add_scalar('accuracy', valid_acc, epoch)
if (epoch+1) % 5 == 0 :
save_path = os.path.join('.', 'save_')
if not os.path.exists(save_path):
os.makedirs(save_path)
torch.save(model, os.path.join(save_path, 'model_epoch%04d_loss_%.4f_acc_%.4f.ckpt'%(epoch, valid_loss, valid_acc)))
|
normal
|
{
"blob_id": "c3aee5d822d48c9dc826f8f2f8d4a56e11513b9c",
"index": 2882,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nmodel.to(device)\n<mask token>\n...\nfor epoch in range(epochs):\n running_loss = 0\n running_acc = 0\n train_loss = 0\n model.train()\n for image, label in tqdm(train_dataloader, desc='Epoch [%d/%d]' % (\n epoch + 1, epochs)):\n optimizer.zero_grad()\n image = image / 255.0\n pred = model(image.float().transpose(3, 2).transpose(2, 1).to(device))\n loss = criterion(pred, label.to(device))\n loss.backward()\n optimizer.step()\n Softmax = torch.nn.Softmax(dim=1)\n _, prediction_tr = torch.max(Softmax(pred), 1)\n y_true_tr = label.cpu().detach().numpy()\n y_pred_tr = prediction_tr.cpu().detach().numpy()\n acc_tr = (label == prediction_tr.cpu()).sum().item() / pred.shape[0\n ] * 100\n running_loss += loss * image.size(0)\n running_acc += acc_tr * image.size(0)\n train_loss = running_loss / len(train_dataset)\n train_acc = running_acc / len(train_dataset)\n print('>>> Train loss : %.4f - Train acc : %.4f' % (train_loss, train_acc))\n running_loss = 0\n running_acc = 0\n model.eval()\n with torch.no_grad():\n for image, label in valid_dataloader:\n image = image / 255.0\n pred = model(image.float().transpose(3, 2).transpose(1, 2).to(\n device))\n loss = criterion(pred, label.to(device))\n Softmax = torch.nn.Softmax(dim=1)\n _, prediction = torch.max(Softmax(pred), 1)\n y_true = label.cpu().detach().numpy()\n y_pred = prediction.cpu().detach().numpy()\n acc_tr = (label == prediction.cpu()).sum().item() / pred.shape[0\n ] * 100\n running_loss += loss.item() * image.size(0)\n running_acc += acc_tr * image.size(0)\n valid_loss = running_loss / len(valid_dataset)\n valid_acc = running_acc / len(valid_dataset)\n print('>>> Valid loss : %.4f - Valid acc : %.4f\\n' % (valid_loss,\n valid_acc))\n print(prediction)\n print(label)\n print()\n if (epoch + 1) % 5 == 0:\n save_path = os.path.join('.', 'save_')\n if not os.path.exists(save_path):\n os.makedirs(save_path)\n torch.save(model, os.path.join(save_path, \n 'model_epoch%04d_loss_%.4f_acc_%.4f.ckpt' % (epoch, valid_loss,\n valid_acc)))\n",
"step-3": "<mask token>\ndevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\nlr = 1e-05\nepochs = 500\nbatch_size = 2\ntrain_data_path = '../../../data/face_data'\ntrain_dataset = FER(train_data_path, image_size=64, mode='train')\ntrain_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle\n =True)\nvalid_data_path = '../../../data/face_data'\nvalid_dataset = FER(valid_data_path, image_size=64, mode='val')\nvalid_dataloader = DataLoader(valid_dataset, batch_size=batch_size, shuffle\n =False)\nmodel = md.vgg16_bn(num_classes=3).to(device)\nmodel.to(device)\ncriterion = torch.nn.CrossEntropyLoss().to(device)\noptimizer = torch.optim.Adam(params=model.parameters(), lr=lr)\n...\nfor epoch in range(epochs):\n running_loss = 0\n running_acc = 0\n train_loss = 0\n model.train()\n for image, label in tqdm(train_dataloader, desc='Epoch [%d/%d]' % (\n epoch + 1, epochs)):\n optimizer.zero_grad()\n image = image / 255.0\n pred = model(image.float().transpose(3, 2).transpose(2, 1).to(device))\n loss = criterion(pred, label.to(device))\n loss.backward()\n optimizer.step()\n Softmax = torch.nn.Softmax(dim=1)\n _, prediction_tr = torch.max(Softmax(pred), 1)\n y_true_tr = label.cpu().detach().numpy()\n y_pred_tr = prediction_tr.cpu().detach().numpy()\n acc_tr = (label == prediction_tr.cpu()).sum().item() / pred.shape[0\n ] * 100\n running_loss += loss * image.size(0)\n running_acc += acc_tr * image.size(0)\n train_loss = running_loss / len(train_dataset)\n train_acc = running_acc / len(train_dataset)\n print('>>> Train loss : %.4f - Train acc : %.4f' % (train_loss, train_acc))\n running_loss = 0\n running_acc = 0\n model.eval()\n with torch.no_grad():\n for image, label in valid_dataloader:\n image = image / 255.0\n pred = model(image.float().transpose(3, 2).transpose(1, 2).to(\n device))\n loss = criterion(pred, label.to(device))\n Softmax = torch.nn.Softmax(dim=1)\n _, prediction = torch.max(Softmax(pred), 1)\n y_true = label.cpu().detach().numpy()\n y_pred = prediction.cpu().detach().numpy()\n acc_tr = (label == prediction.cpu()).sum().item() / pred.shape[0\n ] * 100\n running_loss += loss.item() * image.size(0)\n running_acc += acc_tr * image.size(0)\n valid_loss = running_loss / len(valid_dataset)\n valid_acc = running_acc / len(valid_dataset)\n print('>>> Valid loss : %.4f - Valid acc : %.4f\\n' % (valid_loss,\n valid_acc))\n print(prediction)\n print(label)\n print()\n if (epoch + 1) % 5 == 0:\n save_path = os.path.join('.', 'save_')\n if not os.path.exists(save_path):\n os.makedirs(save_path)\n torch.save(model, os.path.join(save_path, \n 'model_epoch%04d_loss_%.4f_acc_%.4f.ckpt' % (epoch, valid_loss,\n valid_acc)))\n",
"step-4": "import os\nimport torch\nfrom data_loader import FER\nfrom torch.utils.data import DataLoader\nfrom tqdm import tqdm\nimport model as md\ndevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\nlr = 1e-05\nepochs = 500\nbatch_size = 2\ntrain_data_path = '../../../data/face_data'\ntrain_dataset = FER(train_data_path, image_size=64, mode='train')\ntrain_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle\n =True)\nvalid_data_path = '../../../data/face_data'\nvalid_dataset = FER(valid_data_path, image_size=64, mode='val')\nvalid_dataloader = DataLoader(valid_dataset, batch_size=batch_size, shuffle\n =False)\nmodel = md.vgg16_bn(num_classes=3).to(device)\nmodel.to(device)\ncriterion = torch.nn.CrossEntropyLoss().to(device)\noptimizer = torch.optim.Adam(params=model.parameters(), lr=lr)\n...\nfor epoch in range(epochs):\n running_loss = 0\n running_acc = 0\n train_loss = 0\n model.train()\n for image, label in tqdm(train_dataloader, desc='Epoch [%d/%d]' % (\n epoch + 1, epochs)):\n optimizer.zero_grad()\n image = image / 255.0\n pred = model(image.float().transpose(3, 2).transpose(2, 1).to(device))\n loss = criterion(pred, label.to(device))\n loss.backward()\n optimizer.step()\n Softmax = torch.nn.Softmax(dim=1)\n _, prediction_tr = torch.max(Softmax(pred), 1)\n y_true_tr = label.cpu().detach().numpy()\n y_pred_tr = prediction_tr.cpu().detach().numpy()\n acc_tr = (label == prediction_tr.cpu()).sum().item() / pred.shape[0\n ] * 100\n running_loss += loss * image.size(0)\n running_acc += acc_tr * image.size(0)\n train_loss = running_loss / len(train_dataset)\n train_acc = running_acc / len(train_dataset)\n print('>>> Train loss : %.4f - Train acc : %.4f' % (train_loss, train_acc))\n running_loss = 0\n running_acc = 0\n model.eval()\n with torch.no_grad():\n for image, label in valid_dataloader:\n image = image / 255.0\n pred = model(image.float().transpose(3, 2).transpose(1, 2).to(\n device))\n loss = criterion(pred, label.to(device))\n Softmax = torch.nn.Softmax(dim=1)\n _, prediction = torch.max(Softmax(pred), 1)\n y_true = label.cpu().detach().numpy()\n y_pred = prediction.cpu().detach().numpy()\n acc_tr = (label == prediction.cpu()).sum().item() / pred.shape[0\n ] * 100\n running_loss += loss.item() * image.size(0)\n running_acc += acc_tr * image.size(0)\n valid_loss = running_loss / len(valid_dataset)\n valid_acc = running_acc / len(valid_dataset)\n print('>>> Valid loss : %.4f - Valid acc : %.4f\\n' % (valid_loss,\n valid_acc))\n print(prediction)\n print(label)\n print()\n if (epoch + 1) % 5 == 0:\n save_path = os.path.join('.', 'save_')\n if not os.path.exists(save_path):\n os.makedirs(save_path)\n torch.save(model, os.path.join(save_path, \n 'model_epoch%04d_loss_%.4f_acc_%.4f.ckpt' % (epoch, valid_loss,\n valid_acc)))\n",
"step-5": "import os\r\nimport torch\r\nfrom data_loader import FER\r\nfrom torch.utils.data import DataLoader\r\nfrom tqdm import tqdm\r\n# from tensorboardX import SummaryWriter\r\nimport model as md\r\n\r\n\r\n\r\n# train_writer = SummaryWriter(log_dir=\"log_last_last_last/train\")\r\n# valid_writer = SummaryWriter(log_dir=\"log_last_last_last/valid\")\r\n\r\n# os.environ[\"CUDA_DEVICE_ORDER\"] = \"PCI_BUS_ID\"\r\n# os.environ[\"CUDA_VISIBLE_DEVICES\"] = \"2\"\r\ndevice = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\r\n\r\n# Hyper-parameters\r\nlr = 1e-5\r\nepochs = 500\r\nbatch_size = 2\r\n\r\ntrain_data_path = '../../../data/face_data'\r\ntrain_dataset = FER(train_data_path , image_size=64, mode='train')\r\ntrain_dataloader = DataLoader(train_dataset, batch_size=batch_size, shuffle = True)\r\n\r\nvalid_data_path = '../../../data/face_data'\r\nvalid_dataset = FER(valid_data_path,image_size=64, mode='val')\r\nvalid_dataloader = DataLoader(valid_dataset, batch_size=batch_size, shuffle = False)\r\n\r\n\r\nmodel = md.vgg16_bn(num_classes = 3).to(device)\r\n\r\n\r\n\r\n# model_name = 'vgg16'\r\n# feature_extract = True\r\n# num_classes = 3\r\n# model = md.init_pretrained_models(model_name, num_classes, feature_extract, use_pretrained=True)\r\n\r\n\r\nmodel.to(device)\r\ncriterion = torch.nn.CrossEntropyLoss().to(device)\r\noptimizer = torch.optim.Adam(params = model.parameters(), lr = lr)\r\n\r\n...\r\nfor epoch in range(epochs):\r\n running_loss = 0\r\n running_acc = 0\r\n train_loss = 0\r\n model.train()\r\n\r\n # ================== Training ==================\r\n for image, label in tqdm(train_dataloader, desc=\"Epoch [%d/%d]\" % (epoch + 1, epochs)):\r\n optimizer.zero_grad() # Optimizer를 0으로 초기화\r\n image = image / 255.\r\n pred = model(image.float().transpose(3,2).transpose(2,1).to(device))\r\n loss = criterion(pred, label.to(device))\r\n\r\n loss.backward()\r\n optimizer.step()\r\n\r\n Softmax = torch.nn.Softmax(dim=1)\r\n _, prediction_tr = torch.max(Softmax(pred), 1)\r\n\r\n y_true_tr = label.cpu().detach().numpy()\r\n y_pred_tr = prediction_tr.cpu().detach().numpy()\r\n # acc = confusion_matrix(y_true, y_pred)\r\n\r\n acc_tr = ((label == prediction_tr.cpu()).sum().item() / pred.shape[0]) * 100\r\n\r\n\r\n # running_loss += loss.item()\r\n running_loss += loss * image.size(0)\r\n running_acc += acc_tr * image.size(0)\r\n\r\n train_loss = running_loss / len(train_dataset)\r\n train_acc = running_acc / len(train_dataset)\r\n\r\n # loss_sum = tf.summary.scalar(\"train_loss\", train_loss)\r\n # acc_sum = tf.summary.scalar(\"train_accuracy\", train_acc)\r\n\r\n\r\n # writer = tf.summary.FileWriter(\"./abc\")\r\n\r\n # summary, _ = sess.run([loss_sum, epochs], feed_dict={x: loss_sum, y: epochs})\r\n\r\n\r\n print('>>> Train loss : %.4f - Train acc : %.4f'% (train_loss, train_acc))\r\n # train_acc = running_acc / len(train_dataloader)\r\n\r\n\r\n # =================== Validation ===================\r\n running_loss = 0\r\n running_acc = 0\r\n model.eval()\r\n # model.load_state_dict(torch.load('filenname'))\r\n\r\n\r\n\r\n with torch.no_grad():\r\n # val_st ep = 0\r\n for image, label in valid_dataloader:\r\n image = image / 255.\r\n\r\n pred = model(image.float().transpose(3,2).transpose(1,2).to(device))\r\n loss = criterion(pred, label.to(device))\r\n\r\n\r\n Softmax = torch.nn.Softmax(dim=1)\r\n _, prediction = torch.max(Softmax(pred), 1)\r\n\r\n y_true = label.cpu().detach().numpy()\r\n y_pred = prediction.cpu().detach().numpy()\r\n # acc = confusion_matrix(y_true, y_pred)\r\n acc_tr = ((label == prediction.cpu()).sum().item() / pred.shape[0]) * 100\r\n # running_acc += acc_tr\r\n\r\n # running_loss += loss.item()\r\n # val_step +=1\r\n running_loss += loss.item() * image.size(0)\r\n running_acc += acc_tr * image.size(0)\r\n\r\n valid_loss = running_loss / len(valid_dataset)\r\n valid_acc = running_acc / len(valid_dataset)\r\n\r\n print(\">>> Valid loss : %.4f - Valid acc : %.4f\\n\" % (valid_loss, valid_acc))\r\n print(prediction)\r\n print(label)\r\n print()\r\n\r\n # train_writer.add_scalar('loss', train_loss, epoch)\r\n # train_writer.add_scalar('accuracy', train_acc, epoch)\r\n # valid_writer.add_scalar('loss', valid_loss, epoch)\r\n # valid_writer.add_scalar('accuracy', valid_acc, epoch)\r\n\r\n if (epoch+1) % 5 == 0 :\r\n save_path = os.path.join('.', 'save_')\r\n if not os.path.exists(save_path):\r\n os.makedirs(save_path)\r\n torch.save(model, os.path.join(save_path, 'model_epoch%04d_loss_%.4f_acc_%.4f.ckpt'%(epoch, valid_loss, valid_acc)))\r\n\r\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
#HOW TO BUILD A SIMPLE CALCULATOR
#1.ADD
#2.SUBTRACT
#3.MULTIPLY
#4.DIVIDE
print("Select an operation to perform: ")
print("1.ADD")
print("2.SUBTRACT")
print("3.MULTIPLY")
print("4.DIVIDE")
print("5.SQUARE ROOT")
operation=input()
if operation=="1":
a=input("Enter first number: ")
b=input("Enter second number: ")
result=int(a)+int(b)
print("The sum is "+str(result))
elif operation=="2":
a=input("Enter first number: ")
b=input("Enter second number: ")
result=int(a)-int(b)
print("The difference is "+str(result))
elif operation=="3":
a=input("Enter first number: ")
b=input("Enter second number: ")
result=int(a)*int(b)
print("The product is "+str(result))
elif operation=="4":
a=input("Enter first number: ")
b=input("Enter second number: ")
result=int(a)/int(b)
print("The result is "+str(result))
elif operation=="5":
a=input("Enter number:")
result=int(a)*int(a)
print("The square of "+a+ " is "+str(result))
else:
print("Invalid entry!")
|
normal
|
{
"blob_id": "ea35180daecb8ca4b9bd351a949a4757b97322ec",
"index": 2819,
"step-1": "<mask token>\n",
"step-2": "print('Select an operation to perform: ')\nprint('1.ADD')\nprint('2.SUBTRACT')\nprint('3.MULTIPLY')\nprint('4.DIVIDE')\nprint('5.SQUARE ROOT')\n<mask token>\nif operation == '1':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) + int(b)\n print('The sum is ' + str(result))\nelif operation == '2':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) - int(b)\n print('The difference is ' + str(result))\nelif operation == '3':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) * int(b)\n print('The product is ' + str(result))\nelif operation == '4':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) / int(b)\n print('The result is ' + str(result))\nelif operation == '5':\n a = input('Enter number:')\n result = int(a) * int(a)\n print('The square of ' + a + ' is ' + str(result))\nelse:\n print('Invalid entry!')\n",
"step-3": "print('Select an operation to perform: ')\nprint('1.ADD')\nprint('2.SUBTRACT')\nprint('3.MULTIPLY')\nprint('4.DIVIDE')\nprint('5.SQUARE ROOT')\noperation = input()\nif operation == '1':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) + int(b)\n print('The sum is ' + str(result))\nelif operation == '2':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) - int(b)\n print('The difference is ' + str(result))\nelif operation == '3':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) * int(b)\n print('The product is ' + str(result))\nelif operation == '4':\n a = input('Enter first number: ')\n b = input('Enter second number: ')\n result = int(a) / int(b)\n print('The result is ' + str(result))\nelif operation == '5':\n a = input('Enter number:')\n result = int(a) * int(a)\n print('The square of ' + a + ' is ' + str(result))\nelse:\n print('Invalid entry!')\n",
"step-4": "#HOW TO BUILD A SIMPLE CALCULATOR\r\n#1.ADD\r\n#2.SUBTRACT\r\n#3.MULTIPLY\r\n#4.DIVIDE\r\n\r\nprint(\"Select an operation to perform: \")\r\nprint(\"1.ADD\")\r\nprint(\"2.SUBTRACT\")\r\nprint(\"3.MULTIPLY\")\r\nprint(\"4.DIVIDE\")\r\nprint(\"5.SQUARE ROOT\")\r\n\r\noperation=input()\r\nif operation==\"1\":\r\n\ta=input(\"Enter first number: \")\r\n\tb=input(\"Enter second number: \")\r\n\tresult=int(a)+int(b)\r\n\tprint(\"The sum is \"+str(result))\r\nelif operation==\"2\":\r\n\ta=input(\"Enter first number: \")\r\n\tb=input(\"Enter second number: \")\r\n\tresult=int(a)-int(b)\r\n\tprint(\"The difference is \"+str(result))\r\nelif operation==\"3\":\r\n\ta=input(\"Enter first number: \")\r\n\tb=input(\"Enter second number: \")\r\n\tresult=int(a)*int(b)\r\n\tprint(\"The product is \"+str(result))\r\nelif operation==\"4\":\r\n\ta=input(\"Enter first number: \")\r\n\tb=input(\"Enter second number: \")\r\n\tresult=int(a)/int(b)\r\n\tprint(\"The result is \"+str(result))\r\nelif operation==\"5\":\r\n a=input(\"Enter number:\")\r\n result=int(a)*int(a)\r\n print(\"The square of \"+a+ \" is \"+str(result))\r\nelse:\r\n\tprint(\"Invalid entry!\")\r\n\r\n\r\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#----------- writing our for loop
""" number = [1,2,3,4,5]
friends = ['ahmet', 'mehmet','ayşe']
# for n in number:
# print(n)
# for n in friends:
# print(n)
def my_for_loop(my_iterable):
my_iterator = iter(my_iterable)
while True:
try:
print(next(my_iterator))
except StopIteration:
break
my_for_loop(number)
my_for_loop(friends) """
#--------------to show thirth power of given range numbers with iterator class
""" class CubeNumbers:
def __init__(self, start, end):
self.start = start
self.end = end
def __iter__(self):
return self
def __next__(self):
if self.start <= self.end:
result = self.start ** 3
self.start += 1
return result
else:
raise StopIteration
cubed = CubeNumbers(0, 5)
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed)) """
#--------to show thirth power of given range numbers with generator
""" cubed = (x**3 for x in range(0, 5))
print(type(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed))
print(next(cubed)) """
#---------------fibonacci numbers with generator function
""" def fibo(limit):
x = 0
y = 1
while x < limit:
yield x
x, y = y, x + y
my_fib = fibo(1000)
for fib in my_fib:
print(fib) """
#-------------to show index and value together
""" friends = ['john', 'walter', 'henry']
# i = 0
# while i < len(friends):
# v = friends[i]
# print(i, v)
# i += 1
# for n in range(len(friends)):
# v = friends[n]
# print(n, v)
for i, v in enumerate(friends):
print(i, v) """
|
normal
|
{
"blob_id": "70325d0e5eb9dcd7a065f83eaf14647bc30bd7f3",
"index": 9053,
"step-1": "<mask token>\n",
"step-2": "\n#----------- writing our for loop\n\"\"\" number = [1,2,3,4,5]\nfriends = ['ahmet', 'mehmet','ayşe']\n\n# for n in number:\n# print(n)\n# for n in friends:\n# print(n)\n\ndef my_for_loop(my_iterable):\n my_iterator = iter(my_iterable)\n while True:\n try:\n print(next(my_iterator))\n except StopIteration:\n break\n\nmy_for_loop(number)\nmy_for_loop(friends) \"\"\"\n\n\n#--------------to show thirth power of given range numbers with iterator class\n\n\n\"\"\" class CubeNumbers:\n def __init__(self, start, end):\n self.start = start\n self.end = end\n \n def __iter__(self):\n return self\n \n def __next__(self):\n if self.start <= self.end:\n result = self.start ** 3\n self.start += 1\n return result\n else:\n raise StopIteration\n \ncubed = CubeNumbers(0, 5)\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed)) \"\"\"\n\n\n#--------to show thirth power of given range numbers with generator \n\n\"\"\" cubed = (x**3 for x in range(0, 5))\nprint(type(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed))\nprint(next(cubed)) \"\"\"\n\n\n#---------------fibonacci numbers with generator function\n\n\"\"\" def fibo(limit):\n x = 0\n y = 1\n while x < limit:\n yield x\n x, y = y, x + y\n \nmy_fib = fibo(1000)\nfor fib in my_fib:\n print(fib) \"\"\"\n \n\n#-------------to show index and value together\n\n\"\"\" friends = ['john', 'walter', 'henry']\n\n# i = 0\n# while i < len(friends):\n# v = friends[i]\n# print(i, v)\n# i += 1\n\n# for n in range(len(friends)):\n# v = friends[n]\n# print(n, v)\n\nfor i, v in enumerate(friends):\n print(i, v) \"\"\"",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
#This script reads through a Voyager import log and outputs duplicate bib IDs as well as the IDs of bibs, mfhds, and items created.
#import regular expressions and openpyxl
import re
import openpyxl
# prompt for file names
fname = input("Enter input file, including extension: ")
fout = input("Enter output file, without extension: ")
fh = open(fname, "r")
# set up lists
duplicates = [["Duplicate Bib ID"]]
bibs = [["Bib ID"]]
mfhds = [["MFHD ID"]]
items = [["Item ID"]]
# create and open workbook with two sheets
wb1=openpyxl.Workbook()
ws1=wb1.active
ws1.title = "Duplicate Bibs"
ws2 = wb1.create_sheet(index=1, title="IDs Added")
# read through file, extract the line after the line starting with BibID & rank and write to lists
with fh as f:
lines = f.readlines()
n_lines = len(lines)
for i, line in enumerate (lines) :
line = line.rstrip()
if line.startswith(" BibID & rank") and \
n_lines > i + 2 and lines[i + 2].startswith("") :
bibline = re.findall(r'\d+\s-\s', lines[i+1])
dupeid = re.findall(r'\d+', str(bibline))
duplicates.append(dupeid)
elif line.startswith(" Adding Bib") :
line = re.findall(r'\d+',str(line))
bibs.append(line)
elif line.startswith("MFHD_ID ") :
line = re.findall(r'\d+',str(line))
mfhds.append(line)
elif line.startswith("ITEM_ID ") :
line = re.findall(r'\d+',str(line))
items.append(line)
else :
continue
# write the lists to columns in the spreadsheet and save
for row in duplicates:
ws1.append(row)
for r in range(0,len(bibs)):
ws2.cell(row=r+1,column=1).value=bibs[r][0]
for r in range(0,len(mfhds)):
ws2.cell(row=r+1,column=2).value=mfhds[r][0]
for r in range(0,len(items)):
ws2.cell(row=r+1,column=3).value=items[r][0]
wb1.save(fout + ".xlsx")
|
normal
|
{
"blob_id": "fc06d8a26a99c16a4b38ad0b4bbb28a1dc522991",
"index": 6902,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith fh as f:\n lines = f.readlines()\n n_lines = len(lines)\n for i, line in enumerate(lines):\n line = line.rstrip()\n if line.startswith('\\tBibID & rank') and n_lines > i + 2 and lines[\n i + 2].startswith(''):\n bibline = re.findall('\\\\d+\\\\s-\\\\s', lines[i + 1])\n dupeid = re.findall('\\\\d+', str(bibline))\n duplicates.append(dupeid)\n elif line.startswith('\\tAdding Bib'):\n line = re.findall('\\\\d+', str(line))\n bibs.append(line)\n elif line.startswith('MFHD_ID '):\n line = re.findall('\\\\d+', str(line))\n mfhds.append(line)\n elif line.startswith('ITEM_ID '):\n line = re.findall('\\\\d+', str(line))\n items.append(line)\n else:\n continue\nfor row in duplicates:\n ws1.append(row)\nfor r in range(0, len(bibs)):\n ws2.cell(row=r + 1, column=1).value = bibs[r][0]\nfor r in range(0, len(mfhds)):\n ws2.cell(row=r + 1, column=2).value = mfhds[r][0]\nfor r in range(0, len(items)):\n ws2.cell(row=r + 1, column=3).value = items[r][0]\nwb1.save(fout + '.xlsx')\n",
"step-3": "<mask token>\nfname = input('Enter input file, including extension: ')\nfout = input('Enter output file, without extension: ')\nfh = open(fname, 'r')\nduplicates = [['Duplicate Bib ID']]\nbibs = [['Bib ID']]\nmfhds = [['MFHD ID']]\nitems = [['Item ID']]\nwb1 = openpyxl.Workbook()\nws1 = wb1.active\nws1.title = 'Duplicate Bibs'\nws2 = wb1.create_sheet(index=1, title='IDs Added')\nwith fh as f:\n lines = f.readlines()\n n_lines = len(lines)\n for i, line in enumerate(lines):\n line = line.rstrip()\n if line.startswith('\\tBibID & rank') and n_lines > i + 2 and lines[\n i + 2].startswith(''):\n bibline = re.findall('\\\\d+\\\\s-\\\\s', lines[i + 1])\n dupeid = re.findall('\\\\d+', str(bibline))\n duplicates.append(dupeid)\n elif line.startswith('\\tAdding Bib'):\n line = re.findall('\\\\d+', str(line))\n bibs.append(line)\n elif line.startswith('MFHD_ID '):\n line = re.findall('\\\\d+', str(line))\n mfhds.append(line)\n elif line.startswith('ITEM_ID '):\n line = re.findall('\\\\d+', str(line))\n items.append(line)\n else:\n continue\nfor row in duplicates:\n ws1.append(row)\nfor r in range(0, len(bibs)):\n ws2.cell(row=r + 1, column=1).value = bibs[r][0]\nfor r in range(0, len(mfhds)):\n ws2.cell(row=r + 1, column=2).value = mfhds[r][0]\nfor r in range(0, len(items)):\n ws2.cell(row=r + 1, column=3).value = items[r][0]\nwb1.save(fout + '.xlsx')\n",
"step-4": "import re\nimport openpyxl\nfname = input('Enter input file, including extension: ')\nfout = input('Enter output file, without extension: ')\nfh = open(fname, 'r')\nduplicates = [['Duplicate Bib ID']]\nbibs = [['Bib ID']]\nmfhds = [['MFHD ID']]\nitems = [['Item ID']]\nwb1 = openpyxl.Workbook()\nws1 = wb1.active\nws1.title = 'Duplicate Bibs'\nws2 = wb1.create_sheet(index=1, title='IDs Added')\nwith fh as f:\n lines = f.readlines()\n n_lines = len(lines)\n for i, line in enumerate(lines):\n line = line.rstrip()\n if line.startswith('\\tBibID & rank') and n_lines > i + 2 and lines[\n i + 2].startswith(''):\n bibline = re.findall('\\\\d+\\\\s-\\\\s', lines[i + 1])\n dupeid = re.findall('\\\\d+', str(bibline))\n duplicates.append(dupeid)\n elif line.startswith('\\tAdding Bib'):\n line = re.findall('\\\\d+', str(line))\n bibs.append(line)\n elif line.startswith('MFHD_ID '):\n line = re.findall('\\\\d+', str(line))\n mfhds.append(line)\n elif line.startswith('ITEM_ID '):\n line = re.findall('\\\\d+', str(line))\n items.append(line)\n else:\n continue\nfor row in duplicates:\n ws1.append(row)\nfor r in range(0, len(bibs)):\n ws2.cell(row=r + 1, column=1).value = bibs[r][0]\nfor r in range(0, len(mfhds)):\n ws2.cell(row=r + 1, column=2).value = mfhds[r][0]\nfor r in range(0, len(items)):\n ws2.cell(row=r + 1, column=3).value = items[r][0]\nwb1.save(fout + '.xlsx')\n",
"step-5": "#This script reads through a Voyager import log and outputs duplicate bib IDs as well as the IDs of bibs, mfhds, and items created.\n\n#import regular expressions and openpyxl\nimport re\nimport openpyxl\n\n# prompt for file names\nfname = input(\"Enter input file, including extension: \")\nfout = input(\"Enter output file, without extension: \")\nfh = open(fname, \"r\")\n\n# set up lists\nduplicates = [[\"Duplicate Bib ID\"]]\nbibs = [[\"Bib ID\"]]\nmfhds = [[\"MFHD ID\"]]\nitems = [[\"Item ID\"]]\n\n# create and open workbook with two sheets\nwb1=openpyxl.Workbook()\nws1=wb1.active\nws1.title = \"Duplicate Bibs\"\nws2 = wb1.create_sheet(index=1, title=\"IDs Added\")\n\n# read through file, extract the line after the line starting with BibID & rank and write to lists\nwith fh as f:\n lines = f.readlines()\n n_lines = len(lines)\n for i, line in enumerate (lines) :\n line = line.rstrip()\n if line.startswith(\"\tBibID & rank\") and \\\n n_lines > i + 2 and lines[i + 2].startswith(\"\") :\n bibline = re.findall(r'\\d+\\s-\\s', lines[i+1])\n dupeid = re.findall(r'\\d+', str(bibline))\n duplicates.append(dupeid)\n elif line.startswith(\"\tAdding Bib\") :\n line = re.findall(r'\\d+',str(line))\n bibs.append(line)\n elif line.startswith(\"MFHD_ID \") :\n line = re.findall(r'\\d+',str(line))\n mfhds.append(line)\n elif line.startswith(\"ITEM_ID \") :\n line = re.findall(r'\\d+',str(line))\n items.append(line)\n else :\n continue\n\n# write the lists to columns in the spreadsheet and save\nfor row in duplicates:\n ws1.append(row)\nfor r in range(0,len(bibs)):\n ws2.cell(row=r+1,column=1).value=bibs[r][0]\nfor r in range(0,len(mfhds)):\n ws2.cell(row=r+1,column=2).value=mfhds[r][0]\nfor r in range(0,len(items)):\n ws2.cell(row=r+1,column=3).value=items[r][0]\nwb1.save(fout + \".xlsx\")\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import django.dispatch
property_viewed = django.dispatch.Signal(providing_args=["property","user", "request", "response"])
|
normal
|
{
"blob_id": "00099cab0c816c76fc0fa94d7905175feb6919cf",
"index": 9795,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nproperty_viewed = django.dispatch.Signal(providing_args=['property', 'user',\n 'request', 'response'])\n",
"step-3": "import django.dispatch\nproperty_viewed = django.dispatch.Signal(providing_args=['property', 'user',\n 'request', 'response'])\n",
"step-4": "import django.dispatch\n\nproperty_viewed = django.dispatch.Signal(providing_args=[\"property\",\"user\", \"request\", \"response\"])",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def resolve_data(raw_data, derivatives_prefix):
derivatives = {}
if isinstance(raw_data, dict):
for k, v in raw_data.items():
if isinstance(v, dict):
derivatives.update(resolve_data(v, derivatives_prefix + k +
'_'))
elif isinstance(v, list):
derivatives.update(resolve_data(v, derivatives_prefix + k +
'_'))
else:
derivatives[derivatives_prefix + k] = v
elif isinstance(raw_data, list):
derivatives[derivatives_prefix + 'cnt'] = len(raw_data)
if len(raw_data) > 1:
if isinstance(raw_data[0], dict):
if raw_data[0].keys() == raw_data[1].keys():
for ke, va in raw_data[0].items():
if isinstance(va, dict):
for r in raw_data:
if r.get(ke) is not None:
derivatives.update(resolve_data(r[ke],
derivatives_prefix + ke + '_'))
elif isinstance(va, list):
for r in raw_data:
if r.get(ke) is not None:
derivatives.update(resolve_data(r[ke],
derivatives_prefix + ke + '_'))
elif isinstance(va, (float, int, bool)):
derivatives[derivatives_prefix + ke + '_' + 'sum'
] = sum([r.get(ke) for r in raw_data if r.
get(ke)])
derivatives[derivatives_prefix + ke + '_' + 'avg'
] = float(sum([r.get(ke) for r in raw_data if
r.get(ke)])) / len(raw_data)
else:
pass
else:
for li in raw_data:
if isinstance(li, dict):
derivatives.update(resolve_data(li,
derivatives_prefix))
elif isinstance(li, list):
derivatives.update(resolve_data(li,
derivatives_prefix))
else:
pass
else:
pass
else:
for li in raw_data:
if isinstance(li, dict):
derivatives.update(resolve_data(li, derivatives_prefix))
elif isinstance(li, list):
derivatives.update(resolve_data(li, derivatives_prefix))
else:
pass
else:
derivatives[derivatives_prefix] = raw_data
return derivatives
|
flexible
|
{
"blob_id": "31b109d992a1b64816f483e870b00c703643f514",
"index": 6577,
"step-1": "<mask token>\n",
"step-2": "def resolve_data(raw_data, derivatives_prefix):\n derivatives = {}\n if isinstance(raw_data, dict):\n for k, v in raw_data.items():\n if isinstance(v, dict):\n derivatives.update(resolve_data(v, derivatives_prefix + k +\n '_'))\n elif isinstance(v, list):\n derivatives.update(resolve_data(v, derivatives_prefix + k +\n '_'))\n else:\n derivatives[derivatives_prefix + k] = v\n elif isinstance(raw_data, list):\n derivatives[derivatives_prefix + 'cnt'] = len(raw_data)\n if len(raw_data) > 1:\n if isinstance(raw_data[0], dict):\n if raw_data[0].keys() == raw_data[1].keys():\n for ke, va in raw_data[0].items():\n if isinstance(va, dict):\n for r in raw_data:\n if r.get(ke) is not None:\n derivatives.update(resolve_data(r[ke], \n derivatives_prefix + ke + '_'))\n elif isinstance(va, list):\n for r in raw_data:\n if r.get(ke) is not None:\n derivatives.update(resolve_data(r[ke], \n derivatives_prefix + ke + '_'))\n elif isinstance(va, (float, int, bool)):\n derivatives[derivatives_prefix + ke + '_' + 'sum'\n ] = sum([r.get(ke) for r in raw_data if r.\n get(ke)])\n derivatives[derivatives_prefix + ke + '_' + 'avg'\n ] = float(sum([r.get(ke) for r in raw_data if\n r.get(ke)])) / len(raw_data)\n else:\n pass\n else:\n for li in raw_data:\n if isinstance(li, dict):\n derivatives.update(resolve_data(li,\n derivatives_prefix))\n elif isinstance(li, list):\n derivatives.update(resolve_data(li,\n derivatives_prefix))\n else:\n pass\n else:\n pass\n else:\n for li in raw_data:\n if isinstance(li, dict):\n derivatives.update(resolve_data(li, derivatives_prefix))\n elif isinstance(li, list):\n derivatives.update(resolve_data(li, derivatives_prefix))\n else:\n pass\n else:\n derivatives[derivatives_prefix] = raw_data\n return derivatives\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__import__('pkg_resources').require('Django==2.1.dev20180209010235')
<|reserved_special_token_0|>
exec(compile(open(__file__).read(), __file__, 'exec'))
<|reserved_special_token_1|>
__requires__ = 'Django==2.1.dev20180209010235'
__import__('pkg_resources').require('Django==2.1.dev20180209010235')
__file__ = 'D:\\python_projects\\ENV2\\django\\django\\bin\\django-admin.py'
exec(compile(open(__file__).read(), __file__, 'exec'))
<|reserved_special_token_1|>
#!d:\python_projects\env2\scripts\python.exe
# EASY-INSTALL-DEV-SCRIPT: 'Django==2.1.dev20180209010235','django-admin.py'
__requires__ = 'Django==2.1.dev20180209010235'
__import__('pkg_resources').require('Django==2.1.dev20180209010235')
__file__ = 'D:\\python_projects\\ENV2\\django\\django\\bin\\django-admin.py'
exec(compile(open(__file__).read(), __file__, 'exec'))
|
flexible
|
{
"blob_id": "4bbf0a0fadc506ad3674912f1885525a94b5b1e9",
"index": 2807,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__import__('pkg_resources').require('Django==2.1.dev20180209010235')\n<mask token>\nexec(compile(open(__file__).read(), __file__, 'exec'))\n",
"step-3": "__requires__ = 'Django==2.1.dev20180209010235'\n__import__('pkg_resources').require('Django==2.1.dev20180209010235')\n__file__ = 'D:\\\\python_projects\\\\ENV2\\\\django\\\\django\\\\bin\\\\django-admin.py'\nexec(compile(open(__file__).read(), __file__, 'exec'))\n",
"step-4": "#!d:\\python_projects\\env2\\scripts\\python.exe\n# EASY-INSTALL-DEV-SCRIPT: 'Django==2.1.dev20180209010235','django-admin.py'\n__requires__ = 'Django==2.1.dev20180209010235'\n__import__('pkg_resources').require('Django==2.1.dev20180209010235')\n__file__ = 'D:\\\\python_projects\\\\ENV2\\\\django\\\\django\\\\bin\\\\django-admin.py'\nexec(compile(open(__file__).read(), __file__, 'exec'))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
'''
Given an infinite sorted array (or an array with unknown size), find if a given number ‘key’ is present in the array. Write a function to return the index of the ‘key’ if it is present in the array, otherwise return -1.
Since it is not possible to define an array with infinite (unknown) size, you will be provided with an interface ArrayReader to read elements of the array. ArrayReader.get(index) will return the number at index; if the array’s size is smaller than the index, it will return Integer.MAX_VALUE.
Example 1:
Input: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30], key = 16
Output: 6
Explanation: The key is present at index '6' in the array.
Example 2:
Input: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30], key = 11
Output: -1
Explanation: The key is not present in the array.
Example 3:
Input: [1, 3, 8, 10, 15], key = 15
Output: 4
Explanation: The key is present at index '4' in the array.
Example 4:
Input: [1, 3, 8, 10, 15], key = 200
Output: -1
Explanation: The key is not present in the array.
'''
import math
class ArrayReader:
def __init__(self, arr):
self.arr = arr
def get(self, index):
if index > len(self.arr):
return math.inf
return self.arr[index]
def search_in_infinite_array(reader, key):
# first find the bounds
low = 0
high = 1
while reader.get(high) < key:
new_low = high + 1
high = (high - low + 1)*2
low = new_low
return binary_search_array(reader, key, low, high)
def binary_search_array(reader, key, low, high):
while low <= high:
mid = (low + high) // 2
if key == reader.get(mid):
return mid
if key > reader.get(mid):
low = mid + 1
else:
high = mid - 1
return - 1
reader = ArrayReader([4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30])
print(search_in_infinite_array(reader, 16))
|
normal
|
{
"blob_id": "a9efa258c223460b2b79861acdde89161706ad9a",
"index": 8770,
"step-1": "<mask token>\n\n\nclass ArrayReader:\n\n def __init__(self, arr):\n self.arr = arr\n\n def get(self, index):\n if index > len(self.arr):\n return math.inf\n return self.arr[index]\n\n\n<mask token>\n\n\ndef binary_search_array(reader, key, low, high):\n while low <= high:\n mid = (low + high) // 2\n if key == reader.get(mid):\n return mid\n if key > reader.get(mid):\n low = mid + 1\n else:\n high = mid - 1\n return -1\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ArrayReader:\n\n def __init__(self, arr):\n self.arr = arr\n\n def get(self, index):\n if index > len(self.arr):\n return math.inf\n return self.arr[index]\n\n\ndef search_in_infinite_array(reader, key):\n low = 0\n high = 1\n while reader.get(high) < key:\n new_low = high + 1\n high = (high - low + 1) * 2\n low = new_low\n return binary_search_array(reader, key, low, high)\n\n\ndef binary_search_array(reader, key, low, high):\n while low <= high:\n mid = (low + high) // 2\n if key == reader.get(mid):\n return mid\n if key > reader.get(mid):\n low = mid + 1\n else:\n high = mid - 1\n return -1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass ArrayReader:\n\n def __init__(self, arr):\n self.arr = arr\n\n def get(self, index):\n if index > len(self.arr):\n return math.inf\n return self.arr[index]\n\n\ndef search_in_infinite_array(reader, key):\n low = 0\n high = 1\n while reader.get(high) < key:\n new_low = high + 1\n high = (high - low + 1) * 2\n low = new_low\n return binary_search_array(reader, key, low, high)\n\n\ndef binary_search_array(reader, key, low, high):\n while low <= high:\n mid = (low + high) // 2\n if key == reader.get(mid):\n return mid\n if key > reader.get(mid):\n low = mid + 1\n else:\n high = mid - 1\n return -1\n\n\nreader = ArrayReader([4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30])\nprint(search_in_infinite_array(reader, 16))\n",
"step-4": "<mask token>\nimport math\n\n\nclass ArrayReader:\n\n def __init__(self, arr):\n self.arr = arr\n\n def get(self, index):\n if index > len(self.arr):\n return math.inf\n return self.arr[index]\n\n\ndef search_in_infinite_array(reader, key):\n low = 0\n high = 1\n while reader.get(high) < key:\n new_low = high + 1\n high = (high - low + 1) * 2\n low = new_low\n return binary_search_array(reader, key, low, high)\n\n\ndef binary_search_array(reader, key, low, high):\n while low <= high:\n mid = (low + high) // 2\n if key == reader.get(mid):\n return mid\n if key > reader.get(mid):\n low = mid + 1\n else:\n high = mid - 1\n return -1\n\n\nreader = ArrayReader([4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30])\nprint(search_in_infinite_array(reader, 16))\n",
"step-5": "'''\nGiven an infinite sorted array (or an array with unknown size), find if a given number ‘key’ is present in the array. Write a function to return the index of the ‘key’ if it is present in the array, otherwise return -1.\n\nSince it is not possible to define an array with infinite (unknown) size, you will be provided with an interface ArrayReader to read elements of the array. ArrayReader.get(index) will return the number at index; if the array’s size is smaller than the index, it will return Integer.MAX_VALUE.\n\nExample 1:\n\nInput: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30], key = 16\nOutput: 6\nExplanation: The key is present at index '6' in the array.\nExample 2:\n\nInput: [4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30], key = 11\nOutput: -1\nExplanation: The key is not present in the array.\nExample 3:\n\nInput: [1, 3, 8, 10, 15], key = 15\nOutput: 4\nExplanation: The key is present at index '4' in the array.\nExample 4:\n\nInput: [1, 3, 8, 10, 15], key = 200\nOutput: -1\nExplanation: The key is not present in the array.\n'''\n\nimport math\n\n\nclass ArrayReader:\n def __init__(self, arr):\n self.arr = arr\n\n def get(self, index):\n if index > len(self.arr):\n return math.inf\n\n return self.arr[index]\n\n\ndef search_in_infinite_array(reader, key):\n # first find the bounds\n\n low = 0\n high = 1\n\n while reader.get(high) < key:\n new_low = high + 1\n high = (high - low + 1)*2\n low = new_low\n\n return binary_search_array(reader, key, low, high)\n\n\ndef binary_search_array(reader, key, low, high):\n\n while low <= high:\n\n mid = (low + high) // 2\n\n if key == reader.get(mid):\n return mid\n\n if key > reader.get(mid):\n low = mid + 1\n\n else:\n high = mid - 1\n\n return - 1\n\n\nreader = ArrayReader([4, 6, 8, 10, 12, 14, 16, 18, 20, 22, 24, 26, 28, 30])\nprint(search_in_infinite_array(reader, 16))\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def squirrel_play(temp, is_summer):
if is_summer == True:
if 60 <= temp <= 100:
return True
else:
return False
if is_summer == False:
if 60 <= temp <= 90:
return True
else:
return False
|
flexible
|
{
"blob_id": "48755cf48c6696259d0c319d382021f33751ac01",
"index": 9497,
"step-1": "<mask token>\n",
"step-2": "def squirrel_play(temp, is_summer):\n if is_summer == True:\n if 60 <= temp <= 100:\n return True\n else:\n return False\n if is_summer == False:\n if 60 <= temp <= 90:\n return True\n else:\n return False\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
import datetime
from collections import defaultdict
from django.db.models import Prefetch
from urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet
class ProsteUcilniceTermin(Termin):
HUE_PRAZEN = 120 # zelena
HUE_POLN = 0 # rdeca
def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice, rezervirane_ucilnice):
super().__init__(dan, ura)
zasedene_pks = {u.pk for u in zasedene_ucilnice}
rezervirane_pks = {u.pk for u in rezervirane_ucilnice}
# Vse ustrezne proste ucilnice.
self.proste = [u for u in ustrezne_ucilnice if u.pk not in zasedene_pks and u.pk not in rezervirane_pks]
# Vse ustrezne ucilnice, ki so pa zasedene, ker je tam stalno srečanje. Vrednosti so razlogi za zasedenost.
self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.pk not in rezervirane_pks]
# Vse ustrezne ucilnice, ki so pa zasedene, ker so rezervirane. Vrednosti so razlogi za zasedenost.
self.rezervirane = list(rezervirane_ucilnice.items())
# ucilnice, ki bodo prikazane, skupaj s stanjem in razlogom
self.prikazane_ucilnice = []
def filtriraj_ucilnice(self, pokazi_zasedene):
vse = [('prosta', u, None) for u in self.proste]
if pokazi_zasedene:
vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])
vse.extend([('zasedena', u, r) for u, r in self.zasedene])
self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])
def hue(self):
h = self.HUE_PRAZEN if self.proste else self.HUE_POLN
return "{:.0f}".format(h)
class ProsteUcilnice(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu."""
def __init__(self, ucilnice):
self.ucilnice = set(ucilnice)
self.zasedenost_ucilnic = defaultdict(dict)
self.rezerviranost_ucilnic = defaultdict(dict)
def dodaj_srecanja_semestra(self, semester, teden=None):
for srecanje in semester.srecanja.select_related('ucilnica', 'predmet').prefetch_related('ucitelji'
).filter(ucilnica__in=[u.pk for u in self.ucilnice]).exclude(ura__isnull=True):
if teden is None or semester.od <= teden + datetime.timedelta(days=srecanje.dan-1) <= semester.do:
for i in range(srecanje.trajanje):
self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][srecanje.ucilnica] = srecanje
def upostevaj_rezervacije_za_teden(self, teden):
self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))
def upostevaj_rezervacije(self, rezervacije):
for rezervacija in rezervacije.prefetch_related(
Prefetch(
'ucilnice',
queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.ucilnice]),
to_attr='ustrezne_ucilnice'),
'osebe'):
for ucilnica in rezervacija.ustrezne_ucilnice:
for dan in rezervacija.dnevi():
for ura in range(rezervacija.od, rezervacija.do):
self.rezerviranost_ucilnic[dan.isoweekday(), ura][ucilnica] = rezervacija
def dobi_termine(self):
termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.zasedenost_ucilnic[d, u],
self.rezerviranost_ucilnic[d, u])
for d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]
return termini
class Konflikt(object):
def __init__(self):
self.srecanja = []
self.rezervacije = []
@property
def st_konfliktov(self):
return len(self.srecanja) + len(self.rezervacije)
def __bool__(self):
return self.st_konfliktov > 0
def __str__(self):
return "Konflikti:\n rezervacije:\n{}\n predmeti:\n{}".format("\n ".join(map(str, self.rezervacije)),
"\n ".join(map(str, self.srecanja)))
class IskalnikKonfliktov(object):
"""Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico."""
def __init__(self, ucilnice, min_datum, max_datum):
self.ucilnice = set(ucilnice)
self.min_datum = min_datum
self.max_datum = max_datum
self.zasedenost_ucilnic = defaultdict(list)
self.rezerviranost_ucilnic = defaultdict(list)
def dodaj_srecanja(self):
self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.min_datum, self.max_datum))
def dodaj_srecanja_semestrov(self, semestri):
for s in Srecanje.objects.filter(semester__in=semestri, ucilnica__in=self.ucilnice
).exclude(ura__isnull=True).select_related('semester', 'predmet', 'ucilnica'):
for d in s.dnevi_med(self.min_datum, self.max_datum):
self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)
def dodaj_rezervacije(self, rezervacije):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
for r in rezervacije:
for u in r.seznam_ucilnic:
for d in r.dnevi_med(self.min_datum, self.max_datum):
self.rezerviranost_ucilnic[u.pk, d].append(r)
@staticmethod
def za_rezervacije(rezervacije: RezervacijaQuerySet):
"""Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic"""
min_datum = datetime.date.max
max_datum = datetime.date.min
ucilnice = set()
for r in rezervacije:
if r.zacetek < min_datum:
min_datum = r.zacetek
if r.konec > max_datum:
max_datum = r.konec
ucilnice.update(r.seznam_ucilnic)
iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)
iskalnik.dodaj_srecanja()
iskalnik.dodaj_rezervacije(rezervacije)
return iskalnik
def konflikti_z_rezervacijo(self, r: Rezervacija):
if not hasattr(r, 'seznam_ucilnic'):
r.seznam_ucilnic = r.ucilnice.all()
for u in r.seznam_ucilnic:
for d in r.dnevi():
k = self.konflikti(u, d, r.od, r.do, r)
if k:
yield u, d, k
def konflikti(self, ucilnica, datum, od, do, ignore=None):
"""Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`."""
konflikti = Konflikt()
if ucilnica not in self.ucilnice:
raise ValueError("Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}".format(ucilnica))
if not (self.min_datum <= datum <= self.max_datum):
raise ValueError("Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}".format(datum))
for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:
if s != ignore and s.se_po_urah_prekriva(od, do):
konflikti.srecanja.append(s)
for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:
if r != ignore and r.se_po_urah_prekriva(od, do):
konflikti.rezervacije.append(r)
return konflikti
|
normal
|
{
"blob_id": "3ce9c0aeb6b4e575fbb3fced52a86a1dcec44706",
"index": 4713,
"step-1": "<mask token>\n\n\nclass ProsteUcilnice(object):\n <mask token>\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-2": "<mask token>\n\n\nclass ProsteUcilniceTermin(Termin):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-3": "<mask token>\n\n\nclass ProsteUcilniceTermin(Termin):\n HUE_PRAZEN = 120\n HUE_POLN = 0\n\n def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice,\n rezervirane_ucilnice):\n super().__init__(dan, ura)\n zasedene_pks = {u.pk for u in zasedene_ucilnice}\n rezervirane_pks = {u.pk for u in rezervirane_ucilnice}\n self.proste = [u for u in ustrezne_ucilnice if u.pk not in\n zasedene_pks and u.pk not in rezervirane_pks]\n self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.\n pk not in rezervirane_pks]\n self.rezervirane = list(rezervirane_ucilnice.items())\n self.prikazane_ucilnice = []\n\n def filtriraj_ucilnice(self, pokazi_zasedene):\n vse = [('prosta', u, None) for u in self.proste]\n if pokazi_zasedene:\n vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])\n vse.extend([('zasedena', u, r) for u, r in self.zasedene])\n self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])\n\n def hue(self):\n h = self.HUE_PRAZEN if self.proste else self.HUE_POLN\n return '{:.0f}'.format(h)\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-4": "import datetime\nfrom collections import defaultdict\nfrom django.db.models import Prefetch\nfrom urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet\n\n\nclass ProsteUcilniceTermin(Termin):\n HUE_PRAZEN = 120\n HUE_POLN = 0\n\n def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice,\n rezervirane_ucilnice):\n super().__init__(dan, ura)\n zasedene_pks = {u.pk for u in zasedene_ucilnice}\n rezervirane_pks = {u.pk for u in rezervirane_ucilnice}\n self.proste = [u for u in ustrezne_ucilnice if u.pk not in\n zasedene_pks and u.pk not in rezervirane_pks]\n self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.\n pk not in rezervirane_pks]\n self.rezervirane = list(rezervirane_ucilnice.items())\n self.prikazane_ucilnice = []\n\n def filtriraj_ucilnice(self, pokazi_zasedene):\n vse = [('prosta', u, None) for u in self.proste]\n if pokazi_zasedene:\n vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])\n vse.extend([('zasedena', u, r) for u, r in self.zasedene])\n self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])\n\n def hue(self):\n h = self.HUE_PRAZEN if self.proste else self.HUE_POLN\n return '{:.0f}'.format(h)\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet'\n ).prefetch_related('ucitelji').filter(ucilnica__in=[u.pk for u in\n self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days\n =srecanje.dan - 1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][\n srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(Prefetch('ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.\n ucilnice]), to_attr='ustrezne_ucilnice'), 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][\n ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.\n zasedenost_ucilnic[d, u], self.rezerviranost_ucilnic[d, u]) for\n d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return 'Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}'.format(\n '\\n '.join(map(str, self.rezervacije)), '\\n '.join(map(\n str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.\n min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri,\n ucilnica__in=self.ucilnice).exclude(ura__isnull=True\n ).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}'\n .format(ucilnica))\n if not self.min_datum <= datum <= self.max_datum:\n raise ValueError(\n 'Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}'\n .format(datum))\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n return konflikti\n",
"step-5": "import datetime\nfrom collections import defaultdict\n\nfrom django.db.models import Prefetch\n\nfrom urnik.models import Termin, Rezervacija, Ucilnica, DNEVI, MIN_URA, MAX_URA, Srecanje, Semester, RezervacijaQuerySet\n\n\nclass ProsteUcilniceTermin(Termin):\n HUE_PRAZEN = 120 # zelena\n HUE_POLN = 0 # rdeca\n\n def __init__(self, dan, ura, ustrezne_ucilnice, zasedene_ucilnice, rezervirane_ucilnice):\n super().__init__(dan, ura)\n zasedene_pks = {u.pk for u in zasedene_ucilnice}\n rezervirane_pks = {u.pk for u in rezervirane_ucilnice}\n # Vse ustrezne proste ucilnice.\n self.proste = [u for u in ustrezne_ucilnice if u.pk not in zasedene_pks and u.pk not in rezervirane_pks]\n # Vse ustrezne ucilnice, ki so pa zasedene, ker je tam stalno srečanje. Vrednosti so razlogi za zasedenost.\n self.zasedene = [(u, r) for u, r in zasedene_ucilnice.items() if u.pk not in rezervirane_pks]\n # Vse ustrezne ucilnice, ki so pa zasedene, ker so rezervirane. Vrednosti so razlogi za zasedenost.\n self.rezervirane = list(rezervirane_ucilnice.items())\n # ucilnice, ki bodo prikazane, skupaj s stanjem in razlogom\n self.prikazane_ucilnice = []\n\n def filtriraj_ucilnice(self, pokazi_zasedene):\n vse = [('prosta', u, None) for u in self.proste]\n if pokazi_zasedene:\n vse.extend([('rezervirana', u, r) for u, r in self.rezervirane])\n vse.extend([('zasedena', u, r) for u, r in self.zasedene])\n self.prikazane_ucilnice = sorted(vse, key=lambda x: x[1])\n\n def hue(self):\n h = self.HUE_PRAZEN if self.proste else self.HUE_POLN\n return \"{:.0f}\".format(h)\n\n\nclass ProsteUcilnice(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj za dane ucilnice glede na uro in dan v tednu.\"\"\"\n def __init__(self, ucilnice):\n self.ucilnice = set(ucilnice)\n self.zasedenost_ucilnic = defaultdict(dict)\n self.rezerviranost_ucilnic = defaultdict(dict)\n\n def dodaj_srecanja_semestra(self, semester, teden=None):\n for srecanje in semester.srecanja.select_related('ucilnica', 'predmet').prefetch_related('ucitelji'\n ).filter(ucilnica__in=[u.pk for u in self.ucilnice]).exclude(ura__isnull=True):\n if teden is None or semester.od <= teden + datetime.timedelta(days=srecanje.dan-1) <= semester.do:\n for i in range(srecanje.trajanje):\n self.zasedenost_ucilnic[srecanje.dan, srecanje.ura + i][srecanje.ucilnica] = srecanje\n\n def upostevaj_rezervacije_za_teden(self, teden):\n self.upostevaj_rezervacije(Rezervacija.objects.v_tednu(teden))\n\n def upostevaj_rezervacije(self, rezervacije):\n for rezervacija in rezervacije.prefetch_related(\n Prefetch(\n 'ucilnice',\n queryset=Ucilnica.objects.filter(pk__in=[u.pk for u in self.ucilnice]),\n to_attr='ustrezne_ucilnice'),\n 'osebe'):\n for ucilnica in rezervacija.ustrezne_ucilnice:\n for dan in rezervacija.dnevi():\n for ura in range(rezervacija.od, rezervacija.do):\n self.rezerviranost_ucilnic[dan.isoweekday(), ura][ucilnica] = rezervacija\n\n def dobi_termine(self):\n termini = [ProsteUcilniceTermin(d, u, self.ucilnice, self.zasedenost_ucilnic[d, u],\n self.rezerviranost_ucilnic[d, u])\n for d in range(1, len(DNEVI) + 1) for u in range(MIN_URA, MAX_URA)]\n return termini\n\n\nclass Konflikt(object):\n def __init__(self):\n self.srecanja = []\n self.rezervacije = []\n\n @property\n def st_konfliktov(self):\n return len(self.srecanja) + len(self.rezervacije)\n\n def __bool__(self):\n return self.st_konfliktov > 0\n\n def __str__(self):\n return \"Konflikti:\\n rezervacije:\\n{}\\n predmeti:\\n{}\".format(\"\\n \".join(map(str, self.rezervacije)),\n \"\\n \".join(map(str, self.srecanja)))\n\n\nclass IskalnikKonfliktov(object):\n \"\"\"Zgradi strukturo, ki omogoca hitro iskanje prekrivanj glede na datum in učilnico.\"\"\"\n\n def __init__(self, ucilnice, min_datum, max_datum):\n self.ucilnice = set(ucilnice)\n self.min_datum = min_datum\n self.max_datum = max_datum\n self.zasedenost_ucilnic = defaultdict(list)\n self.rezerviranost_ucilnic = defaultdict(list)\n\n def dodaj_srecanja(self):\n self.dodaj_srecanja_semestrov(Semester.objects.v_obdobju(self.min_datum, self.max_datum))\n\n def dodaj_srecanja_semestrov(self, semestri):\n for s in Srecanje.objects.filter(semester__in=semestri, ucilnica__in=self.ucilnice\n ).exclude(ura__isnull=True).select_related('semester', 'predmet', 'ucilnica'):\n for d in s.dnevi_med(self.min_datum, self.max_datum):\n self.zasedenost_ucilnic[s.ucilnica_id, d].append(s)\n\n def dodaj_rezervacije(self, rezervacije):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n for r in rezervacije:\n for u in r.seznam_ucilnic:\n for d in r.dnevi_med(self.min_datum, self.max_datum):\n self.rezerviranost_ucilnic[u.pk, d].append(r)\n\n @staticmethod\n def za_rezervacije(rezervacije: RezervacijaQuerySet):\n \"\"\"Queryset rezervacije mora biti prefetchan tako, da obstaja atribut seznam_ucilnic\"\"\"\n min_datum = datetime.date.max\n max_datum = datetime.date.min\n ucilnice = set()\n for r in rezervacije:\n if r.zacetek < min_datum:\n min_datum = r.zacetek\n if r.konec > max_datum:\n max_datum = r.konec\n ucilnice.update(r.seznam_ucilnic)\n\n iskalnik = IskalnikKonfliktov(ucilnice, min_datum, max_datum)\n iskalnik.dodaj_srecanja()\n iskalnik.dodaj_rezervacije(rezervacije)\n return iskalnik\n\n def konflikti_z_rezervacijo(self, r: Rezervacija):\n if not hasattr(r, 'seznam_ucilnic'):\n r.seznam_ucilnic = r.ucilnice.all()\n for u in r.seznam_ucilnic:\n for d in r.dnevi():\n k = self.konflikti(u, d, r.od, r.do, r)\n if k:\n yield u, d, k\n\n def konflikti(self, ucilnica, datum, od, do, ignore=None):\n \"\"\"Vrne konflikte z dejavnostjo, ki bi v ucilnici `ucilnica` potekala dne `datum` od ure `od` do `do`.\"\"\"\n konflikti = Konflikt()\n if ucilnica not in self.ucilnice:\n raise ValueError(\"Struktura iskanja ni bila pripravljena za iskanje konfliktov v učilnici {}\".format(ucilnica))\n if not (self.min_datum <= datum <= self.max_datum):\n raise ValueError(\"Struktura iskanja ni bila pripravljena za iskanje konfliktov dne {}\".format(datum))\n\n for s in self.zasedenost_ucilnic[ucilnica.pk, datum]:\n if s != ignore and s.se_po_urah_prekriva(od, do):\n konflikti.srecanja.append(s)\n\n for r in self.rezerviranost_ucilnic[ucilnica.pk, datum]:\n if r != ignore and r.se_po_urah_prekriva(od, do):\n konflikti.rezervacije.append(r)\n\n return konflikti\n",
"step-ids": [
20,
22,
26,
27,
28
]
}
|
[
20,
22,
26,
27,
28
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def remove(string_input):
return string_input.replace(' ', '')
<|reserved_special_token_1|>
#Function to remove spaces in a string
def remove(string_input):
return string_input.replace(" ", "")
|
flexible
|
{
"blob_id": "f327f408ae2759407ac9f01ad4feff5c6a0845f1",
"index": 9524,
"step-1": "<mask token>\n",
"step-2": "def remove(string_input):\n return string_input.replace(' ', '')\n",
"step-3": "#Function to remove spaces in a string\n\ndef remove(string_input):\n return string_input.replace(\" \", \"\")\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
def test(name, message):
print('用户是:', name)
print('欢迎消息是:', message)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def test(name, message):
print('用户是:', name)
print('欢迎消息是:', message)
<|reserved_special_token_0|>
def foo(name, *nums):
print('name参数:', name)
print('nums参数:', nums)
<|reserved_special_token_0|>
def bar(book, price, desc):
print(book, '这本书的价格是:', price)
print('描述信息是:', desc)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def test(name, message):
print('用户是:', name)
print('欢迎消息是:', message)
<|reserved_special_token_0|>
test(*my_list)
print('*****')
def foo(name, *nums):
print('name参数:', name)
print('nums参数:', nums)
<|reserved_special_token_0|>
foo('fkit', *my_tuple)
print('********')
foo(*my_tuple)
print('*******')
foo(my_tuple)
def bar(book, price, desc):
print(book, '这本书的价格是:', price)
print('描述信息是:', desc)
print('********')
<|reserved_special_token_0|>
bar(**my_dict)
print('*******')
bar(my_dict)
<|reserved_special_token_1|>
def test(name, message):
print('用户是:', name)
print('欢迎消息是:', message)
my_list = ['孙悟空', '欢迎来疯狂软件']
test(*my_list)
print('*****')
def foo(name, *nums):
print('name参数:', name)
print('nums参数:', nums)
my_tuple = 1, 2, 3
foo('fkit', *my_tuple)
print('********')
foo(*my_tuple)
print('*******')
foo(my_tuple)
def bar(book, price, desc):
print(book, '这本书的价格是:', price)
print('描述信息是:', desc)
print('********')
my_dict = {'price': 89, 'book': '疯狂python讲义', 'desc': '这是一本系统全面的python学习图书'}
bar(**my_dict)
print('*******')
bar(my_dict)
<|reserved_special_token_1|>
def test(name,message):
print("用户是:" , name)
print("欢迎消息是:",message)
my_list = ['孙悟空','欢迎来疯狂软件']
test(*my_list)
print('*****')
# ###########################
def foo(name,*nums):
print("name参数:",name)
print("nums参数:",nums)
my_tuple = (1,2,3)
foo('fkit',*my_tuple)
print('********')
foo(*my_tuple)
print('*******')
foo(my_tuple)
#############################
def bar(book,price,desc):
print(book,'这本书的价格是:',price)
print('描述信息是:',desc)
print('********')
my_dict = {'price':89,'book':'疯狂python讲义','desc':'这是一本系统全面的python学习图书'}
bar(**my_dict)
print('*******')
#如果是下面的调用形式,不采用逆向参数收集将报错
# TypeError: bar() missing 2 required positional arguments: 'price' and 'desc'
bar(my_dict)
|
flexible
|
{
"blob_id": "64fb006ea5ff0d101000dd4329b3d957a326ed1a",
"index": 2387,
"step-1": "def test(name, message):\n print('用户是:', name)\n print('欢迎消息是:', message)\n\n\n<mask token>\n",
"step-2": "def test(name, message):\n print('用户是:', name)\n print('欢迎消息是:', message)\n\n\n<mask token>\n\n\ndef foo(name, *nums):\n print('name参数:', name)\n print('nums参数:', nums)\n\n\n<mask token>\n\n\ndef bar(book, price, desc):\n print(book, '这本书的价格是:', price)\n print('描述信息是:', desc)\n\n\n<mask token>\n",
"step-3": "def test(name, message):\n print('用户是:', name)\n print('欢迎消息是:', message)\n\n\n<mask token>\ntest(*my_list)\nprint('*****')\n\n\ndef foo(name, *nums):\n print('name参数:', name)\n print('nums参数:', nums)\n\n\n<mask token>\nfoo('fkit', *my_tuple)\nprint('********')\nfoo(*my_tuple)\nprint('*******')\nfoo(my_tuple)\n\n\ndef bar(book, price, desc):\n print(book, '这本书的价格是:', price)\n print('描述信息是:', desc)\n\n\nprint('********')\n<mask token>\nbar(**my_dict)\nprint('*******')\nbar(my_dict)\n",
"step-4": "def test(name, message):\n print('用户是:', name)\n print('欢迎消息是:', message)\n\n\nmy_list = ['孙悟空', '欢迎来疯狂软件']\ntest(*my_list)\nprint('*****')\n\n\ndef foo(name, *nums):\n print('name参数:', name)\n print('nums参数:', nums)\n\n\nmy_tuple = 1, 2, 3\nfoo('fkit', *my_tuple)\nprint('********')\nfoo(*my_tuple)\nprint('*******')\nfoo(my_tuple)\n\n\ndef bar(book, price, desc):\n print(book, '这本书的价格是:', price)\n print('描述信息是:', desc)\n\n\nprint('********')\nmy_dict = {'price': 89, 'book': '疯狂python讲义', 'desc': '这是一本系统全面的python学习图书'}\nbar(**my_dict)\nprint('*******')\nbar(my_dict)\n",
"step-5": "def test(name,message):\n print(\"用户是:\" , name)\n print(\"欢迎消息是:\",message)\n\nmy_list = ['孙悟空','欢迎来疯狂软件']\ntest(*my_list)\nprint('*****')\n# ###########################\ndef foo(name,*nums):\n print(\"name参数:\",name)\n print(\"nums参数:\",nums)\nmy_tuple = (1,2,3)\n\nfoo('fkit',*my_tuple)\nprint('********')\nfoo(*my_tuple)\nprint('*******')\nfoo(my_tuple)\n#############################\n\ndef bar(book,price,desc):\n print(book,'这本书的价格是:',price)\n print('描述信息是:',desc)\n\nprint('********')\nmy_dict = {'price':89,'book':'疯狂python讲义','desc':'这是一本系统全面的python学习图书'}\nbar(**my_dict)\nprint('*******')\n#如果是下面的调用形式,不采用逆向参数收集将报错\n# TypeError: bar() missing 2 required positional arguments: 'price' and 'desc'\nbar(my_dict)",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app_name = 'gymapp'
urlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(
'about/', ClientAboutView.as_view(), name='clientabout'), path(
'contact/', ClientContactCreateView.as_view(), name='clientcontact'),
path('products/', ClientProductListView.as_view(), name=
'clientproductlist'), path('product/<int:pk>/detail/',
ClientProductDetailView.as_view(), name='clientproductdetail'), path(
'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),
path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),
name='clienttrainerdetail'), path('services/', ClientServiceListView.
as_view(), name='clientservicelist'), path(
'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name
='clientservicedetail'), path('schedule/<slug:slug>/detail/',
ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(
'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),
path('facilities/', ClientFacilityListView.as_view(), name=
'clientfacilitylist'), path('facilities/<slug:slug>/details',
ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(
'events/', ClientEventListView.as_view(), name='clienteventlist'), path
('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=
'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),
name='clientnoticelist'), path('notices/<slug:slug>/details',
ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(
'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=
'clientpagedetail'), path('images/', ClientImageListView.as_view(),
name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),
name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),
name='clientbloglist'), path('blogs/<slug:slug>/details',
ClientBlogDetailView.as_view(), name='clientblogdetail'), path(
'schedules/', ClientScheduleListView.as_view(), name=
'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),
name='clientpagenotfound'), path('subscribe/',
ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),
path('search/result/', SearchResultView.as_view(), name='searchresult'),
path('login/', ClientLoginView.as_view(), name='clientlogin'), path(
'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(
'register/', ClientRegistrationView.as_view(), name='clientcreate'),
path('cart_update', cart_update, name='cart_update'), path(
'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=
'clientcarttotal')]
<|reserved_special_token_1|>
from django.urls import path
from .views import *
from .utils import *
app_name = 'gymapp'
urlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(
'about/', ClientAboutView.as_view(), name='clientabout'), path(
'contact/', ClientContactCreateView.as_view(), name='clientcontact'),
path('products/', ClientProductListView.as_view(), name=
'clientproductlist'), path('product/<int:pk>/detail/',
ClientProductDetailView.as_view(), name='clientproductdetail'), path(
'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),
path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),
name='clienttrainerdetail'), path('services/', ClientServiceListView.
as_view(), name='clientservicelist'), path(
'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name
='clientservicedetail'), path('schedule/<slug:slug>/detail/',
ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(
'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),
path('facilities/', ClientFacilityListView.as_view(), name=
'clientfacilitylist'), path('facilities/<slug:slug>/details',
ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(
'events/', ClientEventListView.as_view(), name='clienteventlist'), path
('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=
'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),
name='clientnoticelist'), path('notices/<slug:slug>/details',
ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(
'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=
'clientpagedetail'), path('images/', ClientImageListView.as_view(),
name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),
name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),
name='clientbloglist'), path('blogs/<slug:slug>/details',
ClientBlogDetailView.as_view(), name='clientblogdetail'), path(
'schedules/', ClientScheduleListView.as_view(), name=
'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),
name='clientpagenotfound'), path('subscribe/',
ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),
path('search/result/', SearchResultView.as_view(), name='searchresult'),
path('login/', ClientLoginView.as_view(), name='clientlogin'), path(
'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(
'register/', ClientRegistrationView.as_view(), name='clientcreate'),
path('cart_update', cart_update, name='cart_update'), path(
'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=
'clientcarttotal')]
<|reserved_special_token_1|>
from django.urls import path
from .views import *
from .utils import *
app_name = 'gymapp'
urlpatterns = [
# CLIENT PATHS ##
# CLIENT PATHS ##
# CLIENT PATHS ##
# CLIENT PATHS ##
# general pages
path('', ClientHomeView.as_view(), name='clienthome'),
path('about/', ClientAboutView.as_view(), name='clientabout'),
path('contact/', ClientContactCreateView.as_view(), name='clientcontact'),
# path('makeanappointment/', ClientAppointmentCreateView.as_view(),
# name='clientappointmentcreate'),
path('products/', ClientProductListView.as_view(), name='clientproductlist'),
path('product/<int:pk>/detail/',ClientProductDetailView.as_view(),
name='clientproductdetail'),
path('trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),
path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),
name='clienttrainerdetail'),
path('services/', ClientServiceListView.as_view(),
name='clientservicelist'),
path('services/<slug:slug>/detail/',
ClientServiceDetailView.as_view(), name='clientservicedetail'),
path('schedule/<slug:slug>/detail/',
ClientScheduleDetailView.as_view(), name='clientscheduledetail'),
path('testimonial/',
TestimonialListView.as_view(), name='testimoniallist'),
# path('slider/',
# SliderListView.as_view(), name='sliderlist'),
path('facilities/', ClientFacilityListView.as_view(),
name='clientfacilitylist'),
path('facilities/<slug:slug>/details',
ClientFacilityDetailView.as_view(), name='clientfacilitydetail'),
path('events/', ClientEventListView.as_view(),
name='clienteventlist'),
path('events/<slug:slug>/details',
ClientEventDetailView.as_view(), name='clienteventdetail'),
path('notices/', ClientNoticeListView.as_view(), name='clientnoticelist'),
path('notices/<slug:slug>/details',
ClientNoticeDetailView.as_view(), name='clientnoticedetail'),
path('pages/<slug:slug>/details',
ClientPageDetailView.as_view(), name='clientpagedetail'),
path('images/', ClientImageListView.as_view(), name='clientimagelist'),
path('videos/', ClientVideoListView.as_view(), name='clientvideolist'),
path('blogs/', ClientBlogListView.as_view(), name='clientbloglist'),
path('blogs/<slug:slug>/details',
ClientBlogDetailView.as_view(), name='clientblogdetail'),
path('schedules/', ClientScheduleListView.as_view(), name='clientschedulelist'),
path('404/', ClientPageNotFoundView.as_view(), name='clientpagenotfound'),
path('subscribe/', ClientSubscriberCreateView.as_view(),
name='clientsubscribercreate'),
path('search/result/', SearchResultView.as_view(), name="searchresult"),
path('login/', ClientLoginView.as_view(), name='clientlogin'),
path('logout/', ClientLogoutView.as_view(), name='clientlogout'),
path('register/', ClientRegistrationView.as_view(), name='clientcreate'),
path('cart_update',cart_update,name = 'cart_update'),
path('carts/<int:pk>/items/total/',ClientCartTotalView.as_view(), name='clientcarttotal'),
]
|
flexible
|
{
"blob_id": "48a4331e4b26ea81f1c52ae76db1e92a57cb378c",
"index": 2654,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'gymapp'\nurlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(\n 'about/', ClientAboutView.as_view(), name='clientabout'), path(\n 'contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n path('products/', ClientProductListView.as_view(), name=\n 'clientproductlist'), path('product/<int:pk>/detail/',\n ClientProductDetailView.as_view(), name='clientproductdetail'), path(\n 'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'), path('services/', ClientServiceListView.\n as_view(), name='clientservicelist'), path(\n 'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name\n ='clientservicedetail'), path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(\n 'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),\n path('facilities/', ClientFacilityListView.as_view(), name=\n 'clientfacilitylist'), path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(\n 'events/', ClientEventListView.as_view(), name='clienteventlist'), path\n ('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=\n 'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),\n name='clientnoticelist'), path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(\n 'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=\n 'clientpagedetail'), path('images/', ClientImageListView.as_view(),\n name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),\n name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),\n name='clientbloglist'), path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'), path(\n 'schedules/', ClientScheduleListView.as_view(), name=\n 'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),\n name='clientpagenotfound'), path('subscribe/',\n ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name='searchresult'),\n path('login/', ClientLoginView.as_view(), name='clientlogin'), path(\n 'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(\n 'register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update', cart_update, name='cart_update'), path(\n 'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=\n 'clientcarttotal')]\n",
"step-3": "from django.urls import path\nfrom .views import *\nfrom .utils import *\napp_name = 'gymapp'\nurlpatterns = [path('', ClientHomeView.as_view(), name='clienthome'), path(\n 'about/', ClientAboutView.as_view(), name='clientabout'), path(\n 'contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n path('products/', ClientProductListView.as_view(), name=\n 'clientproductlist'), path('product/<int:pk>/detail/',\n ClientProductDetailView.as_view(), name='clientproductdetail'), path(\n 'trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'), path('services/', ClientServiceListView.\n as_view(), name='clientservicelist'), path(\n 'services/<slug:slug>/detail/', ClientServiceDetailView.as_view(), name\n ='clientservicedetail'), path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'), path(\n 'testimonial/', TestimonialListView.as_view(), name='testimoniallist'),\n path('facilities/', ClientFacilityListView.as_view(), name=\n 'clientfacilitylist'), path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'), path(\n 'events/', ClientEventListView.as_view(), name='clienteventlist'), path\n ('events/<slug:slug>/details', ClientEventDetailView.as_view(), name=\n 'clienteventdetail'), path('notices/', ClientNoticeListView.as_view(),\n name='clientnoticelist'), path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'), path(\n 'pages/<slug:slug>/details', ClientPageDetailView.as_view(), name=\n 'clientpagedetail'), path('images/', ClientImageListView.as_view(),\n name='clientimagelist'), path('videos/', ClientVideoListView.as_view(),\n name='clientvideolist'), path('blogs/', ClientBlogListView.as_view(),\n name='clientbloglist'), path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'), path(\n 'schedules/', ClientScheduleListView.as_view(), name=\n 'clientschedulelist'), path('404/', ClientPageNotFoundView.as_view(),\n name='clientpagenotfound'), path('subscribe/',\n ClientSubscriberCreateView.as_view(), name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name='searchresult'),\n path('login/', ClientLoginView.as_view(), name='clientlogin'), path(\n 'logout/', ClientLogoutView.as_view(), name='clientlogout'), path(\n 'register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update', cart_update, name='cart_update'), path(\n 'carts/<int:pk>/items/total/', ClientCartTotalView.as_view(), name=\n 'clientcarttotal')]\n",
"step-4": "from django.urls import path\nfrom .views import *\nfrom .utils import *\n\n\napp_name = 'gymapp'\n\nurlpatterns = [\n\n\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n # CLIENT PATHS ##\n\n # general pages\n\n path('', ClientHomeView.as_view(), name='clienthome'),\n path('about/', ClientAboutView.as_view(), name='clientabout'),\n path('contact/', ClientContactCreateView.as_view(), name='clientcontact'),\n # path('makeanappointment/', ClientAppointmentCreateView.as_view(),\n # name='clientappointmentcreate'),\n path('products/', ClientProductListView.as_view(), name='clientproductlist'),\n path('product/<int:pk>/detail/',ClientProductDetailView.as_view(), \n name='clientproductdetail'),\n path('trainers/', ClientTrainerListView.as_view(), name='clienttrainerlist'),\n path('trainer/<slug:slug>/detail/', ClientTrainerDetailView.as_view(),\n name='clienttrainerdetail'),\n path('services/', ClientServiceListView.as_view(),\n name='clientservicelist'),\n path('services/<slug:slug>/detail/',\n ClientServiceDetailView.as_view(), name='clientservicedetail'),\n path('schedule/<slug:slug>/detail/',\n ClientScheduleDetailView.as_view(), name='clientscheduledetail'),\n path('testimonial/',\n TestimonialListView.as_view(), name='testimoniallist'),\n # path('slider/',\n # SliderListView.as_view(), name='sliderlist'),\n path('facilities/', ClientFacilityListView.as_view(),\n name='clientfacilitylist'),\n path('facilities/<slug:slug>/details',\n ClientFacilityDetailView.as_view(), name='clientfacilitydetail'),\n path('events/', ClientEventListView.as_view(),\n name='clienteventlist'),\n path('events/<slug:slug>/details',\n ClientEventDetailView.as_view(), name='clienteventdetail'),\n path('notices/', ClientNoticeListView.as_view(), name='clientnoticelist'),\n path('notices/<slug:slug>/details',\n ClientNoticeDetailView.as_view(), name='clientnoticedetail'),\n path('pages/<slug:slug>/details',\n ClientPageDetailView.as_view(), name='clientpagedetail'),\n path('images/', ClientImageListView.as_view(), name='clientimagelist'),\n path('videos/', ClientVideoListView.as_view(), name='clientvideolist'),\n path('blogs/', ClientBlogListView.as_view(), name='clientbloglist'),\n path('blogs/<slug:slug>/details',\n ClientBlogDetailView.as_view(), name='clientblogdetail'),\n path('schedules/', ClientScheduleListView.as_view(), name='clientschedulelist'),\n path('404/', ClientPageNotFoundView.as_view(), name='clientpagenotfound'),\n path('subscribe/', ClientSubscriberCreateView.as_view(),\n name='clientsubscribercreate'),\n path('search/result/', SearchResultView.as_view(), name=\"searchresult\"),\n path('login/', ClientLoginView.as_view(), name='clientlogin'),\n path('logout/', ClientLogoutView.as_view(), name='clientlogout'),\n path('register/', ClientRegistrationView.as_view(), name='clientcreate'),\n path('cart_update',cart_update,name = 'cart_update'),\n path('carts/<int:pk>/items/total/',ClientCartTotalView.as_view(), name='clientcarttotal'),\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import six
from oslo_log import log
from watcher._i18n import _
from watcher.decision_engine.strategy.strategies import base
LOG = log.getLogger(__name__)
@six.add_metaclass(abc.ABCMeta)
class ParallelMigrationStrategy(base.BaseStrategy):
VM = "vm"
VOLUME = "volume"
ACTIVE = "active"
SHUTOFF = "shutoff"
AVAILABLE = "available"
IN_USE = "in-use"
LIVE_MIGRATION = "live_migration"
COLD_MIGRATION = "cold_migration"
VOLUME_MIGRATION = "volume_migration"
VOLUME_RETYPE = "volume_retype"
VOLUME_UPDATE = "volume_update"
STATUS = "status"
DST_HOSTNAME = "dst_hostname"
DST_TYPE = "dst_type"
def __init__(self, config, osc=None):
super(ParallelMigrationStrategy, self).__init__(config, osc)
def pre_execute(self):
pass
def do_execute(self):
params = self.input_parameters.params
for key, value in params.iteritems():
for resource_id, dict in value.items():
resource_status = dict.get(self.STATUS)
dst_hostname = dict.get(self.DST_HOSTNAME)
dst_type = dict.get(self.DST_TYPE)
if key == self.VM:
if resource_status == self.ACTIVE:
# do live migration
self._live_migration(resource_id, dst_hostname)
elif resource_status == self.SHUTOFF:
# do cold migration
# cold migration can not specify dest_hostname
self._cold_migration(resource_id)
else:
raise Exception("Wrong status: %s." % resource_status)
elif key == self.VOLUME:
if resource_status == self.IN_USE:
# do novavolume update
self._volume_update(resource_id, dst_type)
elif resource_status == self.AVAILABLE:
# detached volume with no snapshots
# do cinder migrate
self._volume_retype(resource_id, dst_type)
else:
raise Exception("Wrong status: %s." % resource_status)
else:
raise Exception("Wrong key: %s." % key)
def _live_migration(self, resource_id, dst_hostname):
parameters = {self.DST_HOSTNAME: dst_hostname}
self.solution.add_action(
action_type=self.LIVE_MIGRATION,
resource_id=resource_id,
input_parameters=parameters)
def _cold_migration(self, resource_id):
self.solution.add_action(
action_type=self.COLD_MIGRATION,
resource_id=resource_id,
input_parameters={})
def _volume_update(self, resource_id, dst_type):
parameters = {self.DST_TYPE: dst_type}
self.solution.add_action(
action_type=self.VOLUME_UPDATE,
resource_id=resource_id,
input_parameters=parameters)
def _volume_migrate(self, resource_id, dst_hostname):
parameters = {self.DST_HOSTNAME: dst_hostname}
self.solution.add_action(
action_type=self.VOLUME_MIGRATION,
resource_id=resource_id,
input_parameters=parameters)
def _volume_retype(self, resource_id, dst_type):
parameters = {self.DST_TYPE: dst_type}
self.solution.add_action(
action_type=self.VOLUME_RETYPE,
resource_id=resource_id,
input_parameters=parameters)
def post_execute(self):
pass
@classmethod
def get_goal_name(cls):
return "zone_migration"
@classmethod
def get_name(cls):
return "parallel_migration"
@classmethod
def get_display_name(cls):
return _("Parallel migration strategy")
@classmethod
def get_translatable_display_name(cls):
return "Parallel migration strategy"
@classmethod
def get_schema(cls):
return {
"properties": {
"params": {
"description": "",
"type": "object",
"default":
{"vm":
{"instance_id1":
{"status": "active",
"dst_hostname": "vm_dest_hostname1"},
"instance_id2":
{"status": "shutoff"}},
"volume":
{"cinder_id1":
{"status": "available",
"dst_type": "volume_dst_type"},
"cinder_id2":
{"status": "in-use",
"dst_type": "volume_dst_type"}}}
}
}
}
|
normal
|
{
"blob_id": "43e721ac45570e4f9ab9c1970abee3da6db40afa",
"index": 156,
"step-1": "<mask token>\n\n\n@six.add_metaclass(abc.ABCMeta)\nclass ParallelMigrationStrategy(base.BaseStrategy):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, config, osc=None):\n super(ParallelMigrationStrategy, self).__init__(config, osc)\n\n def pre_execute(self):\n pass\n <mask token>\n\n def _live_migration(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.LIVE_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _cold_migration(self, resource_id):\n self.solution.add_action(action_type=self.COLD_MIGRATION,\n resource_id=resource_id, input_parameters={})\n <mask token>\n\n def _volume_migrate(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.VOLUME_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _volume_retype(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(action_type=self.VOLUME_RETYPE,\n resource_id=resource_id, input_parameters=parameters)\n <mask token>\n\n @classmethod\n def get_goal_name(cls):\n return 'zone_migration'\n\n @classmethod\n def get_name(cls):\n return 'parallel_migration'\n <mask token>\n\n @classmethod\n def get_translatable_display_name(cls):\n return 'Parallel migration strategy'\n\n @classmethod\n def get_schema(cls):\n return {'properties': {'params': {'description': '', 'type':\n 'object', 'default': {'vm': {'instance_id1': {'status':\n 'active', 'dst_hostname': 'vm_dest_hostname1'}, 'instance_id2':\n {'status': 'shutoff'}}, 'volume': {'cinder_id1': {'status':\n 'available', 'dst_type': 'volume_dst_type'}, 'cinder_id2': {\n 'status': 'in-use', 'dst_type': 'volume_dst_type'}}}}}}\n",
"step-2": "<mask token>\n\n\n@six.add_metaclass(abc.ABCMeta)\nclass ParallelMigrationStrategy(base.BaseStrategy):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, config, osc=None):\n super(ParallelMigrationStrategy, self).__init__(config, osc)\n\n def pre_execute(self):\n pass\n <mask token>\n\n def _live_migration(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.LIVE_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _cold_migration(self, resource_id):\n self.solution.add_action(action_type=self.COLD_MIGRATION,\n resource_id=resource_id, input_parameters={})\n <mask token>\n\n def _volume_migrate(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.VOLUME_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _volume_retype(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(action_type=self.VOLUME_RETYPE,\n resource_id=resource_id, input_parameters=parameters)\n\n def post_execute(self):\n pass\n\n @classmethod\n def get_goal_name(cls):\n return 'zone_migration'\n\n @classmethod\n def get_name(cls):\n return 'parallel_migration'\n\n @classmethod\n def get_display_name(cls):\n return _('Parallel migration strategy')\n\n @classmethod\n def get_translatable_display_name(cls):\n return 'Parallel migration strategy'\n\n @classmethod\n def get_schema(cls):\n return {'properties': {'params': {'description': '', 'type':\n 'object', 'default': {'vm': {'instance_id1': {'status':\n 'active', 'dst_hostname': 'vm_dest_hostname1'}, 'instance_id2':\n {'status': 'shutoff'}}, 'volume': {'cinder_id1': {'status':\n 'available', 'dst_type': 'volume_dst_type'}, 'cinder_id2': {\n 'status': 'in-use', 'dst_type': 'volume_dst_type'}}}}}}\n",
"step-3": "<mask token>\n\n\n@six.add_metaclass(abc.ABCMeta)\nclass ParallelMigrationStrategy(base.BaseStrategy):\n VM = 'vm'\n VOLUME = 'volume'\n ACTIVE = 'active'\n SHUTOFF = 'shutoff'\n AVAILABLE = 'available'\n IN_USE = 'in-use'\n LIVE_MIGRATION = 'live_migration'\n COLD_MIGRATION = 'cold_migration'\n VOLUME_MIGRATION = 'volume_migration'\n VOLUME_RETYPE = 'volume_retype'\n VOLUME_UPDATE = 'volume_update'\n STATUS = 'status'\n DST_HOSTNAME = 'dst_hostname'\n DST_TYPE = 'dst_type'\n\n def __init__(self, config, osc=None):\n super(ParallelMigrationStrategy, self).__init__(config, osc)\n\n def pre_execute(self):\n pass\n\n def do_execute(self):\n params = self.input_parameters.params\n for key, value in params.iteritems():\n for resource_id, dict in value.items():\n resource_status = dict.get(self.STATUS)\n dst_hostname = dict.get(self.DST_HOSTNAME)\n dst_type = dict.get(self.DST_TYPE)\n if key == self.VM:\n if resource_status == self.ACTIVE:\n self._live_migration(resource_id, dst_hostname)\n elif resource_status == self.SHUTOFF:\n self._cold_migration(resource_id)\n else:\n raise Exception('Wrong status: %s.' % resource_status)\n elif key == self.VOLUME:\n if resource_status == self.IN_USE:\n self._volume_update(resource_id, dst_type)\n elif resource_status == self.AVAILABLE:\n self._volume_retype(resource_id, dst_type)\n else:\n raise Exception('Wrong status: %s.' % resource_status)\n else:\n raise Exception('Wrong key: %s.' % key)\n\n def _live_migration(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.LIVE_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _cold_migration(self, resource_id):\n self.solution.add_action(action_type=self.COLD_MIGRATION,\n resource_id=resource_id, input_parameters={})\n\n def _volume_update(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(action_type=self.VOLUME_UPDATE,\n resource_id=resource_id, input_parameters=parameters)\n\n def _volume_migrate(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.VOLUME_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _volume_retype(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(action_type=self.VOLUME_RETYPE,\n resource_id=resource_id, input_parameters=parameters)\n\n def post_execute(self):\n pass\n\n @classmethod\n def get_goal_name(cls):\n return 'zone_migration'\n\n @classmethod\n def get_name(cls):\n return 'parallel_migration'\n\n @classmethod\n def get_display_name(cls):\n return _('Parallel migration strategy')\n\n @classmethod\n def get_translatable_display_name(cls):\n return 'Parallel migration strategy'\n\n @classmethod\n def get_schema(cls):\n return {'properties': {'params': {'description': '', 'type':\n 'object', 'default': {'vm': {'instance_id1': {'status':\n 'active', 'dst_hostname': 'vm_dest_hostname1'}, 'instance_id2':\n {'status': 'shutoff'}}, 'volume': {'cinder_id1': {'status':\n 'available', 'dst_type': 'volume_dst_type'}, 'cinder_id2': {\n 'status': 'in-use', 'dst_type': 'volume_dst_type'}}}}}}\n",
"step-4": "import abc\nimport six\nfrom oslo_log import log\nfrom watcher._i18n import _\nfrom watcher.decision_engine.strategy.strategies import base\nLOG = log.getLogger(__name__)\n\n\n@six.add_metaclass(abc.ABCMeta)\nclass ParallelMigrationStrategy(base.BaseStrategy):\n VM = 'vm'\n VOLUME = 'volume'\n ACTIVE = 'active'\n SHUTOFF = 'shutoff'\n AVAILABLE = 'available'\n IN_USE = 'in-use'\n LIVE_MIGRATION = 'live_migration'\n COLD_MIGRATION = 'cold_migration'\n VOLUME_MIGRATION = 'volume_migration'\n VOLUME_RETYPE = 'volume_retype'\n VOLUME_UPDATE = 'volume_update'\n STATUS = 'status'\n DST_HOSTNAME = 'dst_hostname'\n DST_TYPE = 'dst_type'\n\n def __init__(self, config, osc=None):\n super(ParallelMigrationStrategy, self).__init__(config, osc)\n\n def pre_execute(self):\n pass\n\n def do_execute(self):\n params = self.input_parameters.params\n for key, value in params.iteritems():\n for resource_id, dict in value.items():\n resource_status = dict.get(self.STATUS)\n dst_hostname = dict.get(self.DST_HOSTNAME)\n dst_type = dict.get(self.DST_TYPE)\n if key == self.VM:\n if resource_status == self.ACTIVE:\n self._live_migration(resource_id, dst_hostname)\n elif resource_status == self.SHUTOFF:\n self._cold_migration(resource_id)\n else:\n raise Exception('Wrong status: %s.' % resource_status)\n elif key == self.VOLUME:\n if resource_status == self.IN_USE:\n self._volume_update(resource_id, dst_type)\n elif resource_status == self.AVAILABLE:\n self._volume_retype(resource_id, dst_type)\n else:\n raise Exception('Wrong status: %s.' % resource_status)\n else:\n raise Exception('Wrong key: %s.' % key)\n\n def _live_migration(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.LIVE_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _cold_migration(self, resource_id):\n self.solution.add_action(action_type=self.COLD_MIGRATION,\n resource_id=resource_id, input_parameters={})\n\n def _volume_update(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(action_type=self.VOLUME_UPDATE,\n resource_id=resource_id, input_parameters=parameters)\n\n def _volume_migrate(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(action_type=self.VOLUME_MIGRATION,\n resource_id=resource_id, input_parameters=parameters)\n\n def _volume_retype(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(action_type=self.VOLUME_RETYPE,\n resource_id=resource_id, input_parameters=parameters)\n\n def post_execute(self):\n pass\n\n @classmethod\n def get_goal_name(cls):\n return 'zone_migration'\n\n @classmethod\n def get_name(cls):\n return 'parallel_migration'\n\n @classmethod\n def get_display_name(cls):\n return _('Parallel migration strategy')\n\n @classmethod\n def get_translatable_display_name(cls):\n return 'Parallel migration strategy'\n\n @classmethod\n def get_schema(cls):\n return {'properties': {'params': {'description': '', 'type':\n 'object', 'default': {'vm': {'instance_id1': {'status':\n 'active', 'dst_hostname': 'vm_dest_hostname1'}, 'instance_id2':\n {'status': 'shutoff'}}, 'volume': {'cinder_id1': {'status':\n 'available', 'dst_type': 'volume_dst_type'}, 'cinder_id2': {\n 'status': 'in-use', 'dst_type': 'volume_dst_type'}}}}}}\n",
"step-5": "#\n# Licensed under the Apache License, Version 2.0 (the \"License\"); you may\n# not use this file except in compliance with the License. You may obtain\n# a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS, WITHOUT\n# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the\n# License for the specific language governing permissions and limitations\n# under the License.\n\nimport abc\n\nimport six\n\nfrom oslo_log import log\n\nfrom watcher._i18n import _\nfrom watcher.decision_engine.strategy.strategies import base\n\nLOG = log.getLogger(__name__)\n\n\n@six.add_metaclass(abc.ABCMeta)\nclass ParallelMigrationStrategy(base.BaseStrategy):\n\n VM = \"vm\"\n VOLUME = \"volume\"\n ACTIVE = \"active\"\n SHUTOFF = \"shutoff\"\n AVAILABLE = \"available\"\n IN_USE = \"in-use\"\n LIVE_MIGRATION = \"live_migration\"\n COLD_MIGRATION = \"cold_migration\"\n VOLUME_MIGRATION = \"volume_migration\"\n VOLUME_RETYPE = \"volume_retype\"\n VOLUME_UPDATE = \"volume_update\"\n STATUS = \"status\"\n DST_HOSTNAME = \"dst_hostname\"\n DST_TYPE = \"dst_type\"\n\n def __init__(self, config, osc=None):\n super(ParallelMigrationStrategy, self).__init__(config, osc)\n\n def pre_execute(self):\n pass\n\n def do_execute(self):\n params = self.input_parameters.params\n for key, value in params.iteritems():\n for resource_id, dict in value.items():\n resource_status = dict.get(self.STATUS)\n dst_hostname = dict.get(self.DST_HOSTNAME)\n dst_type = dict.get(self.DST_TYPE)\n if key == self.VM:\n if resource_status == self.ACTIVE:\n # do live migration\n self._live_migration(resource_id, dst_hostname)\n elif resource_status == self.SHUTOFF:\n # do cold migration\n # cold migration can not specify dest_hostname\n self._cold_migration(resource_id)\n else:\n raise Exception(\"Wrong status: %s.\" % resource_status)\n elif key == self.VOLUME:\n if resource_status == self.IN_USE:\n # do novavolume update\n self._volume_update(resource_id, dst_type)\n elif resource_status == self.AVAILABLE:\n # detached volume with no snapshots\n # do cinder migrate\n self._volume_retype(resource_id, dst_type)\n else:\n raise Exception(\"Wrong status: %s.\" % resource_status)\n else:\n raise Exception(\"Wrong key: %s.\" % key)\n\n def _live_migration(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(\n action_type=self.LIVE_MIGRATION,\n resource_id=resource_id,\n input_parameters=parameters)\n\n def _cold_migration(self, resource_id):\n self.solution.add_action(\n action_type=self.COLD_MIGRATION,\n resource_id=resource_id,\n input_parameters={})\n\n def _volume_update(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(\n action_type=self.VOLUME_UPDATE,\n resource_id=resource_id,\n input_parameters=parameters)\n\n def _volume_migrate(self, resource_id, dst_hostname):\n parameters = {self.DST_HOSTNAME: dst_hostname}\n self.solution.add_action(\n action_type=self.VOLUME_MIGRATION,\n resource_id=resource_id,\n input_parameters=parameters)\n\n def _volume_retype(self, resource_id, dst_type):\n parameters = {self.DST_TYPE: dst_type}\n self.solution.add_action(\n action_type=self.VOLUME_RETYPE,\n resource_id=resource_id,\n input_parameters=parameters)\n\n def post_execute(self):\n pass\n\n @classmethod\n def get_goal_name(cls):\n return \"zone_migration\"\n\n @classmethod\n def get_name(cls):\n return \"parallel_migration\"\n\n @classmethod\n def get_display_name(cls):\n return _(\"Parallel migration strategy\")\n\n @classmethod\n def get_translatable_display_name(cls):\n return \"Parallel migration strategy\"\n\n @classmethod\n def get_schema(cls):\n return {\n \"properties\": {\n \"params\": {\n \"description\": \"\",\n \"type\": \"object\",\n \"default\":\n {\"vm\":\n {\"instance_id1\":\n {\"status\": \"active\",\n \"dst_hostname\": \"vm_dest_hostname1\"},\n \"instance_id2\":\n {\"status\": \"shutoff\"}},\n \"volume\":\n {\"cinder_id1\":\n {\"status\": \"available\",\n \"dst_type\": \"volume_dst_type\"},\n \"cinder_id2\":\n {\"status\": \"in-use\",\n \"dst_type\": \"volume_dst_type\"}}}\n }\n }\n }\n",
"step-ids": [
11,
13,
16,
18,
19
]
}
|
[
11,
13,
16,
18,
19
] |
<|reserved_special_token_0|>
class Hexapod:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_delta_L(self):
"""
Расчет геометрии положения точек A_i в каждый момент времени.
Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.
Задается: self.A, self.all_full_lengths, self.set_r
:return: None
"""
print('####################################################')
print('[INFO] solve delta L, Velocity, Acceleration ...')
print('####################################################')
R_matrix = None
if self.axis == 'x':
R_matrix = self.R_matrix_x
elif self.axis == 'y':
R_matrix = self.R_matrix_y
elif self.axis == 'z':
R_matrix = self.R_matrix_z
dL_all = []
L_all = []
coordinates_A = []
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
print('[INFO] Поршень №{}'.format(j + 1))
dl = []
l = []
coord = []
for t in self.time:
try:
A = np.dot(R_matrix(t), self.A_0[i])
except Exception:
print('Type error axis')
L = np.linalg.norm(self.B[j] - A)
print(self.B[j] - A)
print(self.L, L)
print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))
l.append(L)
dl.append(round((L - self.L) * 1000.0, 5))
coord.append(A)
dL_all.append(dl)
L_all.append(l)
coordinates_A.append(coord)
v = [0.0]
for k in range(self.steps - 1):
v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.
time[k]))
pylab.figure(1)
pylab.plot(self.time[5:], v[5:], colors[j])
print('[INFO] v_max =', np.max(np.abs(v[5:])))
a = [0.0]
for k in range(self.steps - 1):
a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))
pylab.figure(2)
pylab.plot(self.time[5:], a[5:], colors[j])
print('[INFO] a_max =', np.max(np.abs(a[5:])))
print('****************************************************')
pylab.figure(1)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Velocity')
pylab.xlabel('Time [s]')
pylab.ylabel('Velocity [mm/s]')
pylab.grid()
pylab.figure(2)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Acceleration')
pylab.xlabel('Time [s]')
pylab.ylabel('Acceleration [mm/s^2]')
pylab.grid()
pylab.figure(3)
for i in range(6):
pylab.plot(self.time, dL_all[i], colors[i])
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Delta length')
pylab.xlabel('Time [s]')
pylab.ylabel('dL [mm]')
pylab.grid()
plt.show()
self.A = np.array(coordinates_A[0::2])
self.all_full_lengths = np.array(L_all)
self.set_r()
self.plot_3d_lines()
<|reserved_special_token_0|>
def calculate_angles(self, l1, l2):
"""
Решение теоремы косинусов для поиска угла по трем сторонам
:param l1: прилежащая сторона к вычисляемому углу
:param l2: противолежащая сторона к углу
:return: (alpha, teta, gamma) - углы в треугольнике сил
"""
cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2
) / 2 * l1 * 2 * self.a
teta = np.arccos(cos_teta) * 180.0 / np.pi
b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta
cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a
alpha = np.arccos(cos_alpha) * 180.0 / np.pi
gamma = 180.0 - teta - alpha
return alpha, teta, gamma
<|reserved_special_token_0|>
def solve_dynamic_forces(self):
"""
решение обратной задачи стенда
Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х
:return: минимальная и максимальная нагрузка на каждый цилиндр
"""
print('####################################################')
print('[INFO] solve DYNAMIC forces ...')
print('####################################################')
A = []
for i in range(self.steps):
a = []
for j in range(3):
a_ = self.A[j, i, :]
a.append(a_)
A.append(a)
R = []
for i in range(self.steps):
r = []
for j in range(6):
r_ = self.r[j, i, :]
r.append(r_)
R.append(r)
A = np.array(A)
R = np.array(R)
forces = []
for a, r, t in zip(A, R, self.time):
L = []
direct = []
shoulder = []
for i, j in self.indexes:
len = np.array(self.B[j] - a[i])
dir = len / np.linalg.norm(len)
L.append(len)
direct_force_try = self.B[j] - r[j]
direct.append(direct_force_try)
shoulder.append(np.cross(r[j], direct_force_try))
L = np.array(L)
T_static = np.array(direct).T
T_dynamics = np.array(shoulder).T
b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))
dynamic_comp = None
if self.axis == 'x':
comp = self.J[2, 2] * self.prime2_fi_x(t)
dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))
elif self.axis == 'y':
comp = self.J[1, 1] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))
elif self.axis == 'z':
comp = self.J[0, 0] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))
b_dynamic = dynamic_comp
T = T_dynamics[:, :3]
b = b_dynamic[:, :3]
dynamic_f = np.linalg.solve(T, b).reshape((3,))
forces.append(dynamic_f)
print('[INFO] time:', t)
print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])
print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in
np.array(shoulder)])
print('[INFO] forces:', [round(f, 4) for f in dynamic_f])
print('[INFO] dynamic component:', b_dynamic.T)
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
pylab.plot(self.time, forces[i], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],
loc=0)
pylab.title('Dynamic forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def solve_static_forces(self):
"""
Решение обратной задачи стедна для статических нагрузок
:return: компоненты силы для каждой опоры
"""
print('####################################################')
print('[INFO] solve STATIC forces ...')
print('####################################################')
x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]
forces = []
for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):
L1 = np.array(a[0] - self.B[0])
L2 = np.array(a[0] - self.B[1])
L3 = np.array(a[1] - self.B[2])
direct_L1 = L1 / np.linalg.norm(L1)
direct_L2 = L2 / np.linalg.norm(L2)
direct_L3 = L3 / np.linalg.norm(L3)
T = np.stack((direct_L1, direct_L2, direct_L3))
b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))
static_f = np.linalg.solve(T, b).reshape((3,)) / 2
forces.append(static_f)
print('[INFO] time:', t)
print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.
linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))
print('[INFO] forces:', round(static_f[0] / 2, 4), round(
static_f[1] / 2, 4), round(static_f[2] / 2, 4))
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}
for i, j in x_symmetry_ind:
pylab.plot(self.time, forces[j], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)
pylab.title('Static forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def plot_animate(self, A):
""""
try to create animate function to plot mechanisms
"""
fig = plt.figure()
fig.set_tight_layout(False)
ax = plt.axes(projection='3d')
global cnt
cnt = ax
global cur_A
global cur_B
cur_A = A[0]
cur_B = self.B[0]
def steps(count=1):
for i in range(count):
df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
cnt.plot(x, y, z)
def animate(frame):
steps(1)
return cnt
anim = animation.FuncAnimation(fig, animate, frames=100)
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Hexapod:
def __init__(self, axis):
"""
Инициализация начальных параметров системы
:param axis: ось вращения системы (x, y, z)
"""
self.axis = axis
self.alpha = 30.0
self.beta = 30.0
self.L = 1.5
self.h_c = 2.0
self.r = 1.0
self.m_p = 1000.0
self.m = 4000.0
self.nu = 0.5
self.J = np.array([[5000, 0, 0], [0, 5000, 0], [0, 0, 3500]], np.
float32)
self.A_0 = np.round([[self.r * np.sin(2 * np.pi / 3 * i + np.pi),
self.r * np.cos(2 * np.pi / 3 * i + np.pi), -self.h_c] for i in
range(-1, 2)], 5)
self.B = np.array([])
self.A = np.array([])
self.all_full_lengths = np.array([])
self.r = np.array([])
self.fi_x_0 = 4.0
self.fi_x = lambda t: self.fi_x_0 * np.sin(2 * np.pi * self.nu * t)
self.prime_fi_x = lambda t: self.fi_x_0 * 2 * np.pi * self.nu * np.cos(
2 * np.pi * self.nu * t)
self.prime2_fi_x = lambda t: -self.fi_x_0 * (2 * np.pi * self.nu
) ** 2 * np.sin(2 * np.pi * self.nu * t)
self.fi_y_0 = 4.0
self.fi_y = lambda t: self.fi_y_0 * np.sin(2 * np.pi * self.nu * t)
self.prime_fi_y = lambda t: self.fi_y_0 * 2 * np.pi * self.nu * np.cos(
2 * np.pi * self.nu * t)
self.prime2_fi_y = lambda t: -self.fi_y_0 * (2 * np.pi * self.nu
) ** 2 * np.sin(2 * np.pi * self.nu * t)
self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t) * np.pi /
180.0), -np.sin(self.fi_x(t) * np.pi / 180.0), 0], [np.sin(self
.fi_x(t) * np.pi / 180.0), np.cos(self.fi_x(t) * np.pi / 180.0),
0], [0, 0, 1]], 5)
self.R_matrix_y = lambda t: np.round([[1, 0, 0], [0, np.cos(self.
fi_y(t) * np.pi / 180.0), -np.sin(self.fi_y(t) * np.pi / 180.0)
], [0, np.sin(self.fi_y(t) * np.pi / 180.0), np.cos(self.fi_y(t
) * np.pi / 180.0)]], 5)
self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t) * np.pi /
180.0), 0, np.sin(self.fi_y(t) * np.pi / 180.0)], [0, 1, 0], [-
np.sin(self.fi_y(t) * np.pi / 180.0), 0, np.cos(self.fi_y(t) *
np.pi / 180.0)]], 5)
self.H = np.cos(np.pi / 180.0 * self.beta) * np.cos(np.pi / 180.0 *
self.alpha / 2) * self.L
self.h = self.L * np.cos(np.pi / 180.0 * self.alpha / 2) * np.sin(
np.pi / 180.0 * self.beta)
self.a = self.L * np.sin(np.pi / 180.0 * self.alpha / 2)
self.r = (self.h ** 2 + self.a ** 2) ** 0.5
self.end_time = 2.0
self.start_time = 0.0
self.steps = 100
self.time = np.linspace(self.start_time, self.end_time, self.steps)
self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]
def set_B(self):
"""
Расчет геометрии стенда - положение точек B_i.
Задается: self.B
:return: None
"""
for i, A in enumerate(self.A_0):
a = A[:2]
b1 = np.array([self.h, self.a])
b2 = np.array([self.h, -self.a])
kappa = np.array([[np.cos(np.pi / 180 * (30 - 120 * i)), -np.
sin(np.pi / 180 * (30 - 120 * i))], [np.sin(np.pi / 180 * (
30 - 120 * i)), np.cos(np.pi / 180 * (30 - 120 * i))]])
p1 = np.dot(kappa, b1) + a
p2 = np.dot(kappa, b2) + a
p1 = np.append(p1, -self.H - self.h_c)
p2 = np.append(p2, -self.H - self.h_c)
self.B = np.hstack((self.B, p1))
self.B = np.hstack((self.B, p2))
self.B = self.B.reshape(6, 3)
i = 0
for A in self.A_0:
assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 0.0001
assert np.linalg.norm(np.subtract(A, self.B[i + 1])
) - self.L <= 0.0001
i += 2
def get_delta_L(self):
"""
Расчет геометрии положения точек A_i в каждый момент времени.
Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.
Задается: self.A, self.all_full_lengths, self.set_r
:return: None
"""
print('####################################################')
print('[INFO] solve delta L, Velocity, Acceleration ...')
print('####################################################')
R_matrix = None
if self.axis == 'x':
R_matrix = self.R_matrix_x
elif self.axis == 'y':
R_matrix = self.R_matrix_y
elif self.axis == 'z':
R_matrix = self.R_matrix_z
dL_all = []
L_all = []
coordinates_A = []
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
print('[INFO] Поршень №{}'.format(j + 1))
dl = []
l = []
coord = []
for t in self.time:
try:
A = np.dot(R_matrix(t), self.A_0[i])
except Exception:
print('Type error axis')
L = np.linalg.norm(self.B[j] - A)
print(self.B[j] - A)
print(self.L, L)
print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))
l.append(L)
dl.append(round((L - self.L) * 1000.0, 5))
coord.append(A)
dL_all.append(dl)
L_all.append(l)
coordinates_A.append(coord)
v = [0.0]
for k in range(self.steps - 1):
v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.
time[k]))
pylab.figure(1)
pylab.plot(self.time[5:], v[5:], colors[j])
print('[INFO] v_max =', np.max(np.abs(v[5:])))
a = [0.0]
for k in range(self.steps - 1):
a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))
pylab.figure(2)
pylab.plot(self.time[5:], a[5:], colors[j])
print('[INFO] a_max =', np.max(np.abs(a[5:])))
print('****************************************************')
pylab.figure(1)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Velocity')
pylab.xlabel('Time [s]')
pylab.ylabel('Velocity [mm/s]')
pylab.grid()
pylab.figure(2)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Acceleration')
pylab.xlabel('Time [s]')
pylab.ylabel('Acceleration [mm/s^2]')
pylab.grid()
pylab.figure(3)
for i in range(6):
pylab.plot(self.time, dL_all[i], colors[i])
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Delta length')
pylab.xlabel('Time [s]')
pylab.ylabel('dL [mm]')
pylab.grid()
plt.show()
self.A = np.array(coordinates_A[0::2])
self.all_full_lengths = np.array(L_all)
self.set_r()
self.plot_3d_lines()
<|reserved_special_token_0|>
def calculate_angles(self, l1, l2):
"""
Решение теоремы косинусов для поиска угла по трем сторонам
:param l1: прилежащая сторона к вычисляемому углу
:param l2: противолежащая сторона к углу
:return: (alpha, teta, gamma) - углы в треугольнике сил
"""
cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2
) / 2 * l1 * 2 * self.a
teta = np.arccos(cos_teta) * 180.0 / np.pi
b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta
cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a
alpha = np.arccos(cos_alpha) * 180.0 / np.pi
gamma = 180.0 - teta - alpha
return alpha, teta, gamma
def set_r(self):
"""
Вычисление радиус-векторов плеч сил для каждого цилиндра
:return: None
"""
r_all = []
for i, j in self.indexes:
r = []
for a in self.A[i]:
L = np.array(a - self.B[j])
direct_L = L / np.linalg.norm(L)
t1 = np.array([0, -direct_L[2], direct_L[1]])
b1 = np.array([a[2] * direct_L[1] - a[1] * direct_L[2]])
t2 = direct_L
b2 = np.array([0])
t3 = np.array([a[1] * direct_L[2] - a[2] * direct_L[1], -a[
0] * direct_L[2] + a[2] * direct_L[0], a[0] * direct_L[
1] - a[1] * direct_L[0]])
b3 = np.array([0])
T = np.stack((t1, t2, t3))
b = np.stack((b1, b2, b3))
r.append(np.linalg.solve(T, b).reshape((3,)))
r_all.append(r)
self.r = np.array(r_all)
def solve_dynamic_forces(self):
"""
решение обратной задачи стенда
Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х
:return: минимальная и максимальная нагрузка на каждый цилиндр
"""
print('####################################################')
print('[INFO] solve DYNAMIC forces ...')
print('####################################################')
A = []
for i in range(self.steps):
a = []
for j in range(3):
a_ = self.A[j, i, :]
a.append(a_)
A.append(a)
R = []
for i in range(self.steps):
r = []
for j in range(6):
r_ = self.r[j, i, :]
r.append(r_)
R.append(r)
A = np.array(A)
R = np.array(R)
forces = []
for a, r, t in zip(A, R, self.time):
L = []
direct = []
shoulder = []
for i, j in self.indexes:
len = np.array(self.B[j] - a[i])
dir = len / np.linalg.norm(len)
L.append(len)
direct_force_try = self.B[j] - r[j]
direct.append(direct_force_try)
shoulder.append(np.cross(r[j], direct_force_try))
L = np.array(L)
T_static = np.array(direct).T
T_dynamics = np.array(shoulder).T
b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))
dynamic_comp = None
if self.axis == 'x':
comp = self.J[2, 2] * self.prime2_fi_x(t)
dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))
elif self.axis == 'y':
comp = self.J[1, 1] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))
elif self.axis == 'z':
comp = self.J[0, 0] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))
b_dynamic = dynamic_comp
T = T_dynamics[:, :3]
b = b_dynamic[:, :3]
dynamic_f = np.linalg.solve(T, b).reshape((3,))
forces.append(dynamic_f)
print('[INFO] time:', t)
print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])
print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in
np.array(shoulder)])
print('[INFO] forces:', [round(f, 4) for f in dynamic_f])
print('[INFO] dynamic component:', b_dynamic.T)
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
pylab.plot(self.time, forces[i], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],
loc=0)
pylab.title('Dynamic forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def solve_static_forces(self):
"""
Решение обратной задачи стедна для статических нагрузок
:return: компоненты силы для каждой опоры
"""
print('####################################################')
print('[INFO] solve STATIC forces ...')
print('####################################################')
x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]
forces = []
for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):
L1 = np.array(a[0] - self.B[0])
L2 = np.array(a[0] - self.B[1])
L3 = np.array(a[1] - self.B[2])
direct_L1 = L1 / np.linalg.norm(L1)
direct_L2 = L2 / np.linalg.norm(L2)
direct_L3 = L3 / np.linalg.norm(L3)
T = np.stack((direct_L1, direct_L2, direct_L3))
b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))
static_f = np.linalg.solve(T, b).reshape((3,)) / 2
forces.append(static_f)
print('[INFO] time:', t)
print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.
linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))
print('[INFO] forces:', round(static_f[0] / 2, 4), round(
static_f[1] / 2, 4), round(static_f[2] / 2, 4))
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}
for i, j in x_symmetry_ind:
pylab.plot(self.time, forces[j], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)
pylab.title('Static forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def plot_animate(self, A):
""""
try to create animate function to plot mechanisms
"""
fig = plt.figure()
fig.set_tight_layout(False)
ax = plt.axes(projection='3d')
global cnt
cnt = ax
global cur_A
global cur_B
cur_A = A[0]
cur_B = self.B[0]
def steps(count=1):
for i in range(count):
df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
cnt.plot(x, y, z)
def animate(frame):
steps(1)
return cnt
anim = animation.FuncAnimation(fig, animate, frames=100)
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Hexapod:
def __init__(self, axis):
"""
Инициализация начальных параметров системы
:param axis: ось вращения системы (x, y, z)
"""
self.axis = axis
self.alpha = 30.0
self.beta = 30.0
self.L = 1.5
self.h_c = 2.0
self.r = 1.0
self.m_p = 1000.0
self.m = 4000.0
self.nu = 0.5
self.J = np.array([[5000, 0, 0], [0, 5000, 0], [0, 0, 3500]], np.
float32)
self.A_0 = np.round([[self.r * np.sin(2 * np.pi / 3 * i + np.pi),
self.r * np.cos(2 * np.pi / 3 * i + np.pi), -self.h_c] for i in
range(-1, 2)], 5)
self.B = np.array([])
self.A = np.array([])
self.all_full_lengths = np.array([])
self.r = np.array([])
self.fi_x_0 = 4.0
self.fi_x = lambda t: self.fi_x_0 * np.sin(2 * np.pi * self.nu * t)
self.prime_fi_x = lambda t: self.fi_x_0 * 2 * np.pi * self.nu * np.cos(
2 * np.pi * self.nu * t)
self.prime2_fi_x = lambda t: -self.fi_x_0 * (2 * np.pi * self.nu
) ** 2 * np.sin(2 * np.pi * self.nu * t)
self.fi_y_0 = 4.0
self.fi_y = lambda t: self.fi_y_0 * np.sin(2 * np.pi * self.nu * t)
self.prime_fi_y = lambda t: self.fi_y_0 * 2 * np.pi * self.nu * np.cos(
2 * np.pi * self.nu * t)
self.prime2_fi_y = lambda t: -self.fi_y_0 * (2 * np.pi * self.nu
) ** 2 * np.sin(2 * np.pi * self.nu * t)
self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t) * np.pi /
180.0), -np.sin(self.fi_x(t) * np.pi / 180.0), 0], [np.sin(self
.fi_x(t) * np.pi / 180.0), np.cos(self.fi_x(t) * np.pi / 180.0),
0], [0, 0, 1]], 5)
self.R_matrix_y = lambda t: np.round([[1, 0, 0], [0, np.cos(self.
fi_y(t) * np.pi / 180.0), -np.sin(self.fi_y(t) * np.pi / 180.0)
], [0, np.sin(self.fi_y(t) * np.pi / 180.0), np.cos(self.fi_y(t
) * np.pi / 180.0)]], 5)
self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t) * np.pi /
180.0), 0, np.sin(self.fi_y(t) * np.pi / 180.0)], [0, 1, 0], [-
np.sin(self.fi_y(t) * np.pi / 180.0), 0, np.cos(self.fi_y(t) *
np.pi / 180.0)]], 5)
self.H = np.cos(np.pi / 180.0 * self.beta) * np.cos(np.pi / 180.0 *
self.alpha / 2) * self.L
self.h = self.L * np.cos(np.pi / 180.0 * self.alpha / 2) * np.sin(
np.pi / 180.0 * self.beta)
self.a = self.L * np.sin(np.pi / 180.0 * self.alpha / 2)
self.r = (self.h ** 2 + self.a ** 2) ** 0.5
self.end_time = 2.0
self.start_time = 0.0
self.steps = 100
self.time = np.linspace(self.start_time, self.end_time, self.steps)
self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]
def set_B(self):
"""
Расчет геометрии стенда - положение точек B_i.
Задается: self.B
:return: None
"""
for i, A in enumerate(self.A_0):
a = A[:2]
b1 = np.array([self.h, self.a])
b2 = np.array([self.h, -self.a])
kappa = np.array([[np.cos(np.pi / 180 * (30 - 120 * i)), -np.
sin(np.pi / 180 * (30 - 120 * i))], [np.sin(np.pi / 180 * (
30 - 120 * i)), np.cos(np.pi / 180 * (30 - 120 * i))]])
p1 = np.dot(kappa, b1) + a
p2 = np.dot(kappa, b2) + a
p1 = np.append(p1, -self.H - self.h_c)
p2 = np.append(p2, -self.H - self.h_c)
self.B = np.hstack((self.B, p1))
self.B = np.hstack((self.B, p2))
self.B = self.B.reshape(6, 3)
i = 0
for A in self.A_0:
assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 0.0001
assert np.linalg.norm(np.subtract(A, self.B[i + 1])
) - self.L <= 0.0001
i += 2
def get_delta_L(self):
"""
Расчет геометрии положения точек A_i в каждый момент времени.
Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.
Задается: self.A, self.all_full_lengths, self.set_r
:return: None
"""
print('####################################################')
print('[INFO] solve delta L, Velocity, Acceleration ...')
print('####################################################')
R_matrix = None
if self.axis == 'x':
R_matrix = self.R_matrix_x
elif self.axis == 'y':
R_matrix = self.R_matrix_y
elif self.axis == 'z':
R_matrix = self.R_matrix_z
dL_all = []
L_all = []
coordinates_A = []
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
print('[INFO] Поршень №{}'.format(j + 1))
dl = []
l = []
coord = []
for t in self.time:
try:
A = np.dot(R_matrix(t), self.A_0[i])
except Exception:
print('Type error axis')
L = np.linalg.norm(self.B[j] - A)
print(self.B[j] - A)
print(self.L, L)
print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))
l.append(L)
dl.append(round((L - self.L) * 1000.0, 5))
coord.append(A)
dL_all.append(dl)
L_all.append(l)
coordinates_A.append(coord)
v = [0.0]
for k in range(self.steps - 1):
v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.
time[k]))
pylab.figure(1)
pylab.plot(self.time[5:], v[5:], colors[j])
print('[INFO] v_max =', np.max(np.abs(v[5:])))
a = [0.0]
for k in range(self.steps - 1):
a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))
pylab.figure(2)
pylab.plot(self.time[5:], a[5:], colors[j])
print('[INFO] a_max =', np.max(np.abs(a[5:])))
print('****************************************************')
pylab.figure(1)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Velocity')
pylab.xlabel('Time [s]')
pylab.ylabel('Velocity [mm/s]')
pylab.grid()
pylab.figure(2)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Acceleration')
pylab.xlabel('Time [s]')
pylab.ylabel('Acceleration [mm/s^2]')
pylab.grid()
pylab.figure(3)
for i in range(6):
pylab.plot(self.time, dL_all[i], colors[i])
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Delta length')
pylab.xlabel('Time [s]')
pylab.ylabel('dL [mm]')
pylab.grid()
plt.show()
self.A = np.array(coordinates_A[0::2])
self.all_full_lengths = np.array(L_all)
self.set_r()
self.plot_3d_lines()
def plot_3d_lines(self):
"""
Покадровая отрисовка геометрии стенда в 3D.
:return: None
"""
pylab.figure(figsize=(12, 10))
ax = pylab.axes(projection='3d')
colors = {(0): 'r', (1): 'orange', (2): 'g', (3): 'olive', (4): 'b',
(5): 'navy'}
markers = {(0): '^', (1): '^', (2): 'o', (3): 'o', (4): '*', (5): '*'}
for i, j in self.indexes:
df_A = pd.Series(data=self.A_0[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
ax.scatter(x, y, z, c=colors[j], marker=markers[j], s=20.0)
ax.legend(['1', '2', '3', '4', '5', '6'], loc=0)
for i, j in self.indexes:
k = 0
for a, r in zip(self.A[i], self.r[j]):
df_A = pd.Series(data=a, index=['x', 'y', 'z'])
df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])
df_r = pd.Series(data=r, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
x1 = [df_r.x, 0]
y1 = [df_r.y, 0]
z1 = [df_r.z, 0]
x2 = [df_r.x, df_B.x]
y2 = [df_r.y, df_B.y]
z2 = [df_r.z, df_B.z]
if k % int(self.steps - 1) == 0:
ax.plot(x, y, z, c=colors[j], marker=markers[j])
k += 1
for i in range(0, self.steps, 9):
a = np.array([self.A[0, i], self.A[1, i], self.A[2, i]])
df_A = pd.DataFrame(data=a, columns=['x', 'y', 'z'])
df_A = pd.concat((df_A, df_A.take([0])), axis=0)
ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='gray')
df_B = pd.DataFrame(data=self.B, columns=['x', 'y', 'z'])
df_B = pd.concat((df_B, df_B.take([0])))
df_A = pd.DataFrame(data=self.A_0, columns=['x', 'y', 'z'])
df_A = pd.concat((df_A, df_A.take([0])), axis=0)
ax.plot(df_B.x.values, df_B.y.values, df_B.z.values, c='black',
linewidth=4.0)
ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='black',
linewidth=4.0)
ax.view_init(30, -39)
plt.show()
def calculate_angles(self, l1, l2):
"""
Решение теоремы косинусов для поиска угла по трем сторонам
:param l1: прилежащая сторона к вычисляемому углу
:param l2: противолежащая сторона к углу
:return: (alpha, teta, gamma) - углы в треугольнике сил
"""
cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2
) / 2 * l1 * 2 * self.a
teta = np.arccos(cos_teta) * 180.0 / np.pi
b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta
cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a
alpha = np.arccos(cos_alpha) * 180.0 / np.pi
gamma = 180.0 - teta - alpha
return alpha, teta, gamma
def set_r(self):
"""
Вычисление радиус-векторов плеч сил для каждого цилиндра
:return: None
"""
r_all = []
for i, j in self.indexes:
r = []
for a in self.A[i]:
L = np.array(a - self.B[j])
direct_L = L / np.linalg.norm(L)
t1 = np.array([0, -direct_L[2], direct_L[1]])
b1 = np.array([a[2] * direct_L[1] - a[1] * direct_L[2]])
t2 = direct_L
b2 = np.array([0])
t3 = np.array([a[1] * direct_L[2] - a[2] * direct_L[1], -a[
0] * direct_L[2] + a[2] * direct_L[0], a[0] * direct_L[
1] - a[1] * direct_L[0]])
b3 = np.array([0])
T = np.stack((t1, t2, t3))
b = np.stack((b1, b2, b3))
r.append(np.linalg.solve(T, b).reshape((3,)))
r_all.append(r)
self.r = np.array(r_all)
def solve_dynamic_forces(self):
"""
решение обратной задачи стенда
Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х
:return: минимальная и максимальная нагрузка на каждый цилиндр
"""
print('####################################################')
print('[INFO] solve DYNAMIC forces ...')
print('####################################################')
A = []
for i in range(self.steps):
a = []
for j in range(3):
a_ = self.A[j, i, :]
a.append(a_)
A.append(a)
R = []
for i in range(self.steps):
r = []
for j in range(6):
r_ = self.r[j, i, :]
r.append(r_)
R.append(r)
A = np.array(A)
R = np.array(R)
forces = []
for a, r, t in zip(A, R, self.time):
L = []
direct = []
shoulder = []
for i, j in self.indexes:
len = np.array(self.B[j] - a[i])
dir = len / np.linalg.norm(len)
L.append(len)
direct_force_try = self.B[j] - r[j]
direct.append(direct_force_try)
shoulder.append(np.cross(r[j], direct_force_try))
L = np.array(L)
T_static = np.array(direct).T
T_dynamics = np.array(shoulder).T
b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))
dynamic_comp = None
if self.axis == 'x':
comp = self.J[2, 2] * self.prime2_fi_x(t)
dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))
elif self.axis == 'y':
comp = self.J[1, 1] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))
elif self.axis == 'z':
comp = self.J[0, 0] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))
b_dynamic = dynamic_comp
T = T_dynamics[:, :3]
b = b_dynamic[:, :3]
dynamic_f = np.linalg.solve(T, b).reshape((3,))
forces.append(dynamic_f)
print('[INFO] time:', t)
print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])
print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in
np.array(shoulder)])
print('[INFO] forces:', [round(f, 4) for f in dynamic_f])
print('[INFO] dynamic component:', b_dynamic.T)
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
pylab.plot(self.time, forces[i], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],
loc=0)
pylab.title('Dynamic forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def solve_static_forces(self):
"""
Решение обратной задачи стедна для статических нагрузок
:return: компоненты силы для каждой опоры
"""
print('####################################################')
print('[INFO] solve STATIC forces ...')
print('####################################################')
x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]
forces = []
for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):
L1 = np.array(a[0] - self.B[0])
L2 = np.array(a[0] - self.B[1])
L3 = np.array(a[1] - self.B[2])
direct_L1 = L1 / np.linalg.norm(L1)
direct_L2 = L2 / np.linalg.norm(L2)
direct_L3 = L3 / np.linalg.norm(L3)
T = np.stack((direct_L1, direct_L2, direct_L3))
b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))
static_f = np.linalg.solve(T, b).reshape((3,)) / 2
forces.append(static_f)
print('[INFO] time:', t)
print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.
linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))
print('[INFO] forces:', round(static_f[0] / 2, 4), round(
static_f[1] / 2, 4), round(static_f[2] / 2, 4))
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}
for i, j in x_symmetry_ind:
pylab.plot(self.time, forces[j], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)
pylab.title('Static forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def plot_animate(self, A):
""""
try to create animate function to plot mechanisms
"""
fig = plt.figure()
fig.set_tight_layout(False)
ax = plt.axes(projection='3d')
global cnt
cnt = ax
global cur_A
global cur_B
cur_A = A[0]
cur_B = self.B[0]
def steps(count=1):
for i in range(count):
df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
cnt.plot(x, y, z)
def animate(frame):
steps(1)
return cnt
anim = animation.FuncAnimation(fig, animate, frames=100)
plt.show()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Hexapod:
def __init__(self, axis):
"""
Инициализация начальных параметров системы
:param axis: ось вращения системы (x, y, z)
"""
self.axis = axis
self.alpha = 30.0
self.beta = 30.0
self.L = 1.5
self.h_c = 2.0
self.r = 1.0
self.m_p = 1000.0
self.m = 4000.0
self.nu = 0.5
self.J = np.array([[5000, 0, 0], [0, 5000, 0], [0, 0, 3500]], np.
float32)
self.A_0 = np.round([[self.r * np.sin(2 * np.pi / 3 * i + np.pi),
self.r * np.cos(2 * np.pi / 3 * i + np.pi), -self.h_c] for i in
range(-1, 2)], 5)
self.B = np.array([])
self.A = np.array([])
self.all_full_lengths = np.array([])
self.r = np.array([])
self.fi_x_0 = 4.0
self.fi_x = lambda t: self.fi_x_0 * np.sin(2 * np.pi * self.nu * t)
self.prime_fi_x = lambda t: self.fi_x_0 * 2 * np.pi * self.nu * np.cos(
2 * np.pi * self.nu * t)
self.prime2_fi_x = lambda t: -self.fi_x_0 * (2 * np.pi * self.nu
) ** 2 * np.sin(2 * np.pi * self.nu * t)
self.fi_y_0 = 4.0
self.fi_y = lambda t: self.fi_y_0 * np.sin(2 * np.pi * self.nu * t)
self.prime_fi_y = lambda t: self.fi_y_0 * 2 * np.pi * self.nu * np.cos(
2 * np.pi * self.nu * t)
self.prime2_fi_y = lambda t: -self.fi_y_0 * (2 * np.pi * self.nu
) ** 2 * np.sin(2 * np.pi * self.nu * t)
self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t) * np.pi /
180.0), -np.sin(self.fi_x(t) * np.pi / 180.0), 0], [np.sin(self
.fi_x(t) * np.pi / 180.0), np.cos(self.fi_x(t) * np.pi / 180.0),
0], [0, 0, 1]], 5)
self.R_matrix_y = lambda t: np.round([[1, 0, 0], [0, np.cos(self.
fi_y(t) * np.pi / 180.0), -np.sin(self.fi_y(t) * np.pi / 180.0)
], [0, np.sin(self.fi_y(t) * np.pi / 180.0), np.cos(self.fi_y(t
) * np.pi / 180.0)]], 5)
self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t) * np.pi /
180.0), 0, np.sin(self.fi_y(t) * np.pi / 180.0)], [0, 1, 0], [-
np.sin(self.fi_y(t) * np.pi / 180.0), 0, np.cos(self.fi_y(t) *
np.pi / 180.0)]], 5)
self.H = np.cos(np.pi / 180.0 * self.beta) * np.cos(np.pi / 180.0 *
self.alpha / 2) * self.L
self.h = self.L * np.cos(np.pi / 180.0 * self.alpha / 2) * np.sin(
np.pi / 180.0 * self.beta)
self.a = self.L * np.sin(np.pi / 180.0 * self.alpha / 2)
self.r = (self.h ** 2 + self.a ** 2) ** 0.5
self.end_time = 2.0
self.start_time = 0.0
self.steps = 100
self.time = np.linspace(self.start_time, self.end_time, self.steps)
self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]
def set_B(self):
"""
Расчет геометрии стенда - положение точек B_i.
Задается: self.B
:return: None
"""
for i, A in enumerate(self.A_0):
a = A[:2]
b1 = np.array([self.h, self.a])
b2 = np.array([self.h, -self.a])
kappa = np.array([[np.cos(np.pi / 180 * (30 - 120 * i)), -np.
sin(np.pi / 180 * (30 - 120 * i))], [np.sin(np.pi / 180 * (
30 - 120 * i)), np.cos(np.pi / 180 * (30 - 120 * i))]])
p1 = np.dot(kappa, b1) + a
p2 = np.dot(kappa, b2) + a
p1 = np.append(p1, -self.H - self.h_c)
p2 = np.append(p2, -self.H - self.h_c)
self.B = np.hstack((self.B, p1))
self.B = np.hstack((self.B, p2))
self.B = self.B.reshape(6, 3)
i = 0
for A in self.A_0:
assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 0.0001
assert np.linalg.norm(np.subtract(A, self.B[i + 1])
) - self.L <= 0.0001
i += 2
def get_delta_L(self):
"""
Расчет геометрии положения точек A_i в каждый момент времени.
Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.
Задается: self.A, self.all_full_lengths, self.set_r
:return: None
"""
print('####################################################')
print('[INFO] solve delta L, Velocity, Acceleration ...')
print('####################################################')
R_matrix = None
if self.axis == 'x':
R_matrix = self.R_matrix_x
elif self.axis == 'y':
R_matrix = self.R_matrix_y
elif self.axis == 'z':
R_matrix = self.R_matrix_z
dL_all = []
L_all = []
coordinates_A = []
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
print('[INFO] Поршень №{}'.format(j + 1))
dl = []
l = []
coord = []
for t in self.time:
try:
A = np.dot(R_matrix(t), self.A_0[i])
except Exception:
print('Type error axis')
L = np.linalg.norm(self.B[j] - A)
print(self.B[j] - A)
print(self.L, L)
print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))
l.append(L)
dl.append(round((L - self.L) * 1000.0, 5))
coord.append(A)
dL_all.append(dl)
L_all.append(l)
coordinates_A.append(coord)
v = [0.0]
for k in range(self.steps - 1):
v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.
time[k]))
pylab.figure(1)
pylab.plot(self.time[5:], v[5:], colors[j])
print('[INFO] v_max =', np.max(np.abs(v[5:])))
a = [0.0]
for k in range(self.steps - 1):
a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))
pylab.figure(2)
pylab.plot(self.time[5:], a[5:], colors[j])
print('[INFO] a_max =', np.max(np.abs(a[5:])))
print('****************************************************')
pylab.figure(1)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Velocity')
pylab.xlabel('Time [s]')
pylab.ylabel('Velocity [mm/s]')
pylab.grid()
pylab.figure(2)
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Acceleration')
pylab.xlabel('Time [s]')
pylab.ylabel('Acceleration [mm/s^2]')
pylab.grid()
pylab.figure(3)
for i in range(6):
pylab.plot(self.time, dL_all[i], colors[i])
pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',
'6 line'], loc=0)
pylab.title('Delta length')
pylab.xlabel('Time [s]')
pylab.ylabel('dL [mm]')
pylab.grid()
plt.show()
self.A = np.array(coordinates_A[0::2])
self.all_full_lengths = np.array(L_all)
self.set_r()
self.plot_3d_lines()
def plot_3d_lines(self):
"""
Покадровая отрисовка геометрии стенда в 3D.
:return: None
"""
pylab.figure(figsize=(12, 10))
ax = pylab.axes(projection='3d')
colors = {(0): 'r', (1): 'orange', (2): 'g', (3): 'olive', (4): 'b',
(5): 'navy'}
markers = {(0): '^', (1): '^', (2): 'o', (3): 'o', (4): '*', (5): '*'}
for i, j in self.indexes:
df_A = pd.Series(data=self.A_0[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
ax.scatter(x, y, z, c=colors[j], marker=markers[j], s=20.0)
ax.legend(['1', '2', '3', '4', '5', '6'], loc=0)
for i, j in self.indexes:
k = 0
for a, r in zip(self.A[i], self.r[j]):
df_A = pd.Series(data=a, index=['x', 'y', 'z'])
df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])
df_r = pd.Series(data=r, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
x1 = [df_r.x, 0]
y1 = [df_r.y, 0]
z1 = [df_r.z, 0]
x2 = [df_r.x, df_B.x]
y2 = [df_r.y, df_B.y]
z2 = [df_r.z, df_B.z]
if k % int(self.steps - 1) == 0:
ax.plot(x, y, z, c=colors[j], marker=markers[j])
k += 1
for i in range(0, self.steps, 9):
a = np.array([self.A[0, i], self.A[1, i], self.A[2, i]])
df_A = pd.DataFrame(data=a, columns=['x', 'y', 'z'])
df_A = pd.concat((df_A, df_A.take([0])), axis=0)
ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='gray')
df_B = pd.DataFrame(data=self.B, columns=['x', 'y', 'z'])
df_B = pd.concat((df_B, df_B.take([0])))
df_A = pd.DataFrame(data=self.A_0, columns=['x', 'y', 'z'])
df_A = pd.concat((df_A, df_A.take([0])), axis=0)
ax.plot(df_B.x.values, df_B.y.values, df_B.z.values, c='black',
linewidth=4.0)
ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='black',
linewidth=4.0)
ax.view_init(30, -39)
plt.show()
def calculate_angles(self, l1, l2):
"""
Решение теоремы косинусов для поиска угла по трем сторонам
:param l1: прилежащая сторона к вычисляемому углу
:param l2: противолежащая сторона к углу
:return: (alpha, teta, gamma) - углы в треугольнике сил
"""
cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2
) / 2 * l1 * 2 * self.a
teta = np.arccos(cos_teta) * 180.0 / np.pi
b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta
cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a
alpha = np.arccos(cos_alpha) * 180.0 / np.pi
gamma = 180.0 - teta - alpha
return alpha, teta, gamma
def set_r(self):
"""
Вычисление радиус-векторов плеч сил для каждого цилиндра
:return: None
"""
r_all = []
for i, j in self.indexes:
r = []
for a in self.A[i]:
L = np.array(a - self.B[j])
direct_L = L / np.linalg.norm(L)
t1 = np.array([0, -direct_L[2], direct_L[1]])
b1 = np.array([a[2] * direct_L[1] - a[1] * direct_L[2]])
t2 = direct_L
b2 = np.array([0])
t3 = np.array([a[1] * direct_L[2] - a[2] * direct_L[1], -a[
0] * direct_L[2] + a[2] * direct_L[0], a[0] * direct_L[
1] - a[1] * direct_L[0]])
b3 = np.array([0])
T = np.stack((t1, t2, t3))
b = np.stack((b1, b2, b3))
r.append(np.linalg.solve(T, b).reshape((3,)))
r_all.append(r)
self.r = np.array(r_all)
def solve_dynamic_forces(self):
"""
решение обратной задачи стенда
Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х
:return: минимальная и максимальная нагрузка на каждый цилиндр
"""
print('####################################################')
print('[INFO] solve DYNAMIC forces ...')
print('####################################################')
A = []
for i in range(self.steps):
a = []
for j in range(3):
a_ = self.A[j, i, :]
a.append(a_)
A.append(a)
R = []
for i in range(self.steps):
r = []
for j in range(6):
r_ = self.r[j, i, :]
r.append(r_)
R.append(r)
A = np.array(A)
R = np.array(R)
forces = []
for a, r, t in zip(A, R, self.time):
L = []
direct = []
shoulder = []
for i, j in self.indexes:
len = np.array(self.B[j] - a[i])
dir = len / np.linalg.norm(len)
L.append(len)
direct_force_try = self.B[j] - r[j]
direct.append(direct_force_try)
shoulder.append(np.cross(r[j], direct_force_try))
L = np.array(L)
T_static = np.array(direct).T
T_dynamics = np.array(shoulder).T
b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))
dynamic_comp = None
if self.axis == 'x':
comp = self.J[2, 2] * self.prime2_fi_x(t)
dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))
elif self.axis == 'y':
comp = self.J[1, 1] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))
elif self.axis == 'z':
comp = self.J[0, 0] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))
b_dynamic = dynamic_comp
T = T_dynamics[:, :3]
b = b_dynamic[:, :3]
dynamic_f = np.linalg.solve(T, b).reshape((3,))
forces.append(dynamic_f)
print('[INFO] time:', t)
print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])
print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in
np.array(shoulder)])
print('[INFO] forces:', [round(f, 4) for f in dynamic_f])
print('[INFO] dynamic component:', b_dynamic.T)
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):
'b+--', (5): 'bx-'}
for i, j in self.indexes:
pylab.plot(self.time, forces[i], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],
loc=0)
pylab.title('Dynamic forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def solve_static_forces(self):
"""
Решение обратной задачи стедна для статических нагрузок
:return: компоненты силы для каждой опоры
"""
print('####################################################')
print('[INFO] solve STATIC forces ...')
print('####################################################')
x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]
forces = []
for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):
L1 = np.array(a[0] - self.B[0])
L2 = np.array(a[0] - self.B[1])
L3 = np.array(a[1] - self.B[2])
direct_L1 = L1 / np.linalg.norm(L1)
direct_L2 = L2 / np.linalg.norm(L2)
direct_L3 = L3 / np.linalg.norm(L3)
T = np.stack((direct_L1, direct_L2, direct_L3))
b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))
static_f = np.linalg.solve(T, b).reshape((3,)) / 2
forces.append(static_f)
print('[INFO] time:', t)
print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.
linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))
print('[INFO] forces:', round(static_f[0] / 2, 4), round(
static_f[1] / 2, 4), round(static_f[2] / 2, 4))
print('****************************************************')
forces = np.array(forces).T
colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}
for i, j in x_symmetry_ind:
pylab.plot(self.time, forces[j], colors[j])
pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)
pylab.title('Static forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def plot_animate(self, A):
""""
try to create animate function to plot mechanisms
"""
fig = plt.figure()
fig.set_tight_layout(False)
ax = plt.axes(projection='3d')
global cnt
cnt = ax
global cur_A
global cur_B
cur_A = A[0]
cur_B = self.B[0]
def steps(count=1):
for i in range(count):
df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
cnt.plot(x, y, z)
def animate(frame):
steps(1)
return cnt
anim = animation.FuncAnimation(fig, animate, frames=100)
plt.show()
if __name__ == '__main__':
hex = Hexapod(axis='y')
hex.set_B()
hex.get_delta_L()
hex.solve_static_forces()
hex.solve_dynamic_forces()
<|reserved_special_token_1|>
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
import matplotlib.animation as animation
import pylab
from mpl_toolkits import mplot3d
from mpl_toolkits.mplot3d import Axes3D
class Hexapod:
def __init__(self, axis):
"""
Инициализация начальных параметров системы
:param axis: ось вращения системы (x, y, z)
"""
self.axis = axis # ось вращения тела
self.alpha = 30. # угол между приводами в точке крпления
self.beta = 30. # двугранный угол между приводами и платформой
self.L = 1.5 # длина привода
self.h_c = 2. # высота центра масс тела
self.r = 1. # радиус тела
self.m_p = 1000. # масса платформы
self.m = 4000. # масса тела
self.nu = 0.5 # частота
# тензор инерции тела для решения обратной задачи
self.J = np.array([[5000, 0, 0],
[0, 5000, 0],
[0, 0, 3500]], np.float32)
# начальное положение точек крепления приводов на ВЕРХНЕЙ платформе
self.A_0 = np.round([[self.r*np.sin(2*np.pi/3*i + np.pi),
self.r*np.cos(2*np.pi/3*i + np.pi),
-self.h_c] for i in range(-1, 2)], 5)
# точки крепления приводов на НИЖНЕЙ платформе (const)
self.B = np.array([])
# положение точек крепления приводов на ВЕРХНЕЙ платформе за все время
self.A = np.array([])
# длина каждого привода за все время
self.all_full_lengths = np.array([])
# плечи сил приводов за все время
self.r = np.array([])
# ампилитуда вращения, закон изменения угла и его производные по OX
self.fi_x_0 = 4. # градусы
self.fi_x = lambda t: self.fi_x_0 * np.sin(2*np.pi*self.nu*t)
self.prime_fi_x = lambda t: self.fi_x_0 * 2*np.pi*self.nu * np.cos(2*np.pi*self.nu*t)
self.prime2_fi_x = lambda t: -self.fi_x_0 * (2*np.pi*self.nu)**2 * np.sin(2*np.pi*self.nu*t)
# ампилитуда вращения, закон изменения угла и его производные по OY и OZ
self.fi_y_0 = 4. # градусы
self.fi_y = lambda t: self.fi_y_0 * np.sin(2*np.pi*self.nu*t)
self.prime_fi_y = lambda t: self.fi_y_0 * 2*np.pi*self.nu * np.cos(2*np.pi*self.nu*t)
self.prime2_fi_y = lambda t: -self.fi_y_0 * (2*np.pi*self.nu)**2 * np.sin(2*np.pi*self.nu*t)
# матрица поворота вокруг оси OX
self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t)*np.pi/180.), -np.sin(self.fi_x(t)*np.pi/180.), 0],
[np.sin(self.fi_x(t)*np.pi/180.), np.cos(self.fi_x(t)*np.pi/180.), 0],
[0, 0, 1]], 5)
# матрица поворота вокруг оси OY
self.R_matrix_y = lambda t: np.round([[1, 0, 0],
[0, np.cos(self.fi_y(t)*np.pi/180.), -np.sin(self.fi_y(t)*np.pi/180.)],
[0, np.sin(self.fi_y(t)*np.pi/180.), np.cos(self.fi_y(t)*np.pi/180.)]], 5)
# матрица поворота вокруг оси OZ
self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t)*np.pi/180.), 0, np.sin(self.fi_y(t)*np.pi/180.)],
[0, 1, 0],
[-np.sin(self.fi_y(t)*np.pi/180.), 0, np.cos(self.fi_y(t)*np.pi/180.)]], 5)
# для построения геометрии точек B
self.H = np.cos(np.pi/180. * self.beta) * np.cos(np.pi/180. * self.alpha/2) * self.L
self.h = self.L * np.cos(np.pi/180.*self.alpha/2) * np.sin(np.pi/180.*self.beta)
self.a = self.L * np.sin(np.pi/180.*self.alpha/2) # основание треугольника
self.r = (self.h**2 + self.a**2)**0.5
# отсчет времени для расчета законов
self.end_time = 2.0
self.start_time = 0.
self.steps = 100
self.time = np.linspace(self.start_time, self.end_time, self.steps)
# связь индексов нижней и верхней платформы
self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]
def set_B(self):
"""
Расчет геометрии стенда - положение точек B_i.
Задается: self.B
:return: None
"""
for i, A in enumerate(self.A_0):
a = A[:2]
b1 = np.array([self.h, self.a])
b2 = np.array([self.h, - self.a])
kappa = np.array([[np.cos(np.pi / 180 * (30-120*i)), -np.sin(np.pi / 180 * (30-120*i))],
[np.sin(np.pi / 180 * (30-120*i)), np.cos(np.pi / 180 * (30-120*i))]])
p1 = np.dot(kappa, b1) + a
p2 = np.dot(kappa, b2) + a
p1 = np.append(p1, - self.H - self.h_c)
p2 = np.append(p2, - self.H - self.h_c)
self.B = np.hstack((self.B, p1))
self.B = np.hstack((self.B, p2))
self.B = self.B.reshape(6, 3)
# проверка длин приводов
i = 0
for A in self.A_0:
assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 1e-4
assert np.linalg.norm(np.subtract(A, self.B[i + 1])) - self.L <= 1e-4
# print(np.linalg.norm(np.subtract(A, self.B[i])))
# print(np.linalg.norm(np.subtract(A, self.B[i + 1])))
i += 2
def get_delta_L(self):
"""
Расчет геометрии положения точек A_i в каждый момент времени.
Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.
Задается: self.A, self.all_full_lengths, self.set_r
:return: None
"""
print('####################################################')
print('[INFO] solve delta L, Velocity, Acceleration ...')
print('####################################################')
# матрица поворота вокруг зазадной оси
R_matrix = None
if self.axis == 'x':
R_matrix = self.R_matrix_x
elif self.axis == 'y':
R_matrix = self.R_matrix_y
elif self.axis == 'z':
R_matrix = self.R_matrix_z
# удлинения каждого цилиндра за заданное время
dL_all = []
# длины всех цилиндров за все время
L_all = []
# координаты точек крепления на ВЕРХНЕЙ платформе
coordinates_A = []
# легенда для графиков
colors = {0: 'r+--', 1: 'rx-',
2: 'g+--', 3: 'gx-',
4: 'b+--', 5: 'bx-'}
for i, j in self.indexes:
print('[INFO] Поршень №{}'.format(j+1))
dl = [] # изменение длины поршня в момент времени t
l = [] # длины поршня в момент времени t
coord = [] # координата точки A_i в момент времени t
for t in self.time:
try:
A = np.dot(R_matrix(t), self.A_0[i])
except Exception:
print('Type error axis')
# текущая длина привода
L = np.linalg.norm(self.B[j] - A)
print(self.B[j] - A)
print(self.L, L)
# L = np.sum((A - self.B[j])**2)**0.5
print('dL[мм] = {:.5f}'.format((L - self.L) * 1e3))
l.append(L)
dl.append(round(((L - self.L) * 1e3), 5))
coord.append(A)
dL_all.append(dl)
L_all.append(l)
coordinates_A.append(coord)
# численно находим СКОРОСТЬ изменения длины приводов
v = [0.0]
for k in range(self.steps - 1):
v.append((dl[k+1] - dl[k]) / (self.time[k+1] - self.time[k]))
pylab.figure(1)
pylab.plot(self.time[5:], v[5:], colors[j])
print('[INFO] v_max =', np.max(np.abs(v[5:])))
# численно находим УСКОРЕНИЕ изменения длины приводов
a = [0.0]
for k in range(self.steps - 1):
a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))
pylab.figure(2)
pylab.plot(self.time[5:], a[5:], colors[j])
print('[INFO] a_max =', np.max(np.abs(a[5:])))
print('****************************************************')
# легенда для графика со скоростями
pylab.figure(1)
pylab.legend([r'1 line', '2 line', '3 line', '4 line', '5 line', '6 line'], loc=0)
pylab.title('Velocity')
pylab.xlabel('Time [s]')
pylab.ylabel('Velocity [mm/s]')
pylab.grid()
# plt.savefig("output/velocity_{}.png".format(self.axis))
# легенда для графика с ускорениями
pylab.figure(2)
pylab.legend([r'1 line', '2 line', '3 line', '4 line', '5 line', '6 line'], loc=0)
pylab.title('Acceleration')
pylab.xlabel('Time [s]')
pylab.ylabel('Acceleration [mm/s^2]')
pylab.grid()
# pylab.savefig("output/acceleration_{}.png".format(self.axis))
# график удлинения каждого поршня
pylab.figure(3)
for i in range(6):
pylab.plot(self.time, dL_all[i], colors[i])
pylab.legend([r'1 line', '2 line', '3 line', '4 line', '5 line', '6 line'], loc=0)
pylab.title('Delta length')
pylab.xlabel('Time [s]')
pylab.ylabel('dL [mm]')
pylab.grid()
# pylab.savefig("output/length_{}.png".format(self.axis))
plt.show()
# исключим повторение вершин
self.A = np.array(coordinates_A[0::2])
self.all_full_lengths = np.array(L_all)
self.set_r()
# покадровая отрисовка геометрии стенда
self.plot_3d_lines()
# self.plot_animate(coordinates_A)
def plot_3d_lines(self):
"""
Покадровая отрисовка геометрии стенда в 3D.
:return: None
"""
pylab.figure(figsize=(12, 10))
ax = pylab.axes(projection='3d')
colors = {0: 'r', 1: 'orange',
2: 'g', 3: 'olive',
4: 'b', 5: 'navy'}
markers = {0: '^', 1: '^',
2: 'o', 3: 'o',
4: '*', 5: '*'}
# задать легенду
for i, j in self.indexes:
df_A = pd.Series(data=self.A_0[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
ax.scatter(x, y, z, c=colors[j], marker=markers[j], s=20.)
ax.legend([r'1', '2', '3', '4', '5', '6'], loc=0)
# indexes = [[0, 0], [1, 2], [2, 4]]
# построить смещения каждого поршня
for i, j in self.indexes:
k = 0
for (a, r) in zip(self.A[i], self.r[j]):
df_A = pd.Series(data=a, index=['x', 'y', 'z'])
df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])
df_r = pd.Series(data=r, index=['x', 'y', 'z'])
# геометрия длины цилиндров
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
# геометрия плеч сил
x1 = [df_r.x, 0]
y1 = [df_r.y, 0]
z1 = [df_r.z, 0]
# продолжение оси цилиндров
x2 = [df_r.x, df_B.x]
y2 = [df_r.y, df_B.y]
z2 = [df_r.z, df_B.z]
# частичная раскадровка
if k % int(self.steps-1) == 0:
# if k:
# ax.plot(x1, y1, z1, c=colors[j], marker=markers[j])
# ax.plot(x2, y2, z2, c='gray', marker='+')
ax.plot(x, y, z, c=colors[j], marker=markers[j])
# print('H_A =', z[0])
k += 1
# посторить смещение верхней плтаформы
for i in range(0, self.steps, 9):
a = np.array([self.A[0, i], self.A[1, i], self.A[2, i]])
df_A = pd.DataFrame(data=a, columns=['x', 'y', 'z'])
df_A = pd.concat((df_A, df_A.take([0])), axis=0)
ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='gray')
# отрисовать начальные положения верхней и нижней платформы
df_B = pd.DataFrame(data=self.B, columns=['x', 'y', 'z'])
df_B = pd.concat((df_B, df_B.take([0])))
df_A = pd.DataFrame(data=self.A_0, columns=['x', 'y', 'z'])
df_A = pd.concat((df_A, df_A.take([0])), axis=0)
ax.plot(df_B.x.values, df_B.y.values, df_B.z.values, c='black', linewidth=4.)
ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='black', linewidth=4.)
ax.view_init(30, -39)
# pylab.savefig("output/plot_3d_{}.png".format(self.axis))
plt.show()
def calculate_angles(self, l1, l2):
"""
Решение теоремы косинусов для поиска угла по трем сторонам
:param l1: прилежащая сторона к вычисляемому углу
:param l2: противолежащая сторона к углу
:return: (alpha, teta, gamma) - углы в треугольнике сил
"""
cos_teta = (l1**2 + (2*self.a)**2 - l2**2) / 2*l1*2*self.a
teta = np.arccos(cos_teta) * 180. / np.pi
b = l1**2 + self.a**2 - 2*l1*self.a*cos_teta
cos_alpha = (l1**2 + b**2 - l2**2) / 2*l1*self.a
alpha = np.arccos(cos_alpha) * 180. / np.pi
gamma = 180. - teta - alpha
return alpha, teta, gamma
def set_r(self):
"""
Вычисление радиус-векторов плеч сил для каждого цилиндра
:return: None
"""
r_all = []
for i, j in self.indexes:
r = []
for a in self.A[i]:
L = np.array(a - self.B[j])
direct_L = L / np.linalg.norm(L)
t1 = np.array([0, -direct_L[2], direct_L[1]])
b1 = np.array([a[2]*direct_L[1] - a[1]*direct_L[2]])
t2 = direct_L
b2 = np.array([0])
t3 = np.array([a[1]*direct_L[2] - a[2]*direct_L[1],
-a[0]*direct_L[2] + a[2]*direct_L[0],
a[0]*direct_L[1] - a[1]*direct_L[0]])
b3 = np.array([0])
T = np.stack((t1, t2, t3))
b = np.stack((b1, b2, b3))
r.append(np.linalg.solve(T, b).reshape((3,)))
r_all.append(r)
self.r = np.array(r_all)
def solve_dynamic_forces(self):
"""
решение обратной задачи стенда
Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х
:return: минимальная и максимальная нагрузка на каждый цилиндр
"""
print('####################################################')
print('[INFO] solve DYNAMIC forces ...')
print('####################################################')
A = []
for i in range(self.steps):
a = []
for j in range(3):
a_ = self.A[j, i, :]
a.append(a_)
A.append(a)
R = []
for i in range(self.steps):
r = []
for j in range(6):
r_ = self.r[j, i, :]
r.append(r_)
R.append(r)
A = np.array(A)
R = np.array(R)
forces = []
for a, r, t in zip(A, R, self.time):
L = []
direct = [] # направления сил
shoulder = [] # плечи сил
for i, j in self.indexes:
len = np.array(self.B[j] - a[i])
dir = len / np.linalg.norm(len)
L.append(len)
direct_force_try = self.B[j] - r[j]
# direct.append(dir)
direct.append(direct_force_try)
# direct.append(direct_force_try)
shoulder.append(np.cross(r[j], direct_force_try))
L = np.array(L)
T_static = np.array(direct).T
T_dynamics = np.array(shoulder).T
b_static = np.array([-self.m*9.8, 0, 0]).reshape((3, 1))
# определение направления действующих сил
dynamic_comp = None
if self.axis == 'x':
comp = self.J[2, 2] * self.prime2_fi_x(t)
dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))
elif self.axis == 'y':
comp = self.J[1, 1] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))
elif self.axis == 'z':
comp = self.J[0, 0] * self.prime2_fi_y(t)
dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))
b_dynamic = dynamic_comp
# T = np.vstack((T_static, T_dynamics))
# b = np.vstack((b_static, b_dynamic))
T = T_dynamics[:, :3]
b = b_dynamic[:, :3]
# print(T)
# print(b)
dynamic_f = np.linalg.solve(T, b).reshape((3,))
forces.append(dynamic_f)
print('[INFO] time:', t)
print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])
print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in np.array(shoulder)])
print('[INFO] forces:', [round(f, 4) for f in dynamic_f])
print('[INFO] dynamic component:', b_dynamic.T)
print('****************************************************')
forces = np.array(forces).T
# график приложенной силы к цилиндрам от времени
colors = {0: 'r+--', 1: 'rx-',
2: 'g+--', 3: 'gx-',
4: 'b+--', 5: 'bx-'}
for i, j in self.indexes:
pylab.plot(self.time, forces[i], colors[j])
# colors = {0: 'r+--', 1: 'g+--', 2: 'b+--'}
# for i in range(3):
# pylab.plot(self.time, forces[i], colors[i], label='$F_{}$'.format(i))
pylab.legend([r'$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'], loc=0)
# plt.legend(loc="lower right")
pylab.title('Dynamic forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def solve_static_forces(self):
"""
Решение обратной задачи стедна для статических нагрузок
:return: компоненты силы для каждой опоры
"""
print('####################################################')
print('[INFO] solve STATIC forces ...')
print('####################################################')
x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]
forces = []
for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):
L1 = np.array(a[0] - self.B[0])
L2 = np.array(a[0] - self.B[1])
L3 = np.array(a[1] - self.B[2])
direct_L1 = L1 / np.linalg.norm(L1)
direct_L2 = L2 / np.linalg.norm(L2)
direct_L3 = L3 / np.linalg.norm(L3)
T = np.stack((direct_L1, direct_L2, direct_L3))
b = np.array([-self.m*9.8/2, 0, 0]).reshape((3, 1))
static_f = np.linalg.solve(T, b).reshape((3,)) / 2
forces.append(static_f)
print('[INFO] time:', t)
print('[INFO] length:',
round(np.linalg.norm(L1), 4),
round(np.linalg.norm(L2), 4),
round(np.linalg.norm(L3), 4))
print('[INFO] forces:',
round(static_f[0]/2, 4),
round(static_f[1]/2, 4),
round(static_f[2]/2, 4))
print('****************************************************')
forces = np.array(forces).T
# график приложенной силы к цилиндрам от времени
colors = {0: 'r+--', 1: 'g+--', 2: 'b+--'}
for i, j in x_symmetry_ind:
pylab.plot(self.time, forces[j], colors[j])
pylab.legend([r'$F_1$', '$F_2$', '$F_3$'], loc=0)
pylab.title('Static forces')
pylab.xlabel('Time [s]')
pylab.ylabel('Force [kg*m/s^2]')
pylab.grid()
plt.show()
def plot_animate(self, A):
""""
try to create animate function to plot mechanisms
"""
fig = plt.figure()
fig.set_tight_layout(False)
ax = plt.axes(projection='3d')
global cnt
cnt = ax
global cur_A
global cur_B
cur_A = A[0]
cur_B = self.B[0]
def steps(count=1):
for i in range(count):
df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])
df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])
x = [df_A.x, df_B.x]
y = [df_A.y, df_B.y]
z = [df_A.z, df_B.z]
cnt.plot(x, y, z)
def animate(frame):
steps(1)
return cnt
anim = animation.FuncAnimation(fig, animate, frames=100)
plt.show()
if __name__ == "__main__":
hex = Hexapod(axis='y')
hex.set_B()
hex.get_delta_L()
hex.solve_static_forces()
hex.solve_dynamic_forces()
|
flexible
|
{
"blob_id": "9a672c17ee22a05e77491bc1449c1c1678414a8c",
"index": 3094,
"step-1": "<mask token>\n\n\nclass Hexapod:\n <mask token>\n <mask token>\n\n def get_delta_L(self):\n \"\"\"\n Расчет геометрии положения точек A_i в каждый момент времени.\n Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.\n\n Задается: self.A, self.all_full_lengths, self.set_r\n :return: None\n \"\"\"\n print('####################################################')\n print('[INFO] solve delta L, Velocity, Acceleration ...')\n print('####################################################')\n R_matrix = None\n if self.axis == 'x':\n R_matrix = self.R_matrix_x\n elif self.axis == 'y':\n R_matrix = self.R_matrix_y\n elif self.axis == 'z':\n R_matrix = self.R_matrix_z\n dL_all = []\n L_all = []\n coordinates_A = []\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n print('[INFO] Поршень №{}'.format(j + 1))\n dl = []\n l = []\n coord = []\n for t in self.time:\n try:\n A = np.dot(R_matrix(t), self.A_0[i])\n except Exception:\n print('Type error axis')\n L = np.linalg.norm(self.B[j] - A)\n print(self.B[j] - A)\n print(self.L, L)\n print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))\n l.append(L)\n dl.append(round((L - self.L) * 1000.0, 5))\n coord.append(A)\n dL_all.append(dl)\n L_all.append(l)\n coordinates_A.append(coord)\n v = [0.0]\n for k in range(self.steps - 1):\n v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.\n time[k]))\n pylab.figure(1)\n pylab.plot(self.time[5:], v[5:], colors[j])\n print('[INFO] v_max =', np.max(np.abs(v[5:])))\n a = [0.0]\n for k in range(self.steps - 1):\n a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))\n pylab.figure(2)\n pylab.plot(self.time[5:], a[5:], colors[j])\n print('[INFO] a_max =', np.max(np.abs(a[5:])))\n print('****************************************************')\n pylab.figure(1)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Velocity')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Velocity [mm/s]')\n pylab.grid()\n pylab.figure(2)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Acceleration')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Acceleration [mm/s^2]')\n pylab.grid()\n pylab.figure(3)\n for i in range(6):\n pylab.plot(self.time, dL_all[i], colors[i])\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Delta length')\n pylab.xlabel('Time [s]')\n pylab.ylabel('dL [mm]')\n pylab.grid()\n plt.show()\n self.A = np.array(coordinates_A[0::2])\n self.all_full_lengths = np.array(L_all)\n self.set_r()\n self.plot_3d_lines()\n <mask token>\n\n def calculate_angles(self, l1, l2):\n \"\"\"\n Решение теоремы косинусов для поиска угла по трем сторонам\n :param l1: прилежащая сторона к вычисляемому углу\n :param l2: противолежащая сторона к углу\n :return: (alpha, teta, gamma) - углы в треугольнике сил\n \"\"\"\n cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2\n ) / 2 * l1 * 2 * self.a\n teta = np.arccos(cos_teta) * 180.0 / np.pi\n b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta\n cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a\n alpha = np.arccos(cos_alpha) * 180.0 / np.pi\n gamma = 180.0 - teta - alpha\n return alpha, teta, gamma\n <mask token>\n\n def solve_dynamic_forces(self):\n \"\"\"\n решение обратной задачи стенда\n Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х\n :return: минимальная и максимальная нагрузка на каждый цилиндр\n \"\"\"\n print('####################################################')\n print('[INFO] solve DYNAMIC forces ...')\n print('####################################################')\n A = []\n for i in range(self.steps):\n a = []\n for j in range(3):\n a_ = self.A[j, i, :]\n a.append(a_)\n A.append(a)\n R = []\n for i in range(self.steps):\n r = []\n for j in range(6):\n r_ = self.r[j, i, :]\n r.append(r_)\n R.append(r)\n A = np.array(A)\n R = np.array(R)\n forces = []\n for a, r, t in zip(A, R, self.time):\n L = []\n direct = []\n shoulder = []\n for i, j in self.indexes:\n len = np.array(self.B[j] - a[i])\n dir = len / np.linalg.norm(len)\n L.append(len)\n direct_force_try = self.B[j] - r[j]\n direct.append(direct_force_try)\n shoulder.append(np.cross(r[j], direct_force_try))\n L = np.array(L)\n T_static = np.array(direct).T\n T_dynamics = np.array(shoulder).T\n b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))\n dynamic_comp = None\n if self.axis == 'x':\n comp = self.J[2, 2] * self.prime2_fi_x(t)\n dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))\n elif self.axis == 'y':\n comp = self.J[1, 1] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))\n elif self.axis == 'z':\n comp = self.J[0, 0] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))\n b_dynamic = dynamic_comp\n T = T_dynamics[:, :3]\n b = b_dynamic[:, :3]\n dynamic_f = np.linalg.solve(T, b).reshape((3,))\n forces.append(dynamic_f)\n print('[INFO] time:', t)\n print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])\n print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in\n np.array(shoulder)])\n print('[INFO] forces:', [round(f, 4) for f in dynamic_f])\n print('[INFO] dynamic component:', b_dynamic.T)\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n pylab.plot(self.time, forces[i], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],\n loc=0)\n pylab.title('Dynamic forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def solve_static_forces(self):\n \"\"\"\n Решение обратной задачи стедна для статических нагрузок\n :return: компоненты силы для каждой опоры\n \"\"\"\n print('####################################################')\n print('[INFO] solve STATIC forces ...')\n print('####################################################')\n x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]\n forces = []\n for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):\n L1 = np.array(a[0] - self.B[0])\n L2 = np.array(a[0] - self.B[1])\n L3 = np.array(a[1] - self.B[2])\n direct_L1 = L1 / np.linalg.norm(L1)\n direct_L2 = L2 / np.linalg.norm(L2)\n direct_L3 = L3 / np.linalg.norm(L3)\n T = np.stack((direct_L1, direct_L2, direct_L3))\n b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))\n static_f = np.linalg.solve(T, b).reshape((3,)) / 2\n forces.append(static_f)\n print('[INFO] time:', t)\n print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.\n linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))\n print('[INFO] forces:', round(static_f[0] / 2, 4), round(\n static_f[1] / 2, 4), round(static_f[2] / 2, 4))\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}\n for i, j in x_symmetry_ind:\n pylab.plot(self.time, forces[j], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)\n pylab.title('Static forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def plot_animate(self, A):\n \"\"\"\"\n try to create animate function to plot mechanisms\n \"\"\"\n fig = plt.figure()\n fig.set_tight_layout(False)\n ax = plt.axes(projection='3d')\n global cnt\n cnt = ax\n global cur_A\n global cur_B\n cur_A = A[0]\n cur_B = self.B[0]\n\n def steps(count=1):\n for i in range(count):\n df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n cnt.plot(x, y, z)\n\n def animate(frame):\n steps(1)\n return cnt\n anim = animation.FuncAnimation(fig, animate, frames=100)\n plt.show()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass Hexapod:\n\n def __init__(self, axis):\n \"\"\"\n Инициализация начальных параметров системы\n :param axis: ось вращения системы (x, y, z)\n \"\"\"\n self.axis = axis\n self.alpha = 30.0\n self.beta = 30.0\n self.L = 1.5\n self.h_c = 2.0\n self.r = 1.0\n self.m_p = 1000.0\n self.m = 4000.0\n self.nu = 0.5\n self.J = np.array([[5000, 0, 0], [0, 5000, 0], [0, 0, 3500]], np.\n float32)\n self.A_0 = np.round([[self.r * np.sin(2 * np.pi / 3 * i + np.pi), \n self.r * np.cos(2 * np.pi / 3 * i + np.pi), -self.h_c] for i in\n range(-1, 2)], 5)\n self.B = np.array([])\n self.A = np.array([])\n self.all_full_lengths = np.array([])\n self.r = np.array([])\n self.fi_x_0 = 4.0\n self.fi_x = lambda t: self.fi_x_0 * np.sin(2 * np.pi * self.nu * t)\n self.prime_fi_x = lambda t: self.fi_x_0 * 2 * np.pi * self.nu * np.cos(\n 2 * np.pi * self.nu * t)\n self.prime2_fi_x = lambda t: -self.fi_x_0 * (2 * np.pi * self.nu\n ) ** 2 * np.sin(2 * np.pi * self.nu * t)\n self.fi_y_0 = 4.0\n self.fi_y = lambda t: self.fi_y_0 * np.sin(2 * np.pi * self.nu * t)\n self.prime_fi_y = lambda t: self.fi_y_0 * 2 * np.pi * self.nu * np.cos(\n 2 * np.pi * self.nu * t)\n self.prime2_fi_y = lambda t: -self.fi_y_0 * (2 * np.pi * self.nu\n ) ** 2 * np.sin(2 * np.pi * self.nu * t)\n self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t) * np.pi /\n 180.0), -np.sin(self.fi_x(t) * np.pi / 180.0), 0], [np.sin(self\n .fi_x(t) * np.pi / 180.0), np.cos(self.fi_x(t) * np.pi / 180.0),\n 0], [0, 0, 1]], 5)\n self.R_matrix_y = lambda t: np.round([[1, 0, 0], [0, np.cos(self.\n fi_y(t) * np.pi / 180.0), -np.sin(self.fi_y(t) * np.pi / 180.0)\n ], [0, np.sin(self.fi_y(t) * np.pi / 180.0), np.cos(self.fi_y(t\n ) * np.pi / 180.0)]], 5)\n self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t) * np.pi /\n 180.0), 0, np.sin(self.fi_y(t) * np.pi / 180.0)], [0, 1, 0], [-\n np.sin(self.fi_y(t) * np.pi / 180.0), 0, np.cos(self.fi_y(t) *\n np.pi / 180.0)]], 5)\n self.H = np.cos(np.pi / 180.0 * self.beta) * np.cos(np.pi / 180.0 *\n self.alpha / 2) * self.L\n self.h = self.L * np.cos(np.pi / 180.0 * self.alpha / 2) * np.sin(\n np.pi / 180.0 * self.beta)\n self.a = self.L * np.sin(np.pi / 180.0 * self.alpha / 2)\n self.r = (self.h ** 2 + self.a ** 2) ** 0.5\n self.end_time = 2.0\n self.start_time = 0.0\n self.steps = 100\n self.time = np.linspace(self.start_time, self.end_time, self.steps)\n self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]\n\n def set_B(self):\n \"\"\"\n Расчет геометрии стенда - положение точек B_i.\n\n Задается: self.B\n :return: None\n \"\"\"\n for i, A in enumerate(self.A_0):\n a = A[:2]\n b1 = np.array([self.h, self.a])\n b2 = np.array([self.h, -self.a])\n kappa = np.array([[np.cos(np.pi / 180 * (30 - 120 * i)), -np.\n sin(np.pi / 180 * (30 - 120 * i))], [np.sin(np.pi / 180 * (\n 30 - 120 * i)), np.cos(np.pi / 180 * (30 - 120 * i))]])\n p1 = np.dot(kappa, b1) + a\n p2 = np.dot(kappa, b2) + a\n p1 = np.append(p1, -self.H - self.h_c)\n p2 = np.append(p2, -self.H - self.h_c)\n self.B = np.hstack((self.B, p1))\n self.B = np.hstack((self.B, p2))\n self.B = self.B.reshape(6, 3)\n i = 0\n for A in self.A_0:\n assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 0.0001\n assert np.linalg.norm(np.subtract(A, self.B[i + 1])\n ) - self.L <= 0.0001\n i += 2\n\n def get_delta_L(self):\n \"\"\"\n Расчет геометрии положения точек A_i в каждый момент времени.\n Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.\n\n Задается: self.A, self.all_full_lengths, self.set_r\n :return: None\n \"\"\"\n print('####################################################')\n print('[INFO] solve delta L, Velocity, Acceleration ...')\n print('####################################################')\n R_matrix = None\n if self.axis == 'x':\n R_matrix = self.R_matrix_x\n elif self.axis == 'y':\n R_matrix = self.R_matrix_y\n elif self.axis == 'z':\n R_matrix = self.R_matrix_z\n dL_all = []\n L_all = []\n coordinates_A = []\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n print('[INFO] Поршень №{}'.format(j + 1))\n dl = []\n l = []\n coord = []\n for t in self.time:\n try:\n A = np.dot(R_matrix(t), self.A_0[i])\n except Exception:\n print('Type error axis')\n L = np.linalg.norm(self.B[j] - A)\n print(self.B[j] - A)\n print(self.L, L)\n print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))\n l.append(L)\n dl.append(round((L - self.L) * 1000.0, 5))\n coord.append(A)\n dL_all.append(dl)\n L_all.append(l)\n coordinates_A.append(coord)\n v = [0.0]\n for k in range(self.steps - 1):\n v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.\n time[k]))\n pylab.figure(1)\n pylab.plot(self.time[5:], v[5:], colors[j])\n print('[INFO] v_max =', np.max(np.abs(v[5:])))\n a = [0.0]\n for k in range(self.steps - 1):\n a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))\n pylab.figure(2)\n pylab.plot(self.time[5:], a[5:], colors[j])\n print('[INFO] a_max =', np.max(np.abs(a[5:])))\n print('****************************************************')\n pylab.figure(1)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Velocity')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Velocity [mm/s]')\n pylab.grid()\n pylab.figure(2)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Acceleration')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Acceleration [mm/s^2]')\n pylab.grid()\n pylab.figure(3)\n for i in range(6):\n pylab.plot(self.time, dL_all[i], colors[i])\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Delta length')\n pylab.xlabel('Time [s]')\n pylab.ylabel('dL [mm]')\n pylab.grid()\n plt.show()\n self.A = np.array(coordinates_A[0::2])\n self.all_full_lengths = np.array(L_all)\n self.set_r()\n self.plot_3d_lines()\n <mask token>\n\n def calculate_angles(self, l1, l2):\n \"\"\"\n Решение теоремы косинусов для поиска угла по трем сторонам\n :param l1: прилежащая сторона к вычисляемому углу\n :param l2: противолежащая сторона к углу\n :return: (alpha, teta, gamma) - углы в треугольнике сил\n \"\"\"\n cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2\n ) / 2 * l1 * 2 * self.a\n teta = np.arccos(cos_teta) * 180.0 / np.pi\n b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta\n cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a\n alpha = np.arccos(cos_alpha) * 180.0 / np.pi\n gamma = 180.0 - teta - alpha\n return alpha, teta, gamma\n\n def set_r(self):\n \"\"\"\n Вычисление радиус-векторов плеч сил для каждого цилиндра\n :return: None\n \"\"\"\n r_all = []\n for i, j in self.indexes:\n r = []\n for a in self.A[i]:\n L = np.array(a - self.B[j])\n direct_L = L / np.linalg.norm(L)\n t1 = np.array([0, -direct_L[2], direct_L[1]])\n b1 = np.array([a[2] * direct_L[1] - a[1] * direct_L[2]])\n t2 = direct_L\n b2 = np.array([0])\n t3 = np.array([a[1] * direct_L[2] - a[2] * direct_L[1], -a[\n 0] * direct_L[2] + a[2] * direct_L[0], a[0] * direct_L[\n 1] - a[1] * direct_L[0]])\n b3 = np.array([0])\n T = np.stack((t1, t2, t3))\n b = np.stack((b1, b2, b3))\n r.append(np.linalg.solve(T, b).reshape((3,)))\n r_all.append(r)\n self.r = np.array(r_all)\n\n def solve_dynamic_forces(self):\n \"\"\"\n решение обратной задачи стенда\n Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х\n :return: минимальная и максимальная нагрузка на каждый цилиндр\n \"\"\"\n print('####################################################')\n print('[INFO] solve DYNAMIC forces ...')\n print('####################################################')\n A = []\n for i in range(self.steps):\n a = []\n for j in range(3):\n a_ = self.A[j, i, :]\n a.append(a_)\n A.append(a)\n R = []\n for i in range(self.steps):\n r = []\n for j in range(6):\n r_ = self.r[j, i, :]\n r.append(r_)\n R.append(r)\n A = np.array(A)\n R = np.array(R)\n forces = []\n for a, r, t in zip(A, R, self.time):\n L = []\n direct = []\n shoulder = []\n for i, j in self.indexes:\n len = np.array(self.B[j] - a[i])\n dir = len / np.linalg.norm(len)\n L.append(len)\n direct_force_try = self.B[j] - r[j]\n direct.append(direct_force_try)\n shoulder.append(np.cross(r[j], direct_force_try))\n L = np.array(L)\n T_static = np.array(direct).T\n T_dynamics = np.array(shoulder).T\n b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))\n dynamic_comp = None\n if self.axis == 'x':\n comp = self.J[2, 2] * self.prime2_fi_x(t)\n dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))\n elif self.axis == 'y':\n comp = self.J[1, 1] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))\n elif self.axis == 'z':\n comp = self.J[0, 0] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))\n b_dynamic = dynamic_comp\n T = T_dynamics[:, :3]\n b = b_dynamic[:, :3]\n dynamic_f = np.linalg.solve(T, b).reshape((3,))\n forces.append(dynamic_f)\n print('[INFO] time:', t)\n print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])\n print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in\n np.array(shoulder)])\n print('[INFO] forces:', [round(f, 4) for f in dynamic_f])\n print('[INFO] dynamic component:', b_dynamic.T)\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n pylab.plot(self.time, forces[i], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],\n loc=0)\n pylab.title('Dynamic forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def solve_static_forces(self):\n \"\"\"\n Решение обратной задачи стедна для статических нагрузок\n :return: компоненты силы для каждой опоры\n \"\"\"\n print('####################################################')\n print('[INFO] solve STATIC forces ...')\n print('####################################################')\n x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]\n forces = []\n for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):\n L1 = np.array(a[0] - self.B[0])\n L2 = np.array(a[0] - self.B[1])\n L3 = np.array(a[1] - self.B[2])\n direct_L1 = L1 / np.linalg.norm(L1)\n direct_L2 = L2 / np.linalg.norm(L2)\n direct_L3 = L3 / np.linalg.norm(L3)\n T = np.stack((direct_L1, direct_L2, direct_L3))\n b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))\n static_f = np.linalg.solve(T, b).reshape((3,)) / 2\n forces.append(static_f)\n print('[INFO] time:', t)\n print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.\n linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))\n print('[INFO] forces:', round(static_f[0] / 2, 4), round(\n static_f[1] / 2, 4), round(static_f[2] / 2, 4))\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}\n for i, j in x_symmetry_ind:\n pylab.plot(self.time, forces[j], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)\n pylab.title('Static forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def plot_animate(self, A):\n \"\"\"\"\n try to create animate function to plot mechanisms\n \"\"\"\n fig = plt.figure()\n fig.set_tight_layout(False)\n ax = plt.axes(projection='3d')\n global cnt\n cnt = ax\n global cur_A\n global cur_B\n cur_A = A[0]\n cur_B = self.B[0]\n\n def steps(count=1):\n for i in range(count):\n df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n cnt.plot(x, y, z)\n\n def animate(frame):\n steps(1)\n return cnt\n anim = animation.FuncAnimation(fig, animate, frames=100)\n plt.show()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass Hexapod:\n\n def __init__(self, axis):\n \"\"\"\n Инициализация начальных параметров системы\n :param axis: ось вращения системы (x, y, z)\n \"\"\"\n self.axis = axis\n self.alpha = 30.0\n self.beta = 30.0\n self.L = 1.5\n self.h_c = 2.0\n self.r = 1.0\n self.m_p = 1000.0\n self.m = 4000.0\n self.nu = 0.5\n self.J = np.array([[5000, 0, 0], [0, 5000, 0], [0, 0, 3500]], np.\n float32)\n self.A_0 = np.round([[self.r * np.sin(2 * np.pi / 3 * i + np.pi), \n self.r * np.cos(2 * np.pi / 3 * i + np.pi), -self.h_c] for i in\n range(-1, 2)], 5)\n self.B = np.array([])\n self.A = np.array([])\n self.all_full_lengths = np.array([])\n self.r = np.array([])\n self.fi_x_0 = 4.0\n self.fi_x = lambda t: self.fi_x_0 * np.sin(2 * np.pi * self.nu * t)\n self.prime_fi_x = lambda t: self.fi_x_0 * 2 * np.pi * self.nu * np.cos(\n 2 * np.pi * self.nu * t)\n self.prime2_fi_x = lambda t: -self.fi_x_0 * (2 * np.pi * self.nu\n ) ** 2 * np.sin(2 * np.pi * self.nu * t)\n self.fi_y_0 = 4.0\n self.fi_y = lambda t: self.fi_y_0 * np.sin(2 * np.pi * self.nu * t)\n self.prime_fi_y = lambda t: self.fi_y_0 * 2 * np.pi * self.nu * np.cos(\n 2 * np.pi * self.nu * t)\n self.prime2_fi_y = lambda t: -self.fi_y_0 * (2 * np.pi * self.nu\n ) ** 2 * np.sin(2 * np.pi * self.nu * t)\n self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t) * np.pi /\n 180.0), -np.sin(self.fi_x(t) * np.pi / 180.0), 0], [np.sin(self\n .fi_x(t) * np.pi / 180.0), np.cos(self.fi_x(t) * np.pi / 180.0),\n 0], [0, 0, 1]], 5)\n self.R_matrix_y = lambda t: np.round([[1, 0, 0], [0, np.cos(self.\n fi_y(t) * np.pi / 180.0), -np.sin(self.fi_y(t) * np.pi / 180.0)\n ], [0, np.sin(self.fi_y(t) * np.pi / 180.0), np.cos(self.fi_y(t\n ) * np.pi / 180.0)]], 5)\n self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t) * np.pi /\n 180.0), 0, np.sin(self.fi_y(t) * np.pi / 180.0)], [0, 1, 0], [-\n np.sin(self.fi_y(t) * np.pi / 180.0), 0, np.cos(self.fi_y(t) *\n np.pi / 180.0)]], 5)\n self.H = np.cos(np.pi / 180.0 * self.beta) * np.cos(np.pi / 180.0 *\n self.alpha / 2) * self.L\n self.h = self.L * np.cos(np.pi / 180.0 * self.alpha / 2) * np.sin(\n np.pi / 180.0 * self.beta)\n self.a = self.L * np.sin(np.pi / 180.0 * self.alpha / 2)\n self.r = (self.h ** 2 + self.a ** 2) ** 0.5\n self.end_time = 2.0\n self.start_time = 0.0\n self.steps = 100\n self.time = np.linspace(self.start_time, self.end_time, self.steps)\n self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]\n\n def set_B(self):\n \"\"\"\n Расчет геометрии стенда - положение точек B_i.\n\n Задается: self.B\n :return: None\n \"\"\"\n for i, A in enumerate(self.A_0):\n a = A[:2]\n b1 = np.array([self.h, self.a])\n b2 = np.array([self.h, -self.a])\n kappa = np.array([[np.cos(np.pi / 180 * (30 - 120 * i)), -np.\n sin(np.pi / 180 * (30 - 120 * i))], [np.sin(np.pi / 180 * (\n 30 - 120 * i)), np.cos(np.pi / 180 * (30 - 120 * i))]])\n p1 = np.dot(kappa, b1) + a\n p2 = np.dot(kappa, b2) + a\n p1 = np.append(p1, -self.H - self.h_c)\n p2 = np.append(p2, -self.H - self.h_c)\n self.B = np.hstack((self.B, p1))\n self.B = np.hstack((self.B, p2))\n self.B = self.B.reshape(6, 3)\n i = 0\n for A in self.A_0:\n assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 0.0001\n assert np.linalg.norm(np.subtract(A, self.B[i + 1])\n ) - self.L <= 0.0001\n i += 2\n\n def get_delta_L(self):\n \"\"\"\n Расчет геометрии положения точек A_i в каждый момент времени.\n Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.\n\n Задается: self.A, self.all_full_lengths, self.set_r\n :return: None\n \"\"\"\n print('####################################################')\n print('[INFO] solve delta L, Velocity, Acceleration ...')\n print('####################################################')\n R_matrix = None\n if self.axis == 'x':\n R_matrix = self.R_matrix_x\n elif self.axis == 'y':\n R_matrix = self.R_matrix_y\n elif self.axis == 'z':\n R_matrix = self.R_matrix_z\n dL_all = []\n L_all = []\n coordinates_A = []\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n print('[INFO] Поршень №{}'.format(j + 1))\n dl = []\n l = []\n coord = []\n for t in self.time:\n try:\n A = np.dot(R_matrix(t), self.A_0[i])\n except Exception:\n print('Type error axis')\n L = np.linalg.norm(self.B[j] - A)\n print(self.B[j] - A)\n print(self.L, L)\n print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))\n l.append(L)\n dl.append(round((L - self.L) * 1000.0, 5))\n coord.append(A)\n dL_all.append(dl)\n L_all.append(l)\n coordinates_A.append(coord)\n v = [0.0]\n for k in range(self.steps - 1):\n v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.\n time[k]))\n pylab.figure(1)\n pylab.plot(self.time[5:], v[5:], colors[j])\n print('[INFO] v_max =', np.max(np.abs(v[5:])))\n a = [0.0]\n for k in range(self.steps - 1):\n a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))\n pylab.figure(2)\n pylab.plot(self.time[5:], a[5:], colors[j])\n print('[INFO] a_max =', np.max(np.abs(a[5:])))\n print('****************************************************')\n pylab.figure(1)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Velocity')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Velocity [mm/s]')\n pylab.grid()\n pylab.figure(2)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Acceleration')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Acceleration [mm/s^2]')\n pylab.grid()\n pylab.figure(3)\n for i in range(6):\n pylab.plot(self.time, dL_all[i], colors[i])\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Delta length')\n pylab.xlabel('Time [s]')\n pylab.ylabel('dL [mm]')\n pylab.grid()\n plt.show()\n self.A = np.array(coordinates_A[0::2])\n self.all_full_lengths = np.array(L_all)\n self.set_r()\n self.plot_3d_lines()\n\n def plot_3d_lines(self):\n \"\"\"\n Покадровая отрисовка геометрии стенда в 3D.\n :return: None\n \"\"\"\n pylab.figure(figsize=(12, 10))\n ax = pylab.axes(projection='3d')\n colors = {(0): 'r', (1): 'orange', (2): 'g', (3): 'olive', (4): 'b',\n (5): 'navy'}\n markers = {(0): '^', (1): '^', (2): 'o', (3): 'o', (4): '*', (5): '*'}\n for i, j in self.indexes:\n df_A = pd.Series(data=self.A_0[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n ax.scatter(x, y, z, c=colors[j], marker=markers[j], s=20.0)\n ax.legend(['1', '2', '3', '4', '5', '6'], loc=0)\n for i, j in self.indexes:\n k = 0\n for a, r in zip(self.A[i], self.r[j]):\n df_A = pd.Series(data=a, index=['x', 'y', 'z'])\n df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])\n df_r = pd.Series(data=r, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n x1 = [df_r.x, 0]\n y1 = [df_r.y, 0]\n z1 = [df_r.z, 0]\n x2 = [df_r.x, df_B.x]\n y2 = [df_r.y, df_B.y]\n z2 = [df_r.z, df_B.z]\n if k % int(self.steps - 1) == 0:\n ax.plot(x, y, z, c=colors[j], marker=markers[j])\n k += 1\n for i in range(0, self.steps, 9):\n a = np.array([self.A[0, i], self.A[1, i], self.A[2, i]])\n df_A = pd.DataFrame(data=a, columns=['x', 'y', 'z'])\n df_A = pd.concat((df_A, df_A.take([0])), axis=0)\n ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='gray')\n df_B = pd.DataFrame(data=self.B, columns=['x', 'y', 'z'])\n df_B = pd.concat((df_B, df_B.take([0])))\n df_A = pd.DataFrame(data=self.A_0, columns=['x', 'y', 'z'])\n df_A = pd.concat((df_A, df_A.take([0])), axis=0)\n ax.plot(df_B.x.values, df_B.y.values, df_B.z.values, c='black',\n linewidth=4.0)\n ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='black',\n linewidth=4.0)\n ax.view_init(30, -39)\n plt.show()\n\n def calculate_angles(self, l1, l2):\n \"\"\"\n Решение теоремы косинусов для поиска угла по трем сторонам\n :param l1: прилежащая сторона к вычисляемому углу\n :param l2: противолежащая сторона к углу\n :return: (alpha, teta, gamma) - углы в треугольнике сил\n \"\"\"\n cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2\n ) / 2 * l1 * 2 * self.a\n teta = np.arccos(cos_teta) * 180.0 / np.pi\n b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta\n cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a\n alpha = np.arccos(cos_alpha) * 180.0 / np.pi\n gamma = 180.0 - teta - alpha\n return alpha, teta, gamma\n\n def set_r(self):\n \"\"\"\n Вычисление радиус-векторов плеч сил для каждого цилиндра\n :return: None\n \"\"\"\n r_all = []\n for i, j in self.indexes:\n r = []\n for a in self.A[i]:\n L = np.array(a - self.B[j])\n direct_L = L / np.linalg.norm(L)\n t1 = np.array([0, -direct_L[2], direct_L[1]])\n b1 = np.array([a[2] * direct_L[1] - a[1] * direct_L[2]])\n t2 = direct_L\n b2 = np.array([0])\n t3 = np.array([a[1] * direct_L[2] - a[2] * direct_L[1], -a[\n 0] * direct_L[2] + a[2] * direct_L[0], a[0] * direct_L[\n 1] - a[1] * direct_L[0]])\n b3 = np.array([0])\n T = np.stack((t1, t2, t3))\n b = np.stack((b1, b2, b3))\n r.append(np.linalg.solve(T, b).reshape((3,)))\n r_all.append(r)\n self.r = np.array(r_all)\n\n def solve_dynamic_forces(self):\n \"\"\"\n решение обратной задачи стенда\n Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х\n :return: минимальная и максимальная нагрузка на каждый цилиндр\n \"\"\"\n print('####################################################')\n print('[INFO] solve DYNAMIC forces ...')\n print('####################################################')\n A = []\n for i in range(self.steps):\n a = []\n for j in range(3):\n a_ = self.A[j, i, :]\n a.append(a_)\n A.append(a)\n R = []\n for i in range(self.steps):\n r = []\n for j in range(6):\n r_ = self.r[j, i, :]\n r.append(r_)\n R.append(r)\n A = np.array(A)\n R = np.array(R)\n forces = []\n for a, r, t in zip(A, R, self.time):\n L = []\n direct = []\n shoulder = []\n for i, j in self.indexes:\n len = np.array(self.B[j] - a[i])\n dir = len / np.linalg.norm(len)\n L.append(len)\n direct_force_try = self.B[j] - r[j]\n direct.append(direct_force_try)\n shoulder.append(np.cross(r[j], direct_force_try))\n L = np.array(L)\n T_static = np.array(direct).T\n T_dynamics = np.array(shoulder).T\n b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))\n dynamic_comp = None\n if self.axis == 'x':\n comp = self.J[2, 2] * self.prime2_fi_x(t)\n dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))\n elif self.axis == 'y':\n comp = self.J[1, 1] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))\n elif self.axis == 'z':\n comp = self.J[0, 0] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))\n b_dynamic = dynamic_comp\n T = T_dynamics[:, :3]\n b = b_dynamic[:, :3]\n dynamic_f = np.linalg.solve(T, b).reshape((3,))\n forces.append(dynamic_f)\n print('[INFO] time:', t)\n print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])\n print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in\n np.array(shoulder)])\n print('[INFO] forces:', [round(f, 4) for f in dynamic_f])\n print('[INFO] dynamic component:', b_dynamic.T)\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n pylab.plot(self.time, forces[i], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],\n loc=0)\n pylab.title('Dynamic forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def solve_static_forces(self):\n \"\"\"\n Решение обратной задачи стедна для статических нагрузок\n :return: компоненты силы для каждой опоры\n \"\"\"\n print('####################################################')\n print('[INFO] solve STATIC forces ...')\n print('####################################################')\n x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]\n forces = []\n for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):\n L1 = np.array(a[0] - self.B[0])\n L2 = np.array(a[0] - self.B[1])\n L3 = np.array(a[1] - self.B[2])\n direct_L1 = L1 / np.linalg.norm(L1)\n direct_L2 = L2 / np.linalg.norm(L2)\n direct_L3 = L3 / np.linalg.norm(L3)\n T = np.stack((direct_L1, direct_L2, direct_L3))\n b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))\n static_f = np.linalg.solve(T, b).reshape((3,)) / 2\n forces.append(static_f)\n print('[INFO] time:', t)\n print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.\n linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))\n print('[INFO] forces:', round(static_f[0] / 2, 4), round(\n static_f[1] / 2, 4), round(static_f[2] / 2, 4))\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}\n for i, j in x_symmetry_ind:\n pylab.plot(self.time, forces[j], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)\n pylab.title('Static forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def plot_animate(self, A):\n \"\"\"\"\n try to create animate function to plot mechanisms\n \"\"\"\n fig = plt.figure()\n fig.set_tight_layout(False)\n ax = plt.axes(projection='3d')\n global cnt\n cnt = ax\n global cur_A\n global cur_B\n cur_A = A[0]\n cur_B = self.B[0]\n\n def steps(count=1):\n for i in range(count):\n df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n cnt.plot(x, y, z)\n\n def animate(frame):\n steps(1)\n return cnt\n anim = animation.FuncAnimation(fig, animate, frames=100)\n plt.show()\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass Hexapod:\n\n def __init__(self, axis):\n \"\"\"\n Инициализация начальных параметров системы\n :param axis: ось вращения системы (x, y, z)\n \"\"\"\n self.axis = axis\n self.alpha = 30.0\n self.beta = 30.0\n self.L = 1.5\n self.h_c = 2.0\n self.r = 1.0\n self.m_p = 1000.0\n self.m = 4000.0\n self.nu = 0.5\n self.J = np.array([[5000, 0, 0], [0, 5000, 0], [0, 0, 3500]], np.\n float32)\n self.A_0 = np.round([[self.r * np.sin(2 * np.pi / 3 * i + np.pi), \n self.r * np.cos(2 * np.pi / 3 * i + np.pi), -self.h_c] for i in\n range(-1, 2)], 5)\n self.B = np.array([])\n self.A = np.array([])\n self.all_full_lengths = np.array([])\n self.r = np.array([])\n self.fi_x_0 = 4.0\n self.fi_x = lambda t: self.fi_x_0 * np.sin(2 * np.pi * self.nu * t)\n self.prime_fi_x = lambda t: self.fi_x_0 * 2 * np.pi * self.nu * np.cos(\n 2 * np.pi * self.nu * t)\n self.prime2_fi_x = lambda t: -self.fi_x_0 * (2 * np.pi * self.nu\n ) ** 2 * np.sin(2 * np.pi * self.nu * t)\n self.fi_y_0 = 4.0\n self.fi_y = lambda t: self.fi_y_0 * np.sin(2 * np.pi * self.nu * t)\n self.prime_fi_y = lambda t: self.fi_y_0 * 2 * np.pi * self.nu * np.cos(\n 2 * np.pi * self.nu * t)\n self.prime2_fi_y = lambda t: -self.fi_y_0 * (2 * np.pi * self.nu\n ) ** 2 * np.sin(2 * np.pi * self.nu * t)\n self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t) * np.pi /\n 180.0), -np.sin(self.fi_x(t) * np.pi / 180.0), 0], [np.sin(self\n .fi_x(t) * np.pi / 180.0), np.cos(self.fi_x(t) * np.pi / 180.0),\n 0], [0, 0, 1]], 5)\n self.R_matrix_y = lambda t: np.round([[1, 0, 0], [0, np.cos(self.\n fi_y(t) * np.pi / 180.0), -np.sin(self.fi_y(t) * np.pi / 180.0)\n ], [0, np.sin(self.fi_y(t) * np.pi / 180.0), np.cos(self.fi_y(t\n ) * np.pi / 180.0)]], 5)\n self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t) * np.pi /\n 180.0), 0, np.sin(self.fi_y(t) * np.pi / 180.0)], [0, 1, 0], [-\n np.sin(self.fi_y(t) * np.pi / 180.0), 0, np.cos(self.fi_y(t) *\n np.pi / 180.0)]], 5)\n self.H = np.cos(np.pi / 180.0 * self.beta) * np.cos(np.pi / 180.0 *\n self.alpha / 2) * self.L\n self.h = self.L * np.cos(np.pi / 180.0 * self.alpha / 2) * np.sin(\n np.pi / 180.0 * self.beta)\n self.a = self.L * np.sin(np.pi / 180.0 * self.alpha / 2)\n self.r = (self.h ** 2 + self.a ** 2) ** 0.5\n self.end_time = 2.0\n self.start_time = 0.0\n self.steps = 100\n self.time = np.linspace(self.start_time, self.end_time, self.steps)\n self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]\n\n def set_B(self):\n \"\"\"\n Расчет геометрии стенда - положение точек B_i.\n\n Задается: self.B\n :return: None\n \"\"\"\n for i, A in enumerate(self.A_0):\n a = A[:2]\n b1 = np.array([self.h, self.a])\n b2 = np.array([self.h, -self.a])\n kappa = np.array([[np.cos(np.pi / 180 * (30 - 120 * i)), -np.\n sin(np.pi / 180 * (30 - 120 * i))], [np.sin(np.pi / 180 * (\n 30 - 120 * i)), np.cos(np.pi / 180 * (30 - 120 * i))]])\n p1 = np.dot(kappa, b1) + a\n p2 = np.dot(kappa, b2) + a\n p1 = np.append(p1, -self.H - self.h_c)\n p2 = np.append(p2, -self.H - self.h_c)\n self.B = np.hstack((self.B, p1))\n self.B = np.hstack((self.B, p2))\n self.B = self.B.reshape(6, 3)\n i = 0\n for A in self.A_0:\n assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 0.0001\n assert np.linalg.norm(np.subtract(A, self.B[i + 1])\n ) - self.L <= 0.0001\n i += 2\n\n def get_delta_L(self):\n \"\"\"\n Расчет геометрии положения точек A_i в каждый момент времени.\n Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.\n\n Задается: self.A, self.all_full_lengths, self.set_r\n :return: None\n \"\"\"\n print('####################################################')\n print('[INFO] solve delta L, Velocity, Acceleration ...')\n print('####################################################')\n R_matrix = None\n if self.axis == 'x':\n R_matrix = self.R_matrix_x\n elif self.axis == 'y':\n R_matrix = self.R_matrix_y\n elif self.axis == 'z':\n R_matrix = self.R_matrix_z\n dL_all = []\n L_all = []\n coordinates_A = []\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n print('[INFO] Поршень №{}'.format(j + 1))\n dl = []\n l = []\n coord = []\n for t in self.time:\n try:\n A = np.dot(R_matrix(t), self.A_0[i])\n except Exception:\n print('Type error axis')\n L = np.linalg.norm(self.B[j] - A)\n print(self.B[j] - A)\n print(self.L, L)\n print('dL[мм] = {:.5f}'.format((L - self.L) * 1000.0))\n l.append(L)\n dl.append(round((L - self.L) * 1000.0, 5))\n coord.append(A)\n dL_all.append(dl)\n L_all.append(l)\n coordinates_A.append(coord)\n v = [0.0]\n for k in range(self.steps - 1):\n v.append((dl[k + 1] - dl[k]) / (self.time[k + 1] - self.\n time[k]))\n pylab.figure(1)\n pylab.plot(self.time[5:], v[5:], colors[j])\n print('[INFO] v_max =', np.max(np.abs(v[5:])))\n a = [0.0]\n for k in range(self.steps - 1):\n a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))\n pylab.figure(2)\n pylab.plot(self.time[5:], a[5:], colors[j])\n print('[INFO] a_max =', np.max(np.abs(a[5:])))\n print('****************************************************')\n pylab.figure(1)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Velocity')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Velocity [mm/s]')\n pylab.grid()\n pylab.figure(2)\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Acceleration')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Acceleration [mm/s^2]')\n pylab.grid()\n pylab.figure(3)\n for i in range(6):\n pylab.plot(self.time, dL_all[i], colors[i])\n pylab.legend(['1 line', '2 line', '3 line', '4 line', '5 line',\n '6 line'], loc=0)\n pylab.title('Delta length')\n pylab.xlabel('Time [s]')\n pylab.ylabel('dL [mm]')\n pylab.grid()\n plt.show()\n self.A = np.array(coordinates_A[0::2])\n self.all_full_lengths = np.array(L_all)\n self.set_r()\n self.plot_3d_lines()\n\n def plot_3d_lines(self):\n \"\"\"\n Покадровая отрисовка геометрии стенда в 3D.\n :return: None\n \"\"\"\n pylab.figure(figsize=(12, 10))\n ax = pylab.axes(projection='3d')\n colors = {(0): 'r', (1): 'orange', (2): 'g', (3): 'olive', (4): 'b',\n (5): 'navy'}\n markers = {(0): '^', (1): '^', (2): 'o', (3): 'o', (4): '*', (5): '*'}\n for i, j in self.indexes:\n df_A = pd.Series(data=self.A_0[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n ax.scatter(x, y, z, c=colors[j], marker=markers[j], s=20.0)\n ax.legend(['1', '2', '3', '4', '5', '6'], loc=0)\n for i, j in self.indexes:\n k = 0\n for a, r in zip(self.A[i], self.r[j]):\n df_A = pd.Series(data=a, index=['x', 'y', 'z'])\n df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])\n df_r = pd.Series(data=r, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n x1 = [df_r.x, 0]\n y1 = [df_r.y, 0]\n z1 = [df_r.z, 0]\n x2 = [df_r.x, df_B.x]\n y2 = [df_r.y, df_B.y]\n z2 = [df_r.z, df_B.z]\n if k % int(self.steps - 1) == 0:\n ax.plot(x, y, z, c=colors[j], marker=markers[j])\n k += 1\n for i in range(0, self.steps, 9):\n a = np.array([self.A[0, i], self.A[1, i], self.A[2, i]])\n df_A = pd.DataFrame(data=a, columns=['x', 'y', 'z'])\n df_A = pd.concat((df_A, df_A.take([0])), axis=0)\n ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='gray')\n df_B = pd.DataFrame(data=self.B, columns=['x', 'y', 'z'])\n df_B = pd.concat((df_B, df_B.take([0])))\n df_A = pd.DataFrame(data=self.A_0, columns=['x', 'y', 'z'])\n df_A = pd.concat((df_A, df_A.take([0])), axis=0)\n ax.plot(df_B.x.values, df_B.y.values, df_B.z.values, c='black',\n linewidth=4.0)\n ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='black',\n linewidth=4.0)\n ax.view_init(30, -39)\n plt.show()\n\n def calculate_angles(self, l1, l2):\n \"\"\"\n Решение теоремы косинусов для поиска угла по трем сторонам\n :param l1: прилежащая сторона к вычисляемому углу\n :param l2: противолежащая сторона к углу\n :return: (alpha, teta, gamma) - углы в треугольнике сил\n \"\"\"\n cos_teta = (l1 ** 2 + (2 * self.a) ** 2 - l2 ** 2\n ) / 2 * l1 * 2 * self.a\n teta = np.arccos(cos_teta) * 180.0 / np.pi\n b = l1 ** 2 + self.a ** 2 - 2 * l1 * self.a * cos_teta\n cos_alpha = (l1 ** 2 + b ** 2 - l2 ** 2) / 2 * l1 * self.a\n alpha = np.arccos(cos_alpha) * 180.0 / np.pi\n gamma = 180.0 - teta - alpha\n return alpha, teta, gamma\n\n def set_r(self):\n \"\"\"\n Вычисление радиус-векторов плеч сил для каждого цилиндра\n :return: None\n \"\"\"\n r_all = []\n for i, j in self.indexes:\n r = []\n for a in self.A[i]:\n L = np.array(a - self.B[j])\n direct_L = L / np.linalg.norm(L)\n t1 = np.array([0, -direct_L[2], direct_L[1]])\n b1 = np.array([a[2] * direct_L[1] - a[1] * direct_L[2]])\n t2 = direct_L\n b2 = np.array([0])\n t3 = np.array([a[1] * direct_L[2] - a[2] * direct_L[1], -a[\n 0] * direct_L[2] + a[2] * direct_L[0], a[0] * direct_L[\n 1] - a[1] * direct_L[0]])\n b3 = np.array([0])\n T = np.stack((t1, t2, t3))\n b = np.stack((b1, b2, b3))\n r.append(np.linalg.solve(T, b).reshape((3,)))\n r_all.append(r)\n self.r = np.array(r_all)\n\n def solve_dynamic_forces(self):\n \"\"\"\n решение обратной задачи стенда\n Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х\n :return: минимальная и максимальная нагрузка на каждый цилиндр\n \"\"\"\n print('####################################################')\n print('[INFO] solve DYNAMIC forces ...')\n print('####################################################')\n A = []\n for i in range(self.steps):\n a = []\n for j in range(3):\n a_ = self.A[j, i, :]\n a.append(a_)\n A.append(a)\n R = []\n for i in range(self.steps):\n r = []\n for j in range(6):\n r_ = self.r[j, i, :]\n r.append(r_)\n R.append(r)\n A = np.array(A)\n R = np.array(R)\n forces = []\n for a, r, t in zip(A, R, self.time):\n L = []\n direct = []\n shoulder = []\n for i, j in self.indexes:\n len = np.array(self.B[j] - a[i])\n dir = len / np.linalg.norm(len)\n L.append(len)\n direct_force_try = self.B[j] - r[j]\n direct.append(direct_force_try)\n shoulder.append(np.cross(r[j], direct_force_try))\n L = np.array(L)\n T_static = np.array(direct).T\n T_dynamics = np.array(shoulder).T\n b_static = np.array([-self.m * 9.8, 0, 0]).reshape((3, 1))\n dynamic_comp = None\n if self.axis == 'x':\n comp = self.J[2, 2] * self.prime2_fi_x(t)\n dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))\n elif self.axis == 'y':\n comp = self.J[1, 1] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))\n elif self.axis == 'z':\n comp = self.J[0, 0] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))\n b_dynamic = dynamic_comp\n T = T_dynamics[:, :3]\n b = b_dynamic[:, :3]\n dynamic_f = np.linalg.solve(T, b).reshape((3,))\n forces.append(dynamic_f)\n print('[INFO] time:', t)\n print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])\n print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in\n np.array(shoulder)])\n print('[INFO] forces:', [round(f, 4) for f in dynamic_f])\n print('[INFO] dynamic component:', b_dynamic.T)\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'rx-', (2): 'g+--', (3): 'gx-', (4):\n 'b+--', (5): 'bx-'}\n for i, j in self.indexes:\n pylab.plot(self.time, forces[i], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'],\n loc=0)\n pylab.title('Dynamic forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def solve_static_forces(self):\n \"\"\"\n Решение обратной задачи стедна для статических нагрузок\n :return: компоненты силы для каждой опоры\n \"\"\"\n print('####################################################')\n print('[INFO] solve STATIC forces ...')\n print('####################################################')\n x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]\n forces = []\n for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):\n L1 = np.array(a[0] - self.B[0])\n L2 = np.array(a[0] - self.B[1])\n L3 = np.array(a[1] - self.B[2])\n direct_L1 = L1 / np.linalg.norm(L1)\n direct_L2 = L2 / np.linalg.norm(L2)\n direct_L3 = L3 / np.linalg.norm(L3)\n T = np.stack((direct_L1, direct_L2, direct_L3))\n b = np.array([-self.m * 9.8 / 2, 0, 0]).reshape((3, 1))\n static_f = np.linalg.solve(T, b).reshape((3,)) / 2\n forces.append(static_f)\n print('[INFO] time:', t)\n print('[INFO] length:', round(np.linalg.norm(L1), 4), round(np.\n linalg.norm(L2), 4), round(np.linalg.norm(L3), 4))\n print('[INFO] forces:', round(static_f[0] / 2, 4), round(\n static_f[1] / 2, 4), round(static_f[2] / 2, 4))\n print('****************************************************')\n forces = np.array(forces).T\n colors = {(0): 'r+--', (1): 'g+--', (2): 'b+--'}\n for i, j in x_symmetry_ind:\n pylab.plot(self.time, forces[j], colors[j])\n pylab.legend(['$F_1$', '$F_2$', '$F_3$'], loc=0)\n pylab.title('Static forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n plt.show()\n\n def plot_animate(self, A):\n \"\"\"\"\n try to create animate function to plot mechanisms\n \"\"\"\n fig = plt.figure()\n fig.set_tight_layout(False)\n ax = plt.axes(projection='3d')\n global cnt\n cnt = ax\n global cur_A\n global cur_B\n cur_A = A[0]\n cur_B = self.B[0]\n\n def steps(count=1):\n for i in range(count):\n df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n cnt.plot(x, y, z)\n\n def animate(frame):\n steps(1)\n return cnt\n anim = animation.FuncAnimation(fig, animate, frames=100)\n plt.show()\n\n\nif __name__ == '__main__':\n hex = Hexapod(axis='y')\n hex.set_B()\n hex.get_delta_L()\n hex.solve_static_forces()\n hex.solve_dynamic_forces()\n",
"step-5": "import numpy as np\nimport matplotlib.pyplot as plt\nimport pandas as pd\nimport matplotlib.animation as animation\nimport pylab\nfrom mpl_toolkits import mplot3d\nfrom mpl_toolkits.mplot3d import Axes3D\n\n\nclass Hexapod:\n def __init__(self, axis):\n \"\"\"\n Инициализация начальных параметров системы\n :param axis: ось вращения системы (x, y, z)\n \"\"\"\n self.axis = axis # ось вращения тела\n self.alpha = 30. # угол между приводами в точке крпления\n self.beta = 30. # двугранный угол между приводами и платформой\n self.L = 1.5 # длина привода\n self.h_c = 2. # высота центра масс тела\n self.r = 1. # радиус тела\n self.m_p = 1000. # масса платформы\n self.m = 4000. # масса тела\n self.nu = 0.5 # частота\n\n # тензор инерции тела для решения обратной задачи\n self.J = np.array([[5000, 0, 0],\n [0, 5000, 0],\n [0, 0, 3500]], np.float32)\n\n # начальное положение точек крепления приводов на ВЕРХНЕЙ платформе\n self.A_0 = np.round([[self.r*np.sin(2*np.pi/3*i + np.pi),\n self.r*np.cos(2*np.pi/3*i + np.pi),\n -self.h_c] for i in range(-1, 2)], 5)\n\n # точки крепления приводов на НИЖНЕЙ платформе (const)\n self.B = np.array([])\n # положение точек крепления приводов на ВЕРХНЕЙ платформе за все время\n self.A = np.array([])\n # длина каждого привода за все время\n self.all_full_lengths = np.array([])\n # плечи сил приводов за все время\n self.r = np.array([])\n\n # ампилитуда вращения, закон изменения угла и его производные по OX\n self.fi_x_0 = 4. # градусы\n self.fi_x = lambda t: self.fi_x_0 * np.sin(2*np.pi*self.nu*t)\n self.prime_fi_x = lambda t: self.fi_x_0 * 2*np.pi*self.nu * np.cos(2*np.pi*self.nu*t)\n self.prime2_fi_x = lambda t: -self.fi_x_0 * (2*np.pi*self.nu)**2 * np.sin(2*np.pi*self.nu*t)\n\n # ампилитуда вращения, закон изменения угла и его производные по OY и OZ\n self.fi_y_0 = 4. # градусы\n self.fi_y = lambda t: self.fi_y_0 * np.sin(2*np.pi*self.nu*t)\n self.prime_fi_y = lambda t: self.fi_y_0 * 2*np.pi*self.nu * np.cos(2*np.pi*self.nu*t)\n self.prime2_fi_y = lambda t: -self.fi_y_0 * (2*np.pi*self.nu)**2 * np.sin(2*np.pi*self.nu*t)\n\n # матрица поворота вокруг оси OX\n self.R_matrix_x = lambda t: np.round([[np.cos(self.fi_x(t)*np.pi/180.), -np.sin(self.fi_x(t)*np.pi/180.), 0],\n [np.sin(self.fi_x(t)*np.pi/180.), np.cos(self.fi_x(t)*np.pi/180.), 0],\n [0, 0, 1]], 5)\n\n # матрица поворота вокруг оси OY\n self.R_matrix_y = lambda t: np.round([[1, 0, 0],\n [0, np.cos(self.fi_y(t)*np.pi/180.), -np.sin(self.fi_y(t)*np.pi/180.)],\n [0, np.sin(self.fi_y(t)*np.pi/180.), np.cos(self.fi_y(t)*np.pi/180.)]], 5)\n\n # матрица поворота вокруг оси OZ\n self.R_matrix_z = lambda t: np.round([[np.cos(self.fi_y(t)*np.pi/180.), 0, np.sin(self.fi_y(t)*np.pi/180.)],\n [0, 1, 0],\n [-np.sin(self.fi_y(t)*np.pi/180.), 0, np.cos(self.fi_y(t)*np.pi/180.)]], 5)\n\n # для построения геометрии точек B\n self.H = np.cos(np.pi/180. * self.beta) * np.cos(np.pi/180. * self.alpha/2) * self.L\n self.h = self.L * np.cos(np.pi/180.*self.alpha/2) * np.sin(np.pi/180.*self.beta)\n self.a = self.L * np.sin(np.pi/180.*self.alpha/2) # основание треугольника\n self.r = (self.h**2 + self.a**2)**0.5\n\n # отсчет времени для расчета законов\n self.end_time = 2.0\n self.start_time = 0.\n self.steps = 100\n self.time = np.linspace(self.start_time, self.end_time, self.steps)\n\n # связь индексов нижней и верхней платформы\n self.indexes = [[0, 0], [0, 1], [1, 2], [1, 3], [2, 4], [2, 5]]\n\n def set_B(self):\n \"\"\"\n Расчет геометрии стенда - положение точек B_i.\n\n Задается: self.B\n :return: None\n \"\"\"\n for i, A in enumerate(self.A_0):\n a = A[:2]\n b1 = np.array([self.h, self.a])\n b2 = np.array([self.h, - self.a])\n\n kappa = np.array([[np.cos(np.pi / 180 * (30-120*i)), -np.sin(np.pi / 180 * (30-120*i))],\n [np.sin(np.pi / 180 * (30-120*i)), np.cos(np.pi / 180 * (30-120*i))]])\n\n p1 = np.dot(kappa, b1) + a\n p2 = np.dot(kappa, b2) + a\n p1 = np.append(p1, - self.H - self.h_c)\n p2 = np.append(p2, - self.H - self.h_c)\n self.B = np.hstack((self.B, p1))\n self.B = np.hstack((self.B, p2))\n\n self.B = self.B.reshape(6, 3)\n\n # проверка длин приводов\n i = 0\n for A in self.A_0:\n assert np.linalg.norm(np.subtract(A, self.B[i])) - self.L <= 1e-4\n assert np.linalg.norm(np.subtract(A, self.B[i + 1])) - self.L <= 1e-4\n # print(np.linalg.norm(np.subtract(A, self.B[i])))\n # print(np.linalg.norm(np.subtract(A, self.B[i + 1])))\n i += 2\n\n def get_delta_L(self):\n \"\"\"\n Расчет геометрии положения точек A_i в каждый момент времени.\n Отрисовка графиков изменения длин, скорости и ускорения для каждого привода по времени.\n\n Задается: self.A, self.all_full_lengths, self.set_r\n :return: None\n \"\"\"\n print('####################################################')\n print('[INFO] solve delta L, Velocity, Acceleration ...')\n print('####################################################')\n # матрица поворота вокруг зазадной оси\n R_matrix = None\n if self.axis == 'x':\n R_matrix = self.R_matrix_x\n elif self.axis == 'y':\n R_matrix = self.R_matrix_y\n elif self.axis == 'z':\n R_matrix = self.R_matrix_z\n\n # удлинения каждого цилиндра за заданное время\n dL_all = []\n # длины всех цилиндров за все время\n L_all = []\n # координаты точек крепления на ВЕРХНЕЙ платформе\n coordinates_A = []\n # легенда для графиков\n colors = {0: 'r+--', 1: 'rx-',\n 2: 'g+--', 3: 'gx-',\n 4: 'b+--', 5: 'bx-'}\n\n for i, j in self.indexes:\n print('[INFO] Поршень №{}'.format(j+1))\n dl = [] # изменение длины поршня в момент времени t\n l = [] # длины поршня в момент времени t\n coord = [] # координата точки A_i в момент времени t\n for t in self.time:\n try:\n A = np.dot(R_matrix(t), self.A_0[i])\n except Exception:\n print('Type error axis')\n\n # текущая длина привода\n L = np.linalg.norm(self.B[j] - A)\n print(self.B[j] - A)\n print(self.L, L)\n # L = np.sum((A - self.B[j])**2)**0.5\n print('dL[мм] = {:.5f}'.format((L - self.L) * 1e3))\n l.append(L)\n dl.append(round(((L - self.L) * 1e3), 5))\n coord.append(A)\n\n dL_all.append(dl)\n L_all.append(l)\n coordinates_A.append(coord)\n\n # численно находим СКОРОСТЬ изменения длины приводов\n v = [0.0]\n for k in range(self.steps - 1):\n v.append((dl[k+1] - dl[k]) / (self.time[k+1] - self.time[k]))\n pylab.figure(1)\n pylab.plot(self.time[5:], v[5:], colors[j])\n print('[INFO] v_max =', np.max(np.abs(v[5:])))\n\n # численно находим УСКОРЕНИЕ изменения длины приводов\n a = [0.0]\n for k in range(self.steps - 1):\n a.append((v[k + 1] - v[k]) / (self.time[k + 1] - self.time[k]))\n pylab.figure(2)\n pylab.plot(self.time[5:], a[5:], colors[j])\n print('[INFO] a_max =', np.max(np.abs(a[5:])))\n print('****************************************************')\n\n # легенда для графика со скоростями\n pylab.figure(1)\n pylab.legend([r'1 line', '2 line', '3 line', '4 line', '5 line', '6 line'], loc=0)\n pylab.title('Velocity')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Velocity [mm/s]')\n pylab.grid()\n # plt.savefig(\"output/velocity_{}.png\".format(self.axis))\n\n # легенда для графика с ускорениями\n pylab.figure(2)\n pylab.legend([r'1 line', '2 line', '3 line', '4 line', '5 line', '6 line'], loc=0)\n pylab.title('Acceleration')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Acceleration [mm/s^2]')\n pylab.grid()\n # pylab.savefig(\"output/acceleration_{}.png\".format(self.axis))\n\n # график удлинения каждого поршня\n pylab.figure(3)\n for i in range(6):\n pylab.plot(self.time, dL_all[i], colors[i])\n pylab.legend([r'1 line', '2 line', '3 line', '4 line', '5 line', '6 line'], loc=0)\n pylab.title('Delta length')\n pylab.xlabel('Time [s]')\n pylab.ylabel('dL [mm]')\n pylab.grid()\n # pylab.savefig(\"output/length_{}.png\".format(self.axis))\n plt.show()\n\n # исключим повторение вершин\n self.A = np.array(coordinates_A[0::2])\n self.all_full_lengths = np.array(L_all)\n self.set_r()\n # покадровая отрисовка геометрии стенда\n self.plot_3d_lines()\n\n # self.plot_animate(coordinates_A)\n\n def plot_3d_lines(self):\n \"\"\"\n Покадровая отрисовка геометрии стенда в 3D.\n :return: None\n \"\"\"\n pylab.figure(figsize=(12, 10))\n ax = pylab.axes(projection='3d')\n\n colors = {0: 'r', 1: 'orange',\n 2: 'g', 3: 'olive',\n 4: 'b', 5: 'navy'}\n markers = {0: '^', 1: '^',\n 2: 'o', 3: 'o',\n 4: '*', 5: '*'}\n\n # задать легенду\n for i, j in self.indexes:\n df_A = pd.Series(data=self.A_0[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])\n\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n ax.scatter(x, y, z, c=colors[j], marker=markers[j], s=20.)\n ax.legend([r'1', '2', '3', '4', '5', '6'], loc=0)\n\n # indexes = [[0, 0], [1, 2], [2, 4]]\n # построить смещения каждого поршня\n for i, j in self.indexes:\n k = 0\n for (a, r) in zip(self.A[i], self.r[j]):\n df_A = pd.Series(data=a, index=['x', 'y', 'z'])\n df_B = pd.Series(data=self.B[j], index=['x', 'y', 'z'])\n df_r = pd.Series(data=r, index=['x', 'y', 'z'])\n\n # геометрия длины цилиндров\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n\n # геометрия плеч сил\n x1 = [df_r.x, 0]\n y1 = [df_r.y, 0]\n z1 = [df_r.z, 0]\n\n # продолжение оси цилиндров\n x2 = [df_r.x, df_B.x]\n y2 = [df_r.y, df_B.y]\n z2 = [df_r.z, df_B.z]\n\n # частичная раскадровка\n if k % int(self.steps-1) == 0:\n # if k:\n # ax.plot(x1, y1, z1, c=colors[j], marker=markers[j])\n # ax.plot(x2, y2, z2, c='gray', marker='+')\n ax.plot(x, y, z, c=colors[j], marker=markers[j])\n # print('H_A =', z[0])\n k += 1\n\n # посторить смещение верхней плтаформы\n for i in range(0, self.steps, 9):\n a = np.array([self.A[0, i], self.A[1, i], self.A[2, i]])\n df_A = pd.DataFrame(data=a, columns=['x', 'y', 'z'])\n df_A = pd.concat((df_A, df_A.take([0])), axis=0)\n\n ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='gray')\n\n # отрисовать начальные положения верхней и нижней платформы\n df_B = pd.DataFrame(data=self.B, columns=['x', 'y', 'z'])\n df_B = pd.concat((df_B, df_B.take([0])))\n df_A = pd.DataFrame(data=self.A_0, columns=['x', 'y', 'z'])\n df_A = pd.concat((df_A, df_A.take([0])), axis=0)\n\n ax.plot(df_B.x.values, df_B.y.values, df_B.z.values, c='black', linewidth=4.)\n ax.plot(df_A.x.values, df_A.y.values, df_A.z.values, c='black', linewidth=4.)\n\n ax.view_init(30, -39)\n # pylab.savefig(\"output/plot_3d_{}.png\".format(self.axis))\n plt.show()\n\n def calculate_angles(self, l1, l2):\n \"\"\"\n Решение теоремы косинусов для поиска угла по трем сторонам\n :param l1: прилежащая сторона к вычисляемому углу\n :param l2: противолежащая сторона к углу\n :return: (alpha, teta, gamma) - углы в треугольнике сил\n \"\"\"\n cos_teta = (l1**2 + (2*self.a)**2 - l2**2) / 2*l1*2*self.a\n teta = np.arccos(cos_teta) * 180. / np.pi\n b = l1**2 + self.a**2 - 2*l1*self.a*cos_teta\n cos_alpha = (l1**2 + b**2 - l2**2) / 2*l1*self.a\n alpha = np.arccos(cos_alpha) * 180. / np.pi\n gamma = 180. - teta - alpha\n return alpha, teta, gamma\n\n def set_r(self):\n \"\"\"\n Вычисление радиус-векторов плеч сил для каждого цилиндра\n :return: None\n \"\"\"\n r_all = []\n for i, j in self.indexes:\n r = []\n for a in self.A[i]:\n L = np.array(a - self.B[j])\n direct_L = L / np.linalg.norm(L)\n t1 = np.array([0, -direct_L[2], direct_L[1]])\n b1 = np.array([a[2]*direct_L[1] - a[1]*direct_L[2]])\n t2 = direct_L\n b2 = np.array([0])\n t3 = np.array([a[1]*direct_L[2] - a[2]*direct_L[1],\n -a[0]*direct_L[2] + a[2]*direct_L[0],\n a[0]*direct_L[1] - a[1]*direct_L[0]])\n b3 = np.array([0])\n T = np.stack((t1, t2, t3))\n b = np.stack((b1, b2, b3))\n\n r.append(np.linalg.solve(T, b).reshape((3,)))\n r_all.append(r)\n\n self.r = np.array(r_all)\n\n def solve_dynamic_forces(self):\n \"\"\"\n решение обратной задачи стенда\n Первое приближение - решение двумерной зазадчи для пооврота оси вокруг оси х\n :return: минимальная и максимальная нагрузка на каждый цилиндр\n \"\"\"\n print('####################################################')\n print('[INFO] solve DYNAMIC forces ...')\n print('####################################################')\n A = []\n for i in range(self.steps):\n a = []\n for j in range(3):\n a_ = self.A[j, i, :]\n a.append(a_)\n A.append(a)\n\n R = []\n for i in range(self.steps):\n r = []\n for j in range(6):\n r_ = self.r[j, i, :]\n r.append(r_)\n R.append(r)\n\n A = np.array(A)\n R = np.array(R)\n forces = []\n for a, r, t in zip(A, R, self.time):\n L = []\n direct = [] # направления сил\n shoulder = [] # плечи сил\n for i, j in self.indexes:\n len = np.array(self.B[j] - a[i])\n dir = len / np.linalg.norm(len)\n L.append(len)\n direct_force_try = self.B[j] - r[j]\n # direct.append(dir)\n direct.append(direct_force_try)\n # direct.append(direct_force_try)\n shoulder.append(np.cross(r[j], direct_force_try))\n L = np.array(L)\n\n T_static = np.array(direct).T\n T_dynamics = np.array(shoulder).T\n\n b_static = np.array([-self.m*9.8, 0, 0]).reshape((3, 1))\n\n # определение направления действующих сил\n dynamic_comp = None\n if self.axis == 'x':\n comp = self.J[2, 2] * self.prime2_fi_x(t)\n dynamic_comp = np.array([comp, 0, 0]).reshape((3, 1))\n elif self.axis == 'y':\n comp = self.J[1, 1] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, comp, 0]).reshape((3, 1))\n elif self.axis == 'z':\n comp = self.J[0, 0] * self.prime2_fi_y(t)\n dynamic_comp = np.array([0, 0, comp]).reshape((3, 1))\n b_dynamic = dynamic_comp\n\n # T = np.vstack((T_static, T_dynamics))\n # b = np.vstack((b_static, b_dynamic))\n\n T = T_dynamics[:, :3]\n b = b_dynamic[:, :3]\n # print(T)\n # print(b)\n\n dynamic_f = np.linalg.solve(T, b).reshape((3,))\n forces.append(dynamic_f)\n\n print('[INFO] time:', t)\n print('[INFO] length:', [round(np.linalg.norm(l), 4) for l in L])\n print('[INFO] shoulders:', [round(np.linalg.norm(l), 4) for l in np.array(shoulder)])\n print('[INFO] forces:', [round(f, 4) for f in dynamic_f])\n print('[INFO] dynamic component:', b_dynamic.T)\n print('****************************************************')\n forces = np.array(forces).T\n\n # график приложенной силы к цилиндрам от времени\n colors = {0: 'r+--', 1: 'rx-',\n 2: 'g+--', 3: 'gx-',\n 4: 'b+--', 5: 'bx-'}\n for i, j in self.indexes:\n pylab.plot(self.time, forces[i], colors[j])\n # colors = {0: 'r+--', 1: 'g+--', 2: 'b+--'}\n # for i in range(3):\n # pylab.plot(self.time, forces[i], colors[i], label='$F_{}$'.format(i))\n pylab.legend([r'$F_1$', '$F_2$', '$F_3$', '$F_4$', '$F_5$', '$F_6$'], loc=0)\n # plt.legend(loc=\"lower right\")\n pylab.title('Dynamic forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n\n plt.show()\n\n def solve_static_forces(self):\n \"\"\"\n Решение обратной задачи стедна для статических нагрузок\n :return: компоненты силы для каждой опоры\n \"\"\"\n print('####################################################')\n print('[INFO] solve STATIC forces ...')\n print('####################################################')\n x_symmetry_ind = [[0, 0], [0, 1], [1, 2]]\n forces = []\n for a, t in zip(self.A.reshape((self.steps, 3, 3)), self.time):\n L1 = np.array(a[0] - self.B[0])\n L2 = np.array(a[0] - self.B[1])\n L3 = np.array(a[1] - self.B[2])\n\n direct_L1 = L1 / np.linalg.norm(L1)\n direct_L2 = L2 / np.linalg.norm(L2)\n direct_L3 = L3 / np.linalg.norm(L3)\n\n T = np.stack((direct_L1, direct_L2, direct_L3))\n b = np.array([-self.m*9.8/2, 0, 0]).reshape((3, 1))\n\n static_f = np.linalg.solve(T, b).reshape((3,)) / 2\n forces.append(static_f)\n print('[INFO] time:', t)\n print('[INFO] length:',\n round(np.linalg.norm(L1), 4),\n round(np.linalg.norm(L2), 4),\n round(np.linalg.norm(L3), 4))\n print('[INFO] forces:',\n round(static_f[0]/2, 4),\n round(static_f[1]/2, 4),\n round(static_f[2]/2, 4))\n print('****************************************************')\n forces = np.array(forces).T\n\n # график приложенной силы к цилиндрам от времени\n colors = {0: 'r+--', 1: 'g+--', 2: 'b+--'}\n for i, j in x_symmetry_ind:\n pylab.plot(self.time, forces[j], colors[j])\n pylab.legend([r'$F_1$', '$F_2$', '$F_3$'], loc=0)\n pylab.title('Static forces')\n pylab.xlabel('Time [s]')\n pylab.ylabel('Force [kg*m/s^2]')\n pylab.grid()\n\n plt.show()\n\n def plot_animate(self, A):\n \"\"\"\"\n try to create animate function to plot mechanisms\n \"\"\"\n fig = plt.figure()\n fig.set_tight_layout(False)\n ax = plt.axes(projection='3d')\n global cnt\n cnt = ax\n\n global cur_A\n global cur_B\n cur_A = A[0]\n cur_B = self.B[0]\n\n def steps(count=1):\n for i in range(count):\n df_A = pd.Series(data=cur_A[i], index=['x', 'y', 'z'])\n df_B = pd.Series(data=cur_B, index=['x', 'y', 'z'])\n x = [df_A.x, df_B.x]\n y = [df_A.y, df_B.y]\n z = [df_A.z, df_B.z]\n cnt.plot(x, y, z)\n\n def animate(frame):\n steps(1)\n return cnt\n anim = animation.FuncAnimation(fig, animate, frames=100)\n plt.show()\n\n\nif __name__ == \"__main__\":\n hex = Hexapod(axis='y')\n\n hex.set_B()\n hex.get_delta_L()\n hex.solve_static_forces()\n hex.solve_dynamic_forces()",
"step-ids": [
6,
9,
10,
11,
13
]
}
|
[
6,
9,
10,
11,
13
] |
<|reserved_special_token_0|>
def setup():
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(Motor_B_EN, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):
global pwm_B
if status == 0:
motorStop()
elif direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def setup():
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(Motor_B_EN, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):
global pwm_B
if status == 0:
motorStop()
elif direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup()
try:
pass
except KeyboardInterrupt:
destroy()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
Motor_B_EN = 11
Motor_B_Pin1 = 13
Motor_B_Pin2 = 12
Dir_forward = 0
Dir_backward = 1
pwm_B = 0
def setup():
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(Motor_B_EN, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):
global pwm_B
if status == 0:
motorStop()
elif direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup()
try:
pass
except KeyboardInterrupt:
destroy()
<|reserved_special_token_1|>
import RPi.GPIO as GPIO
import time
Motor_B_EN = 11
Motor_B_Pin1 = 13
Motor_B_Pin2 = 12
Dir_forward = 0
Dir_backward = 1
pwm_B = 0
def setup():
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
GPIO.setup(Motor_B_EN, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):
global pwm_B
if status == 0:
motorStop()
elif direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup()
try:
pass
except KeyboardInterrupt:
destroy()
<|reserved_special_token_1|>
#!/usr/bin/python3
import RPi.GPIO as GPIO
import time
# motor_EN_A: Pin7 | motor_EN_B: Pin11
# motor_A: Pin8,Pin10 | motor_B: Pin13,Pin12
#Motor_A_EN = 7
Motor_B_EN = 11
#Motor_A_Pin1 = 8
#Motor_A_Pin2 = 10
Motor_B_Pin1 = 13
Motor_B_Pin2 = 12
Dir_forward = 0
Dir_backward = 1
#pwm_A = 0
pwm_B = 0
def setup():#Motor initialization
global pwm_A, pwm_B
GPIO.setwarnings(False)
GPIO.setmode(GPIO.BOARD)
#GPIO.setup(Motor_A_EN, GPIO.OUT)
GPIO.setup(Motor_B_EN, GPIO.OUT)
#GPIO.setup(Motor_A_Pin1, GPIO.OUT)
#GPIO.setup(Motor_A_Pin2, GPIO.OUT)
GPIO.setup(Motor_B_Pin1, GPIO.OUT)
GPIO.setup(Motor_B_Pin2, GPIO.OUT)
#pwm_A = GPIO.PWM(Motor_A_EN, 1000)
pwm_B = GPIO.PWM(Motor_B_EN, 1000)
def motorStop():#Motor stops
#GPIO.output(Motor_A_Pin1, GPIO.LOW)
#GPIO.output(Motor_A_Pin2, GPIO.LOW)
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
#GPIO.output(Motor_A_EN, GPIO.LOW)
GPIO.output(Motor_B_EN, GPIO.LOW)
def motorStart(status, direction, speed):#Motor 2 positive and negative rotation
global pwm_B
if status == 0: # stop
motorStop()
else:
if direction == Dir_forward:
GPIO.output(Motor_B_Pin1, GPIO.HIGH)
GPIO.output(Motor_B_Pin2, GPIO.LOW)
pwm_B.start(100)
pwm_B.ChangeDutyCycle(speed)
elif direction == Dir_backward:
GPIO.output(Motor_B_Pin1, GPIO.LOW)
GPIO.output(Motor_B_Pin2, GPIO.HIGH)
pwm_B.start(0)
pwm_B.ChangeDutyCycle(speed)
def destroy():
motorStop()
GPIO.cleanup() # Release resource
try:
pass
except KeyboardInterrupt:
destroy()
|
flexible
|
{
"blob_id": "7369d5a463b0f41c17d5648739d4730256e611f9",
"index": 9612,
"step-1": "<mask token>\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\ntry:\n pass\nexcept KeyboardInterrupt:\n destroy()\n",
"step-3": "<mask token>\nMotor_B_EN = 11\nMotor_B_Pin1 = 13\nMotor_B_Pin2 = 12\nDir_forward = 0\nDir_backward = 1\npwm_B = 0\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\ntry:\n pass\nexcept KeyboardInterrupt:\n destroy()\n",
"step-4": "import RPi.GPIO as GPIO\nimport time\nMotor_B_EN = 11\nMotor_B_Pin1 = 13\nMotor_B_Pin2 = 12\nDir_forward = 0\nDir_backward = 1\npwm_B = 0\n\n\ndef setup():\n global pwm_A, pwm_B\n GPIO.setwarnings(False)\n GPIO.setmode(GPIO.BOARD)\n GPIO.setup(Motor_B_EN, GPIO.OUT)\n GPIO.setup(Motor_B_Pin1, GPIO.OUT)\n GPIO.setup(Motor_B_Pin2, GPIO.OUT)\n pwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\n\ndef motorStop():\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n GPIO.output(Motor_B_EN, GPIO.LOW)\n\n\ndef motorStart(status, direction, speed):\n global pwm_B\n if status == 0:\n motorStop()\n elif direction == Dir_forward:\n GPIO.output(Motor_B_Pin1, GPIO.HIGH)\n GPIO.output(Motor_B_Pin2, GPIO.LOW)\n pwm_B.start(100)\n pwm_B.ChangeDutyCycle(speed)\n elif direction == Dir_backward:\n GPIO.output(Motor_B_Pin1, GPIO.LOW)\n GPIO.output(Motor_B_Pin2, GPIO.HIGH)\n pwm_B.start(0)\n pwm_B.ChangeDutyCycle(speed)\n\n\ndef destroy():\n motorStop()\n GPIO.cleanup()\n\n\ntry:\n pass\nexcept KeyboardInterrupt:\n destroy()\n",
"step-5": "#!/usr/bin/python3\n\nimport RPi.GPIO as GPIO\nimport time\n# motor_EN_A: Pin7 | motor_EN_B: Pin11\n# motor_A: Pin8,Pin10 | motor_B: Pin13,Pin12\n\n#Motor_A_EN = 7\nMotor_B_EN = 11\n\n#Motor_A_Pin1 = 8\n#Motor_A_Pin2 = 10\nMotor_B_Pin1 = 13\nMotor_B_Pin2 = 12\n\nDir_forward = 0\nDir_backward = 1\n\n#pwm_A = 0\npwm_B = 0\n\ndef setup():#Motor initialization\n\tglobal pwm_A, pwm_B\n\tGPIO.setwarnings(False)\n\tGPIO.setmode(GPIO.BOARD)\n\t#GPIO.setup(Motor_A_EN, GPIO.OUT)\n\tGPIO.setup(Motor_B_EN, GPIO.OUT)\n\t#GPIO.setup(Motor_A_Pin1, GPIO.OUT)\n\t#GPIO.setup(Motor_A_Pin2, GPIO.OUT)\n\tGPIO.setup(Motor_B_Pin1, GPIO.OUT)\n\tGPIO.setup(Motor_B_Pin2, GPIO.OUT)\n\t#pwm_A = GPIO.PWM(Motor_A_EN, 1000)\n\tpwm_B = GPIO.PWM(Motor_B_EN, 1000)\n\ndef motorStop():#Motor stops\n\t#GPIO.output(Motor_A_Pin1, GPIO.LOW)\n\t#GPIO.output(Motor_A_Pin2, GPIO.LOW)\n\tGPIO.output(Motor_B_Pin1, GPIO.LOW)\n\tGPIO.output(Motor_B_Pin2, GPIO.LOW)\n\t#GPIO.output(Motor_A_EN, GPIO.LOW)\n\tGPIO.output(Motor_B_EN, GPIO.LOW)\n\ndef motorStart(status, direction, speed):#Motor 2 positive and negative rotation\n\tglobal pwm_B\n\tif status == 0: # stop\n\t\tmotorStop()\n\telse:\n\t\tif direction == Dir_forward:\n\t\t\tGPIO.output(Motor_B_Pin1, GPIO.HIGH)\n\t\t\tGPIO.output(Motor_B_Pin2, GPIO.LOW)\n\t\t\tpwm_B.start(100)\n\t\t\tpwm_B.ChangeDutyCycle(speed)\n\t\telif direction == Dir_backward:\n\t\t\tGPIO.output(Motor_B_Pin1, GPIO.LOW)\n\t\t\tGPIO.output(Motor_B_Pin2, GPIO.HIGH)\n\t\t\tpwm_B.start(0)\n\t\t\tpwm_B.ChangeDutyCycle(speed)\n\ndef destroy():\n\tmotorStop()\n\tGPIO.cleanup() # Release resource\n\ntry:\n\tpass\nexcept KeyboardInterrupt:\n\tdestroy()\n\n\n\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from django.apps import AppConfig
class Iapp1Config(AppConfig):
name = 'iapp1'
|
normal
|
{
"blob_id": "c27ca6a8c38f2b96011e3a09da073ccc0e5a1467",
"index": 3386,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Iapp1Config(AppConfig):\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Iapp1Config(AppConfig):\n name = 'iapp1'\n",
"step-4": "from django.apps import AppConfig\n\n\nclass Iapp1Config(AppConfig):\n name = 'iapp1'\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(rsp.text)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
rsp = requests.get(
'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'
% ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))
print(rsp.text)
<|reserved_special_token_1|>
import requests
rsp = requests.get(
'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'
% ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))
print(rsp.text)
|
flexible
|
{
"blob_id": "d86fe165e378e56650e3b76bf3d0f72e2a50a023",
"index": 5082,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(rsp.text)\n",
"step-3": "<mask token>\nrsp = requests.get(\n 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'\n % ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))\nprint(rsp.text)\n",
"step-4": "import requests\nrsp = requests.get(\n 'https://api.weixin.qq.com/cgi-bin/token?grant_type=client_credential&appid=%s&secret=%s'\n % ('wx27c0e6ef6a7f0716', '6e29e232daf462652f66ee8acc11838b'))\nprint(rsp.text)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
class DAE(object):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def fast_training(self, sound):
self.core_size = 100
self.batch_size = 1000
self.Epoches = 50
self._main(sound, 100, 1000, 50)
def medium_training(self, sound):
self.core_size = 5
self.batch_size = 500
self.Epoches = 100
self._main(sound, 5, 500, 100)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def test(self, sound):
audio_matrix = sound.get_reshaped_samples()
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix - mean_value) / std_value
channels = len(audio_matrix)
batches, remainder = self._get_batches(batch_size=self.batch_size,
core_size=self.core_size, data=audio_matrix)
losses = list()
for i in range(len(batches)):
dropout_indicator = np.random.rand()
if dropout_indicator <= 0.2:
losses.append(np.sum(abs(batches[i])))
batches[i] *= 0.0
losses.append(0)
sum_losses = np.sum(np.array(losses).reshape(-1))
test_batches = np.array(batches, np.float64).reshape(channels, -1)
test_batches = np.concatenate((test_batches, remainder), axis=1)
count = audio_matrix.shape
count = count[0] * count[1]
self.origin_loss = sum_losses / float(count)
test_batches = test_batches * std_value + mean_value
test_sound = sound.spawn(test_batches)
self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)
return test_sound, self.new_sound
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DAE(object):
def __init__(self, model_name):
self.model_name = model_name
self.process = 0
self.loss = 0
self.origin_loss = 0
self.core_size = 3
self.batch_size = 600
self.Epoches = 100
<|reserved_special_token_0|>
def fast_training(self, sound):
self.core_size = 100
self.batch_size = 1000
self.Epoches = 50
self._main(sound, 100, 1000, 50)
def medium_training(self, sound):
self.core_size = 5
self.batch_size = 500
self.Epoches = 100
self._main(sound, 5, 500, 100)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def get_current_training_process(self):
return self.process
def test(self, sound):
audio_matrix = sound.get_reshaped_samples()
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix - mean_value) / std_value
channels = len(audio_matrix)
batches, remainder = self._get_batches(batch_size=self.batch_size,
core_size=self.core_size, data=audio_matrix)
losses = list()
for i in range(len(batches)):
dropout_indicator = np.random.rand()
if dropout_indicator <= 0.2:
losses.append(np.sum(abs(batches[i])))
batches[i] *= 0.0
losses.append(0)
sum_losses = np.sum(np.array(losses).reshape(-1))
test_batches = np.array(batches, np.float64).reshape(channels, -1)
test_batches = np.concatenate((test_batches, remainder), axis=1)
count = audio_matrix.shape
count = count[0] * count[1]
self.origin_loss = sum_losses / float(count)
test_batches = test_batches * std_value + mean_value
test_sound = sound.spawn(test_batches)
self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)
return test_sound, self.new_sound
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DAE(object):
def __init__(self, model_name):
self.model_name = model_name
self.process = 0
self.loss = 0
self.origin_loss = 0
self.core_size = 3
self.batch_size = 600
self.Epoches = 100
def _get_batches(self, batch_size, data, core_size):
assert batch_size % core_size == 0
dim_0 = len(data)
length = len(data[0])
num_batches = length // batch_size
remainder_length = length % batch_size
res = list()
for i in range(num_batches):
res.append(data[:, i * batch_size:(i + 1) * batch_size])
res = [np.array(x, np.float64).reshape(dim_0, batch_size //
core_size, core_size) for x in res]
remainder = data[:, -remainder_length:]
return res, remainder
def fast_training(self, sound):
self.core_size = 100
self.batch_size = 1000
self.Epoches = 50
self._main(sound, 100, 1000, 50)
def medium_training(self, sound):
self.core_size = 5
self.batch_size = 500
self.Epoches = 100
self._main(sound, 5, 500, 100)
def slow_training(self, sound):
self.core_size = 3
self.batch_size = 300
self.Epoches = 100
self._main(sound, 3, 300, 150)
<|reserved_special_token_0|>
def get_current_training_process(self):
return self.process
def test(self, sound):
audio_matrix = sound.get_reshaped_samples()
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix - mean_value) / std_value
channels = len(audio_matrix)
batches, remainder = self._get_batches(batch_size=self.batch_size,
core_size=self.core_size, data=audio_matrix)
losses = list()
for i in range(len(batches)):
dropout_indicator = np.random.rand()
if dropout_indicator <= 0.2:
losses.append(np.sum(abs(batches[i])))
batches[i] *= 0.0
losses.append(0)
sum_losses = np.sum(np.array(losses).reshape(-1))
test_batches = np.array(batches, np.float64).reshape(channels, -1)
test_batches = np.concatenate((test_batches, remainder), axis=1)
count = audio_matrix.shape
count = count[0] * count[1]
self.origin_loss = sum_losses / float(count)
test_batches = test_batches * std_value + mean_value
test_sound = sound.spawn(test_batches)
self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)
return test_sound, self.new_sound
def _main(self, sound, core_size, batch_size, Epoches, drop_out_rate=0.9):
self.new_sound = None
self.process = 0
self.loss = 0
audio_matrix = sound.get_reshaped_samples()
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix - mean_value) / std_value
batches, remainder = self._get_batches(batch_size=batch_size,
core_size=core_size, data=audio_matrix)
steps = batch_size // core_size
channels = len(audio_matrix)
best_output = ''
with tf.Session() as sess:
fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.
BasicLSTMCell(core_size), drop_out_rate)
fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell] * 2)
bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.
BasicLSTMCell(core_size), drop_out_rate)
bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell] * 2)
input_data = tf.placeholder(shape=[channels, steps, core_size],
dtype=tf.float64)
in_weights = tf.get_variable(name='in_weight', shape=[steps *
core_size, steps * core_size], dtype=tf.float64)
in_bias = tf.get_variable(name='in_bias', shape=[core_size *
steps], dtype=tf.float64)
hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data, (
channels, -1)), in_weights, in_bias))
hidden_data_out = tf.reshape(hidden_data, [channels, steps,
core_size])
bi_outputs, last_state = tf.nn.bidirectional_dynamic_rnn(
fw_rnn_cell, bw_rnn_cell, hidden_data_out, dtype=tf.float64)
out_weights = tf.get_variable(name='out_weight', shape=[steps *
core_size * 2, steps * core_size], dtype=tf.float64)
out_bias = tf.get_variable(name='out_bias', shape=[core_size *
steps], dtype=tf.float64)
outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs, 2),
(channels, -1)), out_weights, out_bias)
loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(
input_data, (channels, -1)), outputs)))
train = tf.train.AdamOptimizer(0.001).minimize(loss)
saver = tf.train.Saver()
train_loss = 999999999
try:
saver.restore(sess, self.model_name)
print('model restored')
except:
sess.run(tf.global_variables_initializer())
print('restore failed, randomly initialize')
for i in range(Epoches):
loss_temp = 0
outputs_temp = list()
for item in batches:
if drop_out_rate < 1:
epoch_outputs, epoch_loss, _ = sess.run([outputs,
loss, train], feed_dict={input_data: item})
else:
epoch_outputs, epoch_loss = sess.run([outputs, loss
], feed_dict={input_data: item})
loss_temp += epoch_loss
outputs_temp.append(epoch_outputs)
loss_temp /= len(batches)
if i == 0 and drop_out_rate < 1:
self.origin_loss = loss_temp
self.process = i / Epoches
self.loss = loss_temp
if loss_temp < train_loss:
train_loss = loss_temp
if drop_out_rate < 1:
saver.save(sess, self.model_name)
best_output = outputs_temp
best_output = np.array(best_output, np.float64).reshape(channels,
-1)
best_output = np.concatenate((best_output, remainder), axis=1)
best_output = best_output * std_value + mean_value
self.new_sound = sound.spawn(best_output)
audio_matrix = audio_matrix * std_value + mean_value
new_sound = sound.spawn(audio_matrix)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class DAE(object):
def __init__(self, model_name):
self.model_name = model_name
self.process = 0
self.loss = 0
self.origin_loss = 0
self.core_size = 3
self.batch_size = 600
self.Epoches = 100
def _get_batches(self, batch_size, data, core_size):
assert batch_size % core_size == 0
dim_0 = len(data)
length = len(data[0])
num_batches = length // batch_size
remainder_length = length % batch_size
res = list()
for i in range(num_batches):
res.append(data[:, i * batch_size:(i + 1) * batch_size])
res = [np.array(x, np.float64).reshape(dim_0, batch_size //
core_size, core_size) for x in res]
remainder = data[:, -remainder_length:]
return res, remainder
def fast_training(self, sound):
self.core_size = 100
self.batch_size = 1000
self.Epoches = 50
self._main(sound, 100, 1000, 50)
def medium_training(self, sound):
self.core_size = 5
self.batch_size = 500
self.Epoches = 100
self._main(sound, 5, 500, 100)
def slow_training(self, sound):
self.core_size = 3
self.batch_size = 300
self.Epoches = 100
self._main(sound, 3, 300, 150)
def get_train_result_music_file(self):
if self.new_sound:
return self.new_sound
else:
raise Exception('You should run training firstly !')
def get_current_training_process(self):
return self.process
def test(self, sound):
audio_matrix = sound.get_reshaped_samples()
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix - mean_value) / std_value
channels = len(audio_matrix)
batches, remainder = self._get_batches(batch_size=self.batch_size,
core_size=self.core_size, data=audio_matrix)
losses = list()
for i in range(len(batches)):
dropout_indicator = np.random.rand()
if dropout_indicator <= 0.2:
losses.append(np.sum(abs(batches[i])))
batches[i] *= 0.0
losses.append(0)
sum_losses = np.sum(np.array(losses).reshape(-1))
test_batches = np.array(batches, np.float64).reshape(channels, -1)
test_batches = np.concatenate((test_batches, remainder), axis=1)
count = audio_matrix.shape
count = count[0] * count[1]
self.origin_loss = sum_losses / float(count)
test_batches = test_batches * std_value + mean_value
test_sound = sound.spawn(test_batches)
self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)
return test_sound, self.new_sound
def _main(self, sound, core_size, batch_size, Epoches, drop_out_rate=0.9):
self.new_sound = None
self.process = 0
self.loss = 0
audio_matrix = sound.get_reshaped_samples()
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix - mean_value) / std_value
batches, remainder = self._get_batches(batch_size=batch_size,
core_size=core_size, data=audio_matrix)
steps = batch_size // core_size
channels = len(audio_matrix)
best_output = ''
with tf.Session() as sess:
fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.
BasicLSTMCell(core_size), drop_out_rate)
fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell] * 2)
bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.
BasicLSTMCell(core_size), drop_out_rate)
bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell] * 2)
input_data = tf.placeholder(shape=[channels, steps, core_size],
dtype=tf.float64)
in_weights = tf.get_variable(name='in_weight', shape=[steps *
core_size, steps * core_size], dtype=tf.float64)
in_bias = tf.get_variable(name='in_bias', shape=[core_size *
steps], dtype=tf.float64)
hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data, (
channels, -1)), in_weights, in_bias))
hidden_data_out = tf.reshape(hidden_data, [channels, steps,
core_size])
bi_outputs, last_state = tf.nn.bidirectional_dynamic_rnn(
fw_rnn_cell, bw_rnn_cell, hidden_data_out, dtype=tf.float64)
out_weights = tf.get_variable(name='out_weight', shape=[steps *
core_size * 2, steps * core_size], dtype=tf.float64)
out_bias = tf.get_variable(name='out_bias', shape=[core_size *
steps], dtype=tf.float64)
outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs, 2),
(channels, -1)), out_weights, out_bias)
loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(
input_data, (channels, -1)), outputs)))
train = tf.train.AdamOptimizer(0.001).minimize(loss)
saver = tf.train.Saver()
train_loss = 999999999
try:
saver.restore(sess, self.model_name)
print('model restored')
except:
sess.run(tf.global_variables_initializer())
print('restore failed, randomly initialize')
for i in range(Epoches):
loss_temp = 0
outputs_temp = list()
for item in batches:
if drop_out_rate < 1:
epoch_outputs, epoch_loss, _ = sess.run([outputs,
loss, train], feed_dict={input_data: item})
else:
epoch_outputs, epoch_loss = sess.run([outputs, loss
], feed_dict={input_data: item})
loss_temp += epoch_loss
outputs_temp.append(epoch_outputs)
loss_temp /= len(batches)
if i == 0 and drop_out_rate < 1:
self.origin_loss = loss_temp
self.process = i / Epoches
self.loss = loss_temp
if loss_temp < train_loss:
train_loss = loss_temp
if drop_out_rate < 1:
saver.save(sess, self.model_name)
best_output = outputs_temp
best_output = np.array(best_output, np.float64).reshape(channels,
-1)
best_output = np.concatenate((best_output, remainder), axis=1)
best_output = best_output * std_value + mean_value
self.new_sound = sound.spawn(best_output)
audio_matrix = audio_matrix * std_value + mean_value
new_sound = sound.spawn(audio_matrix)
<|reserved_special_token_1|>
from pydub import AudioSegment
import sys
import tensorflow as tf
import numpy as np
from adwtmk.audio import Audio
from adwtmk.encoder import *
from adwtmk.decoder import *
class DAE(object):
def __init__(self,model_name):
self.model_name = model_name
self.process = 0
self.loss = 0
self.origin_loss = 0
self.core_size = 3
self.batch_size = 600
self.Epoches = 100
def _get_batches(self,batch_size,data,core_size):
assert batch_size % core_size == 0
dim_0 = len(data)
#print("dim_0:",dim_0)
length = len(data[0])
num_batches = length // batch_size
remainder_length = length % batch_size
res = list()
for i in range(num_batches):
res.append(data[:,i*batch_size:(i+1)*batch_size])
res = [np.array(x,np.float64).reshape(dim_0,batch_size//core_size,core_size) for x in res]
remainder = data[:,-remainder_length:]
return res,remainder
#np.set_printoptions(threshold=1e6)
#def _my_config():
#core_size = 5
#batch_size = 500
#Epoches = 200
def fast_training(self,sound):
self.core_size = 100
self.batch_size = 1000
self.Epoches = 50
self._main(sound,100,1000,50)
def medium_training(self,sound):
self.core_size = 5
self.batch_size = 500
self.Epoches = 100
self._main(sound,5,500,100)
def slow_training(self,sound):
self.core_size = 3
self.batch_size = 300
self.Epoches = 100
self._main(sound,3,300,150)
def get_train_result_music_file(self):
if (self.new_sound):
return self.new_sound
else:
raise Exception("You should run training firstly !")
def get_current_training_process(self):
return self.process
def test(self,sound):
audio_matrix = sound.get_reshaped_samples()
#max_value = np.max(audio_matrix)
#min_value = np.min(audio_matrix)
#audio_matrix = (audio_matrix-min_value) / (max_value-min_value)
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix-mean_value) / std_value
channels = len(audio_matrix)
batches,remainder = self._get_batches(batch_size=self.batch_size,core_size=self.core_size,data=audio_matrix)
losses = list()
for i in range(len(batches)):
dropout_indicator = np.random.rand()
if (dropout_indicator <= 0.2):
losses.append(np.sum(abs(batches[i])))
batches[i] *= 0.00
losses.append(0)
sum_losses = np.sum(np.array(losses).reshape(-1))
#print("losses:")
#print(np.array(losses).reshape(-1))
#print(sum_losses)
test_batches = np.array(batches,np.float64).reshape(channels,-1)
test_batches = np.concatenate((test_batches,remainder),axis=1)
count = audio_matrix.shape
count = count[0]*count[1]
self.origin_loss = sum_losses/(float)(count)
test_batches = test_batches * std_value + mean_value
test_sound = sound.spawn(test_batches)
self._main(test_sound,self.core_size,self.batch_size,1,1.0)
return test_sound,self.new_sound
def _main(self,sound,core_size,batch_size,Epoches,drop_out_rate=0.9):
self.new_sound = None
self.process = 0
self.loss = 0
#print(sound.frame_rate,sound.duration_seconds, len(sound.get_array_of_samples()))
audio_matrix = sound.get_reshaped_samples()
#max_value = np.max(audio_matrix)
#min_value = np.min(audio_matrix)
#audio_matrix = (audio_matrix-min_value) / (max_value-min_value)
mean_value = np.mean(audio_matrix)
std_value = np.std(audio_matrix)
audio_matrix = (audio_matrix-mean_value) / std_value
batches,remainder = self._get_batches(batch_size=batch_size,core_size=core_size,data=audio_matrix)
steps = batch_size // core_size
channels = len(audio_matrix)
best_output = ""
with tf.Session() as sess:
fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.BasicLSTMCell(core_size),drop_out_rate)
fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell]*2)
bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.BasicLSTMCell(core_size),drop_out_rate)
bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell]*2)
input_data = tf.placeholder(shape=[channels,steps,core_size],dtype=tf.float64)
in_weights = tf.get_variable(name="in_weight",shape=[steps*core_size,steps*core_size],dtype=tf.float64)
in_bias = tf.get_variable(name="in_bias",shape=[core_size*steps],dtype=tf.float64)
hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data,(channels,-1)),in_weights,in_bias))
hidden_data_out = tf.reshape(hidden_data,[channels,steps,core_size])
bi_outputs,last_state = tf.nn.bidirectional_dynamic_rnn(fw_rnn_cell,bw_rnn_cell,hidden_data_out,dtype=tf.float64)
out_weights = tf.get_variable(name="out_weight",shape=[steps*core_size*2,steps*core_size],dtype=tf.float64)
out_bias = tf.get_variable(name="out_bias",shape=[core_size*steps],dtype=tf.float64)
outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs,2),(channels,-1)),out_weights,out_bias)
#outputs,last_state = tf.nn.dynamic_rnn(fw_rnn_cell,input_data,dtype=tf.float64)
loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(input_data,(channels,-1)),outputs)))
train = tf.train.AdamOptimizer(0.001).minimize(loss)
saver = tf.train.Saver()
train_loss = 999999999
try:
saver.restore(sess,self.model_name)
print("model restored")
except:
sess.run(tf.global_variables_initializer())
print("restore failed, randomly initialize")
for i in range(Epoches):
loss_temp = 0
outputs_temp = list()
for item in batches:
if (drop_out_rate < 1):
epoch_outputs,epoch_loss,_ = sess.run([outputs,loss,train],feed_dict={
input_data:item
})
else:
epoch_outputs,epoch_loss = sess.run([outputs,loss],feed_dict={
input_data:item
})
loss_temp += epoch_loss
outputs_temp.append(epoch_outputs)
loss_temp /= len(batches)
if (i == 0 and drop_out_rate<1):
self.origin_loss = loss_temp
self.process = i/Epoches
self.loss = loss_temp
#print("process:%f,loss:%f" % (i/Epoches,loss_temp))
if (loss_temp < train_loss):
train_loss = loss_temp
if (drop_out_rate < 1):
saver.save(sess,self.model_name)
best_output = outputs_temp
#best_output = best_output.append(remainder)
best_output = np.array(best_output,np.float64).reshape(channels,-1)
best_output = np.concatenate((best_output,remainder),axis=1)
#best_output = best_output.T
#best_output = best_output.reshape(-1)
best_output = best_output*std_value+mean_value
#best_output *= max_value-min_value
#best_output += min_value
self.new_sound = sound.spawn(best_output)
#new_sound.export("test.flac","flac")
#ex.add_artifact(filename="./test.flac")
#ex.add_artifact(filename="./rnn_model_key_multirnn_bi_input.ckpt*")
#audio_matrix = np.array(audio_matrix,np.float64).reshape(channels,-1)
#audio_matrix = audio_matrix.T
#audio_matrix = audio_matrix.reshape(-1)
#audio_matrix = audio_matrix * (max_value-min_value)+min_value
audio_matrix = audio_matrix * std_value + mean_value
new_sound = sound.spawn(audio_matrix)
#new_sound.export("test2.flac","flac")
#sound = Audio.from_file("./mark.flac", format="flac")
#fast_training(sound)
|
flexible
|
{
"blob_id": "6f53702d9265a7fc57d2ec2e47dc35a0bc7a9f87",
"index": 9012,
"step-1": "<mask token>\n\n\nclass DAE(object):\n <mask token>\n <mask token>\n\n def fast_training(self, sound):\n self.core_size = 100\n self.batch_size = 1000\n self.Epoches = 50\n self._main(sound, 100, 1000, 50)\n\n def medium_training(self, sound):\n self.core_size = 5\n self.batch_size = 500\n self.Epoches = 100\n self._main(sound, 5, 500, 100)\n <mask token>\n <mask token>\n <mask token>\n\n def test(self, sound):\n audio_matrix = sound.get_reshaped_samples()\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix - mean_value) / std_value\n channels = len(audio_matrix)\n batches, remainder = self._get_batches(batch_size=self.batch_size,\n core_size=self.core_size, data=audio_matrix)\n losses = list()\n for i in range(len(batches)):\n dropout_indicator = np.random.rand()\n if dropout_indicator <= 0.2:\n losses.append(np.sum(abs(batches[i])))\n batches[i] *= 0.0\n losses.append(0)\n sum_losses = np.sum(np.array(losses).reshape(-1))\n test_batches = np.array(batches, np.float64).reshape(channels, -1)\n test_batches = np.concatenate((test_batches, remainder), axis=1)\n count = audio_matrix.shape\n count = count[0] * count[1]\n self.origin_loss = sum_losses / float(count)\n test_batches = test_batches * std_value + mean_value\n test_sound = sound.spawn(test_batches)\n self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)\n return test_sound, self.new_sound\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass DAE(object):\n\n def __init__(self, model_name):\n self.model_name = model_name\n self.process = 0\n self.loss = 0\n self.origin_loss = 0\n self.core_size = 3\n self.batch_size = 600\n self.Epoches = 100\n <mask token>\n\n def fast_training(self, sound):\n self.core_size = 100\n self.batch_size = 1000\n self.Epoches = 50\n self._main(sound, 100, 1000, 50)\n\n def medium_training(self, sound):\n self.core_size = 5\n self.batch_size = 500\n self.Epoches = 100\n self._main(sound, 5, 500, 100)\n <mask token>\n <mask token>\n\n def get_current_training_process(self):\n return self.process\n\n def test(self, sound):\n audio_matrix = sound.get_reshaped_samples()\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix - mean_value) / std_value\n channels = len(audio_matrix)\n batches, remainder = self._get_batches(batch_size=self.batch_size,\n core_size=self.core_size, data=audio_matrix)\n losses = list()\n for i in range(len(batches)):\n dropout_indicator = np.random.rand()\n if dropout_indicator <= 0.2:\n losses.append(np.sum(abs(batches[i])))\n batches[i] *= 0.0\n losses.append(0)\n sum_losses = np.sum(np.array(losses).reshape(-1))\n test_batches = np.array(batches, np.float64).reshape(channels, -1)\n test_batches = np.concatenate((test_batches, remainder), axis=1)\n count = audio_matrix.shape\n count = count[0] * count[1]\n self.origin_loss = sum_losses / float(count)\n test_batches = test_batches * std_value + mean_value\n test_sound = sound.spawn(test_batches)\n self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)\n return test_sound, self.new_sound\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass DAE(object):\n\n def __init__(self, model_name):\n self.model_name = model_name\n self.process = 0\n self.loss = 0\n self.origin_loss = 0\n self.core_size = 3\n self.batch_size = 600\n self.Epoches = 100\n\n def _get_batches(self, batch_size, data, core_size):\n assert batch_size % core_size == 0\n dim_0 = len(data)\n length = len(data[0])\n num_batches = length // batch_size\n remainder_length = length % batch_size\n res = list()\n for i in range(num_batches):\n res.append(data[:, i * batch_size:(i + 1) * batch_size])\n res = [np.array(x, np.float64).reshape(dim_0, batch_size //\n core_size, core_size) for x in res]\n remainder = data[:, -remainder_length:]\n return res, remainder\n\n def fast_training(self, sound):\n self.core_size = 100\n self.batch_size = 1000\n self.Epoches = 50\n self._main(sound, 100, 1000, 50)\n\n def medium_training(self, sound):\n self.core_size = 5\n self.batch_size = 500\n self.Epoches = 100\n self._main(sound, 5, 500, 100)\n\n def slow_training(self, sound):\n self.core_size = 3\n self.batch_size = 300\n self.Epoches = 100\n self._main(sound, 3, 300, 150)\n <mask token>\n\n def get_current_training_process(self):\n return self.process\n\n def test(self, sound):\n audio_matrix = sound.get_reshaped_samples()\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix - mean_value) / std_value\n channels = len(audio_matrix)\n batches, remainder = self._get_batches(batch_size=self.batch_size,\n core_size=self.core_size, data=audio_matrix)\n losses = list()\n for i in range(len(batches)):\n dropout_indicator = np.random.rand()\n if dropout_indicator <= 0.2:\n losses.append(np.sum(abs(batches[i])))\n batches[i] *= 0.0\n losses.append(0)\n sum_losses = np.sum(np.array(losses).reshape(-1))\n test_batches = np.array(batches, np.float64).reshape(channels, -1)\n test_batches = np.concatenate((test_batches, remainder), axis=1)\n count = audio_matrix.shape\n count = count[0] * count[1]\n self.origin_loss = sum_losses / float(count)\n test_batches = test_batches * std_value + mean_value\n test_sound = sound.spawn(test_batches)\n self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)\n return test_sound, self.new_sound\n\n def _main(self, sound, core_size, batch_size, Epoches, drop_out_rate=0.9):\n self.new_sound = None\n self.process = 0\n self.loss = 0\n audio_matrix = sound.get_reshaped_samples()\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix - mean_value) / std_value\n batches, remainder = self._get_batches(batch_size=batch_size,\n core_size=core_size, data=audio_matrix)\n steps = batch_size // core_size\n channels = len(audio_matrix)\n best_output = ''\n with tf.Session() as sess:\n fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.\n BasicLSTMCell(core_size), drop_out_rate)\n fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell] * 2)\n bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.\n BasicLSTMCell(core_size), drop_out_rate)\n bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell] * 2)\n input_data = tf.placeholder(shape=[channels, steps, core_size],\n dtype=tf.float64)\n in_weights = tf.get_variable(name='in_weight', shape=[steps *\n core_size, steps * core_size], dtype=tf.float64)\n in_bias = tf.get_variable(name='in_bias', shape=[core_size *\n steps], dtype=tf.float64)\n hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data, (\n channels, -1)), in_weights, in_bias))\n hidden_data_out = tf.reshape(hidden_data, [channels, steps,\n core_size])\n bi_outputs, last_state = tf.nn.bidirectional_dynamic_rnn(\n fw_rnn_cell, bw_rnn_cell, hidden_data_out, dtype=tf.float64)\n out_weights = tf.get_variable(name='out_weight', shape=[steps *\n core_size * 2, steps * core_size], dtype=tf.float64)\n out_bias = tf.get_variable(name='out_bias', shape=[core_size *\n steps], dtype=tf.float64)\n outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs, 2),\n (channels, -1)), out_weights, out_bias)\n loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(\n input_data, (channels, -1)), outputs)))\n train = tf.train.AdamOptimizer(0.001).minimize(loss)\n saver = tf.train.Saver()\n train_loss = 999999999\n try:\n saver.restore(sess, self.model_name)\n print('model restored')\n except:\n sess.run(tf.global_variables_initializer())\n print('restore failed, randomly initialize')\n for i in range(Epoches):\n loss_temp = 0\n outputs_temp = list()\n for item in batches:\n if drop_out_rate < 1:\n epoch_outputs, epoch_loss, _ = sess.run([outputs,\n loss, train], feed_dict={input_data: item})\n else:\n epoch_outputs, epoch_loss = sess.run([outputs, loss\n ], feed_dict={input_data: item})\n loss_temp += epoch_loss\n outputs_temp.append(epoch_outputs)\n loss_temp /= len(batches)\n if i == 0 and drop_out_rate < 1:\n self.origin_loss = loss_temp\n self.process = i / Epoches\n self.loss = loss_temp\n if loss_temp < train_loss:\n train_loss = loss_temp\n if drop_out_rate < 1:\n saver.save(sess, self.model_name)\n best_output = outputs_temp\n best_output = np.array(best_output, np.float64).reshape(channels,\n -1)\n best_output = np.concatenate((best_output, remainder), axis=1)\n best_output = best_output * std_value + mean_value\n self.new_sound = sound.spawn(best_output)\n audio_matrix = audio_matrix * std_value + mean_value\n new_sound = sound.spawn(audio_matrix)\n",
"step-4": "<mask token>\n\n\nclass DAE(object):\n\n def __init__(self, model_name):\n self.model_name = model_name\n self.process = 0\n self.loss = 0\n self.origin_loss = 0\n self.core_size = 3\n self.batch_size = 600\n self.Epoches = 100\n\n def _get_batches(self, batch_size, data, core_size):\n assert batch_size % core_size == 0\n dim_0 = len(data)\n length = len(data[0])\n num_batches = length // batch_size\n remainder_length = length % batch_size\n res = list()\n for i in range(num_batches):\n res.append(data[:, i * batch_size:(i + 1) * batch_size])\n res = [np.array(x, np.float64).reshape(dim_0, batch_size //\n core_size, core_size) for x in res]\n remainder = data[:, -remainder_length:]\n return res, remainder\n\n def fast_training(self, sound):\n self.core_size = 100\n self.batch_size = 1000\n self.Epoches = 50\n self._main(sound, 100, 1000, 50)\n\n def medium_training(self, sound):\n self.core_size = 5\n self.batch_size = 500\n self.Epoches = 100\n self._main(sound, 5, 500, 100)\n\n def slow_training(self, sound):\n self.core_size = 3\n self.batch_size = 300\n self.Epoches = 100\n self._main(sound, 3, 300, 150)\n\n def get_train_result_music_file(self):\n if self.new_sound:\n return self.new_sound\n else:\n raise Exception('You should run training firstly !')\n\n def get_current_training_process(self):\n return self.process\n\n def test(self, sound):\n audio_matrix = sound.get_reshaped_samples()\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix - mean_value) / std_value\n channels = len(audio_matrix)\n batches, remainder = self._get_batches(batch_size=self.batch_size,\n core_size=self.core_size, data=audio_matrix)\n losses = list()\n for i in range(len(batches)):\n dropout_indicator = np.random.rand()\n if dropout_indicator <= 0.2:\n losses.append(np.sum(abs(batches[i])))\n batches[i] *= 0.0\n losses.append(0)\n sum_losses = np.sum(np.array(losses).reshape(-1))\n test_batches = np.array(batches, np.float64).reshape(channels, -1)\n test_batches = np.concatenate((test_batches, remainder), axis=1)\n count = audio_matrix.shape\n count = count[0] * count[1]\n self.origin_loss = sum_losses / float(count)\n test_batches = test_batches * std_value + mean_value\n test_sound = sound.spawn(test_batches)\n self._main(test_sound, self.core_size, self.batch_size, 1, 1.0)\n return test_sound, self.new_sound\n\n def _main(self, sound, core_size, batch_size, Epoches, drop_out_rate=0.9):\n self.new_sound = None\n self.process = 0\n self.loss = 0\n audio_matrix = sound.get_reshaped_samples()\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix - mean_value) / std_value\n batches, remainder = self._get_batches(batch_size=batch_size,\n core_size=core_size, data=audio_matrix)\n steps = batch_size // core_size\n channels = len(audio_matrix)\n best_output = ''\n with tf.Session() as sess:\n fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.\n BasicLSTMCell(core_size), drop_out_rate)\n fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell] * 2)\n bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.\n BasicLSTMCell(core_size), drop_out_rate)\n bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell] * 2)\n input_data = tf.placeholder(shape=[channels, steps, core_size],\n dtype=tf.float64)\n in_weights = tf.get_variable(name='in_weight', shape=[steps *\n core_size, steps * core_size], dtype=tf.float64)\n in_bias = tf.get_variable(name='in_bias', shape=[core_size *\n steps], dtype=tf.float64)\n hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data, (\n channels, -1)), in_weights, in_bias))\n hidden_data_out = tf.reshape(hidden_data, [channels, steps,\n core_size])\n bi_outputs, last_state = tf.nn.bidirectional_dynamic_rnn(\n fw_rnn_cell, bw_rnn_cell, hidden_data_out, dtype=tf.float64)\n out_weights = tf.get_variable(name='out_weight', shape=[steps *\n core_size * 2, steps * core_size], dtype=tf.float64)\n out_bias = tf.get_variable(name='out_bias', shape=[core_size *\n steps], dtype=tf.float64)\n outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs, 2),\n (channels, -1)), out_weights, out_bias)\n loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(\n input_data, (channels, -1)), outputs)))\n train = tf.train.AdamOptimizer(0.001).minimize(loss)\n saver = tf.train.Saver()\n train_loss = 999999999\n try:\n saver.restore(sess, self.model_name)\n print('model restored')\n except:\n sess.run(tf.global_variables_initializer())\n print('restore failed, randomly initialize')\n for i in range(Epoches):\n loss_temp = 0\n outputs_temp = list()\n for item in batches:\n if drop_out_rate < 1:\n epoch_outputs, epoch_loss, _ = sess.run([outputs,\n loss, train], feed_dict={input_data: item})\n else:\n epoch_outputs, epoch_loss = sess.run([outputs, loss\n ], feed_dict={input_data: item})\n loss_temp += epoch_loss\n outputs_temp.append(epoch_outputs)\n loss_temp /= len(batches)\n if i == 0 and drop_out_rate < 1:\n self.origin_loss = loss_temp\n self.process = i / Epoches\n self.loss = loss_temp\n if loss_temp < train_loss:\n train_loss = loss_temp\n if drop_out_rate < 1:\n saver.save(sess, self.model_name)\n best_output = outputs_temp\n best_output = np.array(best_output, np.float64).reshape(channels,\n -1)\n best_output = np.concatenate((best_output, remainder), axis=1)\n best_output = best_output * std_value + mean_value\n self.new_sound = sound.spawn(best_output)\n audio_matrix = audio_matrix * std_value + mean_value\n new_sound = sound.spawn(audio_matrix)\n",
"step-5": "from pydub import AudioSegment\nimport sys\nimport tensorflow as tf\nimport numpy as np\nfrom adwtmk.audio import Audio\nfrom adwtmk.encoder import *\nfrom adwtmk.decoder import *\nclass DAE(object):\n def __init__(self,model_name):\n self.model_name = model_name\n self.process = 0\n self.loss = 0\n self.origin_loss = 0\n self.core_size = 3\n self.batch_size = 600\n self.Epoches = 100\n\n def _get_batches(self,batch_size,data,core_size):\n assert batch_size % core_size == 0\n dim_0 = len(data)\n #print(\"dim_0:\",dim_0)\n length = len(data[0])\n num_batches = length // batch_size\n remainder_length = length % batch_size\n res = list()\n for i in range(num_batches):\n res.append(data[:,i*batch_size:(i+1)*batch_size])\n res = [np.array(x,np.float64).reshape(dim_0,batch_size//core_size,core_size) for x in res]\n remainder = data[:,-remainder_length:]\n return res,remainder \n\n \n\n\n #np.set_printoptions(threshold=1e6)\n #def _my_config():\n #core_size = 5\n #batch_size = 500\n #Epoches = 200\n\n def fast_training(self,sound):\n self.core_size = 100\n self.batch_size = 1000\n self.Epoches = 50\n self._main(sound,100,1000,50)\n\n def medium_training(self,sound):\n self.core_size = 5\n self.batch_size = 500\n self.Epoches = 100\n self._main(sound,5,500,100)\n\n def slow_training(self,sound):\n self.core_size = 3\n self.batch_size = 300\n self.Epoches = 100\n self._main(sound,3,300,150)\n\n def get_train_result_music_file(self):\n if (self.new_sound):\n return self.new_sound\n else:\n raise Exception(\"You should run training firstly !\")\n\n def get_current_training_process(self):\n return self.process\n\n def test(self,sound):\n audio_matrix = sound.get_reshaped_samples()\n #max_value = np.max(audio_matrix)\n #min_value = np.min(audio_matrix)\n #audio_matrix = (audio_matrix-min_value) / (max_value-min_value)\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix-mean_value) / std_value\n channels = len(audio_matrix)\n batches,remainder = self._get_batches(batch_size=self.batch_size,core_size=self.core_size,data=audio_matrix)\n losses = list()\n for i in range(len(batches)):\n dropout_indicator = np.random.rand()\n if (dropout_indicator <= 0.2):\n losses.append(np.sum(abs(batches[i])))\n batches[i] *= 0.00\n losses.append(0)\n sum_losses = np.sum(np.array(losses).reshape(-1))\n #print(\"losses:\")\n #print(np.array(losses).reshape(-1))\n #print(sum_losses)\n test_batches = np.array(batches,np.float64).reshape(channels,-1)\n test_batches = np.concatenate((test_batches,remainder),axis=1)\n count = audio_matrix.shape\n count = count[0]*count[1]\n self.origin_loss = sum_losses/(float)(count)\n test_batches = test_batches * std_value + mean_value\n test_sound = sound.spawn(test_batches)\n self._main(test_sound,self.core_size,self.batch_size,1,1.0)\n return test_sound,self.new_sound\n\n\n\n def _main(self,sound,core_size,batch_size,Epoches,drop_out_rate=0.9):\n self.new_sound = None\n self.process = 0\n self.loss = 0\n\n #print(sound.frame_rate,sound.duration_seconds, len(sound.get_array_of_samples()))\n\n audio_matrix = sound.get_reshaped_samples()\n #max_value = np.max(audio_matrix)\n #min_value = np.min(audio_matrix)\n #audio_matrix = (audio_matrix-min_value) / (max_value-min_value)\n mean_value = np.mean(audio_matrix)\n std_value = np.std(audio_matrix)\n audio_matrix = (audio_matrix-mean_value) / std_value\n\n\n batches,remainder = self._get_batches(batch_size=batch_size,core_size=core_size,data=audio_matrix)\n\n steps = batch_size // core_size\n channels = len(audio_matrix)\n\n\n best_output = \"\"\n\n with tf.Session() as sess:\n fw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.BasicLSTMCell(core_size),drop_out_rate)\n fw_rnn_cell = tf.contrib.rnn.MultiRNNCell([fw_cell]*2) \n bw_cell = tf.contrib.rnn.DropoutWrapper(tf.contrib.rnn.BasicLSTMCell(core_size),drop_out_rate)\n bw_rnn_cell = tf.contrib.rnn.MultiRNNCell([bw_cell]*2) \n input_data = tf.placeholder(shape=[channels,steps,core_size],dtype=tf.float64)\n in_weights = tf.get_variable(name=\"in_weight\",shape=[steps*core_size,steps*core_size],dtype=tf.float64)\n in_bias = tf.get_variable(name=\"in_bias\",shape=[core_size*steps],dtype=tf.float64)\n hidden_data = tf.tanh(tf.nn.xw_plus_b(tf.reshape(input_data,(channels,-1)),in_weights,in_bias))\n hidden_data_out = tf.reshape(hidden_data,[channels,steps,core_size])\n bi_outputs,last_state = tf.nn.bidirectional_dynamic_rnn(fw_rnn_cell,bw_rnn_cell,hidden_data_out,dtype=tf.float64)\n out_weights = tf.get_variable(name=\"out_weight\",shape=[steps*core_size*2,steps*core_size],dtype=tf.float64)\n out_bias = tf.get_variable(name=\"out_bias\",shape=[core_size*steps],dtype=tf.float64)\n outputs = tf.nn.xw_plus_b(tf.reshape(tf.concat(bi_outputs,2),(channels,-1)),out_weights,out_bias)\n #outputs,last_state = tf.nn.dynamic_rnn(fw_rnn_cell,input_data,dtype=tf.float64)\n loss = tf.reduce_mean(tf.sqrt(tf.squared_difference(tf.reshape(input_data,(channels,-1)),outputs)))\n train = tf.train.AdamOptimizer(0.001).minimize(loss)\n saver = tf.train.Saver()\n train_loss = 999999999\n try:\n saver.restore(sess,self.model_name)\n print(\"model restored\")\n except:\n sess.run(tf.global_variables_initializer())\n print(\"restore failed, randomly initialize\")\n for i in range(Epoches):\n loss_temp = 0\n outputs_temp = list()\n for item in batches:\n if (drop_out_rate < 1):\n epoch_outputs,epoch_loss,_ = sess.run([outputs,loss,train],feed_dict={\n input_data:item\n }) \n else:\n epoch_outputs,epoch_loss = sess.run([outputs,loss],feed_dict={\n input_data:item\n }) \n loss_temp += epoch_loss\n outputs_temp.append(epoch_outputs)\n loss_temp /= len(batches)\n if (i == 0 and drop_out_rate<1):\n self.origin_loss = loss_temp\n self.process = i/Epoches\n self.loss = loss_temp\n #print(\"process:%f,loss:%f\" % (i/Epoches,loss_temp))\n if (loss_temp < train_loss):\n train_loss = loss_temp\n if (drop_out_rate < 1):\n saver.save(sess,self.model_name)\n best_output = outputs_temp\n #best_output = best_output.append(remainder)\n best_output = np.array(best_output,np.float64).reshape(channels,-1)\n best_output = np.concatenate((best_output,remainder),axis=1)\n #best_output = best_output.T\n #best_output = best_output.reshape(-1)\n best_output = best_output*std_value+mean_value\n #best_output *= max_value-min_value\n #best_output += min_value\n\n self.new_sound = sound.spawn(best_output)\n #new_sound.export(\"test.flac\",\"flac\")\n #ex.add_artifact(filename=\"./test.flac\")\n #ex.add_artifact(filename=\"./rnn_model_key_multirnn_bi_input.ckpt*\")\n #audio_matrix = np.array(audio_matrix,np.float64).reshape(channels,-1)\n #audio_matrix = audio_matrix.T\n #audio_matrix = audio_matrix.reshape(-1)\n #audio_matrix = audio_matrix * (max_value-min_value)+min_value\n audio_matrix = audio_matrix * std_value + mean_value\n new_sound = sound.spawn(audio_matrix)\n #new_sound.export(\"test2.flac\",\"flac\")\n\n #sound = Audio.from_file(\"./mark.flac\", format=\"flac\")\n #fast_training(sound)\n",
"step-ids": [
4,
6,
9,
10,
12
]
}
|
[
4,
6,
9,
10,
12
] |
class ChartType:
Vanilla = "Vanilla"
Neopolitan = "Neopolitan"
|
normal
|
{
"blob_id": "451a36eb205a269a05e3b3d89541278633d12aaa",
"index": 9781,
"step-1": "<mask token>\n",
"step-2": "class ChartType:\n <mask token>\n <mask token>\n",
"step-3": "class ChartType:\n Vanilla = 'Vanilla'\n Neopolitan = 'Neopolitan'\n",
"step-4": "\n\nclass ChartType:\n Vanilla = \"Vanilla\"\n Neopolitan = \"Neopolitan\"\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main(config, resume, infile, outfile, sigma, dur, half):
model = get_instance(module_arch, 'arch', config)
model.summary()
checkpoint = torch.load(resume)
state_dict = checkpoint['state_dict']
if config['n_gpu'] > 1:
model = torch.nn.DataParallel(model)
model.load_state_dict(state_dict)
if config['n_gpu'] > 1:
model = model.module
model.apply(remove_weight_norms)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model.eval()
sr = config['arch']['args']['sr']
y, _ = load(infile, sr=sr, duration=dur)
y = torch.Tensor(y).to(device)
mel = model.get_mel(y[None, :])
if half:
model = model.half()
mel = mel.half()
start = time()
x = model.infer(mel, sigma)
cost = time() - start
print('Time cost: {:.4f}, Speed: {:.4f} kHz'.format(cost, x.numel() /
cost / 1000))
write_wav(outfile, x.cpu().float().numpy(), sr, False)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def main(config, resume, infile, outfile, sigma, dur, half):
model = get_instance(module_arch, 'arch', config)
model.summary()
checkpoint = torch.load(resume)
state_dict = checkpoint['state_dict']
if config['n_gpu'] > 1:
model = torch.nn.DataParallel(model)
model.load_state_dict(state_dict)
if config['n_gpu'] > 1:
model = model.module
model.apply(remove_weight_norms)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model.eval()
sr = config['arch']['args']['sr']
y, _ = load(infile, sr=sr, duration=dur)
y = torch.Tensor(y).to(device)
mel = model.get_mel(y[None, :])
if half:
model = model.half()
mel = mel.half()
start = time()
x = model.infer(mel, sigma)
cost = time() - start
print('Time cost: {:.4f}, Speed: {:.4f} kHz'.format(cost, x.numel() /
cost / 1000))
write_wav(outfile, x.cpu().float().numpy(), sr, False)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='WaveGlow inference')
parser.add_argument('infile', type=str, help=
'wave file to generate mel-spectrogram')
parser.add_argument('outfile', type=str, help='output file name')
parser.add_argument('--duration', type=float, help=
'duration of audio, in seconds')
parser.add_argument('--half', action='store_true')
parser.add_argument('-s', '--sigma', type=float, default=1.0)
parser.add_argument('-r', '--resume', default=None, type=str, help=
'path to latest checkpoint (default: None)')
parser.add_argument('-d', '--device', default=None, type=str, help=
'indices of GPUs to enable (default: all)')
args = parser.parse_args()
if args.resume:
config = torch.load(args.resume)['config']
if args.device:
os.environ['CUDA_VISIBLE_DEVICES'] = args.device
main(config, args.resume, args.infile, args.outfile, args.sigma, args.
duration, args.half)
<|reserved_special_token_1|>
import os
import argparse
import torch
import model.model as module_arch
from utils.util import remove_weight_norms
from train import get_instance
from librosa import load
from librosa.output import write_wav
from time import time
def main(config, resume, infile, outfile, sigma, dur, half):
model = get_instance(module_arch, 'arch', config)
model.summary()
checkpoint = torch.load(resume)
state_dict = checkpoint['state_dict']
if config['n_gpu'] > 1:
model = torch.nn.DataParallel(model)
model.load_state_dict(state_dict)
if config['n_gpu'] > 1:
model = model.module
model.apply(remove_weight_norms)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model.eval()
sr = config['arch']['args']['sr']
y, _ = load(infile, sr=sr, duration=dur)
y = torch.Tensor(y).to(device)
mel = model.get_mel(y[None, :])
if half:
model = model.half()
mel = mel.half()
start = time()
x = model.infer(mel, sigma)
cost = time() - start
print('Time cost: {:.4f}, Speed: {:.4f} kHz'.format(cost, x.numel() /
cost / 1000))
write_wav(outfile, x.cpu().float().numpy(), sr, False)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='WaveGlow inference')
parser.add_argument('infile', type=str, help=
'wave file to generate mel-spectrogram')
parser.add_argument('outfile', type=str, help='output file name')
parser.add_argument('--duration', type=float, help=
'duration of audio, in seconds')
parser.add_argument('--half', action='store_true')
parser.add_argument('-s', '--sigma', type=float, default=1.0)
parser.add_argument('-r', '--resume', default=None, type=str, help=
'path to latest checkpoint (default: None)')
parser.add_argument('-d', '--device', default=None, type=str, help=
'indices of GPUs to enable (default: all)')
args = parser.parse_args()
if args.resume:
config = torch.load(args.resume)['config']
if args.device:
os.environ['CUDA_VISIBLE_DEVICES'] = args.device
main(config, args.resume, args.infile, args.outfile, args.sigma, args.
duration, args.half)
<|reserved_special_token_1|>
import os
import argparse
import torch
import model.model as module_arch
from utils.util import remove_weight_norms
from train import get_instance
from librosa import load
from librosa.output import write_wav
from time import time
def main(config, resume, infile, outfile, sigma, dur, half):
# build model architecture
model = get_instance(module_arch, 'arch', config)
model.summary()
# load state dict
checkpoint = torch.load(resume)
state_dict = checkpoint['state_dict']
if config['n_gpu'] > 1:
model = torch.nn.DataParallel(model)
model.load_state_dict(state_dict)
if config['n_gpu'] > 1:
model = model.module
model.apply(remove_weight_norms)
# prepare model for testing
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = model.to(device)
model.eval()
sr = config['arch']['args']['sr']
y, _ = load(infile, sr=sr, duration=dur)
y = torch.Tensor(y).to(device)
# get mel before turn to half, because sparse.half is not implement yet
mel = model.get_mel(y[None, :])
if half:
model = model.half()
mel = mel.half()
start = time()
x = model.infer(mel, sigma)
cost = time() - start
print("Time cost: {:.4f}, Speed: {:.4f} kHz".format(cost, x.numel() / cost / 1000))
# print(x.max(), x.min())
write_wav(outfile, x.cpu().float().numpy(), sr, False)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='WaveGlow inference')
parser.add_argument('infile', type=str, help='wave file to generate mel-spectrogram')
parser.add_argument('outfile', type=str, help='output file name')
parser.add_argument('--duration', type=float, help='duration of audio, in seconds')
parser.add_argument('--half', action='store_true')
parser.add_argument('-s', '--sigma', type=float, default=1.0)
parser.add_argument('-r', '--resume', default=None, type=str,
help='path to latest checkpoint (default: None)')
parser.add_argument('-d', '--device', default=None, type=str,
help='indices of GPUs to enable (default: all)')
args = parser.parse_args()
if args.resume:
config = torch.load(args.resume)['config']
if args.device:
os.environ["CUDA_VISIBLE_DEVICES"] = args.device
main(config, args.resume, args.infile, args.outfile, args.sigma, args.duration, args.half)
|
flexible
|
{
"blob_id": "a2421a8673a524c32539555596711a71a8e00dbf",
"index": 439,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef main(config, resume, infile, outfile, sigma, dur, half):\n model = get_instance(module_arch, 'arch', config)\n model.summary()\n checkpoint = torch.load(resume)\n state_dict = checkpoint['state_dict']\n if config['n_gpu'] > 1:\n model = torch.nn.DataParallel(model)\n model.load_state_dict(state_dict)\n if config['n_gpu'] > 1:\n model = model.module\n model.apply(remove_weight_norms)\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n model = model.to(device)\n model.eval()\n sr = config['arch']['args']['sr']\n y, _ = load(infile, sr=sr, duration=dur)\n y = torch.Tensor(y).to(device)\n mel = model.get_mel(y[None, :])\n if half:\n model = model.half()\n mel = mel.half()\n start = time()\n x = model.infer(mel, sigma)\n cost = time() - start\n print('Time cost: {:.4f}, Speed: {:.4f} kHz'.format(cost, x.numel() /\n cost / 1000))\n write_wav(outfile, x.cpu().float().numpy(), sr, False)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef main(config, resume, infile, outfile, sigma, dur, half):\n model = get_instance(module_arch, 'arch', config)\n model.summary()\n checkpoint = torch.load(resume)\n state_dict = checkpoint['state_dict']\n if config['n_gpu'] > 1:\n model = torch.nn.DataParallel(model)\n model.load_state_dict(state_dict)\n if config['n_gpu'] > 1:\n model = model.module\n model.apply(remove_weight_norms)\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n model = model.to(device)\n model.eval()\n sr = config['arch']['args']['sr']\n y, _ = load(infile, sr=sr, duration=dur)\n y = torch.Tensor(y).to(device)\n mel = model.get_mel(y[None, :])\n if half:\n model = model.half()\n mel = mel.half()\n start = time()\n x = model.infer(mel, sigma)\n cost = time() - start\n print('Time cost: {:.4f}, Speed: {:.4f} kHz'.format(cost, x.numel() /\n cost / 1000))\n write_wav(outfile, x.cpu().float().numpy(), sr, False)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='WaveGlow inference')\n parser.add_argument('infile', type=str, help=\n 'wave file to generate mel-spectrogram')\n parser.add_argument('outfile', type=str, help='output file name')\n parser.add_argument('--duration', type=float, help=\n 'duration of audio, in seconds')\n parser.add_argument('--half', action='store_true')\n parser.add_argument('-s', '--sigma', type=float, default=1.0)\n parser.add_argument('-r', '--resume', default=None, type=str, help=\n 'path to latest checkpoint (default: None)')\n parser.add_argument('-d', '--device', default=None, type=str, help=\n 'indices of GPUs to enable (default: all)')\n args = parser.parse_args()\n if args.resume:\n config = torch.load(args.resume)['config']\n if args.device:\n os.environ['CUDA_VISIBLE_DEVICES'] = args.device\n main(config, args.resume, args.infile, args.outfile, args.sigma, args.\n duration, args.half)\n",
"step-4": "import os\nimport argparse\nimport torch\nimport model.model as module_arch\nfrom utils.util import remove_weight_norms\nfrom train import get_instance\nfrom librosa import load\nfrom librosa.output import write_wav\nfrom time import time\n\n\ndef main(config, resume, infile, outfile, sigma, dur, half):\n model = get_instance(module_arch, 'arch', config)\n model.summary()\n checkpoint = torch.load(resume)\n state_dict = checkpoint['state_dict']\n if config['n_gpu'] > 1:\n model = torch.nn.DataParallel(model)\n model.load_state_dict(state_dict)\n if config['n_gpu'] > 1:\n model = model.module\n model.apply(remove_weight_norms)\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n model = model.to(device)\n model.eval()\n sr = config['arch']['args']['sr']\n y, _ = load(infile, sr=sr, duration=dur)\n y = torch.Tensor(y).to(device)\n mel = model.get_mel(y[None, :])\n if half:\n model = model.half()\n mel = mel.half()\n start = time()\n x = model.infer(mel, sigma)\n cost = time() - start\n print('Time cost: {:.4f}, Speed: {:.4f} kHz'.format(cost, x.numel() /\n cost / 1000))\n write_wav(outfile, x.cpu().float().numpy(), sr, False)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='WaveGlow inference')\n parser.add_argument('infile', type=str, help=\n 'wave file to generate mel-spectrogram')\n parser.add_argument('outfile', type=str, help='output file name')\n parser.add_argument('--duration', type=float, help=\n 'duration of audio, in seconds')\n parser.add_argument('--half', action='store_true')\n parser.add_argument('-s', '--sigma', type=float, default=1.0)\n parser.add_argument('-r', '--resume', default=None, type=str, help=\n 'path to latest checkpoint (default: None)')\n parser.add_argument('-d', '--device', default=None, type=str, help=\n 'indices of GPUs to enable (default: all)')\n args = parser.parse_args()\n if args.resume:\n config = torch.load(args.resume)['config']\n if args.device:\n os.environ['CUDA_VISIBLE_DEVICES'] = args.device\n main(config, args.resume, args.infile, args.outfile, args.sigma, args.\n duration, args.half)\n",
"step-5": "import os\nimport argparse\nimport torch\nimport model.model as module_arch\nfrom utils.util import remove_weight_norms\nfrom train import get_instance\nfrom librosa import load\nfrom librosa.output import write_wav\nfrom time import time\n\n\ndef main(config, resume, infile, outfile, sigma, dur, half):\n # build model architecture\n model = get_instance(module_arch, 'arch', config)\n model.summary()\n\n # load state dict\n checkpoint = torch.load(resume)\n state_dict = checkpoint['state_dict']\n if config['n_gpu'] > 1:\n model = torch.nn.DataParallel(model)\n model.load_state_dict(state_dict)\n\n if config['n_gpu'] > 1:\n model = model.module\n model.apply(remove_weight_norms)\n\n # prepare model for testing\n device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')\n model = model.to(device)\n model.eval()\n\n sr = config['arch']['args']['sr']\n y, _ = load(infile, sr=sr, duration=dur)\n y = torch.Tensor(y).to(device)\n\n # get mel before turn to half, because sparse.half is not implement yet\n mel = model.get_mel(y[None, :])\n\n if half:\n model = model.half()\n mel = mel.half()\n start = time()\n x = model.infer(mel, sigma)\n cost = time() - start\n print(\"Time cost: {:.4f}, Speed: {:.4f} kHz\".format(cost, x.numel() / cost / 1000))\n # print(x.max(), x.min())\n write_wav(outfile, x.cpu().float().numpy(), sr, False)\n\n\nif __name__ == '__main__':\n parser = argparse.ArgumentParser(description='WaveGlow inference')\n parser.add_argument('infile', type=str, help='wave file to generate mel-spectrogram')\n parser.add_argument('outfile', type=str, help='output file name')\n parser.add_argument('--duration', type=float, help='duration of audio, in seconds')\n parser.add_argument('--half', action='store_true')\n parser.add_argument('-s', '--sigma', type=float, default=1.0)\n parser.add_argument('-r', '--resume', default=None, type=str,\n help='path to latest checkpoint (default: None)')\n parser.add_argument('-d', '--device', default=None, type=str,\n help='indices of GPUs to enable (default: all)')\n\n args = parser.parse_args()\n\n if args.resume:\n config = torch.load(args.resume)['config']\n if args.device:\n os.environ[\"CUDA_VISIBLE_DEVICES\"] = args.device\n\n main(config, args.resume, args.infile, args.outfile, args.sigma, args.duration, args.half)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import numpy as np
import os
import sys
file_path = sys.argv[1]
triplets = np.loadtxt(os.path.join(file_path, "kaggle_visible_evaluation_triplets.txt"),
delimiter="\t", dtype="str")
enum_users = np.ndenumerate(np.unique(triplets[:, 0]))
print(enum_users)
triplets[triplets[:, 0] == user_id[user_nr[0]], 0] = user_nr + 1
print(triplets)
|
normal
|
{
"blob_id": "f3d9e783491916e684cda659afa73ce5a6a5894a",
"index": 4063,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(enum_users)\n<mask token>\nprint(triplets)\n",
"step-3": "<mask token>\nfile_path = sys.argv[1]\ntriplets = np.loadtxt(os.path.join(file_path,\n 'kaggle_visible_evaluation_triplets.txt'), delimiter='\\t', dtype='str')\nenum_users = np.ndenumerate(np.unique(triplets[:, 0]))\nprint(enum_users)\ntriplets[triplets[:, 0] == user_id[user_nr[0]], 0] = user_nr + 1\nprint(triplets)\n",
"step-4": "import numpy as np\nimport os\nimport sys\nfile_path = sys.argv[1]\ntriplets = np.loadtxt(os.path.join(file_path,\n 'kaggle_visible_evaluation_triplets.txt'), delimiter='\\t', dtype='str')\nenum_users = np.ndenumerate(np.unique(triplets[:, 0]))\nprint(enum_users)\ntriplets[triplets[:, 0] == user_id[user_nr[0]], 0] = user_nr + 1\nprint(triplets)\n",
"step-5": "import numpy as np\n\nimport os\nimport sys\n\nfile_path = sys.argv[1]\n\ntriplets = np.loadtxt(os.path.join(file_path, \"kaggle_visible_evaluation_triplets.txt\"),\n delimiter=\"\\t\", dtype=\"str\")\n\nenum_users = np.ndenumerate(np.unique(triplets[:, 0]))\n\nprint(enum_users)\n\ntriplets[triplets[:, 0] == user_id[user_nr[0]], 0] = user_nr + 1\n\nprint(triplets)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ConcurrentExecutor:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def run_io_func_sync(self, func, args=(), kwargs=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
return self._io_executor.apply(func, args, kwargs)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply_async(func, args, kwargs, callback)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConcurrentExecutor:
<|reserved_special_token_0|>
def start(self):
self._io_executor.start()
<|reserved_special_token_0|>
def run_io_func_sync(self, func, args=(), kwargs=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
return self._io_executor.apply(func, args, kwargs)
<|reserved_special_token_0|>
def enqueue_io_funcs(self, funcs, block=True):
"""
run jobs in a fire and forget way, no result will be handled
over to clients
:param funcs: tuple/list-like or generator like object, func shall be
callable
"""
return self._io_executor.enqueue_funcs(funcs, block)
def run_compute_func_sync(self, func, args=(), kwargs={}):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply(func, args, kwargs)
def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply_async(func, args, kwargs, callback)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class ConcurrentExecutor:
def __init__(self, config):
"""
:param config: dict like object, contains thread_min_size (int),
thread_max_size (int), daemonize_thread (bool),
process_size (int)
"""
self._io_executor = tp.ThreadPool(config.get('thread_min_size', 0),
config.get('thread_max_size', 0), config.get('task_queue_size',
1024), config.get('daemonize_thread', True))
self._compute_executor = None
if config.get('process_size', 0):
self._compute_executor = pp.ProcessPool(config.get(
'process_size', 0))
def start(self):
self._io_executor.start()
def tear_down(self):
self._io_executor.tear_down()
if self._compute_executor is not None:
self._compute_executor.tear_down()
def run_io_func_sync(self, func, args=(), kwargs=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
return self._io_executor.apply(func, args, kwargs)
<|reserved_special_token_0|>
def enqueue_io_funcs(self, funcs, block=True):
"""
run jobs in a fire and forget way, no result will be handled
over to clients
:param funcs: tuple/list-like or generator like object, func shall be
callable
"""
return self._io_executor.enqueue_funcs(funcs, block)
def run_compute_func_sync(self, func, args=(), kwargs={}):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply(func, args, kwargs)
def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply_async(func, args, kwargs, callback)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import splunktalib.concurrent.process_pool as pp
import splunktalib.concurrent.thread_pool as tp
class ConcurrentExecutor:
def __init__(self, config):
"""
:param config: dict like object, contains thread_min_size (int),
thread_max_size (int), daemonize_thread (bool),
process_size (int)
"""
self._io_executor = tp.ThreadPool(config.get('thread_min_size', 0),
config.get('thread_max_size', 0), config.get('task_queue_size',
1024), config.get('daemonize_thread', True))
self._compute_executor = None
if config.get('process_size', 0):
self._compute_executor = pp.ProcessPool(config.get(
'process_size', 0))
def start(self):
self._io_executor.start()
def tear_down(self):
self._io_executor.tear_down()
if self._compute_executor is not None:
self._compute_executor.tear_down()
def run_io_func_sync(self, func, args=(), kwargs=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
return self._io_executor.apply(func, args, kwargs)
def run_io_func_async(self, func, args=(), kwargs=None, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
return self._io_executor.apply_async(func, args, kwargs, callback)
def enqueue_io_funcs(self, funcs, block=True):
"""
run jobs in a fire and forget way, no result will be handled
over to clients
:param funcs: tuple/list-like or generator like object, func shall be
callable
"""
return self._io_executor.enqueue_funcs(funcs, block)
def run_compute_func_sync(self, func, args=(), kwargs={}):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply(func, args, kwargs)
def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply_async(func, args, kwargs, callback)
<|reserved_special_token_1|>
#
# Copyright 2021 Splunk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Concurrent executor provides concurrent executing function either in
a thread pool or a process pool
"""
import splunktalib.concurrent.process_pool as pp
import splunktalib.concurrent.thread_pool as tp
class ConcurrentExecutor:
def __init__(self, config):
"""
:param config: dict like object, contains thread_min_size (int),
thread_max_size (int), daemonize_thread (bool),
process_size (int)
"""
self._io_executor = tp.ThreadPool(
config.get("thread_min_size", 0),
config.get("thread_max_size", 0),
config.get("task_queue_size", 1024),
config.get("daemonize_thread", True),
)
self._compute_executor = None
if config.get("process_size", 0):
self._compute_executor = pp.ProcessPool(config.get("process_size", 0))
def start(self):
self._io_executor.start()
def tear_down(self):
self._io_executor.tear_down()
if self._compute_executor is not None:
self._compute_executor.tear_down()
def run_io_func_sync(self, func, args=(), kwargs=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
return self._io_executor.apply(func, args, kwargs)
def run_io_func_async(self, func, args=(), kwargs=None, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
return self._io_executor.apply_async(func, args, kwargs, callback)
def enqueue_io_funcs(self, funcs, block=True):
"""
run jobs in a fire and forget way, no result will be handled
over to clients
:param funcs: tuple/list-like or generator like object, func shall be
callable
"""
return self._io_executor.enqueue_funcs(funcs, block)
def run_compute_func_sync(self, func, args=(), kwargs={}):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply(func, args, kwargs)
def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):
"""
:param func: callable
:param args: free params
:param kwargs: named params
:calllback: when func is done and without exception, call the callback
:return whatever the func returns
"""
assert self._compute_executor is not None
return self._compute_executor.apply_async(func, args, kwargs, callback)
|
flexible
|
{
"blob_id": "24b1afb18e1cfdc8d5a62f5ee0147b2d73bc10d8",
"index": 7492,
"step-1": "<mask token>\n\n\nclass ConcurrentExecutor:\n <mask token>\n <mask token>\n <mask token>\n\n def run_io_func_sync(self, func, args=(), kwargs=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n return self._io_executor.apply(func, args, kwargs)\n <mask token>\n <mask token>\n <mask token>\n\n def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply_async(func, args, kwargs, callback)\n",
"step-2": "<mask token>\n\n\nclass ConcurrentExecutor:\n <mask token>\n\n def start(self):\n self._io_executor.start()\n <mask token>\n\n def run_io_func_sync(self, func, args=(), kwargs=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n return self._io_executor.apply(func, args, kwargs)\n <mask token>\n\n def enqueue_io_funcs(self, funcs, block=True):\n \"\"\"\n run jobs in a fire and forget way, no result will be handled\n over to clients\n :param funcs: tuple/list-like or generator like object, func shall be\n callable\n \"\"\"\n return self._io_executor.enqueue_funcs(funcs, block)\n\n def run_compute_func_sync(self, func, args=(), kwargs={}):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply(func, args, kwargs)\n\n def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply_async(func, args, kwargs, callback)\n",
"step-3": "<mask token>\n\n\nclass ConcurrentExecutor:\n\n def __init__(self, config):\n \"\"\"\n :param config: dict like object, contains thread_min_size (int),\n thread_max_size (int), daemonize_thread (bool),\n process_size (int)\n \"\"\"\n self._io_executor = tp.ThreadPool(config.get('thread_min_size', 0),\n config.get('thread_max_size', 0), config.get('task_queue_size',\n 1024), config.get('daemonize_thread', True))\n self._compute_executor = None\n if config.get('process_size', 0):\n self._compute_executor = pp.ProcessPool(config.get(\n 'process_size', 0))\n\n def start(self):\n self._io_executor.start()\n\n def tear_down(self):\n self._io_executor.tear_down()\n if self._compute_executor is not None:\n self._compute_executor.tear_down()\n\n def run_io_func_sync(self, func, args=(), kwargs=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n return self._io_executor.apply(func, args, kwargs)\n <mask token>\n\n def enqueue_io_funcs(self, funcs, block=True):\n \"\"\"\n run jobs in a fire and forget way, no result will be handled\n over to clients\n :param funcs: tuple/list-like or generator like object, func shall be\n callable\n \"\"\"\n return self._io_executor.enqueue_funcs(funcs, block)\n\n def run_compute_func_sync(self, func, args=(), kwargs={}):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply(func, args, kwargs)\n\n def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply_async(func, args, kwargs, callback)\n",
"step-4": "<mask token>\nimport splunktalib.concurrent.process_pool as pp\nimport splunktalib.concurrent.thread_pool as tp\n\n\nclass ConcurrentExecutor:\n\n def __init__(self, config):\n \"\"\"\n :param config: dict like object, contains thread_min_size (int),\n thread_max_size (int), daemonize_thread (bool),\n process_size (int)\n \"\"\"\n self._io_executor = tp.ThreadPool(config.get('thread_min_size', 0),\n config.get('thread_max_size', 0), config.get('task_queue_size',\n 1024), config.get('daemonize_thread', True))\n self._compute_executor = None\n if config.get('process_size', 0):\n self._compute_executor = pp.ProcessPool(config.get(\n 'process_size', 0))\n\n def start(self):\n self._io_executor.start()\n\n def tear_down(self):\n self._io_executor.tear_down()\n if self._compute_executor is not None:\n self._compute_executor.tear_down()\n\n def run_io_func_sync(self, func, args=(), kwargs=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n return self._io_executor.apply(func, args, kwargs)\n\n def run_io_func_async(self, func, args=(), kwargs=None, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n return self._io_executor.apply_async(func, args, kwargs, callback)\n\n def enqueue_io_funcs(self, funcs, block=True):\n \"\"\"\n run jobs in a fire and forget way, no result will be handled\n over to clients\n :param funcs: tuple/list-like or generator like object, func shall be\n callable\n \"\"\"\n return self._io_executor.enqueue_funcs(funcs, block)\n\n def run_compute_func_sync(self, func, args=(), kwargs={}):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply(func, args, kwargs)\n\n def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n assert self._compute_executor is not None\n return self._compute_executor.apply_async(func, args, kwargs, callback)\n",
"step-5": "#\n# Copyright 2021 Splunk Inc.\n#\n# Licensed under the Apache License, Version 2.0 (the \"License\");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an \"AS IS\" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n#\n\n\"\"\"\nConcurrent executor provides concurrent executing function either in\na thread pool or a process pool\n\"\"\"\n\nimport splunktalib.concurrent.process_pool as pp\nimport splunktalib.concurrent.thread_pool as tp\n\n\nclass ConcurrentExecutor:\n def __init__(self, config):\n \"\"\"\n :param config: dict like object, contains thread_min_size (int),\n thread_max_size (int), daemonize_thread (bool),\n process_size (int)\n \"\"\"\n\n self._io_executor = tp.ThreadPool(\n config.get(\"thread_min_size\", 0),\n config.get(\"thread_max_size\", 0),\n config.get(\"task_queue_size\", 1024),\n config.get(\"daemonize_thread\", True),\n )\n self._compute_executor = None\n if config.get(\"process_size\", 0):\n self._compute_executor = pp.ProcessPool(config.get(\"process_size\", 0))\n\n def start(self):\n self._io_executor.start()\n\n def tear_down(self):\n self._io_executor.tear_down()\n if self._compute_executor is not None:\n self._compute_executor.tear_down()\n\n def run_io_func_sync(self, func, args=(), kwargs=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n\n return self._io_executor.apply(func, args, kwargs)\n\n def run_io_func_async(self, func, args=(), kwargs=None, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n\n return self._io_executor.apply_async(func, args, kwargs, callback)\n\n def enqueue_io_funcs(self, funcs, block=True):\n \"\"\"\n run jobs in a fire and forget way, no result will be handled\n over to clients\n :param funcs: tuple/list-like or generator like object, func shall be\n callable\n \"\"\"\n\n return self._io_executor.enqueue_funcs(funcs, block)\n\n def run_compute_func_sync(self, func, args=(), kwargs={}):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :return whatever the func returns\n \"\"\"\n\n assert self._compute_executor is not None\n return self._compute_executor.apply(func, args, kwargs)\n\n def run_compute_func_async(self, func, args=(), kwargs={}, callback=None):\n \"\"\"\n :param func: callable\n :param args: free params\n :param kwargs: named params\n :calllback: when func is done and without exception, call the callback\n :return whatever the func returns\n \"\"\"\n\n assert self._compute_executor is not None\n return self._compute_executor.apply_async(func, args, kwargs, callback)\n",
"step-ids": [
3,
6,
8,
10,
11
]
}
|
[
3,
6,
8,
10,
11
] |
#! /usr/local/bin/python3
# -*- coding: utf-8 -*-
from requests_oauthlib import OAuth1Session
BASEURL = 'https://api.twitter.com/1.1/'
CK = '3rJOl1ODzm9yZy63FACdg'
CS = '5jPoQ5kQvMJFDYRNE8bQ4rHuds4xJqhvgNJM4awaE8'
AT = '333312023-6dTniMxvwlQG8bATKNYWBXaQkftz9t4ZjRBt7BWk'
AS = 'LQ8xXBTTN8F8CHQv9oDAqsGJFeexdnFf2DFzn3EzGH2L8'
def get_instance(rest_url, params):
url = BASEURL + rest_url
print(url)
twitter = OAuth1Session(CK, CS, AT, AS)
return twitter.get(url, params=params)
|
normal
|
{
"blob_id": "63bfaa6e191e6090060877e737f4b003bed559cf",
"index": 9140,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_instance(rest_url, params):\n url = BASEURL + rest_url\n print(url)\n twitter = OAuth1Session(CK, CS, AT, AS)\n return twitter.get(url, params=params)\n",
"step-3": "<mask token>\nBASEURL = 'https://api.twitter.com/1.1/'\nCK = '3rJOl1ODzm9yZy63FACdg'\nCS = '5jPoQ5kQvMJFDYRNE8bQ4rHuds4xJqhvgNJM4awaE8'\nAT = '333312023-6dTniMxvwlQG8bATKNYWBXaQkftz9t4ZjRBt7BWk'\nAS = 'LQ8xXBTTN8F8CHQv9oDAqsGJFeexdnFf2DFzn3EzGH2L8'\n\n\ndef get_instance(rest_url, params):\n url = BASEURL + rest_url\n print(url)\n twitter = OAuth1Session(CK, CS, AT, AS)\n return twitter.get(url, params=params)\n",
"step-4": "from requests_oauthlib import OAuth1Session\nBASEURL = 'https://api.twitter.com/1.1/'\nCK = '3rJOl1ODzm9yZy63FACdg'\nCS = '5jPoQ5kQvMJFDYRNE8bQ4rHuds4xJqhvgNJM4awaE8'\nAT = '333312023-6dTniMxvwlQG8bATKNYWBXaQkftz9t4ZjRBt7BWk'\nAS = 'LQ8xXBTTN8F8CHQv9oDAqsGJFeexdnFf2DFzn3EzGH2L8'\n\n\ndef get_instance(rest_url, params):\n url = BASEURL + rest_url\n print(url)\n twitter = OAuth1Session(CK, CS, AT, AS)\n return twitter.get(url, params=params)\n",
"step-5": "#! /usr/local/bin/python3\n# -*- coding: utf-8 -*-\n\nfrom requests_oauthlib import OAuth1Session\n\nBASEURL = 'https://api.twitter.com/1.1/'\n\nCK = '3rJOl1ODzm9yZy63FACdg'\nCS = '5jPoQ5kQvMJFDYRNE8bQ4rHuds4xJqhvgNJM4awaE8'\nAT = '333312023-6dTniMxvwlQG8bATKNYWBXaQkftz9t4ZjRBt7BWk'\nAS = 'LQ8xXBTTN8F8CHQv9oDAqsGJFeexdnFf2DFzn3EzGH2L8'\n\n\ndef get_instance(rest_url, params):\n url = BASEURL + rest_url\n print(url)\n twitter = OAuth1Session(CK, CS, AT, AS)\n return twitter.get(url, params=params)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
def error(msg):
__log_internal(ERROR_FILE, msg)
def info(msg):
__log_internal(LOG_FILE, msg)
def __log_internal(filename, msg):
now = datetime.datetime.now()
f = open(filename, 'a+')
f.write('{} : {}\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))
f.close()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def error(msg):
__log_internal(ERROR_FILE, msg)
def info(msg):
__log_internal(LOG_FILE, msg)
def __log_internal(filename, msg):
now = datetime.datetime.now()
f = open(filename, 'a+')
f.write('{} : {}\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))
f.close()
if __name__ == '__main__':
print('Erstelle Testfiles')
info('Test')
error('Test')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
ERROR_FILE = 'error.log'
LOG_FILE = 'application.log'
def error(msg):
__log_internal(ERROR_FILE, msg)
def info(msg):
__log_internal(LOG_FILE, msg)
def __log_internal(filename, msg):
now = datetime.datetime.now()
f = open(filename, 'a+')
f.write('{} : {}\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))
f.close()
if __name__ == '__main__':
print('Erstelle Testfiles')
info('Test')
error('Test')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import datetime
ERROR_FILE = 'error.log'
LOG_FILE = 'application.log'
def error(msg):
__log_internal(ERROR_FILE, msg)
def info(msg):
__log_internal(LOG_FILE, msg)
def __log_internal(filename, msg):
now = datetime.datetime.now()
f = open(filename, 'a+')
f.write('{} : {}\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))
f.close()
if __name__ == '__main__':
print('Erstelle Testfiles')
info('Test')
error('Test')
<|reserved_special_token_1|>
'''
log.py
version 1.0 - 18.03.2020
Logging fuer mehrere Szenarien
'''
# Imports
import datetime
# Globale Variablen
ERROR_FILE = "error.log"
LOG_FILE = "application.log"
def error(msg):
__log_internal(ERROR_FILE, msg)
def info(msg):
__log_internal(LOG_FILE, msg)
def __log_internal(filename, msg):
now = datetime.datetime.now()
f = open(filename, "a+")
f.write("{} : {}\n".format(now.strftime("%Y-%m-%d %H:%M:%S"), msg))
f.close()
if __name__ == '__main__':
print("Erstelle Testfiles")
info("Test")
error("Test")
|
flexible
|
{
"blob_id": "0475c6cab353f0d23a4c4b7f78c1b47ecc5f8d3b",
"index": 4819,
"step-1": "<mask token>\n\n\ndef error(msg):\n __log_internal(ERROR_FILE, msg)\n\n\ndef info(msg):\n __log_internal(LOG_FILE, msg)\n\n\ndef __log_internal(filename, msg):\n now = datetime.datetime.now()\n f = open(filename, 'a+')\n f.write('{} : {}\\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))\n f.close()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef error(msg):\n __log_internal(ERROR_FILE, msg)\n\n\ndef info(msg):\n __log_internal(LOG_FILE, msg)\n\n\ndef __log_internal(filename, msg):\n now = datetime.datetime.now()\n f = open(filename, 'a+')\n f.write('{} : {}\\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))\n f.close()\n\n\nif __name__ == '__main__':\n print('Erstelle Testfiles')\n info('Test')\n error('Test')\n",
"step-3": "<mask token>\nERROR_FILE = 'error.log'\nLOG_FILE = 'application.log'\n\n\ndef error(msg):\n __log_internal(ERROR_FILE, msg)\n\n\ndef info(msg):\n __log_internal(LOG_FILE, msg)\n\n\ndef __log_internal(filename, msg):\n now = datetime.datetime.now()\n f = open(filename, 'a+')\n f.write('{} : {}\\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))\n f.close()\n\n\nif __name__ == '__main__':\n print('Erstelle Testfiles')\n info('Test')\n error('Test')\n",
"step-4": "<mask token>\nimport datetime\nERROR_FILE = 'error.log'\nLOG_FILE = 'application.log'\n\n\ndef error(msg):\n __log_internal(ERROR_FILE, msg)\n\n\ndef info(msg):\n __log_internal(LOG_FILE, msg)\n\n\ndef __log_internal(filename, msg):\n now = datetime.datetime.now()\n f = open(filename, 'a+')\n f.write('{} : {}\\n'.format(now.strftime('%Y-%m-%d %H:%M:%S'), msg))\n f.close()\n\n\nif __name__ == '__main__':\n print('Erstelle Testfiles')\n info('Test')\n error('Test')\n",
"step-5": "'''\n log.py\n\n version 1.0 - 18.03.2020\n\n Logging fuer mehrere Szenarien\n'''\n\n# Imports\nimport datetime\n\n# Globale Variablen\nERROR_FILE = \"error.log\"\nLOG_FILE = \"application.log\"\n\n\ndef error(msg):\n __log_internal(ERROR_FILE, msg)\n\n\ndef info(msg):\n __log_internal(LOG_FILE, msg)\n\n\ndef __log_internal(filename, msg):\n now = datetime.datetime.now()\n f = open(filename, \"a+\")\n f.write(\"{} : {}\\n\".format(now.strftime(\"%Y-%m-%d %H:%M:%S\"), msg))\n f.close()\n\n\nif __name__ == '__main__':\n print(\"Erstelle Testfiles\")\n info(\"Test\")\n error(\"Test\")\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
@api_view(['GET'])
def artifact_save_recommend(request, pageNo):
artifact_url = (
f'http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}'
)
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
search_list = []
for data in response_dict.findAll('data'):
for item in data.findAll('item'):
if item['key'] == 'id':
id_num = item['value']
search_list.append(id_num)
detail_list = []
dataDict = {'id_num': '', 'name': '', 'desc': '', 'museum_name': '',
'nationality_name': '', 'image_uri': ''}
for i in range(len(search_list)):
artifact_num = search_list[i]
artifact_url = (
f'http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}'
)
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
for data in response_dict.findAll('list'):
for item in data.findAll('item'):
if item['key'] == 'id':
dataDict['id_num'] = item['value']
elif item['key'] == 'desc':
dataDict['desc'] = item['value']
elif item['key'] == 'nameKr':
dataDict['name'] = item['value']
elif item['key'] == 'nationalityName2':
dataDict['nationality_name'] = item['value']
elif item['key'] == 'museumName2':
dataDict['museum_name'] = item['value']
elif item['key'] == 'imgThumUriM':
dataDict['image_uri'] = item['value']
if dataDict['desc'] != '':
serializer = RecommendedArtifactSerialize(data=dataDict)
if serializer.is_valid(raise_exception=True):
serializer.save()
dataDict = {'id_num': '', 'name': '', 'desc': '',
'museum_name': '', 'nationality_name': '', 'image_uri': ''}
return Response(serializer.data)
@api_view(['GET'])
def artifact_recommend(request):
now = datetime.now()
nowYear = now.year
nowMonth = now.month
nowDay = now.day
daySum = 0
if nowYear % 4 == 0 and nowYear % 100 != 0 or nowYear % 400 == 0:
month = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
else:
month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
for i in range(nowMonth - 1):
daySum += month[i]
daySum += nowDay
Recommended_list = RecommendedArtifact.objects.all()
Recommended_artifact = Recommended_list[daySum]
dataDict = {'id_num': Recommended_artifact.id_num, 'name':
Recommended_artifact.name, 'desc': Recommended_artifact.desc,
'museum_name': Recommended_artifact.museum_name, 'nationality_name':
Recommended_artifact.nationality_name, 'image_uri':
Recommended_artifact.image_uri}
return Response(dataDict)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
service_key = "{jo's museum key}"
@api_view(['GET'])
def artifact_save_recommend(request, pageNo):
artifact_url = (
f'http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}'
)
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
search_list = []
for data in response_dict.findAll('data'):
for item in data.findAll('item'):
if item['key'] == 'id':
id_num = item['value']
search_list.append(id_num)
detail_list = []
dataDict = {'id_num': '', 'name': '', 'desc': '', 'museum_name': '',
'nationality_name': '', 'image_uri': ''}
for i in range(len(search_list)):
artifact_num = search_list[i]
artifact_url = (
f'http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}'
)
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
for data in response_dict.findAll('list'):
for item in data.findAll('item'):
if item['key'] == 'id':
dataDict['id_num'] = item['value']
elif item['key'] == 'desc':
dataDict['desc'] = item['value']
elif item['key'] == 'nameKr':
dataDict['name'] = item['value']
elif item['key'] == 'nationalityName2':
dataDict['nationality_name'] = item['value']
elif item['key'] == 'museumName2':
dataDict['museum_name'] = item['value']
elif item['key'] == 'imgThumUriM':
dataDict['image_uri'] = item['value']
if dataDict['desc'] != '':
serializer = RecommendedArtifactSerialize(data=dataDict)
if serializer.is_valid(raise_exception=True):
serializer.save()
dataDict = {'id_num': '', 'name': '', 'desc': '',
'museum_name': '', 'nationality_name': '', 'image_uri': ''}
return Response(serializer.data)
@api_view(['GET'])
def artifact_recommend(request):
now = datetime.now()
nowYear = now.year
nowMonth = now.month
nowDay = now.day
daySum = 0
if nowYear % 4 == 0 and nowYear % 100 != 0 or nowYear % 400 == 0:
month = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
else:
month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
for i in range(nowMonth - 1):
daySum += month[i]
daySum += nowDay
Recommended_list = RecommendedArtifact.objects.all()
Recommended_artifact = Recommended_list[daySum]
dataDict = {'id_num': Recommended_artifact.id_num, 'name':
Recommended_artifact.name, 'desc': Recommended_artifact.desc,
'museum_name': Recommended_artifact.museum_name, 'nationality_name':
Recommended_artifact.nationality_name, 'image_uri':
Recommended_artifact.image_uri}
return Response(dataDict)
<|reserved_special_token_1|>
from .models import RecommendedArtifact
from .serializers import RecommendedArtifactSerialize
from rest_framework.decorators import api_view
from rest_framework.response import Response
from datetime import datetime
import requests, bs4
service_key = "{jo's museum key}"
@api_view(['GET'])
def artifact_save_recommend(request, pageNo):
artifact_url = (
f'http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}'
)
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
search_list = []
for data in response_dict.findAll('data'):
for item in data.findAll('item'):
if item['key'] == 'id':
id_num = item['value']
search_list.append(id_num)
detail_list = []
dataDict = {'id_num': '', 'name': '', 'desc': '', 'museum_name': '',
'nationality_name': '', 'image_uri': ''}
for i in range(len(search_list)):
artifact_num = search_list[i]
artifact_url = (
f'http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}'
)
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
for data in response_dict.findAll('list'):
for item in data.findAll('item'):
if item['key'] == 'id':
dataDict['id_num'] = item['value']
elif item['key'] == 'desc':
dataDict['desc'] = item['value']
elif item['key'] == 'nameKr':
dataDict['name'] = item['value']
elif item['key'] == 'nationalityName2':
dataDict['nationality_name'] = item['value']
elif item['key'] == 'museumName2':
dataDict['museum_name'] = item['value']
elif item['key'] == 'imgThumUriM':
dataDict['image_uri'] = item['value']
if dataDict['desc'] != '':
serializer = RecommendedArtifactSerialize(data=dataDict)
if serializer.is_valid(raise_exception=True):
serializer.save()
dataDict = {'id_num': '', 'name': '', 'desc': '',
'museum_name': '', 'nationality_name': '', 'image_uri': ''}
return Response(serializer.data)
@api_view(['GET'])
def artifact_recommend(request):
now = datetime.now()
nowYear = now.year
nowMonth = now.month
nowDay = now.day
daySum = 0
if nowYear % 4 == 0 and nowYear % 100 != 0 or nowYear % 400 == 0:
month = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
else:
month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
for i in range(nowMonth - 1):
daySum += month[i]
daySum += nowDay
Recommended_list = RecommendedArtifact.objects.all()
Recommended_artifact = Recommended_list[daySum]
dataDict = {'id_num': Recommended_artifact.id_num, 'name':
Recommended_artifact.name, 'desc': Recommended_artifact.desc,
'museum_name': Recommended_artifact.museum_name, 'nationality_name':
Recommended_artifact.nationality_name, 'image_uri':
Recommended_artifact.image_uri}
return Response(dataDict)
<|reserved_special_token_1|>
from .models import RecommendedArtifact
from .serializers import RecommendedArtifactSerialize
from rest_framework.decorators import api_view
from rest_framework.response import Response
from datetime import datetime
import requests, bs4
# constant value
service_key = "{jo's museum key}"
@api_view(['GET'])
def artifact_save_recommend(request,pageNo):
# 1. 페이지 선정 및 페이지 내 모든 유물 정보 가져오기
artifact_url = f"http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}"
#http://www.emuseum.go.kr/openapi/relic/list?serviceKey=DLuSbLjmCJIDKmhoSB7ELx3eVXXxg9ZBqh9oC8/eFWTcq2gDMqfQA7jrooSkvzWgYv/pd9a6fUJKG40K3VQXHg==&numOfRows=100&pageNo=1
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
search_list = []
for data in response_dict.findAll('data'):
for item in data.findAll('item'):
if item['key'] == 'id':
id_num = item['value']
search_list.append(id_num)
# 2-1. 변수설정
detail_list = []
dataDict = {
'id_num': '',
'name': '',
'desc': '',
'museum_name': '',
'nationality_name': '',
'image_uri': '',
}
# 2-2. 모든 유물에서 desc있나 파악하기
for i in range(len(search_list)):
artifact_num = search_list[i]
artifact_url = f"http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}"
# http://www.emuseum.go.kr/openapi/relic/detail?serviceKey=DLuSbLjmCJIDKmhoSB7ELx3eVXXxg9ZBqh9oC8/eFWTcq2gDMqfQA7jrooSkvzWgYv/pd9a6fUJKG40K3VQXHg==&id=PS0100100100100021500000
response = requests.get(artifact_url)
response_dict = bs4.BeautifulSoup(response.content, 'html.parser')
for data in response_dict.findAll('list'):
for item in data.findAll('item'):
if item['key'] == 'id':
dataDict['id_num'] = item['value']
elif item['key'] == 'desc':
dataDict['desc'] = item['value']
elif item['key'] == 'nameKr':
dataDict['name'] = item['value']
elif item['key'] == 'nationalityName2':
dataDict['nationality_name'] = item['value']
elif item['key'] == 'museumName2':
dataDict['museum_name'] = item['value']
elif item['key'] == 'imgThumUriM':
dataDict['image_uri'] = item['value']
# 2-3 db에 저장하기
if dataDict['desc'] != '':
serializer = RecommendedArtifactSerialize(data=dataDict)
if serializer.is_valid(raise_exception=True):
serializer.save()
dataDict = {
'id_num': '',
'name': '',
'desc': '',
'museum_name': '',
'nationality_name': '',
'image_uri': '',
}
return Response(serializer.data)
@api_view(['GET'])
def artifact_recommend(request):
## 오늘은 며칠째인가요??
now = datetime.now()
nowYear = now.year
nowMonth = now.month
nowDay = now.day
daySum = 0
if nowYear%4==0 and nowYear%100!=0 or nowYear%400==0:
month = [31,29,31,30,31,30,31,31,30,31,30,31]
else:
month = [31,28,31,30,31,30,31,31,30,31,30,31]
for i in range(nowMonth-1):
daySum += month[i]
daySum += nowDay
Recommended_list = RecommendedArtifact.objects.all()
Recommended_artifact = Recommended_list[daySum]
dataDict = {
'id_num': Recommended_artifact.id_num,
'name': Recommended_artifact.name,
'desc': Recommended_artifact.desc,
'museum_name': Recommended_artifact.museum_name,
'nationality_name': Recommended_artifact.nationality_name,
'image_uri': Recommended_artifact.image_uri,
}
# print(Recommended_artifact.name)
return Response(dataDict)
|
flexible
|
{
"blob_id": "707e3e60d6d9a3db5b9bc733e912b34e2cec5974",
"index": 8585,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\n@api_view(['GET'])\ndef artifact_save_recommend(request, pageNo):\n artifact_url = (\n f'http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}'\n )\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n search_list = []\n for data in response_dict.findAll('data'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n id_num = item['value']\n search_list.append(id_num)\n detail_list = []\n dataDict = {'id_num': '', 'name': '', 'desc': '', 'museum_name': '',\n 'nationality_name': '', 'image_uri': ''}\n for i in range(len(search_list)):\n artifact_num = search_list[i]\n artifact_url = (\n f'http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}'\n )\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n for data in response_dict.findAll('list'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n dataDict['id_num'] = item['value']\n elif item['key'] == 'desc':\n dataDict['desc'] = item['value']\n elif item['key'] == 'nameKr':\n dataDict['name'] = item['value']\n elif item['key'] == 'nationalityName2':\n dataDict['nationality_name'] = item['value']\n elif item['key'] == 'museumName2':\n dataDict['museum_name'] = item['value']\n elif item['key'] == 'imgThumUriM':\n dataDict['image_uri'] = item['value']\n if dataDict['desc'] != '':\n serializer = RecommendedArtifactSerialize(data=dataDict)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n dataDict = {'id_num': '', 'name': '', 'desc': '',\n 'museum_name': '', 'nationality_name': '', 'image_uri': ''}\n return Response(serializer.data)\n\n\n@api_view(['GET'])\ndef artifact_recommend(request):\n now = datetime.now()\n nowYear = now.year\n nowMonth = now.month\n nowDay = now.day\n daySum = 0\n if nowYear % 4 == 0 and nowYear % 100 != 0 or nowYear % 400 == 0:\n month = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n else:\n month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n for i in range(nowMonth - 1):\n daySum += month[i]\n daySum += nowDay\n Recommended_list = RecommendedArtifact.objects.all()\n Recommended_artifact = Recommended_list[daySum]\n dataDict = {'id_num': Recommended_artifact.id_num, 'name':\n Recommended_artifact.name, 'desc': Recommended_artifact.desc,\n 'museum_name': Recommended_artifact.museum_name, 'nationality_name':\n Recommended_artifact.nationality_name, 'image_uri':\n Recommended_artifact.image_uri}\n return Response(dataDict)\n",
"step-3": "<mask token>\nservice_key = \"{jo's museum key}\"\n\n\n@api_view(['GET'])\ndef artifact_save_recommend(request, pageNo):\n artifact_url = (\n f'http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}'\n )\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n search_list = []\n for data in response_dict.findAll('data'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n id_num = item['value']\n search_list.append(id_num)\n detail_list = []\n dataDict = {'id_num': '', 'name': '', 'desc': '', 'museum_name': '',\n 'nationality_name': '', 'image_uri': ''}\n for i in range(len(search_list)):\n artifact_num = search_list[i]\n artifact_url = (\n f'http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}'\n )\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n for data in response_dict.findAll('list'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n dataDict['id_num'] = item['value']\n elif item['key'] == 'desc':\n dataDict['desc'] = item['value']\n elif item['key'] == 'nameKr':\n dataDict['name'] = item['value']\n elif item['key'] == 'nationalityName2':\n dataDict['nationality_name'] = item['value']\n elif item['key'] == 'museumName2':\n dataDict['museum_name'] = item['value']\n elif item['key'] == 'imgThumUriM':\n dataDict['image_uri'] = item['value']\n if dataDict['desc'] != '':\n serializer = RecommendedArtifactSerialize(data=dataDict)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n dataDict = {'id_num': '', 'name': '', 'desc': '',\n 'museum_name': '', 'nationality_name': '', 'image_uri': ''}\n return Response(serializer.data)\n\n\n@api_view(['GET'])\ndef artifact_recommend(request):\n now = datetime.now()\n nowYear = now.year\n nowMonth = now.month\n nowDay = now.day\n daySum = 0\n if nowYear % 4 == 0 and nowYear % 100 != 0 or nowYear % 400 == 0:\n month = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n else:\n month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n for i in range(nowMonth - 1):\n daySum += month[i]\n daySum += nowDay\n Recommended_list = RecommendedArtifact.objects.all()\n Recommended_artifact = Recommended_list[daySum]\n dataDict = {'id_num': Recommended_artifact.id_num, 'name':\n Recommended_artifact.name, 'desc': Recommended_artifact.desc,\n 'museum_name': Recommended_artifact.museum_name, 'nationality_name':\n Recommended_artifact.nationality_name, 'image_uri':\n Recommended_artifact.image_uri}\n return Response(dataDict)\n",
"step-4": "from .models import RecommendedArtifact\nfrom .serializers import RecommendedArtifactSerialize\nfrom rest_framework.decorators import api_view\nfrom rest_framework.response import Response\nfrom datetime import datetime\nimport requests, bs4\nservice_key = \"{jo's museum key}\"\n\n\n@api_view(['GET'])\ndef artifact_save_recommend(request, pageNo):\n artifact_url = (\n f'http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}'\n )\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n search_list = []\n for data in response_dict.findAll('data'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n id_num = item['value']\n search_list.append(id_num)\n detail_list = []\n dataDict = {'id_num': '', 'name': '', 'desc': '', 'museum_name': '',\n 'nationality_name': '', 'image_uri': ''}\n for i in range(len(search_list)):\n artifact_num = search_list[i]\n artifact_url = (\n f'http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}'\n )\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n for data in response_dict.findAll('list'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n dataDict['id_num'] = item['value']\n elif item['key'] == 'desc':\n dataDict['desc'] = item['value']\n elif item['key'] == 'nameKr':\n dataDict['name'] = item['value']\n elif item['key'] == 'nationalityName2':\n dataDict['nationality_name'] = item['value']\n elif item['key'] == 'museumName2':\n dataDict['museum_name'] = item['value']\n elif item['key'] == 'imgThumUriM':\n dataDict['image_uri'] = item['value']\n if dataDict['desc'] != '':\n serializer = RecommendedArtifactSerialize(data=dataDict)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n dataDict = {'id_num': '', 'name': '', 'desc': '',\n 'museum_name': '', 'nationality_name': '', 'image_uri': ''}\n return Response(serializer.data)\n\n\n@api_view(['GET'])\ndef artifact_recommend(request):\n now = datetime.now()\n nowYear = now.year\n nowMonth = now.month\n nowDay = now.day\n daySum = 0\n if nowYear % 4 == 0 and nowYear % 100 != 0 or nowYear % 400 == 0:\n month = [31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n else:\n month = [31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]\n for i in range(nowMonth - 1):\n daySum += month[i]\n daySum += nowDay\n Recommended_list = RecommendedArtifact.objects.all()\n Recommended_artifact = Recommended_list[daySum]\n dataDict = {'id_num': Recommended_artifact.id_num, 'name':\n Recommended_artifact.name, 'desc': Recommended_artifact.desc,\n 'museum_name': Recommended_artifact.museum_name, 'nationality_name':\n Recommended_artifact.nationality_name, 'image_uri':\n Recommended_artifact.image_uri}\n return Response(dataDict)\n",
"step-5": "from .models import RecommendedArtifact\nfrom .serializers import RecommendedArtifactSerialize\nfrom rest_framework.decorators import api_view \nfrom rest_framework.response import Response\nfrom datetime import datetime\nimport requests, bs4\n\n# constant value\nservice_key = \"{jo's museum key}\"\n\n@api_view(['GET'])\ndef artifact_save_recommend(request,pageNo):\n \n # 1. 페이지 선정 및 페이지 내 모든 유물 정보 가져오기\n artifact_url = f\"http://www.emuseum.go.kr/openapi/relic/list?serviceKey={service_key}&numOfRows=100&pageNo={pageNo}\"\n #http://www.emuseum.go.kr/openapi/relic/list?serviceKey=DLuSbLjmCJIDKmhoSB7ELx3eVXXxg9ZBqh9oC8/eFWTcq2gDMqfQA7jrooSkvzWgYv/pd9a6fUJKG40K3VQXHg==&numOfRows=100&pageNo=1\n\n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n search_list = []\n\n for data in response_dict.findAll('data'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n id_num = item['value']\n search_list.append(id_num)\n\n # 2-1. 변수설정\n detail_list = []\n dataDict = {\n 'id_num': '',\n 'name': '',\n 'desc': '',\n 'museum_name': '',\n 'nationality_name': '',\n 'image_uri': '',\n }\n\n # 2-2. 모든 유물에서 desc있나 파악하기\n for i in range(len(search_list)):\n artifact_num = search_list[i]\n artifact_url = f\"http://www.emuseum.go.kr/openapi/relic/detail?serviceKey={service_key}&id={artifact_num}\"\n # http://www.emuseum.go.kr/openapi/relic/detail?serviceKey=DLuSbLjmCJIDKmhoSB7ELx3eVXXxg9ZBqh9oC8/eFWTcq2gDMqfQA7jrooSkvzWgYv/pd9a6fUJKG40K3VQXHg==&id=PS0100100100100021500000\n \n response = requests.get(artifact_url)\n response_dict = bs4.BeautifulSoup(response.content, 'html.parser')\n\n for data in response_dict.findAll('list'):\n for item in data.findAll('item'):\n if item['key'] == 'id':\n dataDict['id_num'] = item['value']\n\n elif item['key'] == 'desc':\n dataDict['desc'] = item['value']\n\n elif item['key'] == 'nameKr':\n dataDict['name'] = item['value']\n\n elif item['key'] == 'nationalityName2':\n dataDict['nationality_name'] = item['value']\n\n elif item['key'] == 'museumName2':\n dataDict['museum_name'] = item['value']\n\n elif item['key'] == 'imgThumUriM':\n dataDict['image_uri'] = item['value']\n\n # 2-3 db에 저장하기\n if dataDict['desc'] != '':\n serializer = RecommendedArtifactSerialize(data=dataDict)\n if serializer.is_valid(raise_exception=True):\n serializer.save()\n dataDict = {\n 'id_num': '',\n 'name': '',\n 'desc': '',\n 'museum_name': '',\n 'nationality_name': '',\n 'image_uri': '',\n } \n return Response(serializer.data)\n\n\n@api_view(['GET'])\ndef artifact_recommend(request):\n ## 오늘은 며칠째인가요??\n now = datetime.now()\n nowYear = now.year\n nowMonth = now.month\n nowDay = now.day\n daySum = 0\n\n if nowYear%4==0 and nowYear%100!=0 or nowYear%400==0:\n month = [31,29,31,30,31,30,31,31,30,31,30,31]\n else:\n month = [31,28,31,30,31,30,31,31,30,31,30,31]\n \n for i in range(nowMonth-1):\n daySum += month[i]\n\n daySum += nowDay\n\n Recommended_list = RecommendedArtifact.objects.all()\n Recommended_artifact = Recommended_list[daySum]\n dataDict = {\n 'id_num': Recommended_artifact.id_num,\n 'name': Recommended_artifact.name,\n 'desc': Recommended_artifact.desc,\n 'museum_name': Recommended_artifact.museum_name,\n 'nationality_name': Recommended_artifact.nationality_name,\n 'image_uri': Recommended_artifact.image_uri,\n } \n # print(Recommended_artifact.name)\n\n return Response(dataDict)\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
@app.route('/')
def showMachineList():
return render_template('list.html')
@app.route('/insert_records', methods=['POST'])
def insert_records():
json_data = request.json['info']
nome = json_data['nome']
email = json_data['email']
telefone = json_data['telefone']
db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}
)
return jsonify(status='OK', message='inserted successfully')
@app.route('/get_records', methods=['POST'])
def get_records():
contatos = db.catalogo.find()
return render_template('list.html', contatos=contatos)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
catalogo.insert_one(contato1)
catalogo.insert_one(contato2)
@app.route('/')
def showMachineList():
return render_template('list.html')
@app.route('/insert_records', methods=['POST'])
def insert_records():
json_data = request.json['info']
nome = json_data['nome']
email = json_data['email']
telefone = json_data['telefone']
db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}
)
return jsonify(status='OK', message='inserted successfully')
@app.route('/get_records', methods=['POST'])
def get_records():
contatos = db.catalogo.find()
return render_template('list.html', contatos=contatos)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
app = Flask(__name__)
conexao = MongoClient('localhost', 27017)
db = conexao['teste_db']
contato1 = {'nome': 'Lucas', 'email': 'lucas@gmail.com', 'telefone':
'11 99389-3244'}
contato2 = {'nome': 'Lara', 'email': 'lara@gmail.com', 'telefone':
'11 99333-3556'}
catalogo = db.catalogo
catalogo.insert_one(contato1)
catalogo.insert_one(contato2)
@app.route('/')
def showMachineList():
return render_template('list.html')
@app.route('/insert_records', methods=['POST'])
def insert_records():
json_data = request.json['info']
nome = json_data['nome']
email = json_data['email']
telefone = json_data['telefone']
db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}
)
return jsonify(status='OK', message='inserted successfully')
@app.route('/get_records', methods=['POST'])
def get_records():
contatos = db.catalogo.find()
return render_template('list.html', contatos=contatos)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask, render_template, request, url_for, redirect, jsonify, json, request
from pymongo import MongoClient
app = Flask(__name__)
conexao = MongoClient('localhost', 27017)
db = conexao['teste_db']
contato1 = {'nome': 'Lucas', 'email': 'lucas@gmail.com', 'telefone':
'11 99389-3244'}
contato2 = {'nome': 'Lara', 'email': 'lara@gmail.com', 'telefone':
'11 99333-3556'}
catalogo = db.catalogo
catalogo.insert_one(contato1)
catalogo.insert_one(contato2)
@app.route('/')
def showMachineList():
return render_template('list.html')
@app.route('/insert_records', methods=['POST'])
def insert_records():
json_data = request.json['info']
nome = json_data['nome']
email = json_data['email']
telefone = json_data['telefone']
db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}
)
return jsonify(status='OK', message='inserted successfully')
@app.route('/get_records', methods=['POST'])
def get_records():
contatos = db.catalogo.find()
return render_template('list.html', contatos=contatos)
if __name__ == '__main__':
app.run(debug=True)
<|reserved_special_token_1|>
from flask import Flask, render_template, request, url_for, redirect,jsonify,json,request
from pymongo import MongoClient
#conexão bd
app = Flask(__name__)
conexao = MongoClient('localhost',27017)
db = conexao['teste_db']
#inserindo contatos iniciais
contato1 = {'nome': 'Lucas', 'email': 'lucas@gmail.com', 'telefone': '11 99389-3244'}
contato2 = {'nome': 'Lara', 'email': 'lara@gmail.com', 'telefone': '11 99333-3556'}
catalogo = db.catalogo
catalogo.insert_one(contato1)
catalogo.insert_one(contato2)
#página inicial
@app.route('/')
def showMachineList():
return render_template('list.html')
@app.route("/insert_records", methods=['POST'])
def insert_records():
json_data = request.json['info']
nome = json_data['nome']
email = json_data['email']
telefone = json_data['telefone']
db.catalogo.insert_one({
'nome':nome,'email':email,'telefone':telefone
})
return jsonify(status='OK',message='inserted successfully')
@app.route('/get_records',methods=['POST'])
def get_records():
contatos = db.catalogo.find()
return render_template('list.html',contatos=contatos)
if __name__ == "__main__":
app.run(debug=True)
|
flexible
|
{
"blob_id": "05ca16303d0eb962249793164ac91795c45cc3c2",
"index": 9974,
"step-1": "<mask token>\n\n\n@app.route('/')\ndef showMachineList():\n return render_template('list.html')\n\n\n@app.route('/insert_records', methods=['POST'])\ndef insert_records():\n json_data = request.json['info']\n nome = json_data['nome']\n email = json_data['email']\n telefone = json_data['telefone']\n db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}\n )\n return jsonify(status='OK', message='inserted successfully')\n\n\n@app.route('/get_records', methods=['POST'])\ndef get_records():\n contatos = db.catalogo.find()\n return render_template('list.html', contatos=contatos)\n\n\n<mask token>\n",
"step-2": "<mask token>\ncatalogo.insert_one(contato1)\ncatalogo.insert_one(contato2)\n\n\n@app.route('/')\ndef showMachineList():\n return render_template('list.html')\n\n\n@app.route('/insert_records', methods=['POST'])\ndef insert_records():\n json_data = request.json['info']\n nome = json_data['nome']\n email = json_data['email']\n telefone = json_data['telefone']\n db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}\n )\n return jsonify(status='OK', message='inserted successfully')\n\n\n@app.route('/get_records', methods=['POST'])\ndef get_records():\n contatos = db.catalogo.find()\n return render_template('list.html', contatos=contatos)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-3": "<mask token>\napp = Flask(__name__)\nconexao = MongoClient('localhost', 27017)\ndb = conexao['teste_db']\ncontato1 = {'nome': 'Lucas', 'email': 'lucas@gmail.com', 'telefone':\n '11 99389-3244'}\ncontato2 = {'nome': 'Lara', 'email': 'lara@gmail.com', 'telefone':\n '11 99333-3556'}\ncatalogo = db.catalogo\ncatalogo.insert_one(contato1)\ncatalogo.insert_one(contato2)\n\n\n@app.route('/')\ndef showMachineList():\n return render_template('list.html')\n\n\n@app.route('/insert_records', methods=['POST'])\ndef insert_records():\n json_data = request.json['info']\n nome = json_data['nome']\n email = json_data['email']\n telefone = json_data['telefone']\n db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}\n )\n return jsonify(status='OK', message='inserted successfully')\n\n\n@app.route('/get_records', methods=['POST'])\ndef get_records():\n contatos = db.catalogo.find()\n return render_template('list.html', contatos=contatos)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-4": "from flask import Flask, render_template, request, url_for, redirect, jsonify, json, request\nfrom pymongo import MongoClient\napp = Flask(__name__)\nconexao = MongoClient('localhost', 27017)\ndb = conexao['teste_db']\ncontato1 = {'nome': 'Lucas', 'email': 'lucas@gmail.com', 'telefone':\n '11 99389-3244'}\ncontato2 = {'nome': 'Lara', 'email': 'lara@gmail.com', 'telefone':\n '11 99333-3556'}\ncatalogo = db.catalogo\ncatalogo.insert_one(contato1)\ncatalogo.insert_one(contato2)\n\n\n@app.route('/')\ndef showMachineList():\n return render_template('list.html')\n\n\n@app.route('/insert_records', methods=['POST'])\ndef insert_records():\n json_data = request.json['info']\n nome = json_data['nome']\n email = json_data['email']\n telefone = json_data['telefone']\n db.catalogo.insert_one({'nome': nome, 'email': email, 'telefone': telefone}\n )\n return jsonify(status='OK', message='inserted successfully')\n\n\n@app.route('/get_records', methods=['POST'])\ndef get_records():\n contatos = db.catalogo.find()\n return render_template('list.html', contatos=contatos)\n\n\nif __name__ == '__main__':\n app.run(debug=True)\n",
"step-5": "from flask import Flask, render_template, request, url_for, redirect,jsonify,json,request\n\nfrom pymongo import MongoClient\n\n#conexão bd\napp = Flask(__name__)\nconexao = MongoClient('localhost',27017)\ndb = conexao['teste_db']\n\n#inserindo contatos iniciais\ncontato1 = {'nome': 'Lucas', 'email': 'lucas@gmail.com', 'telefone': '11 99389-3244'}\ncontato2 = {'nome': 'Lara', 'email': 'lara@gmail.com', 'telefone': '11 99333-3556'}\ncatalogo = db.catalogo\ncatalogo.insert_one(contato1)\ncatalogo.insert_one(contato2)\n\n\n#página inicial\n@app.route('/')\ndef showMachineList():\n return render_template('list.html')\n\n@app.route(\"/insert_records\", methods=['POST'])\ndef insert_records():\n \n json_data = request.json['info']\n nome = json_data['nome']\n email = json_data['email']\n telefone = json_data['telefone']\n\n db.catalogo.insert_one({\n 'nome':nome,'email':email,'telefone':telefone\n })\n \n return jsonify(status='OK',message='inserted successfully')\n\n@app.route('/get_records',methods=['POST'])\ndef get_records():\n \n contatos = db.catalogo.find() \n\n return render_template('list.html',contatos=contatos)\n\n\nif __name__ == \"__main__\":\n app.run(debug=True)",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def makeMnistModel():
mnist = tf.keras.datasets.mnist
(X_train, y_train), (_, _) = mnist.load_data()
X_train = X_train / 255.0
model = tf.keras.models.Sequential([tf.keras.layers.Flatten(input_shape
=(28, 28)), tf.keras.layers.Dense(128, activation='relu'), tf.keras
.layers.Dropout(0.2), tf.keras.layers.Dense(10, activation='softmax')])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(X_train, y_train, epochs=5)
model.save('./mnist_model.h5')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def makeMnistModel():
mnist = tf.keras.datasets.mnist
(X_train, y_train), (_, _) = mnist.load_data()
X_train = X_train / 255.0
model = tf.keras.models.Sequential([tf.keras.layers.Flatten(input_shape
=(28, 28)), tf.keras.layers.Dense(128, activation='relu'), tf.keras
.layers.Dropout(0.2), tf.keras.layers.Dense(10, activation='softmax')])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(X_train, y_train, epochs=5)
model.save('./mnist_model.h5')
makeMnistModel()
<|reserved_special_token_1|>
import tensorflow as tf
def makeMnistModel():
mnist = tf.keras.datasets.mnist
(X_train, y_train), (_, _) = mnist.load_data()
X_train = X_train / 255.0
model = tf.keras.models.Sequential([tf.keras.layers.Flatten(input_shape
=(28, 28)), tf.keras.layers.Dense(128, activation='relu'), tf.keras
.layers.Dropout(0.2), tf.keras.layers.Dense(10, activation='softmax')])
model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',
metrics=['accuracy'])
model.fit(X_train, y_train, epochs=5)
model.save('./mnist_model.h5')
makeMnistModel()
|
flexible
|
{
"blob_id": "1555583cd3d8938cbaeeac2d1f74bb9c3858f26d",
"index": 4207,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef makeMnistModel():\n mnist = tf.keras.datasets.mnist\n (X_train, y_train), (_, _) = mnist.load_data()\n X_train = X_train / 255.0\n model = tf.keras.models.Sequential([tf.keras.layers.Flatten(input_shape\n =(28, 28)), tf.keras.layers.Dense(128, activation='relu'), tf.keras\n .layers.Dropout(0.2), tf.keras.layers.Dense(10, activation='softmax')])\n model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X_train, y_train, epochs=5)\n model.save('./mnist_model.h5')\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef makeMnistModel():\n mnist = tf.keras.datasets.mnist\n (X_train, y_train), (_, _) = mnist.load_data()\n X_train = X_train / 255.0\n model = tf.keras.models.Sequential([tf.keras.layers.Flatten(input_shape\n =(28, 28)), tf.keras.layers.Dense(128, activation='relu'), tf.keras\n .layers.Dropout(0.2), tf.keras.layers.Dense(10, activation='softmax')])\n model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X_train, y_train, epochs=5)\n model.save('./mnist_model.h5')\n\n\nmakeMnistModel()\n",
"step-4": "import tensorflow as tf\n\n\ndef makeMnistModel():\n mnist = tf.keras.datasets.mnist\n (X_train, y_train), (_, _) = mnist.load_data()\n X_train = X_train / 255.0\n model = tf.keras.models.Sequential([tf.keras.layers.Flatten(input_shape\n =(28, 28)), tf.keras.layers.Dense(128, activation='relu'), tf.keras\n .layers.Dropout(0.2), tf.keras.layers.Dense(10, activation='softmax')])\n model.compile(optimizer='adam', loss='sparse_categorical_crossentropy',\n metrics=['accuracy'])\n model.fit(X_train, y_train, epochs=5)\n model.save('./mnist_model.h5')\n\n\nmakeMnistModel()\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
connection.open()
print('list of hbase tables {}'.format(connection.tables()))
<|reserved_special_token_0|>
for key, data in customers.scan():
keys.append(key)
data_list.append(data)
<|reserved_special_token_0|>
print('len of hbase keys {}'.format(len(keys)))
print('hbase columns {}'.format(hbase_columns))
print('hbase columns len {}'.format(len(hbase_columns)))
<|reserved_special_token_0|>
print('csv file shape {}'.format(df.shape))
print('csv columns {}'.format(df_columns))
print('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(
df_columns)))
print('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))
<|reserved_special_token_0|>
cursor.execute(query1)
<|reserved_special_token_0|>
cursor.execute(query2)
<|reserved_special_token_0|>
cursor.execute(query3)
<|reserved_special_token_0|>
print(data[:2])
<|reserved_special_token_0|>
cursor.execute('SELECT * FROM customers_hive LIMIT 10')
<|reserved_special_token_0|>
print(len(result))
print(result)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
connection = happybase.Connection()
connection.open()
print('list of hbase tables {}'.format(connection.tables()))
customers = connection.table('CUSTOMERS')
keys = []
data_list = []
for key, data in customers.scan():
keys.append(key)
data_list.append(data)
hbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]
print('len of hbase keys {}'.format(len(keys)))
print('hbase columns {}'.format(hbase_columns))
print('hbase columns len {}'.format(len(hbase_columns)))
df = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col=
'index')
df_columns = list(df.columns)
print('csv file shape {}'.format(df.shape))
print('csv columns {}'.format(df_columns))
print('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(
df_columns)))
print('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))
url = 'http://localhost:8765/'
conn = phoenixdb.connect(url, autocommit=True)
cursor = conn.cursor()
query1 = 'DROP VIEW "CUSTOMERS"'
cursor.execute(query1)
query2 = (
'CREATE VIEW "CUSTOMERS" (pk VARCHAR PRIMARY KEY, "cf"."first_name" VARCHAR, "cf"."last_name" VARCHAR, "cf"."company_name" VARCHAR, "cf"."address" VARCHAR, "cf"."city" VARCHAR, "cf"."county" VARCHAR, "cf"."state" VARCHAR, "cf"."zip" VARCHAR, "cf"."phone1" VARCHAR, "cf"."phone2" VARCHAR, "cf"."email" VARCHAR, "cf"."web" VARCHAR)'
)
cursor.execute(query2)
query3 = 'SELECT * FROM CUSTOMERS'
cursor.execute(query3)
data = cursor.fetchall()
print(data[:2])
<|reserved_special_token_0|>
cursor = hive.connect('localhost').cursor()
cursor.execute('SELECT * FROM customers_hive LIMIT 10')
result = cursor.fetchall()
print(len(result))
print(result)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
import pandas as pd
import happybase
import phoenixdb
from pyhive import hive
connection = happybase.Connection()
connection.open()
print('list of hbase tables {}'.format(connection.tables()))
customers = connection.table('CUSTOMERS')
keys = []
data_list = []
for key, data in customers.scan():
keys.append(key)
data_list.append(data)
hbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]
print('len of hbase keys {}'.format(len(keys)))
print('hbase columns {}'.format(hbase_columns))
print('hbase columns len {}'.format(len(hbase_columns)))
df = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col=
'index')
df_columns = list(df.columns)
print('csv file shape {}'.format(df.shape))
print('csv columns {}'.format(df_columns))
print('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(
df_columns)))
print('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))
url = 'http://localhost:8765/'
conn = phoenixdb.connect(url, autocommit=True)
cursor = conn.cursor()
query1 = 'DROP VIEW "CUSTOMERS"'
cursor.execute(query1)
query2 = (
'CREATE VIEW "CUSTOMERS" (pk VARCHAR PRIMARY KEY, "cf"."first_name" VARCHAR, "cf"."last_name" VARCHAR, "cf"."company_name" VARCHAR, "cf"."address" VARCHAR, "cf"."city" VARCHAR, "cf"."county" VARCHAR, "cf"."state" VARCHAR, "cf"."zip" VARCHAR, "cf"."phone1" VARCHAR, "cf"."phone2" VARCHAR, "cf"."email" VARCHAR, "cf"."web" VARCHAR)'
)
cursor.execute(query2)
query3 = 'SELECT * FROM CUSTOMERS'
cursor.execute(query3)
data = cursor.fetchall()
print(data[:2])
from pyhive import hive
cursor = hive.connect('localhost').cursor()
cursor.execute('SELECT * FROM customers_hive LIMIT 10')
result = cursor.fetchall()
print(len(result))
print(result)
<|reserved_special_token_1|>
'''
we have source files with a certain format and each file has 200 columns and there is a process that takes the source
files and loads into hbase and moves it into sql data warehouse. We have to create automated test scripts that compares
with with is with hbase and sql data warehouse. load into hbase and query the flat file, query the hbase, and compare.
compare each row. load into hbase and query.
https://community.hortonworks.com/articles/4942/import-csv-data-into-hbase-using-importtsv.html
https://www.briandunning.com/sample-data/
http://python-phoenixdb.readthedocs.io/en/latest/
https://phoenix.apache.org/faq.html
https://phoenix.apache.org/bulk_dataload.html
hbase shell
create 'CUSTOMERS', 'cf'
count 'CUSTOMERS'
scan 'CUSTOMERS'
exit
hdfs dfs -put customers-with-out-header-500.csv
hbase org.apache.hadoop.hbase.mapreduce.ImportTsv '-Dimporttsv.separator=|' -Dimporttsv.columns="HBASE_ROW_KEY,cf:first_name,cf:last_name,cf:company_name,cf:address,cf:city,cf:county,cf:state,cf:zip,cf:phone1,cf:phone2,cf:email,cf:web" CUSTOMERS customers-with-out-header-500.csv
sudo python3 -m pip install happybase
sudo python3 -m pip install pandas
sudo python3 -m pip install numpy
sudo python3 -m pip install ipython
list of hbase tables [b'customers']
len of hbase keys 501
hbase columns [b'cf:state', b'cf:phone2', b'cf:email', b'cf:zip', b'cf:last_name', b'cf:address', b'cf:city', b'cf:company_name', b'cf:phone1', b'cf:county', b'cf:first_name', b'cf:web']
hbase columns len 12
csv file shape (500, 13)
csv columns ['index', 'first_name', 'last_name', 'company_name', 'address', 'city', 'county', 'state', 'zip', 'phone1', 'phone2', 'email', 'web']
phoenix steps
python /usr/lib/phoenix/bin/sqlline.py
CREATE TABLE "CUSTOMERSPHOENIX" (pk VARCHAR PRIMARY KEY, first_name VARCHAR, last_name VARCHAR, company_name VARCHAR, address VARCHAR, city VARCHAR, county VARCHAR, state VARCHAR, zip VARCHAR, phone1 VARCHAR, phone2 VARCHAR, email VARCHAR, web VARCHAR)
python /usr/lib/phoenix/bin/psql.py -t CUSTOMERSPHOENIX -d "|" localhost customers-with-out-header-500.csv
SELECT A.*, B.* FROM CUSTOMERS AS A FULL JOIN CUSTOMERSPHOENIX AS B ON (A.PK = B.PK) WHERE A.PK IS NULL OR B.PK IS NULL
hive steps
CREATE EXTERNAL TABLE customers_hive(key string, first_name string, last_name string, company_name string, address string, city string, county string, state string, zip string, phone1 string, phone2 string, email string, web string)
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'
WITH SERDEPROPERTIES ("hbase.columns.mapping" = ":key, cf:first_name, cf:last_name, cf:company_name, cf:address, cf:city, cf:county, cf:state, cf:zip, cf:phone1, cf:phone2, cf:email, cf:web")
TBLPROPERTIES ("hbase.table.name"="CUSTOMERS");
SELECT yourcolumns
FROM tablenames
JOIN tablenames
WHERE condition
GROUP BY yourcolumns
HAVING aggregatecolumn condition
ORDER BY yourcolumns
'''
import pandas as pd
import happybase
import phoenixdb
from pyhive import hive
connection = happybase.Connection()
connection.open()
print('list of hbase tables {}'.format(connection.tables()))
customers = connection.table('CUSTOMERS')
keys = []
data_list = []
for key, data in customers.scan():
keys.append(key)
data_list.append(data)
hbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]
print('len of hbase keys {}'.format(len(keys)))
print('hbase columns {}'.format(hbase_columns))
print('hbase columns len {}'.format(len(hbase_columns)))
df = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col='index')
df_columns = list(df.columns)
print('csv file shape {}'.format(df.shape))
print('csv columns {}'.format(df_columns))
print('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(df_columns)))
print('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))
url = 'http://localhost:8765/'
conn = phoenixdb.connect(url, autocommit=True)
cursor = conn.cursor()
query1 = 'DROP VIEW "CUSTOMERS"'
cursor.execute(query1)
query2 = 'CREATE VIEW "CUSTOMERS" (pk VARCHAR PRIMARY KEY, "cf"."first_name" VARCHAR, "cf"."last_name" VARCHAR, "cf"."company_name" VARCHAR, "cf"."address" VARCHAR, "cf"."city" VARCHAR, "cf"."county" VARCHAR, "cf"."state" VARCHAR, "cf"."zip" VARCHAR, "cf"."phone1" VARCHAR, "cf"."phone2" VARCHAR, "cf"."email" VARCHAR, "cf"."web" VARCHAR)'
cursor.execute(query2)
query3 = 'SELECT * FROM CUSTOMERS'
cursor.execute(query3)
data = cursor.fetchall()
print(data[:2])
from pyhive import hive # or import hive
cursor = hive.connect('localhost').cursor()
cursor.execute('SELECT * FROM customers_hive LIMIT 10')
result = cursor.fetchall()
print(len(result))
print(result)
|
flexible
|
{
"blob_id": "7b38c64174656d1c4ec2b0541e6ed8d6680af7d7",
"index": 9565,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nconnection.open()\nprint('list of hbase tables {}'.format(connection.tables()))\n<mask token>\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\n<mask token>\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\n<mask token>\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(\n df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\n<mask token>\ncursor.execute(query1)\n<mask token>\ncursor.execute(query2)\n<mask token>\ncursor.execute(query3)\n<mask token>\nprint(data[:2])\n<mask token>\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\n<mask token>\nprint(len(result))\nprint(result)\n",
"step-3": "<mask token>\nconnection = happybase.Connection()\nconnection.open()\nprint('list of hbase tables {}'.format(connection.tables()))\ncustomers = connection.table('CUSTOMERS')\nkeys = []\ndata_list = []\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\nhbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\ndf = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col=\n 'index')\ndf_columns = list(df.columns)\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(\n df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\nurl = 'http://localhost:8765/'\nconn = phoenixdb.connect(url, autocommit=True)\ncursor = conn.cursor()\nquery1 = 'DROP VIEW \"CUSTOMERS\"'\ncursor.execute(query1)\nquery2 = (\n 'CREATE VIEW \"CUSTOMERS\" (pk VARCHAR PRIMARY KEY, \"cf\".\"first_name\" VARCHAR, \"cf\".\"last_name\" VARCHAR, \"cf\".\"company_name\" VARCHAR, \"cf\".\"address\" VARCHAR, \"cf\".\"city\" VARCHAR, \"cf\".\"county\" VARCHAR, \"cf\".\"state\" VARCHAR, \"cf\".\"zip\" VARCHAR, \"cf\".\"phone1\" VARCHAR, \"cf\".\"phone2\" VARCHAR, \"cf\".\"email\" VARCHAR, \"cf\".\"web\" VARCHAR)'\n )\ncursor.execute(query2)\nquery3 = 'SELECT * FROM CUSTOMERS'\ncursor.execute(query3)\ndata = cursor.fetchall()\nprint(data[:2])\n<mask token>\ncursor = hive.connect('localhost').cursor()\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\nresult = cursor.fetchall()\nprint(len(result))\nprint(result)\n",
"step-4": "<mask token>\nimport pandas as pd\nimport happybase\nimport phoenixdb\nfrom pyhive import hive\nconnection = happybase.Connection()\nconnection.open()\nprint('list of hbase tables {}'.format(connection.tables()))\ncustomers = connection.table('CUSTOMERS')\nkeys = []\ndata_list = []\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\nhbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\ndf = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col=\n 'index')\ndf_columns = list(df.columns)\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(\n df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\nurl = 'http://localhost:8765/'\nconn = phoenixdb.connect(url, autocommit=True)\ncursor = conn.cursor()\nquery1 = 'DROP VIEW \"CUSTOMERS\"'\ncursor.execute(query1)\nquery2 = (\n 'CREATE VIEW \"CUSTOMERS\" (pk VARCHAR PRIMARY KEY, \"cf\".\"first_name\" VARCHAR, \"cf\".\"last_name\" VARCHAR, \"cf\".\"company_name\" VARCHAR, \"cf\".\"address\" VARCHAR, \"cf\".\"city\" VARCHAR, \"cf\".\"county\" VARCHAR, \"cf\".\"state\" VARCHAR, \"cf\".\"zip\" VARCHAR, \"cf\".\"phone1\" VARCHAR, \"cf\".\"phone2\" VARCHAR, \"cf\".\"email\" VARCHAR, \"cf\".\"web\" VARCHAR)'\n )\ncursor.execute(query2)\nquery3 = 'SELECT * FROM CUSTOMERS'\ncursor.execute(query3)\ndata = cursor.fetchall()\nprint(data[:2])\nfrom pyhive import hive\ncursor = hive.connect('localhost').cursor()\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\nresult = cursor.fetchall()\nprint(len(result))\nprint(result)\n",
"step-5": "'''\nwe have source files with a certain format and each file has 200 columns and there is a process that takes the source\nfiles and loads into hbase and moves it into sql data warehouse. We have to create automated test scripts that compares\nwith with is with hbase and sql data warehouse. load into hbase and query the flat file, query the hbase, and compare.\ncompare each row. load into hbase and query.\n\nhttps://community.hortonworks.com/articles/4942/import-csv-data-into-hbase-using-importtsv.html\nhttps://www.briandunning.com/sample-data/\nhttp://python-phoenixdb.readthedocs.io/en/latest/\nhttps://phoenix.apache.org/faq.html\nhttps://phoenix.apache.org/bulk_dataload.html\n\nhbase shell\ncreate 'CUSTOMERS', 'cf'\ncount 'CUSTOMERS'\nscan 'CUSTOMERS'\nexit\n\nhdfs dfs -put customers-with-out-header-500.csv\nhbase org.apache.hadoop.hbase.mapreduce.ImportTsv '-Dimporttsv.separator=|' -Dimporttsv.columns=\"HBASE_ROW_KEY,cf:first_name,cf:last_name,cf:company_name,cf:address,cf:city,cf:county,cf:state,cf:zip,cf:phone1,cf:phone2,cf:email,cf:web\" CUSTOMERS customers-with-out-header-500.csv\n\nsudo python3 -m pip install happybase\nsudo python3 -m pip install pandas\nsudo python3 -m pip install numpy\nsudo python3 -m pip install ipython\n\nlist of hbase tables [b'customers']\nlen of hbase keys 501\nhbase columns [b'cf:state', b'cf:phone2', b'cf:email', b'cf:zip', b'cf:last_name', b'cf:address', b'cf:city', b'cf:company_name', b'cf:phone1', b'cf:county', b'cf:first_name', b'cf:web']\nhbase columns len 12\ncsv file shape (500, 13)\ncsv columns ['index', 'first_name', 'last_name', 'company_name', 'address', 'city', 'county', 'state', 'zip', 'phone1', 'phone2', 'email', 'web']\n\nphoenix steps\npython /usr/lib/phoenix/bin/sqlline.py\nCREATE TABLE \"CUSTOMERSPHOENIX\" (pk VARCHAR PRIMARY KEY, first_name VARCHAR, last_name VARCHAR, company_name VARCHAR, address VARCHAR, city VARCHAR, county VARCHAR, state VARCHAR, zip VARCHAR, phone1 VARCHAR, phone2 VARCHAR, email VARCHAR, web VARCHAR)\npython /usr/lib/phoenix/bin/psql.py -t CUSTOMERSPHOENIX -d \"|\" localhost customers-with-out-header-500.csv\nSELECT A.*, B.* FROM CUSTOMERS AS A FULL JOIN CUSTOMERSPHOENIX AS B ON (A.PK = B.PK) WHERE A.PK IS NULL OR B.PK IS NULL\n\nhive steps\n\nCREATE EXTERNAL TABLE customers_hive(key string, first_name string, last_name string, company_name string, address string, city string, county string, state string, zip string, phone1 string, phone2 string, email string, web string)\nSTORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler'\nWITH SERDEPROPERTIES (\"hbase.columns.mapping\" = \":key, cf:first_name, cf:last_name, cf:company_name, cf:address, cf:city, cf:county, cf:state, cf:zip, cf:phone1, cf:phone2, cf:email, cf:web\")\nTBLPROPERTIES (\"hbase.table.name\"=\"CUSTOMERS\");\n\nSELECT yourcolumns\nFROM tablenames\nJOIN tablenames\nWHERE condition\nGROUP BY yourcolumns\nHAVING aggregatecolumn condition\nORDER BY yourcolumns\n'''\n\nimport pandas as pd\nimport happybase\nimport phoenixdb\nfrom pyhive import hive\n\n\nconnection = happybase.Connection()\nconnection.open()\n\nprint('list of hbase tables {}'.format(connection.tables()))\n\ncustomers = connection.table('CUSTOMERS')\n\nkeys = []\ndata_list = []\n\nfor key, data in customers.scan():\n keys.append(key)\n data_list.append(data)\n\nhbase_columns = [x.decode('utf-8')[3:] for x in data_list[0].keys()]\n\nprint('len of hbase keys {}'.format(len(keys)))\nprint('hbase columns {}'.format(hbase_columns))\nprint('hbase columns len {}'.format(len(hbase_columns)))\n\ndf = pd.read_csv('customers-with-header-500.csv', delimiter='|', index_col='index')\n\ndf_columns = list(df.columns)\nprint('csv file shape {}'.format(df.shape))\nprint('csv columns {}'.format(df_columns))\n\nprint('hbase columns == csv columns: {}'.format(set(hbase_columns) == set(df_columns)))\nprint('hbase row count == csv row count: {}'.format(len(keys) == df.shape[0]))\n\n\nurl = 'http://localhost:8765/'\nconn = phoenixdb.connect(url, autocommit=True)\n\ncursor = conn.cursor()\nquery1 = 'DROP VIEW \"CUSTOMERS\"'\ncursor.execute(query1)\nquery2 = 'CREATE VIEW \"CUSTOMERS\" (pk VARCHAR PRIMARY KEY, \"cf\".\"first_name\" VARCHAR, \"cf\".\"last_name\" VARCHAR, \"cf\".\"company_name\" VARCHAR, \"cf\".\"address\" VARCHAR, \"cf\".\"city\" VARCHAR, \"cf\".\"county\" VARCHAR, \"cf\".\"state\" VARCHAR, \"cf\".\"zip\" VARCHAR, \"cf\".\"phone1\" VARCHAR, \"cf\".\"phone2\" VARCHAR, \"cf\".\"email\" VARCHAR, \"cf\".\"web\" VARCHAR)'\ncursor.execute(query2)\nquery3 = 'SELECT * FROM CUSTOMERS'\ncursor.execute(query3)\ndata = cursor.fetchall()\nprint(data[:2])\n\n\nfrom pyhive import hive # or import hive\ncursor = hive.connect('localhost').cursor()\ncursor.execute('SELECT * FROM customers_hive LIMIT 10')\nresult = cursor.fetchall()\nprint(len(result))\nprint(result)\n\n\n\n\n\n\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
# Generated by Django 3.0.10 on 2020-12-19 15:07
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
("wagtailadmin", "0001_create_admin_access_permissions"),
]
operations = [
migrations.CreateModel(
name="Admin",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
],
options={
"permissions": [("access_admin", "Can access Wagtail admin")],
"managed": False,
"default_permissions": [],
},
),
]
|
normal
|
{
"blob_id": "52a4213a1729e25f96faebc5fd4f299017446c5a",
"index": 6370,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('wagtailadmin', '0001_create_admin_access_permissions')]\n operations = [migrations.CreateModel(name='Admin', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID'))], options={'permissions': [(\n 'access_admin', 'Can access Wagtail admin')], 'managed': False,\n 'default_permissions': []})]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n initial = True\n dependencies = [('wagtailadmin', '0001_create_admin_access_permissions')]\n operations = [migrations.CreateModel(name='Admin', fields=[('id',\n models.AutoField(auto_created=True, primary_key=True, serialize=\n False, verbose_name='ID'))], options={'permissions': [(\n 'access_admin', 'Can access Wagtail admin')], 'managed': False,\n 'default_permissions': []})]\n",
"step-5": "# Generated by Django 3.0.10 on 2020-12-19 15:07\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n initial = True\n\n dependencies = [\n (\"wagtailadmin\", \"0001_create_admin_access_permissions\"),\n ]\n\n operations = [\n migrations.CreateModel(\n name=\"Admin\",\n fields=[\n (\n \"id\",\n models.AutoField(\n auto_created=True,\n primary_key=True,\n serialize=False,\n verbose_name=\"ID\",\n ),\n ),\n ],\n options={\n \"permissions\": [(\"access_admin\", \"Can access Wagtail admin\")],\n \"managed\": False,\n \"default_permissions\": [],\n },\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class ModelInterface(ProtoSerializable):
def reset(self):
raise NotImplementedError()
pass
def run(self):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
@property
def abstract_timestamp(self):
raise NotImplementedError()
def state_stream(self):
raise NotImplementedError()
def from_state_stream(self, stream):
raise NotImplementedError()
pass
class ModellUtil(object):
def __init__(self, **kwargs):
super(ModellUtil, self).__init__()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NoReplaceMethod(Exception):
pass
class ModelInterface(ProtoSerializable):
def reset(self):
raise NotImplementedError()
pass
def run(self):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
@property
def abstract_timestamp(self):
raise NotImplementedError()
def state_stream(self):
raise NotImplementedError()
def from_state_stream(self, stream):
raise NotImplementedError()
pass
class ModellUtil(object):
def __init__(self, **kwargs):
super(ModellUtil, self).__init__()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NoSelectionMethod(Exception):
pass
class NoMetric(Exception):
pass
class NoReproductionMethod(Exception):
pass
class NoReplaceMethod(Exception):
pass
class ModelInterface(ProtoSerializable):
def reset(self):
raise NotImplementedError()
pass
def run(self):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
@property
def abstract_timestamp(self):
raise NotImplementedError()
def state_stream(self):
raise NotImplementedError()
def from_state_stream(self, stream):
raise NotImplementedError()
pass
class ModellUtil(object):
def __init__(self, **kwargs):
super(ModellUtil, self).__init__()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class NEADone(Exception):
pass
class NoSelectionMethod(Exception):
pass
class NoMetric(Exception):
pass
class NoReproductionMethod(Exception):
pass
class NoReplaceMethod(Exception):
pass
class ModelInterface(ProtoSerializable):
def reset(self):
raise NotImplementedError()
pass
def run(self):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
@property
def abstract_timestamp(self):
raise NotImplementedError()
def state_stream(self):
raise NotImplementedError()
def from_state_stream(self, stream):
raise NotImplementedError()
pass
class ModellUtil(object):
def __init__(self, **kwargs):
super(ModellUtil, self).__init__()
<|reserved_special_token_1|>
from LAMARCK_ML.data_util import ProtoSerializable
class NEADone(Exception):
pass
class NoSelectionMethod(Exception):
pass
class NoMetric(Exception):
pass
class NoReproductionMethod(Exception):
pass
class NoReplaceMethod(Exception):
pass
class ModelInterface(ProtoSerializable):
def reset(self):
raise NotImplementedError()
pass
def run(self):
raise NotImplementedError()
def stop(self):
raise NotImplementedError()
@property
def abstract_timestamp(self):
raise NotImplementedError()
def state_stream(self):
raise NotImplementedError()
def from_state_stream(self, stream):
raise NotImplementedError()
pass
class ModellUtil(object):
def __init__(self, **kwargs):
super(ModellUtil, self).__init__()
|
flexible
|
{
"blob_id": "501b8a9307a1fd65a5f36029f4df59bbe11d881a",
"index": 6591,
"step-1": "<mask token>\n\n\nclass ModelInterface(ProtoSerializable):\n\n def reset(self):\n raise NotImplementedError()\n pass\n\n def run(self):\n raise NotImplementedError()\n\n def stop(self):\n raise NotImplementedError()\n\n @property\n def abstract_timestamp(self):\n raise NotImplementedError()\n\n def state_stream(self):\n raise NotImplementedError()\n\n def from_state_stream(self, stream):\n raise NotImplementedError()\n pass\n\n\nclass ModellUtil(object):\n\n def __init__(self, **kwargs):\n super(ModellUtil, self).__init__()\n",
"step-2": "<mask token>\n\n\nclass NoReplaceMethod(Exception):\n pass\n\n\nclass ModelInterface(ProtoSerializable):\n\n def reset(self):\n raise NotImplementedError()\n pass\n\n def run(self):\n raise NotImplementedError()\n\n def stop(self):\n raise NotImplementedError()\n\n @property\n def abstract_timestamp(self):\n raise NotImplementedError()\n\n def state_stream(self):\n raise NotImplementedError()\n\n def from_state_stream(self, stream):\n raise NotImplementedError()\n pass\n\n\nclass ModellUtil(object):\n\n def __init__(self, **kwargs):\n super(ModellUtil, self).__init__()\n",
"step-3": "<mask token>\n\n\nclass NoSelectionMethod(Exception):\n pass\n\n\nclass NoMetric(Exception):\n pass\n\n\nclass NoReproductionMethod(Exception):\n pass\n\n\nclass NoReplaceMethod(Exception):\n pass\n\n\nclass ModelInterface(ProtoSerializable):\n\n def reset(self):\n raise NotImplementedError()\n pass\n\n def run(self):\n raise NotImplementedError()\n\n def stop(self):\n raise NotImplementedError()\n\n @property\n def abstract_timestamp(self):\n raise NotImplementedError()\n\n def state_stream(self):\n raise NotImplementedError()\n\n def from_state_stream(self, stream):\n raise NotImplementedError()\n pass\n\n\nclass ModellUtil(object):\n\n def __init__(self, **kwargs):\n super(ModellUtil, self).__init__()\n",
"step-4": "<mask token>\n\n\nclass NEADone(Exception):\n pass\n\n\nclass NoSelectionMethod(Exception):\n pass\n\n\nclass NoMetric(Exception):\n pass\n\n\nclass NoReproductionMethod(Exception):\n pass\n\n\nclass NoReplaceMethod(Exception):\n pass\n\n\nclass ModelInterface(ProtoSerializable):\n\n def reset(self):\n raise NotImplementedError()\n pass\n\n def run(self):\n raise NotImplementedError()\n\n def stop(self):\n raise NotImplementedError()\n\n @property\n def abstract_timestamp(self):\n raise NotImplementedError()\n\n def state_stream(self):\n raise NotImplementedError()\n\n def from_state_stream(self, stream):\n raise NotImplementedError()\n pass\n\n\nclass ModellUtil(object):\n\n def __init__(self, **kwargs):\n super(ModellUtil, self).__init__()\n",
"step-5": "from LAMARCK_ML.data_util import ProtoSerializable\n\n\nclass NEADone(Exception):\n pass\n\n\nclass NoSelectionMethod(Exception):\n pass\n\n\nclass NoMetric(Exception):\n pass\n\n\nclass NoReproductionMethod(Exception):\n pass\n\n\nclass NoReplaceMethod(Exception):\n pass\n\n\nclass ModelInterface(ProtoSerializable):\n def reset(self):\n raise NotImplementedError()\n pass\n\n def run(self):\n raise NotImplementedError()\n\n def stop(self):\n raise NotImplementedError()\n\n @property\n def abstract_timestamp(self):\n raise NotImplementedError()\n\n def state_stream(self):\n raise NotImplementedError()\n\n def from_state_stream(self, stream):\n raise NotImplementedError()\n\n pass\n\n\nclass ModellUtil(object):\n def __init__(self, **kwargs):\n super(ModellUtil, self).__init__()\n",
"step-ids": [
9,
10,
13,
14,
16
]
}
|
[
9,
10,
13,
14,
16
] |
# -*- coding: utf-8 -*-
import time
import datetime
def get_second_long(time_str=None):
if time_str is None:
return long(time.time())
time_array = time.strptime(time_str, "%Y-%m-%d %H:%M:%S")
return long(time.mktime(time_array))
def get_curtime_str():
return datetime.datetime.now()
def get_curtimestamp():
return int(time.time() * 1000)
def get_curdatetime_format():
return get_curtime_str().strftime("%Y-%m-%d %H:%M:%S")
def get_curdate_format():
return get_curtime_str().strftime("%Y-%m-%d")
def get_curmonth_format():
return get_curtime_str().strftime("%Y-%m")
def get_curhour_str():
return get_curtime_str().hour
def get_curminuter_str():
return get_curtime_str().minute
def get_curday_str():
return get_curtime_str().day
def get_curdate_str():
return get_curtime_str().strftime("%Y%m%d")
def get_curdatetime_str():
return get_curtime_str().strftime("%Y%m%d%H%M%S")
def get_curminuter_str():
return get_curtime_str().strftime("%Y%m%d%H%M")
|
normal
|
{
"blob_id": "e735529eddd3a46ea335e593e5937558b50b142d",
"index": 2276,
"step-1": "<mask token>\n\n\ndef get_second_long(time_str=None):\n if time_str is None:\n return long(time.time())\n time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')\n return long(time.mktime(time_array))\n\n\n<mask token>\n\n\ndef get_curtimestamp():\n return int(time.time() * 1000)\n\n\n<mask token>\n\n\ndef get_curdate_format():\n return get_curtime_str().strftime('%Y-%m-%d')\n\n\ndef get_curmonth_format():\n return get_curtime_str().strftime('%Y-%m')\n\n\n<mask token>\n\n\ndef get_curday_str():\n return get_curtime_str().day\n\n\ndef get_curdate_str():\n return get_curtime_str().strftime('%Y%m%d')\n\n\ndef get_curdatetime_str():\n return get_curtime_str().strftime('%Y%m%d%H%M%S')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef get_second_long(time_str=None):\n if time_str is None:\n return long(time.time())\n time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')\n return long(time.mktime(time_array))\n\n\n<mask token>\n\n\ndef get_curtimestamp():\n return int(time.time() * 1000)\n\n\ndef get_curdatetime_format():\n return get_curtime_str().strftime('%Y-%m-%d %H:%M:%S')\n\n\ndef get_curdate_format():\n return get_curtime_str().strftime('%Y-%m-%d')\n\n\ndef get_curmonth_format():\n return get_curtime_str().strftime('%Y-%m')\n\n\n<mask token>\n\n\ndef get_curday_str():\n return get_curtime_str().day\n\n\ndef get_curdate_str():\n return get_curtime_str().strftime('%Y%m%d')\n\n\ndef get_curdatetime_str():\n return get_curtime_str().strftime('%Y%m%d%H%M%S')\n\n\ndef get_curminuter_str():\n return get_curtime_str().strftime('%Y%m%d%H%M')\n",
"step-3": "<mask token>\n\n\ndef get_second_long(time_str=None):\n if time_str is None:\n return long(time.time())\n time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')\n return long(time.mktime(time_array))\n\n\n<mask token>\n\n\ndef get_curtimestamp():\n return int(time.time() * 1000)\n\n\ndef get_curdatetime_format():\n return get_curtime_str().strftime('%Y-%m-%d %H:%M:%S')\n\n\ndef get_curdate_format():\n return get_curtime_str().strftime('%Y-%m-%d')\n\n\ndef get_curmonth_format():\n return get_curtime_str().strftime('%Y-%m')\n\n\n<mask token>\n\n\ndef get_curminuter_str():\n return get_curtime_str().minute\n\n\ndef get_curday_str():\n return get_curtime_str().day\n\n\ndef get_curdate_str():\n return get_curtime_str().strftime('%Y%m%d')\n\n\ndef get_curdatetime_str():\n return get_curtime_str().strftime('%Y%m%d%H%M%S')\n\n\ndef get_curminuter_str():\n return get_curtime_str().strftime('%Y%m%d%H%M')\n",
"step-4": "<mask token>\n\n\ndef get_second_long(time_str=None):\n if time_str is None:\n return long(time.time())\n time_array = time.strptime(time_str, '%Y-%m-%d %H:%M:%S')\n return long(time.mktime(time_array))\n\n\n<mask token>\n\n\ndef get_curtimestamp():\n return int(time.time() * 1000)\n\n\ndef get_curdatetime_format():\n return get_curtime_str().strftime('%Y-%m-%d %H:%M:%S')\n\n\ndef get_curdate_format():\n return get_curtime_str().strftime('%Y-%m-%d')\n\n\ndef get_curmonth_format():\n return get_curtime_str().strftime('%Y-%m')\n\n\ndef get_curhour_str():\n return get_curtime_str().hour\n\n\ndef get_curminuter_str():\n return get_curtime_str().minute\n\n\ndef get_curday_str():\n return get_curtime_str().day\n\n\ndef get_curdate_str():\n return get_curtime_str().strftime('%Y%m%d')\n\n\ndef get_curdatetime_str():\n return get_curtime_str().strftime('%Y%m%d%H%M%S')\n\n\ndef get_curminuter_str():\n return get_curtime_str().strftime('%Y%m%d%H%M')\n",
"step-5": "# -*- coding: utf-8 -*-\n\nimport time\nimport datetime\n\n\ndef get_second_long(time_str=None):\n if time_str is None:\n return long(time.time())\n time_array = time.strptime(time_str, \"%Y-%m-%d %H:%M:%S\")\n return long(time.mktime(time_array))\n\n\ndef get_curtime_str():\n return datetime.datetime.now()\n\n\ndef get_curtimestamp():\n return int(time.time() * 1000)\n\n\ndef get_curdatetime_format():\n return get_curtime_str().strftime(\"%Y-%m-%d %H:%M:%S\")\n\n\ndef get_curdate_format():\n return get_curtime_str().strftime(\"%Y-%m-%d\")\n\n\ndef get_curmonth_format():\n return get_curtime_str().strftime(\"%Y-%m\")\n\n\ndef get_curhour_str():\n return get_curtime_str().hour\n\n\ndef get_curminuter_str():\n return get_curtime_str().minute\n\n\ndef get_curday_str():\n return get_curtime_str().day\n\n\ndef get_curdate_str():\n return get_curtime_str().strftime(\"%Y%m%d\")\n\n\ndef get_curdatetime_str():\n return get_curtime_str().strftime(\"%Y%m%d%H%M%S\")\n\n\ndef get_curminuter_str():\n return get_curtime_str().strftime(\"%Y%m%d%H%M\")\n\n\n\n\n\n",
"step-ids": [
7,
9,
10,
11,
14
]
}
|
[
7,
9,
10,
11,
14
] |
containerized: "docker://quay.io/snakemake/containerize-testimage:1.0"
rule a:
output:
"test.out"
conda:
"env.yaml"
shell:
"bcftools 2> {output} || true"
|
normal
|
{
"blob_id": "6e0d09bd0c9d1d272f727817cec65b81f83d02f5",
"index": 6742,
"step-1": "containerized: \"docker://quay.io/snakemake/containerize-testimage:1.0\"\n\nrule a:\n output:\n \"test.out\"\n conda:\n \"env.yaml\"\n shell:\n \"bcftools 2> {output} || true\"\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
import random
import Manhattan_segmental_dist
# Greedy
# s: dictionary of points
# k: number of medoids
# returns
# k medoids from sample set s
def greedy(s, k):
# print("Hello Word!")
m_1 = random.choice(list(s.keys()))
medoids = {m_1: s[m_1]}
dimensions = list(range(len(s[m_1])))
s.pop(m_1)
dist = {}
# compute distance between each point and medoid m1
for x in s:
dist[x] = Manhattan_segmental_dist.manhattan_segmental_dist(medoids[m_1], s[x], dimensions)
for i in range(1, k):
m_i = max(dist, key=lambda x: dist.get(x))
medoids[m_i] = s[m_i]
dist.pop(m_i)
s.pop(m_i)
for x in s:
dist[x] = min(dist[x], Manhattan_segmental_dist.manhattan_segmental_dist(medoids[m_i], s[x], dimensions))
return medoids
|
normal
|
{
"blob_id": "9a02bd0bc14494db033c032003aa5baea111ea8c",
"index": 7185,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef greedy(s, k):\n m_1 = random.choice(list(s.keys()))\n medoids = {m_1: s[m_1]}\n dimensions = list(range(len(s[m_1])))\n s.pop(m_1)\n dist = {}\n for x in s:\n dist[x] = Manhattan_segmental_dist.manhattan_segmental_dist(medoids\n [m_1], s[x], dimensions)\n for i in range(1, k):\n m_i = max(dist, key=lambda x: dist.get(x))\n medoids[m_i] = s[m_i]\n dist.pop(m_i)\n s.pop(m_i)\n for x in s:\n dist[x] = min(dist[x], Manhattan_segmental_dist.\n manhattan_segmental_dist(medoids[m_i], s[x], dimensions))\n return medoids\n",
"step-3": "import random\nimport Manhattan_segmental_dist\n\n\ndef greedy(s, k):\n m_1 = random.choice(list(s.keys()))\n medoids = {m_1: s[m_1]}\n dimensions = list(range(len(s[m_1])))\n s.pop(m_1)\n dist = {}\n for x in s:\n dist[x] = Manhattan_segmental_dist.manhattan_segmental_dist(medoids\n [m_1], s[x], dimensions)\n for i in range(1, k):\n m_i = max(dist, key=lambda x: dist.get(x))\n medoids[m_i] = s[m_i]\n dist.pop(m_i)\n s.pop(m_i)\n for x in s:\n dist[x] = min(dist[x], Manhattan_segmental_dist.\n manhattan_segmental_dist(medoids[m_i], s[x], dimensions))\n return medoids\n",
"step-4": "import random\nimport Manhattan_segmental_dist\n\n\n# Greedy\n# s: dictionary of points\n# k: number of medoids\n# returns\n# k medoids from sample set s\ndef greedy(s, k):\n # print(\"Hello Word!\")\n m_1 = random.choice(list(s.keys()))\n medoids = {m_1: s[m_1]}\n dimensions = list(range(len(s[m_1])))\n s.pop(m_1)\n dist = {}\n # compute distance between each point and medoid m1\n for x in s:\n dist[x] = Manhattan_segmental_dist.manhattan_segmental_dist(medoids[m_1], s[x], dimensions)\n for i in range(1, k):\n m_i = max(dist, key=lambda x: dist.get(x))\n medoids[m_i] = s[m_i]\n dist.pop(m_i)\n s.pop(m_i)\n for x in s:\n dist[x] = min(dist[x], Manhattan_segmental_dist.manhattan_segmental_dist(medoids[m_i], s[x], dimensions))\n return medoids\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import datetime
import json
import logging
import requests
from lib.crits.exceptions import CRITsOperationalError
from lib.crits.vocabulary.indicators import IndicatorThreatTypes as itt
from lib.crits.vocabulary.indicators import IndicatorAttackTypes as iat
log = logging.getLogger()
class CRITsAPI():
def __init__(self, api_url='', api_key='', username='', verify=True,
proxies={}):
self.url = api_url
if self.url[-1] == '/':
self.url = self.url[:-1]
self.api_key = api_key
self.username = username
self.verify = verify
self.proxies = proxies
def get_object(self, obj_id, obj_type):
type_trans = self._type_translation(obj_type)
get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)
params = {
'username' : self.username,
'api_key' : self.api_key,
}
r = requests.get(get_url, params=params, proxies=self.proxies, verify=self.verify)
if r.status_code == 200:
return json.loads(r.text)
else:
print('Status code returned for query {}, '
'was: {}'.format(get_url, r.status_code))
return None
def add_indicator(self, source = '', reference = '', method = '',
campaign = None, confidence = None, bucket_list = [], ticket = '',
add_domain = True, add_relationship = True,
indicator_confidence = 'unknown', indicator_impact = 'unknown',
type = None, threat_type = itt.UNKNOWN, attack_type = iat.UNKNOWN,
value = None, description = ''):
# Time to upload these indicators
data = {
'api_key' : self.api_key,
'username' : self.username,
'source' : source,
'reference' : reference,
'method' : '',
'campaign' : campaign,
'confidence' : confidence,
'bucket_list' : bucket_list,
'ticket' : ticket,
'add_domain' : True,
'add_relationship' : True,
'indicator_confidence' : indicator_confidence,
'indicator_impact' : indicator_impact,
'type' : type,
'threat_type' : threat_type,
'attack_type' : attack_type,
'value' : value,
'description' : description,
}
r = requests.post("{0}/indicators/".format(self.url), data=data,
verify=self.verify, proxies=self.proxies)
if r.status_code == 200:
log.debug("Indicator uploaded successfully - {}".format(value))
ind = json.loads(r.text)
return ind
return None
def has_relationship(self, left_id, left_type, right_id, right_type,
rel_type='Related To'):
data = self.get_object(left_id, left_type)
if not data:
raise CRITsOperationalError('Crits Object not found with id {} and '
'type {}'.format(left_id, left_type))
if not 'relationships' in data:
return False
for relationship in data['relationships']:
if relationship['relationship'] != rel_type:
continue
if relationship['value'] != right_id:
continue
if relationship['type'] != right_type:
continue
return True
return False
def forge_relationship(self, left_id, left_type, right_id, right_type,
rel_type, rel_date='', rel_confidence='high',
rel_reason=''):
if not rel_date:
rel_date = datetime.datetime.now()
type_trans = self._type_translation(left_type)
submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)
headers = {
'Content-Type' : 'application/json',
}
params = {
'api_key' : self.api_key,
'username' : self.username,
}
data = {
'action' : 'forge_relationship',
'right_type' : right_type,
'right_id' : right_id,
'rel_type' : rel_type,
'rel_date' : rel_date,
'rel_confidence' : rel_confidence,
'rel_reason' : rel_reason
}
r = requests.patch(submit_url, params=params, data=data,
proxies=self.proxies, verify=self.verify)
if r.status_code == 200:
log.debug('Relationship built successfully: {0} <-> '
'{1}'.format(left_id, right_id))
return True
else:
log.error('Error with status code {0} and message {1} between '
'these indicators: {2} <-> '
'{3}'.format(r.status_code, r.text, left_id, right_id))
return False
def add_campaign_to_object(self, id, type, campaign, confidence, analyst,
date, description):
# TODO: Make sure the object does not already have the campaign
# Return if it does. Add it if it doesn't
obj = getattr(self.db, type)
result = obj.find( { '_id' : id, 'campaign.name' : campaign } )
if result:
import pdb
pdb.set_trace()
def _type_translation(self, str_type):
if str_type == 'Indicator':
return 'indicators'
if str_type == 'Domain':
return 'domains'
if str_type == 'IP':
return 'ips'
if str_type == 'Sample':
return 'samples'
if str_type == 'Event':
return 'events'
if str_type == 'Actor':
return 'actors'
if str_type == 'Email':
return 'emails'
if str_type == 'Backdoor':
return 'backdoors'
raise CRITsOperationalError('Invalid object type specified: '
'{}'.format(str_type))
|
normal
|
{
"blob_id": "a505cc0e382554d65447a3fe3a56fac43c1964f2",
"index": 8133,
"step-1": "<mask token>\n\n\nclass CRITsAPI:\n <mask token>\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n <mask token>\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-2": "<mask token>\n\n\nclass CRITsAPI:\n <mask token>\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError(\n 'Crits Object not found with id {} and type {}'.format(\n left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-3": "<mask token>\n\n\nclass CRITsAPI:\n\n def __init__(self, api_url='', api_key='', username='', verify=True,\n proxies={}):\n self.url = api_url\n if self.url[-1] == '/':\n self.url = self.url[:-1]\n self.api_key = api_key\n self.username = username\n self.verify = verify\n self.proxies = proxies\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError(\n 'Crits Object not found with id {} and type {}'.format(\n left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-4": "<mask token>\nlog = logging.getLogger()\n\n\nclass CRITsAPI:\n\n def __init__(self, api_url='', api_key='', username='', verify=True,\n proxies={}):\n self.url = api_url\n if self.url[-1] == '/':\n self.url = self.url[:-1]\n self.api_key = api_key\n self.username = username\n self.verify = verify\n self.proxies = proxies\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {'username': self.username, 'api_key': self.api_key}\n r = requests.get(get_url, params=params, proxies=self.proxies,\n verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, was: {}'.format(\n get_url, r.status_code))\n return None\n\n def add_indicator(self, source='', reference='', method='', campaign=\n None, confidence=None, bucket_list=[], ticket='', add_domain=True,\n add_relationship=True, indicator_confidence='unknown',\n indicator_impact='unknown', type=None, threat_type=itt.UNKNOWN,\n attack_type=iat.UNKNOWN, value=None, description=''):\n data = {'api_key': self.api_key, 'username': self.username,\n 'source': source, 'reference': reference, 'method': '',\n 'campaign': campaign, 'confidence': confidence, 'bucket_list':\n bucket_list, 'ticket': ticket, 'add_domain': True,\n 'add_relationship': True, 'indicator_confidence':\n indicator_confidence, 'indicator_impact': indicator_impact,\n 'type': type, 'threat_type': threat_type, 'attack_type':\n attack_type, 'value': value, 'description': description}\n r = requests.post('{0}/indicators/'.format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug('Indicator uploaded successfully - {}'.format(value))\n ind = json.loads(r.text)\n return ind\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError(\n 'Crits Object not found with id {} and type {}'.format(\n left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high', rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {'Content-Type': 'application/json'}\n params = {'api_key': self.api_key, 'username': self.username}\n data = {'action': 'forge_relationship', 'right_type': right_type,\n 'right_id': right_id, 'rel_type': rel_type, 'rel_date':\n rel_date, 'rel_confidence': rel_confidence, 'rel_reason':\n rel_reason}\n r = requests.patch(submit_url, params=params, data=data, proxies=\n self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> {1}'.format\n (left_id, right_id))\n return True\n else:\n log.error(\n 'Error with status code {0} and message {1} between these indicators: {2} <-> {3}'\n .format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence,\n analyst, date, description):\n obj = getattr(self.db, type)\n result = obj.find({'_id': id, 'campaign.name': campaign})\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n raise CRITsOperationalError('Invalid object type specified: {}'.\n format(str_type))\n",
"step-5": "import datetime\nimport json\nimport logging\nimport requests\n\nfrom lib.crits.exceptions import CRITsOperationalError\nfrom lib.crits.vocabulary.indicators import IndicatorThreatTypes as itt\nfrom lib.crits.vocabulary.indicators import IndicatorAttackTypes as iat\n\nlog = logging.getLogger()\n\nclass CRITsAPI():\n\n def __init__(self, api_url='', api_key='', username='', verify=True,\n proxies={}):\n self.url = api_url\n if self.url[-1] == '/':\n self.url = self.url[:-1]\n self.api_key = api_key\n self.username = username\n self.verify = verify\n self.proxies = proxies\n\n def get_object(self, obj_id, obj_type):\n type_trans = self._type_translation(obj_type)\n get_url = '{}/{}/{}/'.format(self.url, type_trans, obj_id)\n params = {\n 'username' : self.username,\n 'api_key' : self.api_key,\n }\n r = requests.get(get_url, params=params, proxies=self.proxies, verify=self.verify)\n if r.status_code == 200:\n return json.loads(r.text)\n else:\n print('Status code returned for query {}, '\n 'was: {}'.format(get_url, r.status_code))\n return None\n\n def add_indicator(self, source = '', reference = '', method = '',\n campaign = None, confidence = None, bucket_list = [], ticket = '',\n add_domain = True, add_relationship = True,\n indicator_confidence = 'unknown', indicator_impact = 'unknown',\n type = None, threat_type = itt.UNKNOWN, attack_type = iat.UNKNOWN,\n value = None, description = ''):\n # Time to upload these indicators\n data = {\n 'api_key' : self.api_key,\n 'username' : self.username,\n 'source' : source,\n 'reference' : reference,\n 'method' : '',\n 'campaign' : campaign,\n 'confidence' : confidence,\n 'bucket_list' : bucket_list,\n 'ticket' : ticket,\n 'add_domain' : True,\n 'add_relationship' : True,\n 'indicator_confidence' : indicator_confidence,\n 'indicator_impact' : indicator_impact,\n 'type' : type,\n 'threat_type' : threat_type,\n 'attack_type' : attack_type,\n 'value' : value,\n 'description' : description,\n }\n\n r = requests.post(\"{0}/indicators/\".format(self.url), data=data,\n verify=self.verify, proxies=self.proxies)\n if r.status_code == 200:\n log.debug(\"Indicator uploaded successfully - {}\".format(value))\n ind = json.loads(r.text)\n return ind\n\n return None\n\n def has_relationship(self, left_id, left_type, right_id, right_type,\n rel_type='Related To'):\n data = self.get_object(left_id, left_type)\n if not data:\n raise CRITsOperationalError('Crits Object not found with id {} and '\n 'type {}'.format(left_id, left_type))\n if not 'relationships' in data:\n return False\n for relationship in data['relationships']:\n if relationship['relationship'] != rel_type:\n continue\n if relationship['value'] != right_id:\n continue\n if relationship['type'] != right_type:\n continue\n return True\n return False\n\n def forge_relationship(self, left_id, left_type, right_id, right_type,\n rel_type, rel_date='', rel_confidence='high',\n rel_reason=''):\n if not rel_date:\n rel_date = datetime.datetime.now()\n type_trans = self._type_translation(left_type)\n submit_url = '{}/{}/{}/'.format(self.url, type_trans, left_id)\n headers = {\n 'Content-Type' : 'application/json',\n }\n\n params = {\n 'api_key' : self.api_key,\n 'username' : self.username,\n }\n\n data = {\n 'action' : 'forge_relationship',\n 'right_type' : right_type,\n 'right_id' : right_id,\n 'rel_type' : rel_type,\n 'rel_date' : rel_date,\n 'rel_confidence' : rel_confidence,\n 'rel_reason' : rel_reason\n }\n\n r = requests.patch(submit_url, params=params, data=data,\n proxies=self.proxies, verify=self.verify)\n if r.status_code == 200:\n log.debug('Relationship built successfully: {0} <-> '\n '{1}'.format(left_id, right_id))\n return True\n else:\n log.error('Error with status code {0} and message {1} between '\n 'these indicators: {2} <-> '\n '{3}'.format(r.status_code, r.text, left_id, right_id))\n return False\n\n def add_campaign_to_object(self, id, type, campaign, confidence, analyst,\n date, description):\n # TODO: Make sure the object does not already have the campaign\n # Return if it does. Add it if it doesn't\n obj = getattr(self.db, type)\n result = obj.find( { '_id' : id, 'campaign.name' : campaign } )\n if result:\n import pdb\n pdb.set_trace()\n\n def _type_translation(self, str_type):\n if str_type == 'Indicator':\n return 'indicators'\n if str_type == 'Domain':\n return 'domains'\n if str_type == 'IP':\n return 'ips'\n if str_type == 'Sample':\n return 'samples'\n if str_type == 'Event':\n return 'events'\n if str_type == 'Actor':\n return 'actors'\n if str_type == 'Email':\n return 'emails'\n if str_type == 'Backdoor':\n return 'backdoors'\n\n raise CRITsOperationalError('Invalid object type specified: '\n '{}'.format(str_type))\n",
"step-ids": [
6,
7,
8,
9,
11
]
}
|
[
6,
7,
8,
9,
11
] |
#!/usr/bin/env python
#pylint: skip-file
"""
HostApi.py
Copyright 2016 Cisco Systems
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import os
import urllib.request, urllib.parse, urllib.error
from .models import *
class HostApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def getHosts(self, **kwargs):
"""Retrieve hosts
Args:
limit, str: limit (required)
offset, str: offset (required)
sortBy, str: sortBy (required)
order, str: order (required)
hostName, list[str]: hostName (required)
hostMac, list[str]: hostMac (required)
hostType, list[str]: hostType (required)
connectedInterfaceName, list[str]: connectedInterfaceName (required)
hostIp, list[str]: hostIp (required)
connectedDeviceIp, list[str]: connectedDeviceIp (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostListResult
"""
allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName', 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp', 'connectedDeviceIp', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHosts" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('limit' in params):
queryParams['limit'] = self.apiClient.toPathValue(params['limit'])
if ('offset' in params):
queryParams['offset'] = self.apiClient.toPathValue(params['offset'])
if ('sortBy' in params):
queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])
if ('order' in params):
queryParams['order'] = self.apiClient.toPathValue(params['order'])
if ('hostName' in params):
queryParams['hostName'] = self.apiClient.toPathValue(params['hostName'])
if ('hostMac' in params):
queryParams['hostMac'] = self.apiClient.toPathValue(params['hostMac'])
if ('hostType' in params):
queryParams['hostType'] = self.apiClient.toPathValue(params['hostType'])
if ('connectedInterfaceName' in params):
queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(params['connectedInterfaceName'])
if ('hostIp' in params):
queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp'])
if ('connectedDeviceIp' in params):
queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(params['connectedDeviceIp'])
if ('scope' in params):
headerParams['scope'] = params['scope']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostListResult')
return responseObject
def getHostCount(self, **kwargs):
"""Gives total number of hosts
Args:
scope, str: Authorization Scope for RBAC (required)
Returns: CountResult
"""
allParams = ['scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHostCount" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host/count'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'CountResult')
return responseObject
def getHostById(self, **kwargs):
"""Retrieves host based on id
Args:
id, str: Host Id (required)
scope, str: Authorization Scope for RBAC (required)
Returns: HostResult
"""
allParams = ['id', 'scope']
params = locals()
for (key, val) in list(params['kwargs'].items()):
if key not in allParams:
raise TypeError("Got an unexpected keyword argument '%s' to method getHostById" % key)
params[key] = val
del params['kwargs']
resourcePath = '/host/{id}'
resourcePath = resourcePath.replace('{format}', 'json')
method = 'GET'
queryParams = {}
headerParams = {}
formParams = {}
files = {}
bodyParam = None
headerParams['Accept'] = 'application/json'
headerParams['Content-Type'] = 'application/json'
if ('scope' in params):
headerParams['scope'] = params['scope']
if ('id' in params):
replacement = str(self.apiClient.toPathValue(params['id']))
replacement = urllib.parse.quote(replacement)
resourcePath = resourcePath.replace('{' + 'id' + '}',
replacement)
postData = (formParams if formParams else bodyParam)
response = self.apiClient.callAPI(resourcePath, method, queryParams,
postData, headerParams, files=files)
if not response:
return None
responseObject = self.apiClient.deserialize(response, 'HostResult')
return responseObject
|
normal
|
{
"blob_id": "4243c863827f1378c364171ca7d8fdabd42be22f",
"index": 3625,
"step-1": "<mask token>\n\n\nclass HostApi(object):\n <mask token>\n <mask token>\n <mask token>\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-2": "<mask token>\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n <mask token>\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-3": "<mask token>\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n allParams = ['scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostCount\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-4": "<mask token>\nimport sys\nimport os\nimport urllib.request, urllib.parse, urllib.error\nfrom .models import *\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName',\n 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp',\n 'connectedDeviceIp', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHosts\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'limit' in params:\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n if 'offset' in params:\n queryParams['offset'] = self.apiClient.toPathValue(params['offset']\n )\n if 'sortBy' in params:\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy']\n )\n if 'order' in params:\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n if 'hostName' in params:\n queryParams['hostName'] = self.apiClient.toPathValue(params[\n 'hostName'])\n if 'hostMac' in params:\n queryParams['hostMac'] = self.apiClient.toPathValue(params[\n 'hostMac'])\n if 'hostType' in params:\n queryParams['hostType'] = self.apiClient.toPathValue(params[\n 'hostType'])\n if 'connectedInterfaceName' in params:\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(\n params['connectedInterfaceName'])\n if 'hostIp' in params:\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp']\n )\n if 'connectedDeviceIp' in params:\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(\n params['connectedDeviceIp'])\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n allParams = ['scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostCount\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n allParams = ['id', 'scope']\n params = locals()\n for key, val in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\n \"Got an unexpected keyword argument '%s' to method getHostById\"\n % key)\n params[key] = val\n del params['kwargs']\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n if 'scope' in params:\n headerParams['scope'] = params['scope']\n if 'id' in params:\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}', replacement)\n postData = formParams if formParams else bodyParam\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n if not response:\n return None\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n",
"step-5": "#!/usr/bin/env python\n#pylint: skip-file\n\"\"\"\nHostApi.py\n Copyright 2016 Cisco Systems\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n you may not use this file except in compliance with the License.\n You may obtain a copy of the License at\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n Unless required by applicable law or agreed to in writing, software\n distributed under the License is distributed on an \"AS IS\" BASIS,\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n See the License for the specific language governing permissions and\n limitations under the License.\n\n\"\"\"\nimport sys\nimport os\nimport urllib.request, urllib.parse, urllib.error\n\nfrom .models import *\n\n\nclass HostApi(object):\n\n def __init__(self, apiClient):\n self.apiClient = apiClient\n\n\n\n def getHosts(self, **kwargs):\n \"\"\"Retrieve hosts\n\n Args:\n\n limit, str: limit (required)\n\n\n offset, str: offset (required)\n\n\n sortBy, str: sortBy (required)\n\n\n order, str: order (required)\n\n\n hostName, list[str]: hostName (required)\n\n\n hostMac, list[str]: hostMac (required)\n\n\n hostType, list[str]: hostType (required)\n\n\n connectedInterfaceName, list[str]: connectedInterfaceName (required)\n\n\n hostIp, list[str]: hostIp (required)\n\n\n connectedDeviceIp, list[str]: connectedDeviceIp (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostListResult\n \"\"\"\n\n allParams = ['limit', 'offset', 'sortBy', 'order', 'hostName', 'hostMac', 'hostType', 'connectedInterfaceName', 'hostIp', 'connectedDeviceIp', 'scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHosts\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n if ('limit' in params):\n queryParams['limit'] = self.apiClient.toPathValue(params['limit'])\n\n if ('offset' in params):\n queryParams['offset'] = self.apiClient.toPathValue(params['offset'])\n\n if ('sortBy' in params):\n queryParams['sortBy'] = self.apiClient.toPathValue(params['sortBy'])\n\n if ('order' in params):\n queryParams['order'] = self.apiClient.toPathValue(params['order'])\n\n if ('hostName' in params):\n queryParams['hostName'] = self.apiClient.toPathValue(params['hostName'])\n\n if ('hostMac' in params):\n queryParams['hostMac'] = self.apiClient.toPathValue(params['hostMac'])\n\n if ('hostType' in params):\n queryParams['hostType'] = self.apiClient.toPathValue(params['hostType'])\n\n if ('connectedInterfaceName' in params):\n queryParams['connectedInterfaceName'] = self.apiClient.toPathValue(params['connectedInterfaceName'])\n\n if ('hostIp' in params):\n queryParams['hostIp'] = self.apiClient.toPathValue(params['hostIp'])\n\n if ('connectedDeviceIp' in params):\n queryParams['connectedDeviceIp'] = self.apiClient.toPathValue(params['connectedDeviceIp'])\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'HostListResult')\n return responseObject\n\n\n\n\n def getHostCount(self, **kwargs):\n \"\"\"Gives total number of hosts\n\n Args:\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: CountResult\n \"\"\"\n\n allParams = ['scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHostCount\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host/count'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'CountResult')\n return responseObject\n\n\n\n\n def getHostById(self, **kwargs):\n \"\"\"Retrieves host based on id\n\n Args:\n\n id, str: Host Id (required)\n\n\n scope, str: Authorization Scope for RBAC (required)\n\n\n\n Returns: HostResult\n \"\"\"\n\n allParams = ['id', 'scope']\n\n params = locals()\n for (key, val) in list(params['kwargs'].items()):\n if key not in allParams:\n raise TypeError(\"Got an unexpected keyword argument '%s' to method getHostById\" % key)\n params[key] = val\n del params['kwargs']\n\n resourcePath = '/host/{id}'\n resourcePath = resourcePath.replace('{format}', 'json')\n method = 'GET'\n\n queryParams = {}\n headerParams = {}\n formParams = {}\n files = {}\n bodyParam = None\n\n headerParams['Accept'] = 'application/json'\n headerParams['Content-Type'] = 'application/json'\n\n\n\n\n if ('scope' in params):\n headerParams['scope'] = params['scope']\n\n\n\n if ('id' in params):\n replacement = str(self.apiClient.toPathValue(params['id']))\n replacement = urllib.parse.quote(replacement)\n resourcePath = resourcePath.replace('{' + 'id' + '}',\n replacement)\n\n\n\n\n\n\n postData = (formParams if formParams else bodyParam)\n\n response = self.apiClient.callAPI(resourcePath, method, queryParams,\n postData, headerParams, files=files)\n\n\n if not response:\n return None\n\n responseObject = self.apiClient.deserialize(response, 'HostResult')\n return responseObject\n\n\n\n\n\n\n",
"step-ids": [
2,
4,
5,
6,
7
]
}
|
[
2,
4,
5,
6,
7
] |
for t in range(int(input())):
st = list(input())
N, j = len(st), 1
for i in range(N // 2):
if st[i] == '*' or st[-i - 1] == '*':
break
elif st[i] != st[-i - 1]:
j = 0
break
print('#{} Exist'.format(t + 1)) if j else print('#{} Not exist'.format
(t + 1))
|
normal
|
{
"blob_id": "21d499555b4bc4944996a57ae544a56aa317b00b",
"index": 4386,
"step-1": "<mask token>\n",
"step-2": "for t in range(int(input())):\n st = list(input())\n N, j = len(st), 1\n for i in range(N // 2):\n if st[i] == '*' or st[-i - 1] == '*':\n break\n elif st[i] != st[-i - 1]:\n j = 0\n break\n print('#{} Exist'.format(t + 1)) if j else print('#{} Not exist'.format\n (t + 1))\n",
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0,
1
]
}
|
[
0,
1
] |
<|reserved_special_token_0|>
def extract_title(page):
return page.find('header').find('h1').contents[0]
def extract_colours(page):
color_list = page.find('ul')
return list(dict.fromkeys(re.findall('#\\w+', str(color_list.contents))))
def get_colours_from_page(browser, baseurl, target_page):
response = browser.open(baseurl + target_page)
soup = BeautifulSoup(response.text, 'lxml')
extract = soup.find('section', {'id': 'item'})
entity = {'title': extract_title(extract), 'colours': extract_colours(
extract)}
return entity
def get_links_from_article(articles):
links = []
for article in articles:
links.append(article.find('a').attrs['href'])
return links
def scrape_flag_pagination_page(browser, baseurl, pageCount):
response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))
soup = BeautifulSoup(response.text, 'lxml')
flag_articles = soup.findAll('article')
return get_links_from_article(flag_articles)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def extract_title(page):
return page.find('header').find('h1').contents[0]
def extract_colours(page):
color_list = page.find('ul')
return list(dict.fromkeys(re.findall('#\\w+', str(color_list.contents))))
def get_colours_from_page(browser, baseurl, target_page):
response = browser.open(baseurl + target_page)
soup = BeautifulSoup(response.text, 'lxml')
extract = soup.find('section', {'id': 'item'})
entity = {'title': extract_title(extract), 'colours': extract_colours(
extract)}
return entity
def get_links_from_article(articles):
links = []
for article in articles:
links.append(article.find('a').attrs['href'])
return links
def scrape_flag_pagination_page(browser, baseurl, pageCount):
response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))
soup = BeautifulSoup(response.text, 'lxml')
flag_articles = soup.findAll('article')
return get_links_from_article(flag_articles)
<|reserved_special_token_0|>
while True:
try:
list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount
)
except mechanicalsoup.utils.LinkNotFoundError:
break
pageCount += 1
<|reserved_special_token_0|>
for url in list_of_urls:
package.append(get_colours_from_page(browser, baseurl, url))
with open('flag_colours.json', 'w', encoding='utf-8') as f:
json.dump(package, f, ensure_ascii=False, indent=4)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def extract_title(page):
return page.find('header').find('h1').contents[0]
def extract_colours(page):
color_list = page.find('ul')
return list(dict.fromkeys(re.findall('#\\w+', str(color_list.contents))))
def get_colours_from_page(browser, baseurl, target_page):
response = browser.open(baseurl + target_page)
soup = BeautifulSoup(response.text, 'lxml')
extract = soup.find('section', {'id': 'item'})
entity = {'title': extract_title(extract), 'colours': extract_colours(
extract)}
return entity
def get_links_from_article(articles):
links = []
for article in articles:
links.append(article.find('a').attrs['href'])
return links
def scrape_flag_pagination_page(browser, baseurl, pageCount):
response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))
soup = BeautifulSoup(response.text, 'lxml')
flag_articles = soup.findAll('article')
return get_links_from_article(flag_articles)
baseurl = 'https://encycolorpedia.com'
browser = mechanicalsoup.StatefulBrowser(raise_on_404=True)
list_of_urls = []
flag_count = 0
pageCount = 1
while True:
try:
list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount
)
except mechanicalsoup.utils.LinkNotFoundError:
break
pageCount += 1
package = []
for url in list_of_urls:
package.append(get_colours_from_page(browser, baseurl, url))
with open('flag_colours.json', 'w', encoding='utf-8') as f:
json.dump(package, f, ensure_ascii=False, indent=4)
<|reserved_special_token_1|>
import mechanicalsoup
from bs4 import BeautifulSoup
import re
import json
def extract_title(page):
return page.find('header').find('h1').contents[0]
def extract_colours(page):
color_list = page.find('ul')
return list(dict.fromkeys(re.findall('#\\w+', str(color_list.contents))))
def get_colours_from_page(browser, baseurl, target_page):
response = browser.open(baseurl + target_page)
soup = BeautifulSoup(response.text, 'lxml')
extract = soup.find('section', {'id': 'item'})
entity = {'title': extract_title(extract), 'colours': extract_colours(
extract)}
return entity
def get_links_from_article(articles):
links = []
for article in articles:
links.append(article.find('a').attrs['href'])
return links
def scrape_flag_pagination_page(browser, baseurl, pageCount):
response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))
soup = BeautifulSoup(response.text, 'lxml')
flag_articles = soup.findAll('article')
return get_links_from_article(flag_articles)
baseurl = 'https://encycolorpedia.com'
browser = mechanicalsoup.StatefulBrowser(raise_on_404=True)
list_of_urls = []
flag_count = 0
pageCount = 1
while True:
try:
list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount
)
except mechanicalsoup.utils.LinkNotFoundError:
break
pageCount += 1
package = []
for url in list_of_urls:
package.append(get_colours_from_page(browser, baseurl, url))
with open('flag_colours.json', 'w', encoding='utf-8') as f:
json.dump(package, f, ensure_ascii=False, indent=4)
<|reserved_special_token_1|>
import mechanicalsoup
from bs4 import BeautifulSoup
import re
import json
def extract_title(page):
return page.find("header").find("h1").contents[0]
def extract_colours(page):
color_list = page.find("ul")
return list(dict.fromkeys(re.findall("#\w+", str(color_list.contents))))
def get_colours_from_page(browser, baseurl, target_page):
response = browser.open(baseurl + target_page)
soup = BeautifulSoup(response.text, 'lxml')
extract = soup.find("section", {"id": "item"})
entity = {"title": extract_title(extract), "colours": extract_colours(extract)}
return entity
def get_links_from_article(articles):
links = []
for article in articles:
links.append(article.find("a").attrs['href'])
return links
def scrape_flag_pagination_page(browser, baseurl, pageCount):
response = browser.open(baseurl + "/flags?page={0}".format(pageCount))
soup = BeautifulSoup(response.text, 'lxml')
flag_articles = soup.findAll("article")
return get_links_from_article(flag_articles)
baseurl = "https://encycolorpedia.com"
browser = mechanicalsoup.StatefulBrowser(raise_on_404=True)
list_of_urls = []
flag_count = 0
pageCount = 1
while(True):
try:
list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount)
except mechanicalsoup.utils.LinkNotFoundError:
break
pageCount += 1
package = []
for url in list_of_urls:
package.append(get_colours_from_page(browser, baseurl, url))
with open('flag_colours.json', 'w', encoding='utf-8') as f:
json.dump(package, f, ensure_ascii=False, indent=4)
|
flexible
|
{
"blob_id": "9fd33089a9dc919ef2fb2698059e60a24a0e05e6",
"index": 6118,
"step-1": "<mask token>\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\n<mask token>\nwhile True:\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount\n )\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\n<mask token>\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)\n",
"step-3": "<mask token>\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\nbaseurl = 'https://encycolorpedia.com'\nbrowser = mechanicalsoup.StatefulBrowser(raise_on_404=True)\nlist_of_urls = []\nflag_count = 0\npageCount = 1\nwhile True:\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount\n )\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\npackage = []\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)\n",
"step-4": "import mechanicalsoup\nfrom bs4 import BeautifulSoup\nimport re\nimport json\n\n\ndef extract_title(page):\n return page.find('header').find('h1').contents[0]\n\n\ndef extract_colours(page):\n color_list = page.find('ul')\n return list(dict.fromkeys(re.findall('#\\\\w+', str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find('section', {'id': 'item'})\n entity = {'title': extract_title(extract), 'colours': extract_colours(\n extract)}\n return entity\n\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find('a').attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + '/flags?page={0}'.format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll('article')\n return get_links_from_article(flag_articles)\n\n\nbaseurl = 'https://encycolorpedia.com'\nbrowser = mechanicalsoup.StatefulBrowser(raise_on_404=True)\nlist_of_urls = []\nflag_count = 0\npageCount = 1\nwhile True:\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount\n )\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\npackage = []\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)\n",
"step-5": "import mechanicalsoup\nfrom bs4 import BeautifulSoup\nimport re\nimport json\n\n\ndef extract_title(page):\n return page.find(\"header\").find(\"h1\").contents[0]\n\n\n\ndef extract_colours(page):\n color_list = page.find(\"ul\")\n return list(dict.fromkeys(re.findall(\"#\\w+\", str(color_list.contents))))\n\n\ndef get_colours_from_page(browser, baseurl, target_page):\n response = browser.open(baseurl + target_page)\n soup = BeautifulSoup(response.text, 'lxml')\n extract = soup.find(\"section\", {\"id\": \"item\"})\n entity = {\"title\": extract_title(extract), \"colours\": extract_colours(extract)}\n return entity\n\ndef get_links_from_article(articles):\n links = []\n for article in articles:\n links.append(article.find(\"a\").attrs['href'])\n return links\n\n\ndef scrape_flag_pagination_page(browser, baseurl, pageCount):\n response = browser.open(baseurl + \"/flags?page={0}\".format(pageCount))\n soup = BeautifulSoup(response.text, 'lxml')\n flag_articles = soup.findAll(\"article\")\n return get_links_from_article(flag_articles)\n\n\n\nbaseurl = \"https://encycolorpedia.com\"\nbrowser = mechanicalsoup.StatefulBrowser(raise_on_404=True)\nlist_of_urls = []\nflag_count = 0\npageCount = 1\nwhile(True):\n try:\n list_of_urls += scrape_flag_pagination_page(browser, baseurl, pageCount)\n except mechanicalsoup.utils.LinkNotFoundError:\n break\n pageCount += 1\npackage = []\nfor url in list_of_urls:\n package.append(get_colours_from_page(browser, baseurl, url))\n\nwith open('flag_colours.json', 'w', encoding='utf-8') as f:\n json.dump(package, f, ensure_ascii=False, indent=4)",
"step-ids": [
5,
6,
7,
8,
9
]
}
|
[
5,
6,
7,
8,
9
] |
<|reserved_special_token_0|>
class PddMallGoodsSpider(scrapy.Spider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def start_requests(self):
mall_nums = self.limit * int(self.process_nums)
is_end = False
start_mall_id = ''
while not is_end:
mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,
start_mall_id, '', mall_nums)
if not mall_ids:
is_end = True
continue
for mall_id in mall_ids:
mall_id = int(mall_id.decode('utf-8'))
start_mall_id = mall_id
if mall_id % self.process_nums != self.hash_num:
continue
goods_list = []
page = 1
headers = self.make_headers()
url = (
'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='
+ str(mall_id) + '&page_no=' + str(page) +
'&page_size=500')
meta = {'page': page, 'mall_id': mall_id, 'goods_list':
goods_list}
yield scrapy.Request(url, meta=meta, callback=self.parse,
headers=headers)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def make_headers(self):
chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.
randint(1000, 3200)) + '.94'
headers = {'Host': 'yangkeduo.com', 'Accept':
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
, 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',
'Referer':
'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'
, 'Connection': 'keep-alive', 'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'
+ chrome_version + ' Safari/537.36'}
ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)
) + '.' + str(random.randint(1, 255)) + '.' + str(random.
randint(1, 255))
headers['CLIENT-IP'] = ip
headers['X-FORWARDED-FOR'] = ip
return headers
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PddMallGoodsSpider(scrapy.Spider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, hash_num=0, process_nums=1):
self.ssdb_client = pyssdb.Client(get_project_settings().get(
'SSDB_HOST'), 8888)
self.hash_num = int(hash_num)
self.process_nums = int(process_nums)
self.pageSize = 500
def start_requests(self):
mall_nums = self.limit * int(self.process_nums)
is_end = False
start_mall_id = ''
while not is_end:
mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,
start_mall_id, '', mall_nums)
if not mall_ids:
is_end = True
continue
for mall_id in mall_ids:
mall_id = int(mall_id.decode('utf-8'))
start_mall_id = mall_id
if mall_id % self.process_nums != self.hash_num:
continue
goods_list = []
page = 1
headers = self.make_headers()
url = (
'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='
+ str(mall_id) + '&page_no=' + str(page) +
'&page_size=500')
meta = {'page': page, 'mall_id': mall_id, 'goods_list':
goods_list}
yield scrapy.Request(url, meta=meta, callback=self.parse,
headers=headers)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def make_headers(self):
chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.
randint(1000, 3200)) + '.94'
headers = {'Host': 'yangkeduo.com', 'Accept':
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
, 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',
'Referer':
'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'
, 'Connection': 'keep-alive', 'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'
+ chrome_version + ' Safari/537.36'}
ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)
) + '.' + str(random.randint(1, 255)) + '.' + str(random.
randint(1, 255))
headers['CLIENT-IP'] = ip
headers['X-FORWARDED-FOR'] = ip
return headers
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class PddMallGoodsSpider(scrapy.Spider):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def __init__(self, hash_num=0, process_nums=1):
self.ssdb_client = pyssdb.Client(get_project_settings().get(
'SSDB_HOST'), 8888)
self.hash_num = int(hash_num)
self.process_nums = int(process_nums)
self.pageSize = 500
def start_requests(self):
mall_nums = self.limit * int(self.process_nums)
is_end = False
start_mall_id = ''
while not is_end:
mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,
start_mall_id, '', mall_nums)
if not mall_ids:
is_end = True
continue
for mall_id in mall_ids:
mall_id = int(mall_id.decode('utf-8'))
start_mall_id = mall_id
if mall_id % self.process_nums != self.hash_num:
continue
goods_list = []
page = 1
headers = self.make_headers()
url = (
'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='
+ str(mall_id) + '&page_no=' + str(page) +
'&page_size=500')
meta = {'page': page, 'mall_id': mall_id, 'goods_list':
goods_list}
yield scrapy.Request(url, meta=meta, callback=self.parse,
headers=headers)
def parse(self, response):
pass
goods_list = response.meta['goods_list']
mall_id = response.meta['mall_id']
page = response.meta['page']
mall_goods = response.body.decode('utf-8')
mall_goods = json.loads(mall_goods)
goods_len = len(mall_goods['goods_list'])
if goods_len > 0:
goods_list = goods_list + mall_goods['goods_list']
if goods_len > self.pageSize - 100:
page += 1
url = (
'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='
+ str(mall_id) + '&page_no=' + str(page) + '&page_size=500')
meta = {'page': page, 'mall_id': mall_id, 'goods_list': goods_list}
headers = self.make_headers()
yield scrapy.Request(url, meta=meta, callback=self.parse,
headers=headers)
elif goods_list:
item = GoodsSalesItem()
item['goods_list'] = goods_list
item['mall_id'] = mall_id
yield item
<|reserved_special_token_0|>
def make_headers(self):
chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.
randint(1000, 3200)) + '.94'
headers = {'Host': 'yangkeduo.com', 'Accept':
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
, 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',
'Referer':
'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'
, 'Connection': 'keep-alive', 'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'
+ chrome_version + ' Safari/537.36'}
ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)
) + '.' + str(random.randint(1, 255)) + '.' + str(random.
randint(1, 255))
headers['CLIENT-IP'] = ip
headers['X-FORWARDED-FOR'] = ip
return headers
<|reserved_special_token_1|>
import scrapy
import json, time, sys, random, re, pyssdb
from scrapy.utils.project import get_project_settings
from spider.items import GoodsSalesItem
goods_list = []
<|reserved_special_token_0|>
class PddMallGoodsSpider(scrapy.Spider):
name = 'pdd_mall_goods'
mall_id_hash = 'pdd_mall_id_hash'
hash_num = 0
ssdb_client = ''
process_nums = 1
limit = 100
def __init__(self, hash_num=0, process_nums=1):
self.ssdb_client = pyssdb.Client(get_project_settings().get(
'SSDB_HOST'), 8888)
self.hash_num = int(hash_num)
self.process_nums = int(process_nums)
self.pageSize = 500
def start_requests(self):
mall_nums = self.limit * int(self.process_nums)
is_end = False
start_mall_id = ''
while not is_end:
mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,
start_mall_id, '', mall_nums)
if not mall_ids:
is_end = True
continue
for mall_id in mall_ids:
mall_id = int(mall_id.decode('utf-8'))
start_mall_id = mall_id
if mall_id % self.process_nums != self.hash_num:
continue
goods_list = []
page = 1
headers = self.make_headers()
url = (
'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='
+ str(mall_id) + '&page_no=' + str(page) +
'&page_size=500')
meta = {'page': page, 'mall_id': mall_id, 'goods_list':
goods_list}
yield scrapy.Request(url, meta=meta, callback=self.parse,
headers=headers)
def parse(self, response):
pass
goods_list = response.meta['goods_list']
mall_id = response.meta['mall_id']
page = response.meta['page']
mall_goods = response.body.decode('utf-8')
mall_goods = json.loads(mall_goods)
goods_len = len(mall_goods['goods_list'])
if goods_len > 0:
goods_list = goods_list + mall_goods['goods_list']
if goods_len > self.pageSize - 100:
page += 1
url = (
'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='
+ str(mall_id) + '&page_no=' + str(page) + '&page_size=500')
meta = {'page': page, 'mall_id': mall_id, 'goods_list': goods_list}
headers = self.make_headers()
yield scrapy.Request(url, meta=meta, callback=self.parse,
headers=headers)
elif goods_list:
item = GoodsSalesItem()
item['goods_list'] = goods_list
item['mall_id'] = mall_id
yield item
"""生成headers头信息"""
def make_headers(self):
chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.
randint(1000, 3200)) + '.94'
headers = {'Host': 'yangkeduo.com', 'Accept':
'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'
, 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',
'Referer':
'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'
, 'Connection': 'keep-alive', 'User-Agent':
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'
+ chrome_version + ' Safari/537.36'}
ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)
) + '.' + str(random.randint(1, 255)) + '.' + str(random.
randint(1, 255))
headers['CLIENT-IP'] = ip
headers['X-FORWARDED-FOR'] = ip
return headers
<|reserved_special_token_1|>
# -*- coding: utf-8 -*-
import scrapy
import json, time, sys, random, re, pyssdb
from scrapy.utils.project import get_project_settings
from spider.items import GoodsSalesItem
goods_list = []
'''获取店铺内产品信息'''
class PddMallGoodsSpider(scrapy.Spider):
name = 'pdd_mall_goods'
mall_id_hash = 'pdd_mall_id_hash'
hash_num = 0
ssdb_client = ''
process_nums = 1
limit = 100
def __init__(self, hash_num = 0, process_nums = 1):
self.ssdb_client = pyssdb.Client(get_project_settings().get('SSDB_HOST'), 8888)
self.hash_num = int(hash_num) ##当前脚本号
self.process_nums = int(process_nums) ##脚本总数
self.pageSize = 500 ##每次抓取的产品数 最大只返回500
def start_requests(self):
mall_nums = self.limit * int(self.process_nums) ##一次查询的数量
is_end = False
start_mall_id = '' ##起始查询的店铺key
while not is_end:
mall_ids = self.ssdb_client.hkeys(self.mall_id_hash, start_mall_id, '', mall_nums)
if not mall_ids: ##没有数据返回
is_end = True
continue
for mall_id in mall_ids:
mall_id = int( mall_id.decode('utf-8') )
start_mall_id = mall_id
if mall_id % self.process_nums != self.hash_num:
continue
goods_list=[]
page = 1
headers = self.make_headers()
url = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500'
meta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list}
yield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers)
def parse(self, response):
pass
goods_list=response.meta['goods_list'] ##产品集合
mall_id = response.meta['mall_id'] ##店铺ID
page = response.meta['page'] ##每返回一次页面数据 记录页数
mall_goods = response.body.decode('utf-8') ##bytes转换为str
mall_goods = json.loads(mall_goods)
goods_len = len(mall_goods['goods_list'])
if goods_len > 0:
goods_list = goods_list + mall_goods['goods_list'] ##合并产品列表
if goods_len > self.pageSize - 100:
page += 1
##继续采集下一页面
url = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500'
meta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list}
headers = self.make_headers()
yield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers)
else:
if goods_list:
item = GoodsSalesItem()
item['goods_list'] = goods_list
item['mall_id'] = mall_id
yield item
'''生成headers头信息'''
def make_headers(self):
chrome_version = str(random.randint(59,63))+'.0.'+str(random.randint(1000,3200))+'.94'
headers = {
"Host":"yangkeduo.com",
"Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
"Accept-Language":"zh-CN,zh;q=0.9,en;q=0.8",
"Accept-Encoding":"gzip, deflate",
"Host":"yangkeduo.com",
"Referer":"http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026",
"Connection":"keep-alive",
'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'+chrome_version+' Safari/537.36',
}
ip = str(random.randint(100, 200))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))
headers['CLIENT-IP'] = ip
headers['X-FORWARDED-FOR']= ip
return headers
|
flexible
|
{
"blob_id": "f33190df35a6b0b91c4dd2d6a58291451d06e29a",
"index": 3529,
"step-1": "<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n <mask token>\n <mask token>\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n",
"step-2": "<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, hash_num=0, process_nums=1):\n self.ssdb_client = pyssdb.Client(get_project_settings().get(\n 'SSDB_HOST'), 8888)\n self.hash_num = int(hash_num)\n self.process_nums = int(process_nums)\n self.pageSize = 500\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n <mask token>\n <mask token>\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n",
"step-3": "<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def __init__(self, hash_num=0, process_nums=1):\n self.ssdb_client = pyssdb.Client(get_project_settings().get(\n 'SSDB_HOST'), 8888)\n self.hash_num = int(hash_num)\n self.process_nums = int(process_nums)\n self.pageSize = 500\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n\n def parse(self, response):\n pass\n goods_list = response.meta['goods_list']\n mall_id = response.meta['mall_id']\n page = response.meta['page']\n mall_goods = response.body.decode('utf-8')\n mall_goods = json.loads(mall_goods)\n goods_len = len(mall_goods['goods_list'])\n if goods_len > 0:\n goods_list = goods_list + mall_goods['goods_list']\n if goods_len > self.pageSize - 100:\n page += 1\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) + '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list': goods_list}\n headers = self.make_headers()\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n elif goods_list:\n item = GoodsSalesItem()\n item['goods_list'] = goods_list\n item['mall_id'] = mall_id\n yield item\n <mask token>\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n",
"step-4": "import scrapy\nimport json, time, sys, random, re, pyssdb\nfrom scrapy.utils.project import get_project_settings\nfrom spider.items import GoodsSalesItem\ngoods_list = []\n<mask token>\n\n\nclass PddMallGoodsSpider(scrapy.Spider):\n name = 'pdd_mall_goods'\n mall_id_hash = 'pdd_mall_id_hash'\n hash_num = 0\n ssdb_client = ''\n process_nums = 1\n limit = 100\n\n def __init__(self, hash_num=0, process_nums=1):\n self.ssdb_client = pyssdb.Client(get_project_settings().get(\n 'SSDB_HOST'), 8888)\n self.hash_num = int(hash_num)\n self.process_nums = int(process_nums)\n self.pageSize = 500\n\n def start_requests(self):\n mall_nums = self.limit * int(self.process_nums)\n is_end = False\n start_mall_id = ''\n while not is_end:\n mall_ids = self.ssdb_client.hkeys(self.mall_id_hash,\n start_mall_id, '', mall_nums)\n if not mall_ids:\n is_end = True\n continue\n for mall_id in mall_ids:\n mall_id = int(mall_id.decode('utf-8'))\n start_mall_id = mall_id\n if mall_id % self.process_nums != self.hash_num:\n continue\n goods_list = []\n page = 1\n headers = self.make_headers()\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) +\n '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list':\n goods_list}\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n\n def parse(self, response):\n pass\n goods_list = response.meta['goods_list']\n mall_id = response.meta['mall_id']\n page = response.meta['page']\n mall_goods = response.body.decode('utf-8')\n mall_goods = json.loads(mall_goods)\n goods_len = len(mall_goods['goods_list'])\n if goods_len > 0:\n goods_list = goods_list + mall_goods['goods_list']\n if goods_len > self.pageSize - 100:\n page += 1\n url = (\n 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='\n + str(mall_id) + '&page_no=' + str(page) + '&page_size=500')\n meta = {'page': page, 'mall_id': mall_id, 'goods_list': goods_list}\n headers = self.make_headers()\n yield scrapy.Request(url, meta=meta, callback=self.parse,\n headers=headers)\n elif goods_list:\n item = GoodsSalesItem()\n item['goods_list'] = goods_list\n item['mall_id'] = mall_id\n yield item\n \"\"\"生成headers头信息\"\"\"\n\n def make_headers(self):\n chrome_version = str(random.randint(59, 63)) + '.0.' + str(random.\n randint(1000, 3200)) + '.94'\n headers = {'Host': 'yangkeduo.com', 'Accept':\n 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8'\n , 'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',\n 'Accept-Encoding': 'gzip, deflate', 'Host': 'yangkeduo.com',\n 'Referer':\n 'http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026'\n , 'Connection': 'keep-alive', 'User-Agent': \n 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'\n + chrome_version + ' Safari/537.36'}\n ip = str(random.randint(100, 200)) + '.' + str(random.randint(1, 255)\n ) + '.' + str(random.randint(1, 255)) + '.' + str(random.\n randint(1, 255))\n headers['CLIENT-IP'] = ip\n headers['X-FORWARDED-FOR'] = ip\n return headers\n",
"step-5": "# -*- coding: utf-8 -*-\r\nimport scrapy\r\nimport json, time, sys, random, re, pyssdb\r\n\r\nfrom scrapy.utils.project import get_project_settings\r\n\r\nfrom spider.items import GoodsSalesItem\r\n\r\ngoods_list = []\r\n'''获取店铺内产品信息'''\r\nclass PddMallGoodsSpider(scrapy.Spider):\r\n\tname = 'pdd_mall_goods'\r\n\tmall_id_hash \t= 'pdd_mall_id_hash'\r\n\thash_num \t\t= 0\r\n\tssdb_client = ''\r\n\tprocess_nums \t= 1\r\n\tlimit\t\t\t= 100\r\n\r\n\tdef __init__(self, hash_num = 0, process_nums = 1):\r\n\t\tself.ssdb_client = pyssdb.Client(get_project_settings().get('SSDB_HOST'), 8888)\r\n\t\tself.hash_num = int(hash_num) ##当前脚本号\r\n\t\tself.process_nums = int(process_nums) ##脚本总数\r\n\t\tself.pageSize = 500 ##每次抓取的产品数 最大只返回500\r\n\r\n\tdef start_requests(self):\r\n\t\tmall_nums \t\t= \tself.limit * int(self.process_nums) ##一次查询的数量\r\n\r\n\t\tis_end \t\t\t=\tFalse\r\n\t\tstart_mall_id \t=\t'' ##起始查询的店铺key\r\n\t\twhile not is_end:\r\n\t\t\tmall_ids \t=\tself.ssdb_client.hkeys(self.mall_id_hash, start_mall_id, '', mall_nums)\r\n\t\t\t\r\n\t\t\tif not mall_ids: ##没有数据返回\r\n\t\t\t\tis_end \t=\tTrue\r\n\t\t\t\tcontinue\r\n\r\n\t\t\tfor mall_id in mall_ids:\r\n\t\t\t\tmall_id = int( mall_id.decode('utf-8') )\r\n\t\t\t\tstart_mall_id = mall_id\r\n\r\n\t\t\t\tif mall_id % self.process_nums != self.hash_num:\r\n\t\t\t\t\tcontinue\r\n\t\t\t\t\t\r\n\t\t\t\tgoods_list=[]\r\n\t\t\t\tpage = 1\r\n\r\n\t\t\t\theaders = self.make_headers()\r\n\t\t\t\turl = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500'\r\n\t\t\t\tmeta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list}\r\n\t\t\t\tyield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers)\r\n\t\t\t\r\n\tdef parse(self, response):\r\n\t\tpass\r\n\t\tgoods_list=response.meta['goods_list'] ##产品集合\r\n\t\tmall_id = response.meta['mall_id'] ##店铺ID\r\n\t\tpage \t = response.meta['page'] ##每返回一次页面数据 记录页数\r\n\r\n\t\tmall_goods = response.body.decode('utf-8') ##bytes转换为str\r\n\t\tmall_goods = json.loads(mall_goods)\r\n\r\n\t\tgoods_len = len(mall_goods['goods_list'])\r\n\r\n\t\tif goods_len > 0:\r\n\t\t\tgoods_list = goods_list + mall_goods['goods_list'] ##合并产品列表\r\n\r\n\t\tif goods_len > self.pageSize - 100:\r\n\t\t\tpage += 1\r\n\t\t\t##继续采集下一页面\r\n\t\t\turl = 'http://apiv4.yangkeduo.com/api/turing/mall/query_cat_goods?category_id=0&type=0&sort_type=_sales&mall_id='+str(mall_id)+'&page_no='+str(page)+'&page_size=500'\r\n\t\t\tmeta = {'page':page, 'mall_id':mall_id, 'goods_list':goods_list}\r\n\t\t\theaders = self.make_headers()\r\n\t\t\tyield scrapy.Request(url, meta=meta, callback=self.parse, headers=headers)\r\n\t\telse:\r\n\t\t\tif goods_list:\r\n\t\t\t\titem = GoodsSalesItem()\r\n\t\t\t\titem['goods_list'] = goods_list\r\n\t\t\t\titem['mall_id'] = mall_id\r\n\t\t\t\tyield item\r\n\r\n\t'''生成headers头信息'''\r\n\tdef make_headers(self):\r\n\t\tchrome_version = str(random.randint(59,63))+'.0.'+str(random.randint(1000,3200))+'.94'\r\n\t\theaders = {\r\n\t\t\t\"Host\":\"yangkeduo.com\",\r\n\t\t\t\"Accept\":\"text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8\",\r\n\t\t\t\"Accept-Language\":\"zh-CN,zh;q=0.9,en;q=0.8\",\r\n\t\t\t\"Accept-Encoding\":\"gzip, deflate\",\r\n\t\t\t\"Host\":\"yangkeduo.com\",\r\n\t\t\t\"Referer\":\"http://yangkeduo.com/goods.html?goods_id=442573047&from_subject_id=935&is_spike=0&refer_page_name=subject&refer_page_id=subject_1515726808272_1M143fWqjQ&refer_page_sn=10026\",\r\n\t\t\t\"Connection\":\"keep-alive\",\r\n\t\t\t'User-Agent':'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/'+chrome_version+' Safari/537.36',\r\n\t\t}\r\n\t\t\r\n\t\tip = str(random.randint(100, 200))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))+'.'+str(random.randint(1, 255))\r\n\t\theaders['CLIENT-IP'] \t=\tip\r\n\t\theaders['X-FORWARDED-FOR']=\tip\r\n\t\treturn headers",
"step-ids": [
3,
4,
5,
9,
10
]
}
|
[
3,
4,
5,
9,
10
] |
import pandas as pd
dict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [
7, 8, 9], 'c3': [10, 11, 12], 'c4': [13, 14, 15]}
df = pd.DataFrame(dict_data)
print(type(df))
print('\n')
print(df)
# <class 'pandas.core.frame.DataFrame'>
# c0 c1 c2 c3 c4
# 0 1 4 7 10 13
# 1 2 5 8 11 14
# 2 3 6 9 12 15
|
normal
|
{
"blob_id": "22f4ae755e7ea43604db39452ca80f44f540708a",
"index": 9503,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(type(df))\nprint('\\n')\nprint(df)\n",
"step-3": "<mask token>\ndict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [7, 8, 9], 'c3': [10, \n 11, 12], 'c4': [13, 14, 15]}\ndf = pd.DataFrame(dict_data)\nprint(type(df))\nprint('\\n')\nprint(df)\n",
"step-4": "import pandas as pd\ndict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [7, 8, 9], 'c3': [10, \n 11, 12], 'c4': [13, 14, 15]}\ndf = pd.DataFrame(dict_data)\nprint(type(df))\nprint('\\n')\nprint(df)\n",
"step-5": "import pandas as pd\n\ndict_data = {'c0': [1, 2, 3], 'c1': [4, 5, 6], 'c2': [\n 7, 8, 9], 'c3': [10, 11, 12], 'c4': [13, 14, 15]}\n\ndf = pd.DataFrame(dict_data)\n\nprint(type(df))\nprint('\\n')\nprint(df)\n\n# <class 'pandas.core.frame.DataFrame'>\n\n\n# c0 c1 c2 c3 c4\n# 0 1 4 7 10 13\n# 1 2 5 8 11 14\n# 2 3 6 9 12 15\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""Encoder module of Monodepth2
Code partially borrowed from
https://github.com/nianticlabs/monodepth2/blob/master/networks/resnet_encoder.py
"""
from __future__ import absolute_import, division, print_function
import os
import numpy as np
import mxnet as mx
from mxnet.gluon import nn
from mxnet.context import cpu
from ...model_zoo.resnetv1b import \
resnet18_v1b, resnet34_v1b, resnet50_v1s, resnet101_v1s, resnet152_v1s
class ResnetEncoder(nn.HybridBlock):
r"""Encoder of Monodepth2
Parameters
----------
backbone : string
Pre-trained dilated backbone network type ('resnet18', 'resnet34', 'resnet50',
'resnet101' or 'resnet152').
pretrained : bool or str
Refers to if the backbone is pretrained or not. If `True`,
model weights of a model that was trained on ImageNet is loaded.
num_input_images : int
The number of input sequences. 1 for depth encoder, larger than 1 for pose encoder.
(Default: 1)
root : str, default '~/.mxnet/models'
Location for keeping the model parameters.
"""
def __init__(self, backbone, pretrained, num_input_images=1,
root=os.path.join(os.path.expanduser('~'), '.mxnet/models'),
ctx=cpu(), **kwargs):
super(ResnetEncoder, self).__init__()
self.num_ch_enc = np.array([64, 64, 128, 256, 512])
resnets = {'resnet18': resnet18_v1b,
'resnet34': resnet34_v1b,
'resnet50': resnet50_v1s,
'resnet101': resnet101_v1s,
'resnet152': resnet152_v1s}
num_layers = {'resnet18': 18,
'resnet34': 34,
'resnet50': 50,
'resnet101': 101,
'resnet152': 152}
if backbone not in resnets:
raise ValueError("{} is not a valid resnet".format(backbone))
if num_input_images > 1:
self.encoder = resnets[backbone](pretrained=False, ctx=ctx, **kwargs)
if pretrained:
filename = os.path.join(
root, 'resnet%d_v%db_multiple_inputs.params' % (num_layers[backbone], 1))
if not os.path.isfile(filename):
from ..model_store import get_model_file
loaded = mx.nd.load(get_model_file('resnet%d_v%db' % (num_layers[backbone], 1),
tag=pretrained, root=root))
loaded['conv1.weight'] = mx.nd.concat(
*([loaded['conv1.weight']] * num_input_images), dim=1) / num_input_images
mx.nd.save(filename, loaded)
self.encoder.load_parameters(filename, ctx=ctx)
from ...data import ImageNet1kAttr
attrib = ImageNet1kAttr()
self.encoder.synset = attrib.synset
self.encoder.classes = attrib.classes
self.encoder.classes_long = attrib.classes_long
else:
self.encoder = resnets[backbone](pretrained=pretrained, ctx=ctx, **kwargs)
if backbone not in ('resnet18', 'resnet34'):
self.num_ch_enc[1:] *= 4
def hybrid_forward(self, F, input_image):
# pylint: disable=unused-argument, missing-function-docstring
self.features = []
x = (input_image - 0.45) / 0.225
x = self.encoder.conv1(x)
x = self.encoder.bn1(x)
self.features.append(self.encoder.relu(x))
self.features.append(self.encoder.layer1(self.encoder.maxpool(self.features[-1])))
self.features.append(self.encoder.layer2(self.features[-1]))
self.features.append(self.encoder.layer3(self.features[-1]))
self.features.append(self.encoder.layer4(self.features[-1]))
return self.features
def predict(self, input_image):
# pylint: disable=unused-argument, missing-function-docstring
self.features = []
x = (input_image - 0.45) / 0.225
x = self.encoder.conv1(x)
x = self.encoder.bn1(x)
self.features.append(self.encoder.relu(x))
self.features.append(self.encoder.layer1(self.encoder.maxpool(self.features[-1])))
self.features.append(self.encoder.layer2(self.features[-1]))
self.features.append(self.encoder.layer3(self.features[-1]))
self.features.append(self.encoder.layer4(self.features[-1]))
return self.features
|
normal
|
{
"blob_id": "62601eca767800f00b461ef46d72bddc5cf75de0",
"index": 1400,
"step-1": "<mask token>\n\n\nclass ResnetEncoder(nn.HybridBlock):\n <mask token>\n\n def __init__(self, backbone, pretrained, num_input_images=1, root=os.\n path.join(os.path.expanduser('~'), '.mxnet/models'), ctx=cpu(), **\n kwargs):\n super(ResnetEncoder, self).__init__()\n self.num_ch_enc = np.array([64, 64, 128, 256, 512])\n resnets = {'resnet18': resnet18_v1b, 'resnet34': resnet34_v1b,\n 'resnet50': resnet50_v1s, 'resnet101': resnet101_v1s,\n 'resnet152': resnet152_v1s}\n num_layers = {'resnet18': 18, 'resnet34': 34, 'resnet50': 50,\n 'resnet101': 101, 'resnet152': 152}\n if backbone not in resnets:\n raise ValueError('{} is not a valid resnet'.format(backbone))\n if num_input_images > 1:\n self.encoder = resnets[backbone](pretrained=False, ctx=ctx, **\n kwargs)\n if pretrained:\n filename = os.path.join(root, \n 'resnet%d_v%db_multiple_inputs.params' % (num_layers[\n backbone], 1))\n if not os.path.isfile(filename):\n from ..model_store import get_model_file\n loaded = mx.nd.load(get_model_file('resnet%d_v%db' % (\n num_layers[backbone], 1), tag=pretrained, root=root))\n loaded['conv1.weight'] = mx.nd.concat(*([loaded[\n 'conv1.weight']] * num_input_images), dim=1\n ) / num_input_images\n mx.nd.save(filename, loaded)\n self.encoder.load_parameters(filename, ctx=ctx)\n from ...data import ImageNet1kAttr\n attrib = ImageNet1kAttr()\n self.encoder.synset = attrib.synset\n self.encoder.classes = attrib.classes\n self.encoder.classes_long = attrib.classes_long\n else:\n self.encoder = resnets[backbone](pretrained=pretrained, ctx=ctx,\n **kwargs)\n if backbone not in ('resnet18', 'resnet34'):\n self.num_ch_enc[1:] *= 4\n\n def hybrid_forward(self, F, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass ResnetEncoder(nn.HybridBlock):\n <mask token>\n\n def __init__(self, backbone, pretrained, num_input_images=1, root=os.\n path.join(os.path.expanduser('~'), '.mxnet/models'), ctx=cpu(), **\n kwargs):\n super(ResnetEncoder, self).__init__()\n self.num_ch_enc = np.array([64, 64, 128, 256, 512])\n resnets = {'resnet18': resnet18_v1b, 'resnet34': resnet34_v1b,\n 'resnet50': resnet50_v1s, 'resnet101': resnet101_v1s,\n 'resnet152': resnet152_v1s}\n num_layers = {'resnet18': 18, 'resnet34': 34, 'resnet50': 50,\n 'resnet101': 101, 'resnet152': 152}\n if backbone not in resnets:\n raise ValueError('{} is not a valid resnet'.format(backbone))\n if num_input_images > 1:\n self.encoder = resnets[backbone](pretrained=False, ctx=ctx, **\n kwargs)\n if pretrained:\n filename = os.path.join(root, \n 'resnet%d_v%db_multiple_inputs.params' % (num_layers[\n backbone], 1))\n if not os.path.isfile(filename):\n from ..model_store import get_model_file\n loaded = mx.nd.load(get_model_file('resnet%d_v%db' % (\n num_layers[backbone], 1), tag=pretrained, root=root))\n loaded['conv1.weight'] = mx.nd.concat(*([loaded[\n 'conv1.weight']] * num_input_images), dim=1\n ) / num_input_images\n mx.nd.save(filename, loaded)\n self.encoder.load_parameters(filename, ctx=ctx)\n from ...data import ImageNet1kAttr\n attrib = ImageNet1kAttr()\n self.encoder.synset = attrib.synset\n self.encoder.classes = attrib.classes\n self.encoder.classes_long = attrib.classes_long\n else:\n self.encoder = resnets[backbone](pretrained=pretrained, ctx=ctx,\n **kwargs)\n if backbone not in ('resnet18', 'resnet34'):\n self.num_ch_enc[1:] *= 4\n\n def hybrid_forward(self, F, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n\n def predict(self, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n",
"step-3": "<mask token>\n\n\nclass ResnetEncoder(nn.HybridBlock):\n \"\"\"Encoder of Monodepth2\n\n Parameters\n ----------\n backbone : string\n Pre-trained dilated backbone network type ('resnet18', 'resnet34', 'resnet50',\n 'resnet101' or 'resnet152').\n pretrained : bool or str\n Refers to if the backbone is pretrained or not. If `True`,\n model weights of a model that was trained on ImageNet is loaded.\n num_input_images : int\n The number of input sequences. 1 for depth encoder, larger than 1 for pose encoder.\n (Default: 1)\n root : str, default '~/.mxnet/models'\n Location for keeping the model parameters.\n \"\"\"\n\n def __init__(self, backbone, pretrained, num_input_images=1, root=os.\n path.join(os.path.expanduser('~'), '.mxnet/models'), ctx=cpu(), **\n kwargs):\n super(ResnetEncoder, self).__init__()\n self.num_ch_enc = np.array([64, 64, 128, 256, 512])\n resnets = {'resnet18': resnet18_v1b, 'resnet34': resnet34_v1b,\n 'resnet50': resnet50_v1s, 'resnet101': resnet101_v1s,\n 'resnet152': resnet152_v1s}\n num_layers = {'resnet18': 18, 'resnet34': 34, 'resnet50': 50,\n 'resnet101': 101, 'resnet152': 152}\n if backbone not in resnets:\n raise ValueError('{} is not a valid resnet'.format(backbone))\n if num_input_images > 1:\n self.encoder = resnets[backbone](pretrained=False, ctx=ctx, **\n kwargs)\n if pretrained:\n filename = os.path.join(root, \n 'resnet%d_v%db_multiple_inputs.params' % (num_layers[\n backbone], 1))\n if not os.path.isfile(filename):\n from ..model_store import get_model_file\n loaded = mx.nd.load(get_model_file('resnet%d_v%db' % (\n num_layers[backbone], 1), tag=pretrained, root=root))\n loaded['conv1.weight'] = mx.nd.concat(*([loaded[\n 'conv1.weight']] * num_input_images), dim=1\n ) / num_input_images\n mx.nd.save(filename, loaded)\n self.encoder.load_parameters(filename, ctx=ctx)\n from ...data import ImageNet1kAttr\n attrib = ImageNet1kAttr()\n self.encoder.synset = attrib.synset\n self.encoder.classes = attrib.classes\n self.encoder.classes_long = attrib.classes_long\n else:\n self.encoder = resnets[backbone](pretrained=pretrained, ctx=ctx,\n **kwargs)\n if backbone not in ('resnet18', 'resnet34'):\n self.num_ch_enc[1:] *= 4\n\n def hybrid_forward(self, F, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n\n def predict(self, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n",
"step-4": "<mask token>\nfrom __future__ import absolute_import, division, print_function\nimport os\nimport numpy as np\nimport mxnet as mx\nfrom mxnet.gluon import nn\nfrom mxnet.context import cpu\nfrom ...model_zoo.resnetv1b import resnet18_v1b, resnet34_v1b, resnet50_v1s, resnet101_v1s, resnet152_v1s\n\n\nclass ResnetEncoder(nn.HybridBlock):\n \"\"\"Encoder of Monodepth2\n\n Parameters\n ----------\n backbone : string\n Pre-trained dilated backbone network type ('resnet18', 'resnet34', 'resnet50',\n 'resnet101' or 'resnet152').\n pretrained : bool or str\n Refers to if the backbone is pretrained or not. If `True`,\n model weights of a model that was trained on ImageNet is loaded.\n num_input_images : int\n The number of input sequences. 1 for depth encoder, larger than 1 for pose encoder.\n (Default: 1)\n root : str, default '~/.mxnet/models'\n Location for keeping the model parameters.\n \"\"\"\n\n def __init__(self, backbone, pretrained, num_input_images=1, root=os.\n path.join(os.path.expanduser('~'), '.mxnet/models'), ctx=cpu(), **\n kwargs):\n super(ResnetEncoder, self).__init__()\n self.num_ch_enc = np.array([64, 64, 128, 256, 512])\n resnets = {'resnet18': resnet18_v1b, 'resnet34': resnet34_v1b,\n 'resnet50': resnet50_v1s, 'resnet101': resnet101_v1s,\n 'resnet152': resnet152_v1s}\n num_layers = {'resnet18': 18, 'resnet34': 34, 'resnet50': 50,\n 'resnet101': 101, 'resnet152': 152}\n if backbone not in resnets:\n raise ValueError('{} is not a valid resnet'.format(backbone))\n if num_input_images > 1:\n self.encoder = resnets[backbone](pretrained=False, ctx=ctx, **\n kwargs)\n if pretrained:\n filename = os.path.join(root, \n 'resnet%d_v%db_multiple_inputs.params' % (num_layers[\n backbone], 1))\n if not os.path.isfile(filename):\n from ..model_store import get_model_file\n loaded = mx.nd.load(get_model_file('resnet%d_v%db' % (\n num_layers[backbone], 1), tag=pretrained, root=root))\n loaded['conv1.weight'] = mx.nd.concat(*([loaded[\n 'conv1.weight']] * num_input_images), dim=1\n ) / num_input_images\n mx.nd.save(filename, loaded)\n self.encoder.load_parameters(filename, ctx=ctx)\n from ...data import ImageNet1kAttr\n attrib = ImageNet1kAttr()\n self.encoder.synset = attrib.synset\n self.encoder.classes = attrib.classes\n self.encoder.classes_long = attrib.classes_long\n else:\n self.encoder = resnets[backbone](pretrained=pretrained, ctx=ctx,\n **kwargs)\n if backbone not in ('resnet18', 'resnet34'):\n self.num_ch_enc[1:] *= 4\n\n def hybrid_forward(self, F, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n\n def predict(self, input_image):\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.\n features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n return self.features\n",
"step-5": "\"\"\"Encoder module of Monodepth2\nCode partially borrowed from\nhttps://github.com/nianticlabs/monodepth2/blob/master/networks/resnet_encoder.py\n\"\"\"\nfrom __future__ import absolute_import, division, print_function\n\nimport os\nimport numpy as np\nimport mxnet as mx\n\nfrom mxnet.gluon import nn\nfrom mxnet.context import cpu\nfrom ...model_zoo.resnetv1b import \\\n resnet18_v1b, resnet34_v1b, resnet50_v1s, resnet101_v1s, resnet152_v1s\n\n\nclass ResnetEncoder(nn.HybridBlock):\n r\"\"\"Encoder of Monodepth2\n\n Parameters\n ----------\n backbone : string\n Pre-trained dilated backbone network type ('resnet18', 'resnet34', 'resnet50',\n 'resnet101' or 'resnet152').\n pretrained : bool or str\n Refers to if the backbone is pretrained or not. If `True`,\n model weights of a model that was trained on ImageNet is loaded.\n num_input_images : int\n The number of input sequences. 1 for depth encoder, larger than 1 for pose encoder.\n (Default: 1)\n root : str, default '~/.mxnet/models'\n Location for keeping the model parameters.\n \"\"\"\n def __init__(self, backbone, pretrained, num_input_images=1,\n root=os.path.join(os.path.expanduser('~'), '.mxnet/models'),\n ctx=cpu(), **kwargs):\n super(ResnetEncoder, self).__init__()\n\n self.num_ch_enc = np.array([64, 64, 128, 256, 512])\n\n resnets = {'resnet18': resnet18_v1b,\n 'resnet34': resnet34_v1b,\n 'resnet50': resnet50_v1s,\n 'resnet101': resnet101_v1s,\n 'resnet152': resnet152_v1s}\n\n num_layers = {'resnet18': 18,\n 'resnet34': 34,\n 'resnet50': 50,\n 'resnet101': 101,\n 'resnet152': 152}\n\n if backbone not in resnets:\n raise ValueError(\"{} is not a valid resnet\".format(backbone))\n\n if num_input_images > 1:\n self.encoder = resnets[backbone](pretrained=False, ctx=ctx, **kwargs)\n if pretrained:\n filename = os.path.join(\n root, 'resnet%d_v%db_multiple_inputs.params' % (num_layers[backbone], 1))\n if not os.path.isfile(filename):\n from ..model_store import get_model_file\n loaded = mx.nd.load(get_model_file('resnet%d_v%db' % (num_layers[backbone], 1),\n tag=pretrained, root=root))\n loaded['conv1.weight'] = mx.nd.concat(\n *([loaded['conv1.weight']] * num_input_images), dim=1) / num_input_images\n mx.nd.save(filename, loaded)\n self.encoder.load_parameters(filename, ctx=ctx)\n from ...data import ImageNet1kAttr\n attrib = ImageNet1kAttr()\n self.encoder.synset = attrib.synset\n self.encoder.classes = attrib.classes\n self.encoder.classes_long = attrib.classes_long\n else:\n self.encoder = resnets[backbone](pretrained=pretrained, ctx=ctx, **kwargs)\n\n if backbone not in ('resnet18', 'resnet34'):\n self.num_ch_enc[1:] *= 4\n\n def hybrid_forward(self, F, input_image):\n # pylint: disable=unused-argument, missing-function-docstring\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n\n return self.features\n\n def predict(self, input_image):\n # pylint: disable=unused-argument, missing-function-docstring\n self.features = []\n x = (input_image - 0.45) / 0.225\n x = self.encoder.conv1(x)\n x = self.encoder.bn1(x)\n self.features.append(self.encoder.relu(x))\n self.features.append(self.encoder.layer1(self.encoder.maxpool(self.features[-1])))\n self.features.append(self.encoder.layer2(self.features[-1]))\n self.features.append(self.encoder.layer3(self.features[-1]))\n self.features.append(self.encoder.layer4(self.features[-1]))\n\n return self.features\n",
"step-ids": [
3,
4,
5,
6,
7
]
}
|
[
3,
4,
5,
6,
7
] |
#!/usr/bin/env python
from math import factorial
F = [factorial(i) for i in range(10)]
#F[9] * 8 = 2903040 > this means no 8 digit numbers
#F[9] * 7 = 2540160 < this is the maximum that I could think of
total = 0
for i in xrange(10, 2540160):
if sum([F[int(d)] for d in str(i)]) == i:
total = total + i
print total
|
normal
|
{
"blob_id": "d2e8c95dc144aa83128cc815ad145982f64b1819",
"index": 3206,
"step-1": "#!/usr/bin/env python\n\nfrom math import factorial\n\nF = [factorial(i) for i in range(10)]\n#F[9] * 8 = 2903040 > this means no 8 digit numbers\n#F[9] * 7 = 2540160 < this is the maximum that I could think of\n\ntotal = 0\nfor i in xrange(10, 2540160):\n if sum([F[int(d)] for d in str(i)]) == i:\n total = total + i\n\nprint total\n",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
#coding=utf-8
'''
find words and count
By @liuxingpuu
'''
import re
fin= open("example","r")
fout = open("reuslt.txt","w")
str=fin.read()
reObj = re.compile("\b?([a-zA-Z]+)\b?")
words = reObj.findall(str)
word_dict={}
for word in words:
if(word_dict.has_key(word)):
word_dict[word.lower()]=max(word_dict[word.lower()],words.count(word.lower())+words.count(word.upper())+words.count(word))
else:
word_dict[word.lower()]=max(0,words.count(word.lower())+words.count(word.upper())+words.count(word))
for(word,number) in word_dict.items():
fout.write(word+":%d\n"%number)
|
normal
|
{
"blob_id": "addab37cb23abead2d9f77a65336cd6026c52c68",
"index": 8559,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor word in words:\n if word_dict.has_key(word):\n word_dict[word.lower()] = max(word_dict[word.lower()], words.count(\n word.lower()) + words.count(word.upper()) + words.count(word))\n else:\n word_dict[word.lower()] = max(0, words.count(word.lower()) + words.\n count(word.upper()) + words.count(word))\nfor word, number in word_dict.items():\n fout.write(word + ':%d\\n' % number)\n",
"step-3": "<mask token>\nfin = open('example', 'r')\nfout = open('reuslt.txt', 'w')\nstr = fin.read()\nreObj = re.compile('\\x08?([a-zA-Z]+)\\x08?')\nwords = reObj.findall(str)\nword_dict = {}\nfor word in words:\n if word_dict.has_key(word):\n word_dict[word.lower()] = max(word_dict[word.lower()], words.count(\n word.lower()) + words.count(word.upper()) + words.count(word))\n else:\n word_dict[word.lower()] = max(0, words.count(word.lower()) + words.\n count(word.upper()) + words.count(word))\nfor word, number in word_dict.items():\n fout.write(word + ':%d\\n' % number)\n",
"step-4": "<mask token>\nimport re\nfin = open('example', 'r')\nfout = open('reuslt.txt', 'w')\nstr = fin.read()\nreObj = re.compile('\\x08?([a-zA-Z]+)\\x08?')\nwords = reObj.findall(str)\nword_dict = {}\nfor word in words:\n if word_dict.has_key(word):\n word_dict[word.lower()] = max(word_dict[word.lower()], words.count(\n word.lower()) + words.count(word.upper()) + words.count(word))\n else:\n word_dict[word.lower()] = max(0, words.count(word.lower()) + words.\n count(word.upper()) + words.count(word))\nfor word, number in word_dict.items():\n fout.write(word + ':%d\\n' % number)\n",
"step-5": "#coding=utf-8\n'''\nfind words and count\nBy @liuxingpuu\n'''\nimport re\n\nfin= open(\"example\",\"r\")\nfout = open(\"reuslt.txt\",\"w\")\nstr=fin.read()\nreObj = re.compile(\"\\b?([a-zA-Z]+)\\b?\")\nwords = reObj.findall(str)\nword_dict={}\nfor word in words:\n if(word_dict.has_key(word)):\n word_dict[word.lower()]=max(word_dict[word.lower()],words.count(word.lower())+words.count(word.upper())+words.count(word))\n else:\n word_dict[word.lower()]=max(0,words.count(word.lower())+words.count(word.upper())+words.count(word))\nfor(word,number) in word_dict.items():\n fout.write(word+\":%d\\n\"%number)",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
import httplib
import sys
http_server = "localhost:8000"
connection = httplib.HTTPConnection(http_server)
# Open test input.
test_file_path = "test_input"
test_f = open(test_file_path)
inputs = test_f.readlines()
inputs = [x.strip() for x in inputs]
test_f.close()
# Open expected input.
expected_file_path = "expected"
expected_f = open(expected_file_path)
expecteds = expected_f.readlines()
expecteds = [x.strip() for x in expecteds]
expected_f.close()
assert(len(inputs) == len(expecteds))
for i in range(len(inputs)):
connection.request("GET", ("<start>%s<end>" % inputs[i]))
response = connection.getresponse()
if response.status != 200:
print("Request failed for input: %s. Reason: %s" % (inputs[i], response.reason))
output = response.read()
print("Output:", output)
print("Expected:", expecteds[i])
if expecteds[i] == output:
print("SUCCESS")
else:
print("FAILURE")
|
normal
|
{
"blob_id": "cd9b04a93d85ba0ee2a38b534386f9aec0ef6895",
"index": 5165,
"step-1": "<mask token>\n",
"step-2": "<mask token>\ntest_f.close()\n<mask token>\nexpected_f.close()\nassert len(inputs) == len(expecteds)\nfor i in range(len(inputs)):\n connection.request('GET', '<start>%s<end>' % inputs[i])\n response = connection.getresponse()\n if response.status != 200:\n print('Request failed for input: %s. Reason: %s' % (inputs[i],\n response.reason))\n output = response.read()\n print('Output:', output)\n print('Expected:', expecteds[i])\n if expecteds[i] == output:\n print('SUCCESS')\n else:\n print('FAILURE')\n",
"step-3": "<mask token>\nhttp_server = 'localhost:8000'\nconnection = httplib.HTTPConnection(http_server)\ntest_file_path = 'test_input'\ntest_f = open(test_file_path)\ninputs = test_f.readlines()\ninputs = [x.strip() for x in inputs]\ntest_f.close()\nexpected_file_path = 'expected'\nexpected_f = open(expected_file_path)\nexpecteds = expected_f.readlines()\nexpecteds = [x.strip() for x in expecteds]\nexpected_f.close()\nassert len(inputs) == len(expecteds)\nfor i in range(len(inputs)):\n connection.request('GET', '<start>%s<end>' % inputs[i])\n response = connection.getresponse()\n if response.status != 200:\n print('Request failed for input: %s. Reason: %s' % (inputs[i],\n response.reason))\n output = response.read()\n print('Output:', output)\n print('Expected:', expecteds[i])\n if expecteds[i] == output:\n print('SUCCESS')\n else:\n print('FAILURE')\n",
"step-4": "import httplib\nimport sys\nhttp_server = 'localhost:8000'\nconnection = httplib.HTTPConnection(http_server)\ntest_file_path = 'test_input'\ntest_f = open(test_file_path)\ninputs = test_f.readlines()\ninputs = [x.strip() for x in inputs]\ntest_f.close()\nexpected_file_path = 'expected'\nexpected_f = open(expected_file_path)\nexpecteds = expected_f.readlines()\nexpecteds = [x.strip() for x in expecteds]\nexpected_f.close()\nassert len(inputs) == len(expecteds)\nfor i in range(len(inputs)):\n connection.request('GET', '<start>%s<end>' % inputs[i])\n response = connection.getresponse()\n if response.status != 200:\n print('Request failed for input: %s. Reason: %s' % (inputs[i],\n response.reason))\n output = response.read()\n print('Output:', output)\n print('Expected:', expecteds[i])\n if expecteds[i] == output:\n print('SUCCESS')\n else:\n print('FAILURE')\n",
"step-5": "import httplib\nimport sys\n\nhttp_server = \"localhost:8000\"\nconnection = httplib.HTTPConnection(http_server)\n\n# Open test input. \ntest_file_path = \"test_input\"\ntest_f = open(test_file_path)\ninputs = test_f.readlines()\ninputs = [x.strip() for x in inputs]\ntest_f.close()\n\n# Open expected input.\nexpected_file_path = \"expected\"\nexpected_f = open(expected_file_path)\nexpecteds = expected_f.readlines()\nexpecteds = [x.strip() for x in expecteds]\nexpected_f.close()\nassert(len(inputs) == len(expecteds))\t\n\nfor i in range(len(inputs)):\n connection.request(\"GET\", (\"<start>%s<end>\" % inputs[i]))\n response = connection.getresponse()\n if response.status != 200:\n print(\"Request failed for input: %s. Reason: %s\" % (inputs[i], response.reason))\n output = response.read()\n print(\"Output:\", output)\n print(\"Expected:\", expecteds[i])\n if expecteds[i] == output:\n print(\"SUCCESS\")\n else:\n print(\"FAILURE\")\n\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import absolute_import
from builtins import str
from builtins import object
import unittest
import sys, os, re
import forcebalance
import abc
import numpy
from __init__ import ForceBalanceTestCase
class TestImplemented(ForceBalanceTestCase):
def test_implemented_targets_derived_from_target(self):
"""Check classes listed in Implemented_Targets are derived from Target"""
for key in forcebalance.objective.Implemented_Targets.keys():
self.logger.debug("Assert %s is subclass of target\n" % str(forcebalance.objective.Implemented_Targets[key]))
self.assertTrue(issubclass(forcebalance.objective.Implemented_Targets[key],forcebalance.target.Target))
def test_no_unlisted_classes_derived_from_Target(self):
"""Check for unknown omissions from Implemented_Targets
Check to make sure any classes derived from Target are either
listed in Implemented_Targets or in the exclusion list in this
test case
"""
self.skipTest("Not sure if test is working properly.")
forcebalance_modules=[module[:-3] for module in os.listdir(forcebalance.__path__[0])
if re.compile(".*\.py$").match(module)
and module not in ["__init__.py"]]
for module in forcebalance_modules:
# LPW: I don't think dcdlib should be imported this way.
print(module)
if module == "_dcdlib": continue
m = __import__('forcebalance.' + module)
objs = dir(eval('m.' + module))
print(objs)
for obj in objs:
obj = eval('m.'+module+'.'+obj)
if type(obj) == abc.ABCMeta:
implemented = [i for i in forcebalance.objective.Implemented_Targets.values()]
# list of documented exceptions
# Basically, platform-independent targets are excluded.
exclude = ['Target',
'AbInitio',
'Interaction',
'Interaction_GMX',
'Liquid',
'Lipid',
'BindingEnergy',
'LeastSquares',
'Vibration',
'Thermo',
'Hydration',
'Moments']
print(obj)
if obj not in implemented and obj.__name__ not in exclude:
self.fail("Unknown class '%s' not listed in Implemented_Targets" % obj.__name__)
class TestPenalty(ForceBalanceTestCase):
def setUp(self):
self.options=forcebalance.parser.gen_opts_defaults.copy()
self.options.update({
'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01,
'jobtype': 'NEWTON',
'forcefield': ['cc-pvdz-overlap-original.gbs']})
os.chdir(self.options['root'])
self.ff = forcebalance.forcefield.FF(self.options)
self.np=self.ff.np
self.penalties = []
for ptype in forcebalance.objective.Penalty.Pen_Names.keys():
penalty = forcebalance.objective.Penalty(ptype,
self.ff,
self.options['penalty_additive'],
self.options['penalty_multiplicative'],
self.options['penalty_hyperbolic_b'],
self.options['penalty_alpha'])
self.penalties.append(penalty)
def test_penalty_compute(self):
"""Check penalty computation functions"""
objective = {'G': numpy.zeros((9)),
'H': numpy.diag((1,)*9),
'X': 1}
for penalty in self.penalties:
result=penalty.compute([1]*self.np, objective)
self.assertEqual(tuple, type(result))
# more tests go here
class ObjectiveTests(object):
def test_target_zero_order_terms(self):
"""Check zero order target terms"""
obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=0)
self.assertEqual(type(obj),dict)
self.assertTrue("X" in obj)
self.assertNotEqual(int(obj["X"]), 0)
self.assertTrue("G" in obj)
self.assertFalse(obj["G"].any())
self.assertTrue("H" in obj)
self.assertEqual(obj["H"], numpy.diag([1]*self.ff.np))
def test_target_first_order_terms(self):
"""Check first order target terms"""
obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=1)
self.assertEqual(type(obj),dict)
self.assertTrue("X" in obj)
self.assertTrue("G" in obj)
self.assertTrue("H" in obj)
def test_target_second_order_terms(self):
"""Check second order target terms"""
obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=2)
self.assertEqual(type(obj),dict)
self.assertTrue("X" in obj)
self.assertTrue("G" in obj)
self.assertTrue("H" in obj)
def test_indicate(self):
"""Check objective.indicate() runs without errors"""
self.objective.Indicate()
class TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options=forcebalance.parser.gen_opts_defaults.copy()
self.options.update({
'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01,
'jobtype': 'NEWTON',
'forcefield': ['water.itp']})
os.chdir(self.options['root'])
self.logger.debug("\nUsing the following options:\n%s\n" % str(self.options))
self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]
self.tgt_opts[0].update({"type" : "ABINITIO_GMX", "name" : "cluster-06"})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)
def shortDescription(self):
return super(TestWaterObjective, self).shortDescription() + " (AbInitio_GMX target)"
class TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):
def setUp(self):
self.options=forcebalance.parser.gen_opts_defaults.copy()
self.options.update({
'root': os.getcwd() + '/test/files',
'penalty_additive': 0.01,
'jobtype': 'NEWTON',
'forcefield': ['bro.itp']})
os.chdir(self.options['root'])
self.logger.debug("\nUsing the following options:\n%s\n" % str(self.options))
self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]
self.tgt_opts[0].update({"type" : "LIQUID_GMX", "name" : "LiquidBromine"})
self.ff = forcebalance.forcefield.FF(self.options)
self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)
def shortDescription(self):
return super(TestBromineObjective, self).shortDescription() + " (Liquid_GMX target)"
if __name__ == '__main__':
unittest.main()
|
normal
|
{
"blob_id": "f91e1fdc31b2fe1aef15757576d847c617a86201",
"index": 1121,
"step-1": "<mask token>\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n <mask token>\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\nclass ObjectiveTests(object):\n\n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=0)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertNotEqual(int(obj['X']), 0)\n self.assertTrue('G' in obj)\n self.assertFalse(obj['G'].any())\n self.assertTrue('H' in obj)\n self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))\n\n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=1)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=2)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'water.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription(\n ) + ' (AbInitio_GMX target)'\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription(\n ) + ' (Liquid_GMX target)'\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\nclass TestPenalty(ForceBalanceTestCase):\n <mask token>\n <mask token>\n\n\nclass ObjectiveTests(object):\n\n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=0)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertNotEqual(int(obj['X']), 0)\n self.assertTrue('G' in obj)\n self.assertFalse(obj['G'].any())\n self.assertTrue('H' in obj)\n self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))\n\n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=1)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=2)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'water.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription(\n ) + ' (AbInitio_GMX target)'\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription(\n ) + ' (Liquid_GMX target)'\n\n\n<mask token>\n",
"step-4": "<mask token>\n\n\nclass TestImplemented(ForceBalanceTestCase):\n <mask token>\n <mask token>\n\n\nclass TestPenalty(ForceBalanceTestCase):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'cc-pvdz-overlap-original.gbs']})\n os.chdir(self.options['root'])\n self.ff = forcebalance.forcefield.FF(self.options)\n self.np = self.ff.np\n self.penalties = []\n for ptype in forcebalance.objective.Penalty.Pen_Names.keys():\n penalty = forcebalance.objective.Penalty(ptype, self.ff, self.\n options['penalty_additive'], self.options[\n 'penalty_multiplicative'], self.options[\n 'penalty_hyperbolic_b'], self.options['penalty_alpha'])\n self.penalties.append(penalty)\n\n def test_penalty_compute(self):\n \"\"\"Check penalty computation functions\"\"\"\n objective = {'G': numpy.zeros(9), 'H': numpy.diag((1,) * 9), 'X': 1}\n for penalty in self.penalties:\n result = penalty.compute([1] * self.np, objective)\n self.assertEqual(tuple, type(result))\n\n\nclass ObjectiveTests(object):\n\n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=0)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertNotEqual(int(obj['X']), 0)\n self.assertTrue('G' in obj)\n self.assertFalse(obj['G'].any())\n self.assertTrue('H' in obj)\n self.assertEqual(obj['H'], numpy.diag([1] * self.ff.np))\n\n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=1)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([0.5] * self.ff.np),\n Order=2)\n self.assertEqual(type(obj), dict)\n self.assertTrue('X' in obj)\n self.assertTrue('G' in obj)\n self.assertTrue('H' in obj)\n\n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'water.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'ABINITIO_GMX', 'name': 'cluster-06'})\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription(\n ) + ' (AbInitio_GMX target)'\n\n\nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n\n def setUp(self):\n self.options = forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01, 'jobtype': 'NEWTON', 'forcefield': [\n 'bro.itp']})\n os.chdir(self.options['root'])\n self.logger.debug('\\nUsing the following options:\\n%s\\n' % str(self\n .options))\n self.tgt_opts = [forcebalance.parser.tgt_opts_defaults.copy()]\n self.tgt_opts[0].update({'type': 'LIQUID_GMX', 'name': 'LiquidBromine'}\n )\n self.ff = forcebalance.forcefield.FF(self.options)\n self.objective = forcebalance.objective.Objective(self.options,\n self.tgt_opts, self.ff)\n\n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription(\n ) + ' (Liquid_GMX target)'\n\n\n<mask token>\n",
"step-5": "from __future__ import absolute_import\nfrom builtins import str\nfrom builtins import object\nimport unittest\nimport sys, os, re\nimport forcebalance\nimport abc\nimport numpy\nfrom __init__ import ForceBalanceTestCase\n\nclass TestImplemented(ForceBalanceTestCase):\n def test_implemented_targets_derived_from_target(self):\n \"\"\"Check classes listed in Implemented_Targets are derived from Target\"\"\"\n for key in forcebalance.objective.Implemented_Targets.keys():\n self.logger.debug(\"Assert %s is subclass of target\\n\" % str(forcebalance.objective.Implemented_Targets[key]))\n self.assertTrue(issubclass(forcebalance.objective.Implemented_Targets[key],forcebalance.target.Target))\n \n def test_no_unlisted_classes_derived_from_Target(self):\n \"\"\"Check for unknown omissions from Implemented_Targets\n \n Check to make sure any classes derived from Target are either\n listed in Implemented_Targets or in the exclusion list in this\n test case\n \"\"\"\n self.skipTest(\"Not sure if test is working properly.\")\n forcebalance_modules=[module[:-3] for module in os.listdir(forcebalance.__path__[0])\n if re.compile(\".*\\.py$\").match(module)\n and module not in [\"__init__.py\"]]\n for module in forcebalance_modules:\n # LPW: I don't think dcdlib should be imported this way.\n print(module)\n if module == \"_dcdlib\": continue\n m = __import__('forcebalance.' + module)\n objs = dir(eval('m.' + module))\n print(objs)\n for obj in objs:\n obj = eval('m.'+module+'.'+obj)\n if type(obj) == abc.ABCMeta:\n implemented = [i for i in forcebalance.objective.Implemented_Targets.values()]\n # list of documented exceptions\n # Basically, platform-independent targets are excluded.\n exclude = ['Target',\n 'AbInitio',\n 'Interaction',\n 'Interaction_GMX',\n 'Liquid',\n 'Lipid',\n 'BindingEnergy',\n 'LeastSquares',\n 'Vibration',\n 'Thermo',\n 'Hydration',\n 'Moments']\n print(obj)\n if obj not in implemented and obj.__name__ not in exclude:\n self.fail(\"Unknown class '%s' not listed in Implemented_Targets\" % obj.__name__)\n\nclass TestPenalty(ForceBalanceTestCase):\n def setUp(self):\n self.options=forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({\n 'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01,\n 'jobtype': 'NEWTON',\n 'forcefield': ['cc-pvdz-overlap-original.gbs']})\n os.chdir(self.options['root'])\n\n self.ff = forcebalance.forcefield.FF(self.options)\n self.np=self.ff.np\n\n self.penalties = []\n for ptype in forcebalance.objective.Penalty.Pen_Names.keys():\n penalty = forcebalance.objective.Penalty(ptype,\n self.ff,\n self.options['penalty_additive'],\n self.options['penalty_multiplicative'],\n self.options['penalty_hyperbolic_b'],\n self.options['penalty_alpha'])\n self.penalties.append(penalty)\n\n def test_penalty_compute(self):\n \"\"\"Check penalty computation functions\"\"\"\n objective = {'G': numpy.zeros((9)),\n 'H': numpy.diag((1,)*9),\n 'X': 1}\n for penalty in self.penalties:\n result=penalty.compute([1]*self.np, objective)\n self.assertEqual(tuple, type(result))\n # more tests go here\n \nclass ObjectiveTests(object): \n def test_target_zero_order_terms(self):\n \"\"\"Check zero order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=0)\n self.assertEqual(type(obj),dict)\n self.assertTrue(\"X\" in obj)\n self.assertNotEqual(int(obj[\"X\"]), 0)\n \n self.assertTrue(\"G\" in obj)\n self.assertFalse(obj[\"G\"].any())\n \n self.assertTrue(\"H\" in obj)\n self.assertEqual(obj[\"H\"], numpy.diag([1]*self.ff.np))\n \n def test_target_first_order_terms(self):\n \"\"\"Check first order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=1)\n self.assertEqual(type(obj),dict)\n self.assertTrue(\"X\" in obj)\n self.assertTrue(\"G\" in obj)\n self.assertTrue(\"H\" in obj)\n \n def test_target_second_order_terms(self):\n \"\"\"Check second order target terms\"\"\"\n obj = self.objective.Target_Terms(numpy.array([.5]*self.ff.np), Order=2)\n self.assertEqual(type(obj),dict)\n self.assertTrue(\"X\" in obj)\n self.assertTrue(\"G\" in obj)\n self.assertTrue(\"H\" in obj)\n \n def test_indicate(self):\n \"\"\"Check objective.indicate() runs without errors\"\"\"\n self.objective.Indicate()\n\nclass TestWaterObjective(ForceBalanceTestCase, ObjectiveTests):\n def setUp(self):\n self.options=forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({\n 'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01,\n 'jobtype': 'NEWTON',\n 'forcefield': ['water.itp']})\n os.chdir(self.options['root'])\n \n self.logger.debug(\"\\nUsing the following options:\\n%s\\n\" % str(self.options))\n\n self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]\n self.tgt_opts[0].update({\"type\" : \"ABINITIO_GMX\", \"name\" : \"cluster-06\"})\n self.ff = forcebalance.forcefield.FF(self.options)\n \n self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)\n \n def shortDescription(self):\n return super(TestWaterObjective, self).shortDescription() + \" (AbInitio_GMX target)\"\n \nclass TestBromineObjective(ForceBalanceTestCase, ObjectiveTests):\n def setUp(self):\n self.options=forcebalance.parser.gen_opts_defaults.copy()\n self.options.update({\n 'root': os.getcwd() + '/test/files',\n 'penalty_additive': 0.01,\n 'jobtype': 'NEWTON',\n 'forcefield': ['bro.itp']})\n os.chdir(self.options['root'])\n \n self.logger.debug(\"\\nUsing the following options:\\n%s\\n\" % str(self.options))\n\n self.tgt_opts = [ forcebalance.parser.tgt_opts_defaults.copy() ]\n self.tgt_opts[0].update({\"type\" : \"LIQUID_GMX\", \"name\" : \"LiquidBromine\"})\n self.ff = forcebalance.forcefield.FF(self.options)\n \n self.objective = forcebalance.objective.Objective(self.options, self.tgt_opts,self.ff)\n \n def shortDescription(self):\n return super(TestBromineObjective, self).shortDescription() + \" (Liquid_GMX target)\"\n\nif __name__ == '__main__': \n unittest.main()\n",
"step-ids": [
2,
11,
12,
15,
20
]
}
|
[
2,
11,
12,
15,
20
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
version = 2, 5, 8
version_string = '.'.join(str(v) for v in version)
release_date = '2015.12.27'
<|reserved_special_token_1|>
version = (2, 5, 8)
version_string = ".".join(str(v) for v in version)
release_date = "2015.12.27"
|
flexible
|
{
"blob_id": "28077af0759e062078f7b9d1f7bbbb93c62835cb",
"index": 5063,
"step-1": "<mask token>\n",
"step-2": "version = 2, 5, 8\nversion_string = '.'.join(str(v) for v in version)\nrelease_date = '2015.12.27'\n",
"step-3": "version = (2, 5, 8)\nversion_string = \".\".join(str(v) for v in version)\n\nrelease_date = \"2015.12.27\"\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def Sort(a):
i = 1
n = len(a)
while i < len(a):
j = i
print(i - 1, '\t', i)
while a[j - 1] > a[j] and j >= 0:
j -= 1
print('Key : ', a[i], ' inserting at: ', j, '\t in ', a)
if n > 2:
j1 = n - 2
temp = arr[n - 1]
while arr[j1] > temp and j1 >= 0:
arr[j1 + 1] = arr[j1]
j1 -= 1
print(' '.join(list(map(str, arr))))
arr[j1 + 1] = temp
print(' '.join(list(map(str, arr))))
elif n == 1:
return arr
else:
temp = arr[1]
arr[1] = arr[0]
print(' '.join(list(map(str, arr))))
arr[0] = temp
print(' '.join(list(map(str, arr))))
i += 1
return a
<|reserved_special_token_1|>
'''
def Sort(a):
i=1
while i<len(a):
j=i
while j>0 and a[j-1] > a[j]:
temp = a[j-1]
a[j-1] = a[j]
a[j] = temp
j-=1
i+=1
return a
'''
def Sort(a):
i=1
n=len(a)
while i<len(a):
j=i
print(i-1,'\t',i)
while a[j-1]>a[j] and j>=0:
j-=1
print('Key : ',a[i],' inserting at: ',j, '\t in ',a)
if n>2:
j1=n-2
temp = arr[n-1]
while arr[j1] > temp and j1>=0:
arr[j1+1] = arr[j1]
j1-=1
print(' '.join(list(map(str, arr))))
arr[j1+1] = temp
print(' '.join(list(map(str, arr))))
elif n==1:
return arr
else: # len(arr) =2
temp = arr[1]
arr[1]=arr[0]
print(' '.join(list(map(str, arr))))
arr[0] = temp
print(' '.join(list(map(str, arr))))
i+=1
return a
|
flexible
|
{
"blob_id": "3f8b8b8cfbe712f09734d0fb7302073187d65a73",
"index": 982,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef Sort(a):\n i = 1\n n = len(a)\n while i < len(a):\n j = i\n print(i - 1, '\\t', i)\n while a[j - 1] > a[j] and j >= 0:\n j -= 1\n print('Key : ', a[i], ' inserting at: ', j, '\\t in ', a)\n if n > 2:\n j1 = n - 2\n temp = arr[n - 1]\n while arr[j1] > temp and j1 >= 0:\n arr[j1 + 1] = arr[j1]\n j1 -= 1\n print(' '.join(list(map(str, arr))))\n arr[j1 + 1] = temp\n print(' '.join(list(map(str, arr))))\n elif n == 1:\n return arr\n else:\n temp = arr[1]\n arr[1] = arr[0]\n print(' '.join(list(map(str, arr))))\n arr[0] = temp\n print(' '.join(list(map(str, arr))))\n i += 1\n return a\n",
"step-3": "'''\ndef Sort(a):\n i=1\n while i<len(a):\n j=i\n while j>0 and a[j-1] > a[j]:\n temp = a[j-1]\n a[j-1] = a[j]\n a[j] = temp\n j-=1\n i+=1\n return a\n'''\ndef Sort(a):\n i=1\n n=len(a)\n while i<len(a):\n j=i\n print(i-1,'\\t',i)\n while a[j-1]>a[j] and j>=0:\n j-=1\n print('Key : ',a[i],' inserting at: ',j, '\\t in ',a)\n if n>2:\n j1=n-2\n temp = arr[n-1]\n while arr[j1] > temp and j1>=0:\n arr[j1+1] = arr[j1]\n j1-=1\n print(' '.join(list(map(str, arr))))\n arr[j1+1] = temp\n print(' '.join(list(map(str, arr))))\n elif n==1: \n return arr\n else: # len(arr) =2\n temp = arr[1]\n arr[1]=arr[0]\n print(' '.join(list(map(str, arr))))\n arr[0] = temp \n print(' '.join(list(map(str, arr))))\n i+=1\n return a\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def findFirst(arr, l, h, x):
if l > h:
return -1
mid = (l + h) // 2
if arr[mid] == x:
return mid
elif arr[mid] > x:
return findFirst(arr, l, mid - 1, x)
return findFirst(arr, mid + 1, h, x)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def findFirst(arr, l, h, x):
if l > h:
return -1
mid = (l + h) // 2
if arr[mid] == x:
return mid
elif arr[mid] > x:
return findFirst(arr, l, mid - 1, x)
return findFirst(arr, mid + 1, h, x)
def indexes(arr, x):
n = len(arr)
ind = findFirst(arr, 0, n - 1, x)
if ind == -1:
return [-1, -1]
l = u = ind
for i in range(ind + 1, n):
if arr[i] == x:
u = i
else:
break
for i in range(ind - 1, -1, -1):
if arr[i] == x:
l = i
else:
break
return [l, u]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
def findFirst(arr, l, h, x):
if l > h:
return -1
mid = (l + h) // 2
if arr[mid] == x:
return mid
elif arr[mid] > x:
return findFirst(arr, l, mid - 1, x)
return findFirst(arr, mid + 1, h, x)
def indexes(arr, x):
n = len(arr)
ind = findFirst(arr, 0, n - 1, x)
if ind == -1:
return [-1, -1]
l = u = ind
for i in range(ind + 1, n):
if arr[i] == x:
u = i
else:
break
for i in range(ind - 1, -1, -1):
if arr[i] == x:
l = i
else:
break
return [l, u]
print(indexes([1, 2, 5, 5, 5, 5, 5, 12, 45, 67], 5))
|
flexible
|
{
"blob_id": "b4783540224902b10088edbd038d6d664934a237",
"index": 4893,
"step-1": "<mask token>\n",
"step-2": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\n<mask token>\n",
"step-3": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\ndef indexes(arr, x):\n n = len(arr)\n ind = findFirst(arr, 0, n - 1, x)\n if ind == -1:\n return [-1, -1]\n l = u = ind\n for i in range(ind + 1, n):\n if arr[i] == x:\n u = i\n else:\n break\n for i in range(ind - 1, -1, -1):\n if arr[i] == x:\n l = i\n else:\n break\n return [l, u]\n\n\n<mask token>\n",
"step-4": "def findFirst(arr, l, h, x):\n if l > h:\n return -1\n mid = (l + h) // 2\n if arr[mid] == x:\n return mid\n elif arr[mid] > x:\n return findFirst(arr, l, mid - 1, x)\n return findFirst(arr, mid + 1, h, x)\n\n\ndef indexes(arr, x):\n n = len(arr)\n ind = findFirst(arr, 0, n - 1, x)\n if ind == -1:\n return [-1, -1]\n l = u = ind\n for i in range(ind + 1, n):\n if arr[i] == x:\n u = i\n else:\n break\n for i in range(ind - 1, -1, -1):\n if arr[i] == x:\n l = i\n else:\n break\n return [l, u]\n\n\nprint(indexes([1, 2, 5, 5, 5, 5, 5, 12, 45, 67], 5))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import os
import pytest
from selenium.webdriver.remote.webdriver import WebDriver
from selenium.webdriver import Firefox
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
def create_gecko_driver():
home_dir = os.getenv('HOME')
return Firefox(executable_path=os.path.join(home_dir, 'bin', 'geckodriver'))
@pytest.fixture
def driver(request):
firefox = create_gecko_driver()
request.addfinalizer(firefox.quit)
return firefox
def test_successful_login(driver: WebDriver): # type hint for IDE
driver.get("http://localhost:8080/litecart/admin/login.php")
driver.find_element_by_name("username").send_keys('admin', Keys.TAB)
driver.find_element_by_name("password").send_keys('admin', Keys.ENTER)
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, 'sidebar')))
|
normal
|
{
"blob_id": "b6e28f29edd0c4659ab992b45861c4c31a57e7fd",
"index": 8920,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef create_gecko_driver():\n home_dir = os.getenv('HOME')\n return Firefox(executable_path=os.path.join(home_dir, 'bin', 'geckodriver')\n )\n\n\n@pytest.fixture\ndef driver(request):\n firefox = create_gecko_driver()\n request.addfinalizer(firefox.quit)\n return firefox\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef create_gecko_driver():\n home_dir = os.getenv('HOME')\n return Firefox(executable_path=os.path.join(home_dir, 'bin', 'geckodriver')\n )\n\n\n@pytest.fixture\ndef driver(request):\n firefox = create_gecko_driver()\n request.addfinalizer(firefox.quit)\n return firefox\n\n\ndef test_successful_login(driver: WebDriver):\n driver.get('http://localhost:8080/litecart/admin/login.php')\n driver.find_element_by_name('username').send_keys('admin', Keys.TAB)\n driver.find_element_by_name('password').send_keys('admin', Keys.ENTER)\n WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID,\n 'sidebar')))\n",
"step-4": "import os\nimport pytest\nfrom selenium.webdriver.remote.webdriver import WebDriver\nfrom selenium.webdriver import Firefox\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\n\n\ndef create_gecko_driver():\n home_dir = os.getenv('HOME')\n return Firefox(executable_path=os.path.join(home_dir, 'bin', 'geckodriver')\n )\n\n\n@pytest.fixture\ndef driver(request):\n firefox = create_gecko_driver()\n request.addfinalizer(firefox.quit)\n return firefox\n\n\ndef test_successful_login(driver: WebDriver):\n driver.get('http://localhost:8080/litecart/admin/login.php')\n driver.find_element_by_name('username').send_keys('admin', Keys.TAB)\n driver.find_element_by_name('password').send_keys('admin', Keys.ENTER)\n WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID,\n 'sidebar')))\n",
"step-5": "import os\nimport pytest\nfrom selenium.webdriver.remote.webdriver import WebDriver\nfrom selenium.webdriver import Firefox\nfrom selenium.webdriver.common.keys import Keys\nfrom selenium.webdriver.support.wait import WebDriverWait\nfrom selenium.webdriver.support import expected_conditions as EC\nfrom selenium.webdriver.common.by import By\n\n\ndef create_gecko_driver():\n home_dir = os.getenv('HOME')\n return Firefox(executable_path=os.path.join(home_dir, 'bin', 'geckodriver'))\n\n\n@pytest.fixture\ndef driver(request):\n firefox = create_gecko_driver()\n request.addfinalizer(firefox.quit)\n return firefox\n\n\ndef test_successful_login(driver: WebDriver): # type hint for IDE\n driver.get(\"http://localhost:8080/litecart/admin/login.php\")\n driver.find_element_by_name(\"username\").send_keys('admin', Keys.TAB)\n driver.find_element_by_name(\"password\").send_keys('admin', Keys.ENTER)\n WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.ID, 'sidebar')))\n",
"step-ids": [
0,
2,
3,
4,
5
]
}
|
[
0,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
class UserManager:
<|reserved_special_token_0|>
def validate_user(self, user_name, password):
user = self.find_user(user_name)
if not user:
raise ClientError()
self.validate_password(user, password)
return user
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def delete_user(self, user):
for player_id in user.player_ids:
self._player_delete(player_id)
delete_object(user)
dispatch('publish_edit', 'delete', user)
def delete_player(self, user, player_id):
if user:
self._player_delete(player_id)
user.player_ids.remove(player_id)
save_object(user)
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def create_user(self, user_name, password, email=''):
user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,
'email': email, 'password': make_hash(password), 'notifies': [
'friendSound', 'friendDesktop']}
user = create_object(User, user_raw)
dispatch('publish_edit', 'create', user)
return user
<|reserved_special_token_0|>
def player_exists(self, player_id):
return object_exists(Player.dbo_key_type, player_id)
def _user_connect(self, user, client_data):
client_data.update({'user_id': user.dbo_id, 'player_ids': user.
player_ids, 'displays': user.displays, 'password_reset': user.
password_reset, 'notifies': user.notifies})
<|reserved_special_token_0|>
def login_player(self, player):
dispatch('player_baptise', player)
player.last_login = int(time.time())
if not player.created:
player.created = player.last_login
player.start()
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def name_to_id(self, player_name):
return player_name.lower()
def player_cleanup(self, player_id):
delete_index('ix:player:user', player_id)
for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):
dbo = load_object(dbo_id)
if dbo and dbo.owner_id == player_id:
dbo.change_owner()
save_object(dbo)
dispatch('publish_update', 'update', dbo)
dispatch('player_deleted', player_id)
def _player_delete(self, player_id):
player = load_object(player_id, Player)
if player:
dispatch('publish_edit', 'delete', player)
delete_object(player)
else:
warn('Attempting to delete player {} who does not exist.'.
format(player_id))
self.player_cleanup(player_id)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserManager:
<|reserved_special_token_0|>
def validate_user(self, user_name, password):
user = self.find_user(user_name)
if not user:
raise ClientError()
self.validate_password(user, password)
return user
<|reserved_special_token_0|>
def find_user(self, user_name):
user_name = user_name.lower()
user_id = get_index('ix:user:user_name', user_name)
if user_id:
return load_object(user_id, User)
player = load_object(user_name, Player)
if player:
return load_object(player.user_id, User)
return None
def delete_user(self, user):
for player_id in user.player_ids:
self._player_delete(player_id)
delete_object(user)
dispatch('publish_edit', 'delete', user)
def delete_player(self, user, player_id):
if user:
self._player_delete(player_id)
user.player_ids.remove(player_id)
save_object(user)
def attach_player(self, user, player):
user.player_ids.append(player.dbo_id)
set_index('ix:player:user', player.dbo_id, user.dbo_id)
dispatch('player_create', player, user)
player.user_id = user.dbo_id
save_object(player)
save_object(user)
return player
def find_player(self, player_id):
return load_object(player_id, Player)
def create_user(self, user_name, password, email=''):
user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,
'email': email, 'password': make_hash(password), 'notifies': [
'friendSound', 'friendDesktop']}
user = create_object(User, user_raw)
dispatch('publish_edit', 'create', user)
return user
def check_name(self, account_name, user):
account_name = account_name.lower()
if user:
if account_name == user.user_name.lower():
return
for player_id in user.player_ids:
if account_name == player_id.lower():
return
if self.player_exists(account_name) or get_index('ix:user:user_name',
account_name):
raise DataError('InUse: {}'.format(account_name))
def player_exists(self, player_id):
return object_exists(Player.dbo_key_type, player_id)
def _user_connect(self, user, client_data):
client_data.update({'user_id': user.dbo_id, 'player_ids': user.
player_ids, 'displays': user.displays, 'password_reset': user.
password_reset, 'notifies': user.notifies})
<|reserved_special_token_0|>
def login_player(self, player):
dispatch('player_baptise', player)
player.last_login = int(time.time())
if not player.created:
player.created = player.last_login
player.start()
def logout_player(self, player):
player.age += player.last_logout - player.last_login
player.detach()
save_object(player)
evict_object(player)
def id_to_name(self, player_id):
try:
return player_id.capitalize()
except AttributeError:
pass
def name_to_id(self, player_name):
return player_name.lower()
def player_cleanup(self, player_id):
delete_index('ix:player:user', player_id)
for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):
dbo = load_object(dbo_id)
if dbo and dbo.owner_id == player_id:
dbo.change_owner()
save_object(dbo)
dispatch('publish_update', 'update', dbo)
dispatch('player_deleted', player_id)
def _player_delete(self, player_id):
player = load_object(player_id, Player)
if player:
dispatch('publish_edit', 'delete', player)
delete_object(player)
else:
warn('Attempting to delete player {} who does not exist.'.
format(player_id))
self.player_cleanup(player_id)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class UserManager:
def _post_init(self):
register('user_connect', self._user_connect)
register('player_connect', self._player_connect)
def validate_user(self, user_name, password):
user = self.find_user(user_name)
if not user:
raise ClientError()
self.validate_password(user, password)
return user
def validate_password(self, user, password):
if check_password(user.password, password):
return
salt, old_password = user.password.split('$')
if check_password(b64decode(bytes(old_password, 'utf-8')), password,
bytes(salt, 'utf-8')):
warn('Using old password for account {}', user.user_name)
user.password_reset = True
save_object(user)
else:
raise ClientError('invalid_password')
def find_user(self, user_name):
user_name = user_name.lower()
user_id = get_index('ix:user:user_name', user_name)
if user_id:
return load_object(user_id, User)
player = load_object(user_name, Player)
if player:
return load_object(player.user_id, User)
return None
def delete_user(self, user):
for player_id in user.player_ids:
self._player_delete(player_id)
delete_object(user)
dispatch('publish_edit', 'delete', user)
def delete_player(self, user, player_id):
if user:
self._player_delete(player_id)
user.player_ids.remove(player_id)
save_object(user)
def attach_player(self, user, player):
user.player_ids.append(player.dbo_id)
set_index('ix:player:user', player.dbo_id, user.dbo_id)
dispatch('player_create', player, user)
player.user_id = user.dbo_id
save_object(player)
save_object(user)
return player
def find_player(self, player_id):
return load_object(player_id, Player)
def create_user(self, user_name, password, email=''):
user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,
'email': email, 'password': make_hash(password), 'notifies': [
'friendSound', 'friendDesktop']}
user = create_object(User, user_raw)
dispatch('publish_edit', 'create', user)
return user
def check_name(self, account_name, user):
account_name = account_name.lower()
if user:
if account_name == user.user_name.lower():
return
for player_id in user.player_ids:
if account_name == player_id.lower():
return
if self.player_exists(account_name) or get_index('ix:user:user_name',
account_name):
raise DataError('InUse: {}'.format(account_name))
def player_exists(self, player_id):
return object_exists(Player.dbo_key_type, player_id)
def _user_connect(self, user, client_data):
client_data.update({'user_id': user.dbo_id, 'player_ids': user.
player_ids, 'displays': user.displays, 'password_reset': user.
password_reset, 'notifies': user.notifies})
<|reserved_special_token_0|>
def login_player(self, player):
dispatch('player_baptise', player)
player.last_login = int(time.time())
if not player.created:
player.created = player.last_login
player.start()
def logout_player(self, player):
player.age += player.last_logout - player.last_login
player.detach()
save_object(player)
evict_object(player)
def id_to_name(self, player_id):
try:
return player_id.capitalize()
except AttributeError:
pass
def name_to_id(self, player_name):
return player_name.lower()
def player_cleanup(self, player_id):
delete_index('ix:player:user', player_id)
for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):
dbo = load_object(dbo_id)
if dbo and dbo.owner_id == player_id:
dbo.change_owner()
save_object(dbo)
dispatch('publish_update', 'update', dbo)
dispatch('player_deleted', player_id)
def _player_delete(self, player_id):
player = load_object(player_id, Player)
if player:
dispatch('publish_edit', 'delete', player)
delete_object(player)
else:
warn('Attempting to delete player {} who does not exist.'.
format(player_id))
self.player_cleanup(player_id)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class User(KeyDBO):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
class UserManager:
def _post_init(self):
register('user_connect', self._user_connect)
register('player_connect', self._player_connect)
def validate_user(self, user_name, password):
user = self.find_user(user_name)
if not user:
raise ClientError()
self.validate_password(user, password)
return user
def validate_password(self, user, password):
if check_password(user.password, password):
return
salt, old_password = user.password.split('$')
if check_password(b64decode(bytes(old_password, 'utf-8')), password,
bytes(salt, 'utf-8')):
warn('Using old password for account {}', user.user_name)
user.password_reset = True
save_object(user)
else:
raise ClientError('invalid_password')
def find_user(self, user_name):
user_name = user_name.lower()
user_id = get_index('ix:user:user_name', user_name)
if user_id:
return load_object(user_id, User)
player = load_object(user_name, Player)
if player:
return load_object(player.user_id, User)
return None
def delete_user(self, user):
for player_id in user.player_ids:
self._player_delete(player_id)
delete_object(user)
dispatch('publish_edit', 'delete', user)
def delete_player(self, user, player_id):
if user:
self._player_delete(player_id)
user.player_ids.remove(player_id)
save_object(user)
def attach_player(self, user, player):
user.player_ids.append(player.dbo_id)
set_index('ix:player:user', player.dbo_id, user.dbo_id)
dispatch('player_create', player, user)
player.user_id = user.dbo_id
save_object(player)
save_object(user)
return player
def find_player(self, player_id):
return load_object(player_id, Player)
def create_user(self, user_name, password, email=''):
user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,
'email': email, 'password': make_hash(password), 'notifies': [
'friendSound', 'friendDesktop']}
user = create_object(User, user_raw)
dispatch('publish_edit', 'create', user)
return user
def check_name(self, account_name, user):
account_name = account_name.lower()
if user:
if account_name == user.user_name.lower():
return
for player_id in user.player_ids:
if account_name == player_id.lower():
return
if self.player_exists(account_name) or get_index('ix:user:user_name',
account_name):
raise DataError('InUse: {}'.format(account_name))
def player_exists(self, player_id):
return object_exists(Player.dbo_key_type, player_id)
def _user_connect(self, user, client_data):
client_data.update({'user_id': user.dbo_id, 'player_ids': user.
player_ids, 'displays': user.displays, 'password_reset': user.
password_reset, 'notifies': user.notifies})
def _player_connect(self, player, client_data):
client_data['name'] = player.name
if player.imm_level:
client_data['imm_level'] = player.imm_level
def login_player(self, player):
dispatch('player_baptise', player)
player.last_login = int(time.time())
if not player.created:
player.created = player.last_login
player.start()
def logout_player(self, player):
player.age += player.last_logout - player.last_login
player.detach()
save_object(player)
evict_object(player)
def id_to_name(self, player_id):
try:
return player_id.capitalize()
except AttributeError:
pass
def name_to_id(self, player_name):
return player_name.lower()
def player_cleanup(self, player_id):
delete_index('ix:player:user', player_id)
for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):
dbo = load_object(dbo_id)
if dbo and dbo.owner_id == player_id:
dbo.change_owner()
save_object(dbo)
dispatch('publish_update', 'update', dbo)
dispatch('player_deleted', player_id)
def _player_delete(self, player_id):
player = load_object(player_id, Player)
if player:
dispatch('publish_edit', 'delete', player)
delete_object(player)
else:
warn('Attempting to delete player {} who does not exist.'.
format(player_id))
self.player_cleanup(player_id)
<|reserved_special_token_1|>
from base64 import b64decode
import time
from lampost.context.resource import m_requires
from lampost.datastore.dbo import KeyDBO
from lampost.datastore.dbofield import DBOField
from lampost.datastore.exceptions import DataError
from lampost.model.player import Player
from lampost.util.encrypt import make_hash, check_password
from lampost.util.lputil import ClientError
m_requires(__name__, 'log', 'perm', 'datastore', 'dispatcher')
class User(KeyDBO):
dbo_key_type = "user"
dbo_set_key = "users"
dbo_indexes = "user_name", "email"
user_name = DBOField('')
password = DBOField()
password_reset = DBOField(False)
email = DBOField('')
notes = DBOField('')
player_ids = DBOField([])
displays = DBOField({})
notifies = DBOField([])
@property
def edit_dto(self):
dto = super().edit_dto
dto['password'] = ''
return dto
@property
def imm_level(self):
if self.player_ids:
return max([perm.immortals.get(player_id, 0) for player_id in self.player_ids])
return 0
class UserManager():
def _post_init(self):
register("user_connect", self._user_connect)
register("player_connect", self._player_connect)
def validate_user(self, user_name, password):
user = self.find_user(user_name)
if not user:
raise ClientError()
self.validate_password(user, password)
return user
def validate_password(self, user, password):
if check_password(user.password, password):
return
salt, old_password = user.password.split('$')
if check_password(b64decode(bytes(old_password, 'utf-8')), password, bytes(salt, 'utf-8')):
warn("Using old password for account {}", user.user_name)
user.password_reset = True
save_object(user)
else:
raise ClientError("invalid_password")
def find_user(self, user_name):
user_name = user_name.lower()
user_id = get_index("ix:user:user_name", user_name)
if user_id:
return load_object(user_id, User)
player = load_object(user_name, Player)
if player:
return load_object(player.user_id, User)
return None
def delete_user(self, user):
for player_id in user.player_ids:
self._player_delete(player_id)
delete_object(user)
dispatch('publish_edit', 'delete', user)
def delete_player(self, user, player_id):
if user:
self._player_delete(player_id)
user.player_ids.remove(player_id)
save_object(user)
def attach_player(self, user, player):
user.player_ids.append(player.dbo_id)
set_index('ix:player:user', player.dbo_id, user.dbo_id)
dispatch('player_create', player, user)
player.user_id = user.dbo_id
save_object(player)
save_object(user)
return player
def find_player(self, player_id):
return load_object(player_id, Player)
def create_user(self, user_name, password, email=""):
user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,
'email': email, 'password': make_hash(password),
'notifies': ['friendSound', 'friendDesktop']}
user = create_object(User, user_raw)
dispatch('publish_edit', 'create', user)
return user
def check_name(self, account_name, user):
account_name = account_name.lower()
if user:
if account_name == user.user_name.lower():
return
for player_id in user.player_ids:
if account_name == player_id.lower():
return
if self.player_exists(account_name) or get_index("ix:user:user_name", account_name):
raise DataError("InUse: {}".format(account_name))
def player_exists(self, player_id):
return object_exists(Player.dbo_key_type, player_id)
def _user_connect(self, user, client_data):
client_data.update({'user_id': user.dbo_id, 'player_ids': user.player_ids, 'displays': user.displays,
'password_reset': user.password_reset, 'notifies': user.notifies})
def _player_connect(self, player, client_data):
client_data['name'] = player.name
if player.imm_level:
client_data['imm_level'] = player.imm_level
def login_player(self, player):
dispatch('player_baptise', player)
player.last_login = int(time.time())
if not player.created:
player.created = player.last_login
player.start()
def logout_player(self, player):
player.age += player.last_logout - player.last_login
player.detach()
save_object(player)
evict_object(player)
def id_to_name(self, player_id):
try:
return player_id.capitalize()
except AttributeError:
pass
def name_to_id(self, player_name):
return player_name.lower()
def player_cleanup(self, player_id):
delete_index('ix:player:user', player_id)
for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):
dbo = load_object(dbo_id)
if dbo and dbo.owner_id == player_id:
dbo.change_owner()
save_object(dbo)
dispatch('publish_update', 'update', dbo)
dispatch('player_deleted', player_id)
def _player_delete(self, player_id):
player = load_object(player_id, Player)
if player:
dispatch('publish_edit', 'delete', player)
delete_object(player)
else:
warn("Attempting to delete player {} who does not exist.".format(player_id))
self.player_cleanup(player_id)
|
flexible
|
{
"blob_id": "210199ed217db0d7a05e280f20e33496c0795f06",
"index": 9472,
"step-1": "<mask token>\n\n\nclass UserManager:\n <mask token>\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n <mask token>\n <mask token>\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n <mask token>\n <mask token>\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n <mask token>\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n <mask token>\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n <mask token>\n <mask token>\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-2": "<mask token>\n\n\nclass UserManager:\n <mask token>\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n <mask token>\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index('ix:user:user_name', user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index('ix:user:user_name',\n account_name):\n raise DataError('InUse: {}'.format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n <mask token>\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-3": "<mask token>\n\n\nclass UserManager:\n\n def _post_init(self):\n register('user_connect', self._user_connect)\n register('player_connect', self._player_connect)\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n\n def validate_password(self, user, password):\n if check_password(user.password, password):\n return\n salt, old_password = user.password.split('$')\n if check_password(b64decode(bytes(old_password, 'utf-8')), password,\n bytes(salt, 'utf-8')):\n warn('Using old password for account {}', user.user_name)\n user.password_reset = True\n save_object(user)\n else:\n raise ClientError('invalid_password')\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index('ix:user:user_name', user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index('ix:user:user_name',\n account_name):\n raise DataError('InUse: {}'.format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n <mask token>\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-4": "<mask token>\n\n\nclass User(KeyDBO):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n\nclass UserManager:\n\n def _post_init(self):\n register('user_connect', self._user_connect)\n register('player_connect', self._player_connect)\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n\n def validate_password(self, user, password):\n if check_password(user.password, password):\n return\n salt, old_password = user.password.split('$')\n if check_password(b64decode(bytes(old_password, 'utf-8')), password,\n bytes(salt, 'utf-8')):\n warn('Using old password for account {}', user.user_name)\n user.password_reset = True\n save_object(user)\n else:\n raise ClientError('invalid_password')\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index('ix:user:user_name', user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=''):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password), 'notifies': [\n 'friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index('ix:user:user_name',\n account_name):\n raise DataError('InUse: {}'.format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.\n player_ids, 'displays': user.displays, 'password_reset': user.\n password_reset, 'notifies': user.notifies})\n\n def _player_connect(self, player, client_data):\n client_data['name'] = player.name\n if player.imm_level:\n client_data['imm_level'] = player.imm_level\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn('Attempting to delete player {} who does not exist.'.\n format(player_id))\n self.player_cleanup(player_id)\n",
"step-5": "from base64 import b64decode\nimport time\n\nfrom lampost.context.resource import m_requires\nfrom lampost.datastore.dbo import KeyDBO\nfrom lampost.datastore.dbofield import DBOField\nfrom lampost.datastore.exceptions import DataError\nfrom lampost.model.player import Player\nfrom lampost.util.encrypt import make_hash, check_password\nfrom lampost.util.lputil import ClientError\n\n\nm_requires(__name__, 'log', 'perm', 'datastore', 'dispatcher')\n\n\nclass User(KeyDBO):\n dbo_key_type = \"user\"\n dbo_set_key = \"users\"\n dbo_indexes = \"user_name\", \"email\"\n\n user_name = DBOField('')\n password = DBOField()\n password_reset = DBOField(False)\n email = DBOField('')\n notes = DBOField('')\n\n player_ids = DBOField([])\n displays = DBOField({})\n notifies = DBOField([])\n\n @property\n def edit_dto(self):\n dto = super().edit_dto\n dto['password'] = ''\n return dto\n\n @property\n def imm_level(self):\n if self.player_ids:\n return max([perm.immortals.get(player_id, 0) for player_id in self.player_ids])\n return 0\n\n\nclass UserManager():\n def _post_init(self):\n register(\"user_connect\", self._user_connect)\n register(\"player_connect\", self._player_connect)\n\n def validate_user(self, user_name, password):\n user = self.find_user(user_name)\n if not user:\n raise ClientError()\n self.validate_password(user, password)\n return user\n\n def validate_password(self, user, password):\n if check_password(user.password, password):\n return\n salt, old_password = user.password.split('$')\n if check_password(b64decode(bytes(old_password, 'utf-8')), password, bytes(salt, 'utf-8')):\n warn(\"Using old password for account {}\", user.user_name)\n user.password_reset = True\n save_object(user)\n else:\n raise ClientError(\"invalid_password\")\n\n def find_user(self, user_name):\n user_name = user_name.lower()\n user_id = get_index(\"ix:user:user_name\", user_name)\n if user_id:\n return load_object(user_id, User)\n player = load_object(user_name, Player)\n if player:\n return load_object(player.user_id, User)\n return None\n\n def delete_user(self, user):\n for player_id in user.player_ids:\n self._player_delete(player_id)\n delete_object(user)\n dispatch('publish_edit', 'delete', user)\n\n def delete_player(self, user, player_id):\n if user:\n self._player_delete(player_id)\n user.player_ids.remove(player_id)\n save_object(user)\n\n def attach_player(self, user, player):\n\n user.player_ids.append(player.dbo_id)\n set_index('ix:player:user', player.dbo_id, user.dbo_id)\n dispatch('player_create', player, user)\n player.user_id = user.dbo_id\n save_object(player)\n save_object(user)\n return player\n\n def find_player(self, player_id):\n return load_object(player_id, Player)\n\n def create_user(self, user_name, password, email=\"\"):\n user_raw = {'dbo_id': db_counter('user_id'), 'user_name': user_name,\n 'email': email, 'password': make_hash(password),\n 'notifies': ['friendSound', 'friendDesktop']}\n user = create_object(User, user_raw)\n dispatch('publish_edit', 'create', user)\n return user\n\n def check_name(self, account_name, user):\n account_name = account_name.lower()\n if user:\n if account_name == user.user_name.lower():\n return\n for player_id in user.player_ids:\n if account_name == player_id.lower():\n return\n if self.player_exists(account_name) or get_index(\"ix:user:user_name\", account_name):\n raise DataError(\"InUse: {}\".format(account_name))\n\n def player_exists(self, player_id):\n return object_exists(Player.dbo_key_type, player_id)\n\n def _user_connect(self, user, client_data):\n client_data.update({'user_id': user.dbo_id, 'player_ids': user.player_ids, 'displays': user.displays,\n 'password_reset': user.password_reset, 'notifies': user.notifies})\n\n def _player_connect(self, player, client_data):\n client_data['name'] = player.name\n if player.imm_level:\n client_data['imm_level'] = player.imm_level\n\n def login_player(self, player):\n dispatch('player_baptise', player)\n player.last_login = int(time.time())\n if not player.created:\n player.created = player.last_login\n player.start()\n\n def logout_player(self, player):\n player.age += player.last_logout - player.last_login\n player.detach()\n save_object(player)\n evict_object(player)\n\n def id_to_name(self, player_id):\n try:\n return player_id.capitalize()\n except AttributeError:\n pass\n\n def name_to_id(self, player_name):\n return player_name.lower()\n\n def player_cleanup(self, player_id):\n delete_index('ix:player:user', player_id)\n for dbo_id in fetch_set_keys('owned:{}'.format(player_id)):\n dbo = load_object(dbo_id)\n if dbo and dbo.owner_id == player_id:\n dbo.change_owner()\n save_object(dbo)\n dispatch('publish_update', 'update', dbo)\n dispatch('player_deleted', player_id)\n\n def _player_delete(self, player_id):\n player = load_object(player_id, Player)\n if player:\n dispatch('publish_edit', 'delete', player)\n delete_object(player)\n else:\n warn(\"Attempting to delete player {} who does not exist.\".format(player_id))\n self.player_cleanup(player_id)\n\n",
"step-ids": [
11,
17,
19,
21,
27
]
}
|
[
11,
17,
19,
21,
27
] |
from os.path import basename
from .FileInfo import FileInfo
class mrk_file(FileInfo):
"""
.mrk specific file container.
"""
def __init__(self, id_=None, file=None, parent=None):
super(mrk_file, self).__init__(id_, file, parent)
self._type = '.mrk'
#region class methods
def __getstate__(self):
data = super(mrk_file, self).__getstate__()
return data
def __setstate__(self, state):
super(mrk_file, self).__setstate__(state)
def __repr__(self):
# Have a separate representation for .mrk files as this is shown in the
# info for each con file under the list of associated mrk's.
return str(basename(self.file))
|
normal
|
{
"blob_id": "8e9aec7d3653137a05f94e4041d28f3423122751",
"index": 3990,
"step-1": "<mask token>\n\n\nclass mrk_file(FileInfo):\n <mask token>\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n <mask token>\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-2": "<mask token>\n\n\nclass mrk_file(FileInfo):\n <mask token>\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-3": "<mask token>\n\n\nclass mrk_file(FileInfo):\n \"\"\"\n .mrk specific file container.\n \"\"\"\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-4": "from os.path import basename\nfrom .FileInfo import FileInfo\n\n\nclass mrk_file(FileInfo):\n \"\"\"\n .mrk specific file container.\n \"\"\"\n\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n return str(basename(self.file))\n",
"step-5": "from os.path import basename\n\nfrom .FileInfo import FileInfo\n\n\nclass mrk_file(FileInfo):\n \"\"\"\n .mrk specific file container.\n \"\"\"\n def __init__(self, id_=None, file=None, parent=None):\n super(mrk_file, self).__init__(id_, file, parent)\n self._type = '.mrk'\n\n#region class methods\n\n def __getstate__(self):\n data = super(mrk_file, self).__getstate__()\n\n return data\n\n def __setstate__(self, state):\n super(mrk_file, self).__setstate__(state)\n\n def __repr__(self):\n # Have a separate representation for .mrk files as this is shown in the\n # info for each con file under the list of associated mrk's.\n return str(basename(self.file))\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if __name__ == '__main__':
exit(cli.main(prog_name='htmap'))
<|reserved_special_token_1|>
from .cli import cli
if __name__ == '__main__':
exit(cli.main(prog_name='htmap'))
<|reserved_special_token_1|>
from .cli import cli
if __name__ == "__main__":
exit(cli.main(prog_name="htmap"))
|
flexible
|
{
"blob_id": "069338b188f3cf16357b2502cbb3130b69918bd9",
"index": 286,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nif __name__ == '__main__':\n exit(cli.main(prog_name='htmap'))\n",
"step-3": "from .cli import cli\nif __name__ == '__main__':\n exit(cli.main(prog_name='htmap'))\n",
"step-4": "from .cli import cli\n\nif __name__ == \"__main__\":\n exit(cli.main(prog_name=\"htmap\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.setrecursionlimit(1000000)
<|reserved_special_token_0|>
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
<|reserved_special_token_0|>
odd.add(0)
dfs(0)
<|reserved_special_token_0|>
for i in range(q):
c, d = map(int, input().split())
if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:
ans.append('Town')
else:
ans.append('Road')
for i in ans:
print(i)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
sys.setrecursionlimit(1000000)
n, q = map(int, input().split())
graph = [set([]) for _ in range(n)]
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
temp = [False] * n
odd = set([])
even = set([])
odd.add(0)
dfs(0)
ans = []
for i in range(q):
c, d = map(int, input().split())
if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:
ans.append('Town')
else:
ans.append('Road')
for i in ans:
print(i)
<|reserved_special_token_1|>
import sys
sys.setrecursionlimit(1000000)
n, q = map(int, input().split())
graph = [set([]) for _ in range(n)]
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
temp = [False] * n
odd = set([])
even = set([])
odd.add(0)
dfs(0)
ans = []
for i in range(q):
c, d = map(int, input().split())
if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:
ans.append('Town')
else:
ans.append('Road')
for i in ans:
print(i)
<|reserved_special_token_1|>
import sys
sys.setrecursionlimit(1000000)
n, q = map(int, input().split())
graph = [set([]) for _ in range(n)]
for _ in range(n - 1):
a, b = map(int, input().split())
graph[a - 1].add(b - 1)
graph[b - 1].add(a - 1)
def dfs(i):
if temp[i]:
return
temp[i] = True
if i in odd:
for j in graph[i]:
even.add(j)
dfs(j)
else:
for j in graph[i]:
odd.add(j)
dfs(j)
temp = [False] * n
odd = set([])
even = set([])
odd.add(0)
dfs(0)
ans = []
for i in range(q):
c, d = map(int, input().split())
if (c - 1 in odd and d - 1 in odd) or (c - 1 in even and d - 1 in even):
ans.append("Town")
else:
ans.append("Road")
for i in ans:
print(i)
|
flexible
|
{
"blob_id": "bab6b9a0178da119f753deb6c626dd5c41db2bdd",
"index": 2004,
"step-1": "<mask token>\n\n\ndef dfs(i):\n if temp[i]:\n return\n temp[i] = True\n if i in odd:\n for j in graph[i]:\n even.add(j)\n dfs(j)\n else:\n for j in graph[i]:\n odd.add(j)\n dfs(j)\n\n\n<mask token>\n",
"step-2": "<mask token>\nsys.setrecursionlimit(1000000)\n<mask token>\nfor _ in range(n - 1):\n a, b = map(int, input().split())\n graph[a - 1].add(b - 1)\n graph[b - 1].add(a - 1)\n\n\ndef dfs(i):\n if temp[i]:\n return\n temp[i] = True\n if i in odd:\n for j in graph[i]:\n even.add(j)\n dfs(j)\n else:\n for j in graph[i]:\n odd.add(j)\n dfs(j)\n\n\n<mask token>\nodd.add(0)\ndfs(0)\n<mask token>\nfor i in range(q):\n c, d = map(int, input().split())\n if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:\n ans.append('Town')\n else:\n ans.append('Road')\nfor i in ans:\n print(i)\n",
"step-3": "<mask token>\nsys.setrecursionlimit(1000000)\nn, q = map(int, input().split())\ngraph = [set([]) for _ in range(n)]\nfor _ in range(n - 1):\n a, b = map(int, input().split())\n graph[a - 1].add(b - 1)\n graph[b - 1].add(a - 1)\n\n\ndef dfs(i):\n if temp[i]:\n return\n temp[i] = True\n if i in odd:\n for j in graph[i]:\n even.add(j)\n dfs(j)\n else:\n for j in graph[i]:\n odd.add(j)\n dfs(j)\n\n\ntemp = [False] * n\nodd = set([])\neven = set([])\nodd.add(0)\ndfs(0)\nans = []\nfor i in range(q):\n c, d = map(int, input().split())\n if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:\n ans.append('Town')\n else:\n ans.append('Road')\nfor i in ans:\n print(i)\n",
"step-4": "import sys\nsys.setrecursionlimit(1000000)\nn, q = map(int, input().split())\ngraph = [set([]) for _ in range(n)]\nfor _ in range(n - 1):\n a, b = map(int, input().split())\n graph[a - 1].add(b - 1)\n graph[b - 1].add(a - 1)\n\n\ndef dfs(i):\n if temp[i]:\n return\n temp[i] = True\n if i in odd:\n for j in graph[i]:\n even.add(j)\n dfs(j)\n else:\n for j in graph[i]:\n odd.add(j)\n dfs(j)\n\n\ntemp = [False] * n\nodd = set([])\neven = set([])\nodd.add(0)\ndfs(0)\nans = []\nfor i in range(q):\n c, d = map(int, input().split())\n if c - 1 in odd and d - 1 in odd or c - 1 in even and d - 1 in even:\n ans.append('Town')\n else:\n ans.append('Road')\nfor i in ans:\n print(i)\n",
"step-5": "import sys\n\nsys.setrecursionlimit(1000000)\nn, q = map(int, input().split())\ngraph = [set([]) for _ in range(n)]\nfor _ in range(n - 1):\n a, b = map(int, input().split())\n graph[a - 1].add(b - 1)\n graph[b - 1].add(a - 1)\n\n\ndef dfs(i):\n if temp[i]:\n return\n temp[i] = True\n if i in odd:\n for j in graph[i]:\n even.add(j)\n dfs(j)\n else:\n for j in graph[i]:\n odd.add(j)\n dfs(j)\n\n\ntemp = [False] * n\nodd = set([])\neven = set([])\nodd.add(0)\ndfs(0)\nans = []\nfor i in range(q):\n c, d = map(int, input().split())\n if (c - 1 in odd and d - 1 in odd) or (c - 1 in even and d - 1 in even):\n ans.append(\"Town\")\n else:\n ans.append(\"Road\")\nfor i in ans:\n print(i)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
# vim: tabstop=4 expandtab autoindent shiftwidth=4 fileencoding=utf-8
from django.contrib.auth.decorators import login_required
from django.contrib.auth import models as auth_models
from django.contrib.auth import forms as auth_forms
from django.contrib.auth import authenticate, login
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_noop as _
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from django_mises.blog import models as blog_models
from django_mises.users import forms as users_forms
from django_mises import email_helpers
def user_view(request, username):
"""View the user
"""
import datetime
user = get_object_or_404(auth_models.User, username=username, is_active=True)
now = datetime.datetime.now()
post_count = blog_models.Post.objects.filter(author=user, publish_at__lte=now).count()
# Needs verification?
email_verification_form = None
if request.user.id == user.id and not user.get_profile().is_verified:
data = request.POST.copy() or None
email_verification_form = users_forms.EmailVerificationForm(data=data)
if email_verification_form.is_bound:
email_verification_form.data['user'] = request.user
if email_verification_form.is_valid():
email_verification_form.save()
messages.info(request, 'Tunnuksesi on aktivoitu!')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
# Avoid template namespace clash
context = {
'viewed_user': user,
'post_count': post_count,
'email_verification_form': email_verification_form,
}
req_ctx = RequestContext(request, context)
return render_to_response('user.html', req_ctx)
def register(request):
"""Registration view, Django offers none
"""
data = request.POST.copy() or None
user_creation_form = auth_forms.UserCreationForm(data)
if user_creation_form.is_bound:
if user_creation_form.is_valid():
user = user_creation_form.save()
user = authenticate(username=user.username, password=user_creation_form.cleaned_data['password1'])
login(request, user)
return HttpResponseRedirect(reverse('user', args=(user.username,)))
context = {
'user_creation_form': user_creation_form,
}
req_ctx = RequestContext(request, context)
return render_to_response('register.html', req_ctx)
@login_required
def get_verification_code(request):
"""Maybe ajaxify this in the future
"""
if request.user.get_profile().is_verified:
messages.info(request, 'Olet jo vahvistanut osoitteesi')
else:
verification_code = request.user.get_profile().gen_verification_code()
extractx = {
'code': verification_code,
}
subject = _('Verification code')
email_helpers.send_user_email(request.user, subject, 'send_verification_code.txt', extractx)
messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')
return HttpResponseRedirect(reverse('user', args=(request.user.username,)))
# EOF
|
normal
|
{
"blob_id": "22da05d9bf6139a0306bfb2d1df96e9e2cf6a0c6",
"index": 475,
"step-1": "<mask token>\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-2": "<mask token>\n\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n data = request.POST.copy() or None\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n user = authenticate(username=user.username, password=\n user_creation_form.cleaned_data['password1'])\n login(request, user)\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n context = {'user_creation_form': user_creation_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('register.html', req_ctx)\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-3": "<mask token>\n\n\ndef user_view(request, username):\n \"\"\"View the user\n \"\"\"\n import datetime\n user = get_object_or_404(auth_models.User, username=username, is_active\n =True)\n now = datetime.datetime.now()\n post_count = blog_models.Post.objects.filter(author=user,\n publish_at__lte=now).count()\n email_verification_form = None\n if request.user.id == user.id and not user.get_profile().is_verified:\n data = request.POST.copy() or None\n email_verification_form = users_forms.EmailVerificationForm(data=data)\n if email_verification_form.is_bound:\n email_verification_form.data['user'] = request.user\n if email_verification_form.is_valid():\n email_verification_form.save()\n messages.info(request, 'Tunnuksesi on aktivoitu!')\n return HttpResponseRedirect(reverse('user', args=(request.\n user.username,)))\n context = {'viewed_user': user, 'post_count': post_count,\n 'email_verification_form': email_verification_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('user.html', req_ctx)\n\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n data = request.POST.copy() or None\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n user = authenticate(username=user.username, password=\n user_creation_form.cleaned_data['password1'])\n login(request, user)\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n context = {'user_creation_form': user_creation_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('register.html', req_ctx)\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-4": "from django.contrib.auth.decorators import login_required\nfrom django.contrib.auth import models as auth_models\nfrom django.contrib.auth import forms as auth_forms\nfrom django.contrib.auth import authenticate, login\nfrom django.core.urlresolvers import reverse\nfrom django.utils.translation import ugettext_noop as _\nfrom django.contrib import messages\nfrom django.http import HttpResponseRedirect\nfrom django.shortcuts import get_object_or_404, render_to_response\nfrom django.template import RequestContext\nfrom django_mises.blog import models as blog_models\nfrom django_mises.users import forms as users_forms\nfrom django_mises import email_helpers\n\n\ndef user_view(request, username):\n \"\"\"View the user\n \"\"\"\n import datetime\n user = get_object_or_404(auth_models.User, username=username, is_active\n =True)\n now = datetime.datetime.now()\n post_count = blog_models.Post.objects.filter(author=user,\n publish_at__lte=now).count()\n email_verification_form = None\n if request.user.id == user.id and not user.get_profile().is_verified:\n data = request.POST.copy() or None\n email_verification_form = users_forms.EmailVerificationForm(data=data)\n if email_verification_form.is_bound:\n email_verification_form.data['user'] = request.user\n if email_verification_form.is_valid():\n email_verification_form.save()\n messages.info(request, 'Tunnuksesi on aktivoitu!')\n return HttpResponseRedirect(reverse('user', args=(request.\n user.username,)))\n context = {'viewed_user': user, 'post_count': post_count,\n 'email_verification_form': email_verification_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('user.html', req_ctx)\n\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n data = request.POST.copy() or None\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n user = authenticate(username=user.username, password=\n user_creation_form.cleaned_data['password1'])\n login(request, user)\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n context = {'user_creation_form': user_creation_form}\n req_ctx = RequestContext(request, context)\n return render_to_response('register.html', req_ctx)\n\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {'code': verification_code}\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject,\n 'send_verification_code.txt', extractx)\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n",
"step-5": "# vim: tabstop=4 expandtab autoindent shiftwidth=4 fileencoding=utf-8\n\nfrom django.contrib.auth.decorators import login_required\n\nfrom django.contrib.auth import models as auth_models\nfrom django.contrib.auth import forms as auth_forms\nfrom django.contrib.auth import authenticate, login\n\nfrom django.core.urlresolvers import reverse\n\nfrom django.utils.translation import ugettext_noop as _\n\nfrom django.contrib import messages\n\nfrom django.http import HttpResponseRedirect\n\nfrom django.shortcuts import get_object_or_404, render_to_response\n\nfrom django.template import RequestContext\n\nfrom django_mises.blog import models as blog_models\n\nfrom django_mises.users import forms as users_forms\n\nfrom django_mises import email_helpers\n\ndef user_view(request, username):\n \"\"\"View the user\n \"\"\"\n\n import datetime\n\n user = get_object_or_404(auth_models.User, username=username, is_active=True)\n\n now = datetime.datetime.now()\n\n post_count = blog_models.Post.objects.filter(author=user, publish_at__lte=now).count()\n\n # Needs verification?\n email_verification_form = None\n if request.user.id == user.id and not user.get_profile().is_verified:\n data = request.POST.copy() or None\n\n email_verification_form = users_forms.EmailVerificationForm(data=data)\n if email_verification_form.is_bound:\n email_verification_form.data['user'] = request.user\n if email_verification_form.is_valid():\n email_verification_form.save()\n\n messages.info(request, 'Tunnuksesi on aktivoitu!')\n\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n\n # Avoid template namespace clash\n context = {\n 'viewed_user': user,\n 'post_count': post_count,\n 'email_verification_form': email_verification_form,\n }\n req_ctx = RequestContext(request, context)\n\n return render_to_response('user.html', req_ctx)\n\ndef register(request):\n \"\"\"Registration view, Django offers none\n \"\"\"\n\n data = request.POST.copy() or None\n\n user_creation_form = auth_forms.UserCreationForm(data)\n if user_creation_form.is_bound:\n if user_creation_form.is_valid():\n user = user_creation_form.save()\n\n user = authenticate(username=user.username, password=user_creation_form.cleaned_data['password1'])\n login(request, user)\n\n return HttpResponseRedirect(reverse('user', args=(user.username,)))\n\n context = {\n 'user_creation_form': user_creation_form,\n }\n req_ctx = RequestContext(request, context)\n\n return render_to_response('register.html', req_ctx)\n\n@login_required\ndef get_verification_code(request):\n \"\"\"Maybe ajaxify this in the future\n \"\"\"\n\n if request.user.get_profile().is_verified:\n messages.info(request, 'Olet jo vahvistanut osoitteesi')\n else:\n verification_code = request.user.get_profile().gen_verification_code()\n extractx = {\n 'code': verification_code,\n }\n subject = _('Verification code')\n email_helpers.send_user_email(request.user, subject, 'send_verification_code.txt', extractx)\n\n messages.info(request, 'Vahvistuskoodi on lähetetty sähköpostiisi')\n\n return HttpResponseRedirect(reverse('user', args=(request.user.username,)))\n\n# EOF\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
import tkinter as tk # Import tkinker for GUI creation
from PIL import Image, ImageTk # Allow images to be used as backgrounds
import socket # Importing sockets for low level implementation of networks
import select # Importing select to poll between the user input and received message
import sys # Getting input from terminal and writing output to terminal
# Size of GUI
HEIGHT = 714
WIDTH = 1000
root = tk.Tk() #Define root to begin window
def sigint_handler(signum, frame):
print('\n Disconnecting from server')
sys.exit()
# creating the client_socket object and adding the TCP/IP and IPv4 protocol
client_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# IP and PORT of the socket
IP = "127.0.0.1"
PORT = 42069
# Let's connect to the server!
client_socket.connect((IP, PORT))
# Handling Ctrl+C in a very cool way
import signal
signal.signal(signal.SIGINT, sigint_handler)
# Clever function to send username to the server
# Format
# Header: length_of_username
# Body: Username
# Header length used to receive the username
HEADER_LENGTH = 10
def sendUsernameToServer(username_entry):
username = username_entry.encode('utf-8')
username_header = f"{len(username):<{HEADER_LENGTH}}".encode('utf-8')
client_socket.send(username_header + username)
checkIO()
def checkIO():
# polling between user input and message received from the server
sockets_list = [sys.stdin, client_socket]
# checking for I/O in read_sockets
read_sockets, write_socket, error_socket = select.select(
sockets_list, [], [])
for socket in read_sockets:
# If socket == client_socket, we got a message
if socket == client_socket:
message = socket.recv(2048)
if not len(message):
text_label['text'] = "Connection closed by server"
print("Connection closed by server")
sys.exit()
text_label['text'] = message.decode('utf-8')
print(message.decode('utf-8'))
def sendY():
# Else, we can send a message
message = 'y'
message = message.encode('utf-8')
client_socket.send(message)
#sys.stdout.write(str(my_username) + " > ")
# sys.stdout.write(message.decode('utf-8'))
sys.stdout.flush()
checkIO()
def sendN():
# Else, we can send a message
message = 'n'
message = message.encode('utf-8')
client_socket.send(message)
#sys.stdout.write(str(my_username) + " > ")
# sys.stdout.write(message.decode('utf-8'))
sys.stdout.flush()
checkIO()
#client_socket.close()
#-----------------------------------------------------
#-------------GUI-LAYOUT------------------------------
#-----------------------------------------------------
canvas = tk.Canvas(root, height=HEIGHT, width=WIDTH)
canvas.pack()
background_image = tk.PhotoImage(file='background.gif')
background_label = tk.Label(root, image=background_image)
background_label.place(relwidth=1, relheight=1)
covid_label = tk.Label(root, text="COVID-19 Helper", bg="sky blue")
covid_label.config(font=("Arial", 40))
covid_label.place(relx=0.12, rely=0.1, relwidth=0.76, relheight=0.1)
main_frame = tk.Frame(root, bg="light blue")
main_frame.place(relx=0.12, rely=0.2, relwidth=0.76, relheight=0.7)
#----------------------------------------------------------
right_frame = tk.Frame(main_frame, bg="sky blue")
right_frame.place(relx=0.74, rely=0.05, relwidth=0.23, relheight=0.9)
heat_button = tk.Button(right_frame, text="View HeatMap", bg="deep sky blue", activebackground="steel blue")
heat_button.place(relx=0.05, rely=0.04, relwidth=0.9, relheight=0.2)
info_button = tk.Button(right_frame, text="Covid-19 HSE Info", bg="deep sky blue", activebackground="steel blue")
info_button.place(relx=0.05, rely=0.28, relwidth=0.9, relheight=0.2)
contact_button = tk.Button(right_frame, text="Heathcare Contacts", bg="deep sky blue", activebackground="steel blue")
contact_button.place(relx=0.05, rely=0.52, relwidth=0.9, relheight=0.2)
doctor_button = tk.Button(right_frame, text="Speak with a doctor", bg="orange2", activebackground="DarkOrange1")
doctor_button.place(relx=0.05, rely=0.76, relwidth=0.9, relheight=0.2)
#----------------------------------------------------------
left_frame = tk.Frame(main_frame, bg="sky blue")
left_frame.place(relx=0.03, rely=0.05, relwidth=0.69, relheight=0.9)
text_frame = tk.Frame(left_frame, bg="ghost white")
text_frame.place(relx=0.05, rely=0.05, relwidth= 0.9, relheight=0.6)
text_label = tk.Label(text_frame, bg="ghost white", font=('Courier', 10))
text_label['text'] = "Please enter your username and click\n'Connect to testing server'"
text_label.place(relwidth=1, relheight=1)
server_button = tk.Button(left_frame, text="Connect to testing server", bg="deep sky blue", activebackground="steel blue", command=lambda: sendUsernameToServer(username_entry.get()))
server_button.place(relx=0.05, rely=0.7, relwidth=0.9, relheight=0.05)
username_label = tk.Label(left_frame, text="Username:", bg="DarkSeaGreen1")
username_label.place(relx=0.05, rely=0.77, relwidth=0.2, relheight=0.05)
username_entry = tk.Entry(left_frame, bg="PaleGreen1")
username_entry.place(relx=0.3, rely=0.77, relwidth=0.65, relheight=0.05)
yes_button = tk.Button(left_frame, text="Yes", bg="deep sky blue", activebackground="steel blue", command=lambda: sendY())
yes_button.place(relx=0.05, rely=0.84, relwidth=0.44, relheight=0.12)
no_button = tk.Button(left_frame, text="No", bg="deep sky blue", activebackground="steel blue", command=lambda: sendN())
no_button.place(relx=0.51, rely=0.84, relwidth=0.44, relheight=0.12)
#----------------------------------------------------------
root.mainloop()
|
normal
|
{
"blob_id": "5e17299e6a409e433e384935a815bab6ce178ff5",
"index": 3031,
"step-1": "<mask token>\n\n\ndef sigint_handler(signum, frame):\n print('\\n Disconnecting from server')\n sys.exit()\n\n\n<mask token>\n\n\ndef sendUsernameToServer(username_entry):\n username = username_entry.encode('utf-8')\n username_header = f'{len(username):<{HEADER_LENGTH}}'.encode('utf-8')\n client_socket.send(username_header + username)\n checkIO()\n\n\ndef checkIO():\n sockets_list = [sys.stdin, client_socket]\n read_sockets, write_socket, error_socket = select.select(sockets_list,\n [], [])\n for socket in read_sockets:\n if socket == client_socket:\n message = socket.recv(2048)\n if not len(message):\n text_label['text'] = 'Connection closed by server'\n print('Connection closed by server')\n sys.exit()\n text_label['text'] = message.decode('utf-8')\n print(message.decode('utf-8'))\n\n\ndef sendY():\n message = 'y'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef sigint_handler(signum, frame):\n print('\\n Disconnecting from server')\n sys.exit()\n\n\n<mask token>\n\n\ndef sendUsernameToServer(username_entry):\n username = username_entry.encode('utf-8')\n username_header = f'{len(username):<{HEADER_LENGTH}}'.encode('utf-8')\n client_socket.send(username_header + username)\n checkIO()\n\n\ndef checkIO():\n sockets_list = [sys.stdin, client_socket]\n read_sockets, write_socket, error_socket = select.select(sockets_list,\n [], [])\n for socket in read_sockets:\n if socket == client_socket:\n message = socket.recv(2048)\n if not len(message):\n text_label['text'] = 'Connection closed by server'\n print('Connection closed by server')\n sys.exit()\n text_label['text'] = message.decode('utf-8')\n print(message.decode('utf-8'))\n\n\ndef sendY():\n message = 'y'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\ndef sendN():\n message = 'n'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef sigint_handler(signum, frame):\n print('\\n Disconnecting from server')\n sys.exit()\n\n\n<mask token>\nclient_socket.connect((IP, PORT))\n<mask token>\nsignal.signal(signal.SIGINT, sigint_handler)\n<mask token>\n\n\ndef sendUsernameToServer(username_entry):\n username = username_entry.encode('utf-8')\n username_header = f'{len(username):<{HEADER_LENGTH}}'.encode('utf-8')\n client_socket.send(username_header + username)\n checkIO()\n\n\ndef checkIO():\n sockets_list = [sys.stdin, client_socket]\n read_sockets, write_socket, error_socket = select.select(sockets_list,\n [], [])\n for socket in read_sockets:\n if socket == client_socket:\n message = socket.recv(2048)\n if not len(message):\n text_label['text'] = 'Connection closed by server'\n print('Connection closed by server')\n sys.exit()\n text_label['text'] = message.decode('utf-8')\n print(message.decode('utf-8'))\n\n\ndef sendY():\n message = 'y'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\ndef sendN():\n message = 'n'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\n<mask token>\ncanvas.pack()\n<mask token>\nbackground_label.place(relwidth=1, relheight=1)\n<mask token>\ncovid_label.config(font=('Arial', 40))\ncovid_label.place(relx=0.12, rely=0.1, relwidth=0.76, relheight=0.1)\n<mask token>\nmain_frame.place(relx=0.12, rely=0.2, relwidth=0.76, relheight=0.7)\n<mask token>\nright_frame.place(relx=0.74, rely=0.05, relwidth=0.23, relheight=0.9)\n<mask token>\nheat_button.place(relx=0.05, rely=0.04, relwidth=0.9, relheight=0.2)\n<mask token>\ninfo_button.place(relx=0.05, rely=0.28, relwidth=0.9, relheight=0.2)\n<mask token>\ncontact_button.place(relx=0.05, rely=0.52, relwidth=0.9, relheight=0.2)\n<mask token>\ndoctor_button.place(relx=0.05, rely=0.76, relwidth=0.9, relheight=0.2)\n<mask token>\nleft_frame.place(relx=0.03, rely=0.05, relwidth=0.69, relheight=0.9)\n<mask token>\ntext_frame.place(relx=0.05, rely=0.05, relwidth=0.9, relheight=0.6)\n<mask token>\ntext_label.place(relwidth=1, relheight=1)\n<mask token>\nserver_button.place(relx=0.05, rely=0.7, relwidth=0.9, relheight=0.05)\n<mask token>\nusername_label.place(relx=0.05, rely=0.77, relwidth=0.2, relheight=0.05)\n<mask token>\nusername_entry.place(relx=0.3, rely=0.77, relwidth=0.65, relheight=0.05)\n<mask token>\nyes_button.place(relx=0.05, rely=0.84, relwidth=0.44, relheight=0.12)\n<mask token>\nno_button.place(relx=0.51, rely=0.84, relwidth=0.44, relheight=0.12)\nroot.mainloop()\n",
"step-4": "<mask token>\nHEIGHT = 714\nWIDTH = 1000\nroot = tk.Tk()\n\n\ndef sigint_handler(signum, frame):\n print('\\n Disconnecting from server')\n sys.exit()\n\n\nclient_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\nIP = '127.0.0.1'\nPORT = 42069\nclient_socket.connect((IP, PORT))\n<mask token>\nsignal.signal(signal.SIGINT, sigint_handler)\nHEADER_LENGTH = 10\n\n\ndef sendUsernameToServer(username_entry):\n username = username_entry.encode('utf-8')\n username_header = f'{len(username):<{HEADER_LENGTH}}'.encode('utf-8')\n client_socket.send(username_header + username)\n checkIO()\n\n\ndef checkIO():\n sockets_list = [sys.stdin, client_socket]\n read_sockets, write_socket, error_socket = select.select(sockets_list,\n [], [])\n for socket in read_sockets:\n if socket == client_socket:\n message = socket.recv(2048)\n if not len(message):\n text_label['text'] = 'Connection closed by server'\n print('Connection closed by server')\n sys.exit()\n text_label['text'] = message.decode('utf-8')\n print(message.decode('utf-8'))\n\n\ndef sendY():\n message = 'y'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\ndef sendN():\n message = 'n'\n message = message.encode('utf-8')\n client_socket.send(message)\n sys.stdout.flush()\n checkIO()\n\n\ncanvas = tk.Canvas(root, height=HEIGHT, width=WIDTH)\ncanvas.pack()\nbackground_image = tk.PhotoImage(file='background.gif')\nbackground_label = tk.Label(root, image=background_image)\nbackground_label.place(relwidth=1, relheight=1)\ncovid_label = tk.Label(root, text='COVID-19 Helper', bg='sky blue')\ncovid_label.config(font=('Arial', 40))\ncovid_label.place(relx=0.12, rely=0.1, relwidth=0.76, relheight=0.1)\nmain_frame = tk.Frame(root, bg='light blue')\nmain_frame.place(relx=0.12, rely=0.2, relwidth=0.76, relheight=0.7)\nright_frame = tk.Frame(main_frame, bg='sky blue')\nright_frame.place(relx=0.74, rely=0.05, relwidth=0.23, relheight=0.9)\nheat_button = tk.Button(right_frame, text='View HeatMap', bg=\n 'deep sky blue', activebackground='steel blue')\nheat_button.place(relx=0.05, rely=0.04, relwidth=0.9, relheight=0.2)\ninfo_button = tk.Button(right_frame, text='Covid-19 HSE Info', bg=\n 'deep sky blue', activebackground='steel blue')\ninfo_button.place(relx=0.05, rely=0.28, relwidth=0.9, relheight=0.2)\ncontact_button = tk.Button(right_frame, text='Heathcare Contacts', bg=\n 'deep sky blue', activebackground='steel blue')\ncontact_button.place(relx=0.05, rely=0.52, relwidth=0.9, relheight=0.2)\ndoctor_button = tk.Button(right_frame, text='Speak with a doctor', bg=\n 'orange2', activebackground='DarkOrange1')\ndoctor_button.place(relx=0.05, rely=0.76, relwidth=0.9, relheight=0.2)\nleft_frame = tk.Frame(main_frame, bg='sky blue')\nleft_frame.place(relx=0.03, rely=0.05, relwidth=0.69, relheight=0.9)\ntext_frame = tk.Frame(left_frame, bg='ghost white')\ntext_frame.place(relx=0.05, rely=0.05, relwidth=0.9, relheight=0.6)\ntext_label = tk.Label(text_frame, bg='ghost white', font=('Courier', 10))\ntext_label['text'] = \"\"\"Please enter your username and click\n'Connect to testing server'\"\"\"\ntext_label.place(relwidth=1, relheight=1)\nserver_button = tk.Button(left_frame, text='Connect to testing server', bg=\n 'deep sky blue', activebackground='steel blue', command=lambda :\n sendUsernameToServer(username_entry.get()))\nserver_button.place(relx=0.05, rely=0.7, relwidth=0.9, relheight=0.05)\nusername_label = tk.Label(left_frame, text='Username:', bg='DarkSeaGreen1')\nusername_label.place(relx=0.05, rely=0.77, relwidth=0.2, relheight=0.05)\nusername_entry = tk.Entry(left_frame, bg='PaleGreen1')\nusername_entry.place(relx=0.3, rely=0.77, relwidth=0.65, relheight=0.05)\nyes_button = tk.Button(left_frame, text='Yes', bg='deep sky blue',\n activebackground='steel blue', command=lambda : sendY())\nyes_button.place(relx=0.05, rely=0.84, relwidth=0.44, relheight=0.12)\nno_button = tk.Button(left_frame, text='No', bg='deep sky blue',\n activebackground='steel blue', command=lambda : sendN())\nno_button.place(relx=0.51, rely=0.84, relwidth=0.44, relheight=0.12)\nroot.mainloop()\n",
"step-5": "import tkinter as tk # Import tkinker for GUI creation\nfrom PIL import Image, ImageTk # Allow images to be used as backgrounds\nimport socket # Importing sockets for low level implementation of networks\nimport select # Importing select to poll between the user input and received message\nimport sys # Getting input from terminal and writing output to terminal\n\n# Size of GUI\nHEIGHT = 714\nWIDTH = 1000\n\nroot = tk.Tk() #Define root to begin window\n\ndef sigint_handler(signum, frame):\n print('\\n Disconnecting from server')\n sys.exit()\n\n# creating the client_socket object and adding the TCP/IP and IPv4 protocol\nclient_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)\n\n# IP and PORT of the socket\nIP = \"127.0.0.1\"\nPORT = 42069\n\n# Let's connect to the server!\nclient_socket.connect((IP, PORT))\n\n\n# Handling Ctrl+C in a very cool way\nimport signal\n\n\nsignal.signal(signal.SIGINT, sigint_handler)\n\n# Clever function to send username to the server\n# Format\n# Header: length_of_username\n# Body: Username\n\n# Header length used to receive the username\nHEADER_LENGTH = 10\n\ndef sendUsernameToServer(username_entry):\n username = username_entry.encode('utf-8')\n username_header = f\"{len(username):<{HEADER_LENGTH}}\".encode('utf-8')\n client_socket.send(username_header + username)\n checkIO()\n\ndef checkIO():\n # polling between user input and message received from the server\n sockets_list = [sys.stdin, client_socket]\n\n # checking for I/O in read_sockets\n read_sockets, write_socket, error_socket = select.select(\n sockets_list, [], [])\n\n for socket in read_sockets:\n # If socket == client_socket, we got a message\n if socket == client_socket:\n message = socket.recv(2048)\n if not len(message):\n text_label['text'] = \"Connection closed by server\"\n print(\"Connection closed by server\")\n sys.exit()\n \n text_label['text'] = message.decode('utf-8')\n print(message.decode('utf-8'))\n\ndef sendY():\n # Else, we can send a message\n message = 'y'\n message = message.encode('utf-8')\n client_socket.send(message)\n #sys.stdout.write(str(my_username) + \" > \")\n # sys.stdout.write(message.decode('utf-8'))\n sys.stdout.flush()\n checkIO()\n \ndef sendN():\n # Else, we can send a message\n message = 'n'\n message = message.encode('utf-8')\n client_socket.send(message)\n #sys.stdout.write(str(my_username) + \" > \")\n # sys.stdout.write(message.decode('utf-8'))\n sys.stdout.flush()\n checkIO()\n\n #client_socket.close()\n\n#-----------------------------------------------------\n#-------------GUI-LAYOUT------------------------------\n#-----------------------------------------------------\n\ncanvas = tk.Canvas(root, height=HEIGHT, width=WIDTH)\ncanvas.pack()\n\nbackground_image = tk.PhotoImage(file='background.gif')\nbackground_label = tk.Label(root, image=background_image)\nbackground_label.place(relwidth=1, relheight=1)\n\ncovid_label = tk.Label(root, text=\"COVID-19 Helper\", bg=\"sky blue\")\ncovid_label.config(font=(\"Arial\", 40))\ncovid_label.place(relx=0.12, rely=0.1, relwidth=0.76, relheight=0.1)\n\nmain_frame = tk.Frame(root, bg=\"light blue\")\nmain_frame.place(relx=0.12, rely=0.2, relwidth=0.76, relheight=0.7)\n\n#----------------------------------------------------------\n\nright_frame = tk.Frame(main_frame, bg=\"sky blue\")\nright_frame.place(relx=0.74, rely=0.05, relwidth=0.23, relheight=0.9)\n\nheat_button = tk.Button(right_frame, text=\"View HeatMap\", bg=\"deep sky blue\", activebackground=\"steel blue\")\nheat_button.place(relx=0.05, rely=0.04, relwidth=0.9, relheight=0.2)\n\ninfo_button = tk.Button(right_frame, text=\"Covid-19 HSE Info\", bg=\"deep sky blue\", activebackground=\"steel blue\")\ninfo_button.place(relx=0.05, rely=0.28, relwidth=0.9, relheight=0.2)\n\ncontact_button = tk.Button(right_frame, text=\"Heathcare Contacts\", bg=\"deep sky blue\", activebackground=\"steel blue\")\ncontact_button.place(relx=0.05, rely=0.52, relwidth=0.9, relheight=0.2)\n\ndoctor_button = tk.Button(right_frame, text=\"Speak with a doctor\", bg=\"orange2\", activebackground=\"DarkOrange1\")\ndoctor_button.place(relx=0.05, rely=0.76, relwidth=0.9, relheight=0.2)\n\n#----------------------------------------------------------\n\nleft_frame = tk.Frame(main_frame, bg=\"sky blue\")\nleft_frame.place(relx=0.03, rely=0.05, relwidth=0.69, relheight=0.9)\n\ntext_frame = tk.Frame(left_frame, bg=\"ghost white\")\ntext_frame.place(relx=0.05, rely=0.05, relwidth= 0.9, relheight=0.6)\n\ntext_label = tk.Label(text_frame, bg=\"ghost white\", font=('Courier', 10))\ntext_label['text'] = \"Please enter your username and click\\n'Connect to testing server'\"\ntext_label.place(relwidth=1, relheight=1)\n\nserver_button = tk.Button(left_frame, text=\"Connect to testing server\", bg=\"deep sky blue\", activebackground=\"steel blue\", command=lambda: sendUsernameToServer(username_entry.get()))\nserver_button.place(relx=0.05, rely=0.7, relwidth=0.9, relheight=0.05)\n\nusername_label = tk.Label(left_frame, text=\"Username:\", bg=\"DarkSeaGreen1\")\nusername_label.place(relx=0.05, rely=0.77, relwidth=0.2, relheight=0.05)\n\nusername_entry = tk.Entry(left_frame, bg=\"PaleGreen1\")\nusername_entry.place(relx=0.3, rely=0.77, relwidth=0.65, relheight=0.05)\n\nyes_button = tk.Button(left_frame, text=\"Yes\", bg=\"deep sky blue\", activebackground=\"steel blue\", command=lambda: sendY())\nyes_button.place(relx=0.05, rely=0.84, relwidth=0.44, relheight=0.12)\n\nno_button = tk.Button(left_frame, text=\"No\", bg=\"deep sky blue\", activebackground=\"steel blue\", command=lambda: sendN())\nno_button.place(relx=0.51, rely=0.84, relwidth=0.44, relheight=0.12)\n\n#----------------------------------------------------------\n\nroot.mainloop()",
"step-ids": [
4,
5,
6,
7,
9
]
}
|
[
4,
5,
6,
7,
9
] |
# Copyright (C) 2011 Ruckus Wireless, Inc. All rights reserved.
# Please make sure the following module docstring is accurate since it will be used in report generation.
"""
Description:
@author: Chris Wang
@contact: cwang@ruckuswireless.com
@since: Aug-09, 2010
Prerequisite (Assumptions about the state of the test bed/DUT):
1. Build under test is loaded on the Station
Required components: 'Station'
Test parameters:
- zd_tag: zd tag. Will get zd components via zd tag in self.testbed.components.
Test procedure:
1. Config:
- initialize test parameters
2. Test:
- Get limited ZD discovery settings.
3. Cleanup:
- N/A
Result type: PASS/FAIL
Results: PASS: Get limited ZD discovery settings correctly.
Messages: If FAIL the test script returns a message related to the criterion that is not satisfied
"""
import logging
from RuckusAutoTest.models import Test
from RuckusAutoTest.components.lib.zd import access_points_zd as lib
class CB_ZD_Get_Primary_Secondary_ZD(Test):
required_components = ['ZoneDirector']
parameters_description = {'zd_tag': "zd tag. Will get zd components via zd tag in self.testbed.components",
}
'''
Test case for automation.
'''
def config(self, conf):
self._init_test_params(conf)
self._retrive_carrier_bag()
def test(self):
try:
logging.info("Get limited ZD discovery settings via ZD")
self.zd_discovery_cfg = lib.get_limited_zd_discovery_cfg(self.zd)
logging.info("Limited ZD discovery cfg: %s" % self.zd_discovery_cfg)
except Exception, e:
self.errmsg = "Fail to get limited ZD discovery: %s" % e.message
if self.errmsg:
logging.debug(self.errmsg)
return self.returnResult("FAIL", self.errmsg)
else:
self._update_carrier_bag()
self.passmsg = "Get limited ZD discovery correctly: %s" % (self.zd_discovery_cfg)
return self.returnResult("PASS", self.passmsg)
def cleanup(self):
pass
def _retrive_carrier_bag(self):
pass
def _update_carrier_bag(self):
self.carrierbag['gui_zd_discovery_cfg'] = self.zd_discovery_cfg
def _init_test_params(self, conf):
self.conf = dict(zd_tag = '')
self.conf.update(conf)
zd_tag = self.conf.pop('zd_tag')
if zd_tag:
self.zd = self.carrierbag[zd_tag]
else:
self.zd = self.testbed.components['ZoneDirector']
self.errmsg = ''
self.passmsg = ''
|
normal
|
{
"blob_id": "25288a6dd0552d59f8c305bb8edbbbed5d464d5b",
"index": 9997,
"step-1": "# Copyright (C) 2011 Ruckus Wireless, Inc. All rights reserved.\n# Please make sure the following module docstring is accurate since it will be used in report generation.\n\n\"\"\"\n Description: \n @author: Chris Wang\n @contact: cwang@ruckuswireless.com\n @since: Aug-09, 2010\n\n Prerequisite (Assumptions about the state of the test bed/DUT):\n 1. Build under test is loaded on the Station\n\n Required components: 'Station'\n Test parameters:\n - zd_tag: zd tag. Will get zd components via zd tag in self.testbed.components.\n \n Test procedure:\n 1. Config:\n - initialize test parameters \n 2. Test:\n - Get limited ZD discovery settings.\n 3. Cleanup:\n - N/A\n \n Result type: PASS/FAIL\n Results: PASS: Get limited ZD discovery settings correctly.\n\n Messages: If FAIL the test script returns a message related to the criterion that is not satisfied\n\"\"\"\nimport logging\n\nfrom RuckusAutoTest.models import Test\nfrom RuckusAutoTest.components.lib.zd import access_points_zd as lib \n\nclass CB_ZD_Get_Primary_Secondary_ZD(Test):\n required_components = ['ZoneDirector']\n parameters_description = {'zd_tag': \"zd tag. Will get zd components via zd tag in self.testbed.components\",\n }\n \n '''\n Test case for automation.\n '''\n def config(self, conf):\n self._init_test_params(conf)\n self._retrive_carrier_bag()\n \n def test(self):\n try:\n logging.info(\"Get limited ZD discovery settings via ZD\")\n self.zd_discovery_cfg = lib.get_limited_zd_discovery_cfg(self.zd)\n logging.info(\"Limited ZD discovery cfg: %s\" % self.zd_discovery_cfg)\n except Exception, e:\n self.errmsg = \"Fail to get limited ZD discovery: %s\" % e.message\n \n if self.errmsg:\n logging.debug(self.errmsg)\n return self.returnResult(\"FAIL\", self.errmsg)\n else:\n self._update_carrier_bag()\n self.passmsg = \"Get limited ZD discovery correctly: %s\" % (self.zd_discovery_cfg)\n return self.returnResult(\"PASS\", self.passmsg)\n \n def cleanup(self):\n pass\n \n def _retrive_carrier_bag(self):\n pass\n \n def _update_carrier_bag(self):\n self.carrierbag['gui_zd_discovery_cfg'] = self.zd_discovery_cfg\n \n def _init_test_params(self, conf):\n self.conf = dict(zd_tag = '')\n self.conf.update(conf)\n \n zd_tag = self.conf.pop('zd_tag')\n if zd_tag:\n self.zd = self.carrierbag[zd_tag]\n else:\n self.zd = self.testbed.components['ZoneDirector']\n \n self.errmsg = ''\n self.passmsg = ''",
"step-2": null,
"step-3": null,
"step-4": null,
"step-5": null,
"step-ids": [
0
]
}
|
[
0
] |
<|reserved_special_token_0|>
class SideEnum(str, Enum):
BUY = 'B'
SELL = 'S'
class BaseClient:
def __init__(self, client: 'StakeClient'):
self._client = weakref.proxy(client)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if TYPE_CHECKING:
from stake.client import StakeClient
<|reserved_special_token_0|>
class SideEnum(str, Enum):
BUY = 'B'
SELL = 'S'
class BaseClient:
def __init__(self, client: 'StakeClient'):
self._client = weakref.proxy(client)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
if TYPE_CHECKING:
from stake.client import StakeClient
camelcase = partial(inflection.camelize, uppercase_first_letter=False)
__all__ = ['SideEnum']
class SideEnum(str, Enum):
BUY = 'B'
SELL = 'S'
class BaseClient:
def __init__(self, client: 'StakeClient'):
self._client = weakref.proxy(client)
<|reserved_special_token_1|>
import weakref
from enum import Enum
from functools import partial
from typing import TYPE_CHECKING
import inflection
if TYPE_CHECKING:
from stake.client import StakeClient
camelcase = partial(inflection.camelize, uppercase_first_letter=False)
__all__ = ['SideEnum']
class SideEnum(str, Enum):
BUY = 'B'
SELL = 'S'
class BaseClient:
def __init__(self, client: 'StakeClient'):
self._client = weakref.proxy(client)
<|reserved_special_token_1|>
import weakref
from enum import Enum
from functools import partial
from typing import TYPE_CHECKING
import inflection
if TYPE_CHECKING:
from stake.client import StakeClient
camelcase = partial(inflection.camelize, uppercase_first_letter=False)
__all__ = ["SideEnum"]
class SideEnum(str, Enum):
BUY = "B"
SELL = "S"
class BaseClient:
# flake8: noqa
def __init__(self, client: "StakeClient"):
self._client = weakref.proxy(client)
|
flexible
|
{
"blob_id": "f13ccbfb27788deca0d4f4b58a4e9e8c7e8e0306",
"index": 1644,
"step-1": "<mask token>\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-2": "<mask token>\nif TYPE_CHECKING:\n from stake.client import StakeClient\n<mask token>\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-3": "<mask token>\nif TYPE_CHECKING:\n from stake.client import StakeClient\ncamelcase = partial(inflection.camelize, uppercase_first_letter=False)\n__all__ = ['SideEnum']\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-4": "import weakref\nfrom enum import Enum\nfrom functools import partial\nfrom typing import TYPE_CHECKING\nimport inflection\nif TYPE_CHECKING:\n from stake.client import StakeClient\ncamelcase = partial(inflection.camelize, uppercase_first_letter=False)\n__all__ = ['SideEnum']\n\n\nclass SideEnum(str, Enum):\n BUY = 'B'\n SELL = 'S'\n\n\nclass BaseClient:\n\n def __init__(self, client: 'StakeClient'):\n self._client = weakref.proxy(client)\n",
"step-5": "import weakref\nfrom enum import Enum\nfrom functools import partial\nfrom typing import TYPE_CHECKING\n\nimport inflection\n\nif TYPE_CHECKING:\n from stake.client import StakeClient\n\ncamelcase = partial(inflection.camelize, uppercase_first_letter=False)\n\n__all__ = [\"SideEnum\"]\n\n\nclass SideEnum(str, Enum):\n BUY = \"B\"\n SELL = \"S\"\n\n\nclass BaseClient:\n # flake8: noqa\n def __init__(self, client: \"StakeClient\"):\n self._client = weakref.proxy(client)\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#
# @lc app=leetcode id=67 lang=python3
#
# [67] Add Binary
#
# https://leetcode.com/problems/add-binary/description/
#
# algorithms
# Easy (46.70%)
# Likes: 2566
# Dislikes: 331
# Total Accepted: 572.1K
# Total Submissions: 1.2M
# Testcase Example: '"11"\n"1"'
#
# Given two binary strings a and b, return their sum as a binary string.
#
#
# Example 1:
# Input: a = "11", b = "1"
# Output: "100"
# Example 2:
# Input: a = "1010", b = "1011"
# Output: "10101"
#
#
# Constraints:
#
#
# 1 <= a.length, b.length <= 10^4
# a and b consist only of '0' or '1' characters.
# Each string does not contain leading zeros except for the zero itself.
#
#
#
# @lc code=start
class Solution:
def addBinary(self, a: str, b: str) -> str:
if len(a) < len(b):
a = "0" * (len(b) - len(a)) + a
else:
b = "0" * (len(a) - len(b)) + b
last_pointer = len(a) - 1
mark = 0
res = []
while last_pointer >= 0:
tmp = int(a[last_pointer]) + int(b[last_pointer]) + mark
if tmp >= 2:
mark = tmp // 2
res.insert(0, str(tmp % 2))
else:
res.insert(0, str(tmp))
mark = 0
last_pointer -= 1
if last_pointer == -1 and mark != 0:
res.insert(0, str(mark))
return "".join(res)
# @lc code=end
|
normal
|
{
"blob_id": "227a56c970a74d515ab694d2c0924885e2209cfe",
"index": 7089,
"step-1": "<mask token>\n",
"step-2": "class Solution:\n <mask token>\n",
"step-3": "class Solution:\n\n def addBinary(self, a: str, b: str) ->str:\n if len(a) < len(b):\n a = '0' * (len(b) - len(a)) + a\n else:\n b = '0' * (len(a) - len(b)) + b\n last_pointer = len(a) - 1\n mark = 0\n res = []\n while last_pointer >= 0:\n tmp = int(a[last_pointer]) + int(b[last_pointer]) + mark\n if tmp >= 2:\n mark = tmp // 2\n res.insert(0, str(tmp % 2))\n else:\n res.insert(0, str(tmp))\n mark = 0\n last_pointer -= 1\n if last_pointer == -1 and mark != 0:\n res.insert(0, str(mark))\n return ''.join(res)\n",
"step-4": "#\n# @lc app=leetcode id=67 lang=python3\n#\n# [67] Add Binary\n#\n# https://leetcode.com/problems/add-binary/description/\n#\n# algorithms\n# Easy (46.70%)\n# Likes: 2566\n# Dislikes: 331\n# Total Accepted: 572.1K\n# Total Submissions: 1.2M\n# Testcase Example: '\"11\"\\n\"1\"'\n#\n# Given two binary strings a and b, return their sum as a binary string.\n# \n# \n# Example 1:\n# Input: a = \"11\", b = \"1\"\n# Output: \"100\"\n# Example 2:\n# Input: a = \"1010\", b = \"1011\"\n# Output: \"10101\"\n# \n# \n# Constraints:\n# \n# \n# 1 <= a.length, b.length <= 10^4\n# a and b consist only of '0' or '1' characters.\n# Each string does not contain leading zeros except for the zero itself.\n# \n# \n#\n\n# @lc code=start\nclass Solution:\n def addBinary(self, a: str, b: str) -> str:\n if len(a) < len(b):\n a = \"0\" * (len(b) - len(a)) + a\n else:\n b = \"0\" * (len(a) - len(b)) + b\n\n last_pointer = len(a) - 1\n mark = 0\n res = []\n while last_pointer >= 0:\n tmp = int(a[last_pointer]) + int(b[last_pointer]) + mark\n if tmp >= 2:\n mark = tmp // 2\n res.insert(0, str(tmp % 2))\n else:\n res.insert(0, str(tmp))\n mark = 0\n\n\n last_pointer -= 1\n\n if last_pointer == -1 and mark != 0:\n res.insert(0, str(mark))\n\n return \"\".join(res)\n \n# @lc code=end\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
import csv
import Feature_extraction as urlfeature
import trainer as tr
import warnings
warnings.filterwarnings("ignore")
def resultwriter(feature, output_dest):
flag = True
with open(output_dest, 'w') as f:
for item in feature:
w = csv.DictWriter(f, item[1].keys())
if flag:
w.writeheader()
flag = False
w.writerow(item[1])
def process_URL_list(file_dest,
output_dest):
feature = []
with open(file_dest) as file:
for line in file:
url = line.split(',')[0].strip()
malicious_bool = line.split(',')[1].strip()
if url != '':
print('working on: ' + url) # showoff
ret_dict = urlfeature.feature_extract(url)
ret_dict['malicious'] = malicious_bool
feature.append([url, ret_dict]);
resultwriter(feature, output_dest)
def process_test_list(file_dest,
output_dest): # i think this takes whole file of urls without given malicious to extract their feature and doest not provide malicious column like this will take query.txt
global f
feature = []
with open(file_dest) as file:
for line in file:
url = line.strip()
if url != '':
print('working on: ' + url) # showoff
ret_dict = urlfeature.feature_extract(url)
feature.append([url, ret_dict]);
resultwriter(feature, output_dest)
# change
def process_test_url(url,
output_dest):
feature = []
url = url.strip()
if url != '':
print('working on: ' + url)
ret_dict = urlfeature.feature_extract(url)
feature.append([url, ret_dict])
resultwriter(feature, output_dest)
def main():
for i in range(1, 6):
s = 'comp/train_Data' + str(i) + '.csv'
k = 'comp/test_features' + str(i) + '.csv'
tr.train(s, k)
print(' -------------------------------------------------------- ')
|
normal
|
{
"blob_id": "9d190face528d1a237f4c92bfb94a399f61a5af2",
"index": 9317,
"step-1": "<mask token>\n\n\ndef resultwriter(feature, output_dest):\n flag = True\n with open(output_dest, 'w') as f:\n for item in feature:\n w = csv.DictWriter(f, item[1].keys())\n if flag:\n w.writeheader()\n flag = False\n w.writerow(item[1])\n\n\n<mask token>\n\n\ndef process_test_list(file_dest, output_dest):\n global f\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_url(url, output_dest):\n feature = []\n url = url.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef main():\n for i in range(1, 6):\n s = 'comp/train_Data' + str(i) + '.csv'\n k = 'comp/test_features' + str(i) + '.csv'\n tr.train(s, k)\n print(' -------------------------------------------------------- ')\n",
"step-2": "<mask token>\n\n\ndef resultwriter(feature, output_dest):\n flag = True\n with open(output_dest, 'w') as f:\n for item in feature:\n w = csv.DictWriter(f, item[1].keys())\n if flag:\n w.writeheader()\n flag = False\n w.writerow(item[1])\n\n\ndef process_URL_list(file_dest, output_dest):\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.split(',')[0].strip()\n malicious_bool = line.split(',')[1].strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n ret_dict['malicious'] = malicious_bool\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_list(file_dest, output_dest):\n global f\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_url(url, output_dest):\n feature = []\n url = url.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef main():\n for i in range(1, 6):\n s = 'comp/train_Data' + str(i) + '.csv'\n k = 'comp/test_features' + str(i) + '.csv'\n tr.train(s, k)\n print(' -------------------------------------------------------- ')\n",
"step-3": "<mask token>\nwarnings.filterwarnings('ignore')\n\n\ndef resultwriter(feature, output_dest):\n flag = True\n with open(output_dest, 'w') as f:\n for item in feature:\n w = csv.DictWriter(f, item[1].keys())\n if flag:\n w.writeheader()\n flag = False\n w.writerow(item[1])\n\n\ndef process_URL_list(file_dest, output_dest):\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.split(',')[0].strip()\n malicious_bool = line.split(',')[1].strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n ret_dict['malicious'] = malicious_bool\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_list(file_dest, output_dest):\n global f\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_url(url, output_dest):\n feature = []\n url = url.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef main():\n for i in range(1, 6):\n s = 'comp/train_Data' + str(i) + '.csv'\n k = 'comp/test_features' + str(i) + '.csv'\n tr.train(s, k)\n print(' -------------------------------------------------------- ')\n",
"step-4": "import csv\nimport Feature_extraction as urlfeature\nimport trainer as tr\nimport warnings\nwarnings.filterwarnings('ignore')\n\n\ndef resultwriter(feature, output_dest):\n flag = True\n with open(output_dest, 'w') as f:\n for item in feature:\n w = csv.DictWriter(f, item[1].keys())\n if flag:\n w.writeheader()\n flag = False\n w.writerow(item[1])\n\n\ndef process_URL_list(file_dest, output_dest):\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.split(',')[0].strip()\n malicious_bool = line.split(',')[1].strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n ret_dict['malicious'] = malicious_bool\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_list(file_dest, output_dest):\n global f\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef process_test_url(url, output_dest):\n feature = []\n url = url.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef main():\n for i in range(1, 6):\n s = 'comp/train_Data' + str(i) + '.csv'\n k = 'comp/test_features' + str(i) + '.csv'\n tr.train(s, k)\n print(' -------------------------------------------------------- ')\n",
"step-5": "import csv\nimport Feature_extraction as urlfeature\nimport trainer as tr\nimport warnings\nwarnings.filterwarnings(\"ignore\")\n\n\ndef resultwriter(feature, output_dest):\n flag = True\n with open(output_dest, 'w') as f:\n for item in feature:\n w = csv.DictWriter(f, item[1].keys())\n if flag:\n w.writeheader()\n flag = False\n w.writerow(item[1])\n\n\ndef process_URL_list(file_dest,\n output_dest):\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.split(',')[0].strip()\n malicious_bool = line.split(',')[1].strip()\n if url != '':\n print('working on: ' + url) # showoff\n ret_dict = urlfeature.feature_extract(url)\n ret_dict['malicious'] = malicious_bool\n feature.append([url, ret_dict]);\n resultwriter(feature, output_dest)\n\n\ndef process_test_list(file_dest,\n output_dest): # i think this takes whole file of urls without given malicious to extract their feature and doest not provide malicious column like this will take query.txt\n global f\n feature = []\n with open(file_dest) as file:\n for line in file:\n url = line.strip()\n if url != '':\n print('working on: ' + url) # showoff\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict]);\n\n resultwriter(feature, output_dest)\n\n\n# change\ndef process_test_url(url,\n output_dest):\n feature = []\n url = url.strip()\n if url != '':\n print('working on: ' + url)\n ret_dict = urlfeature.feature_extract(url)\n feature.append([url, ret_dict])\n resultwriter(feature, output_dest)\n\n\ndef main():\n for i in range(1, 6):\n s = 'comp/train_Data' + str(i) + '.csv'\n k = 'comp/test_features' + str(i) + '.csv'\n tr.train(s, k)\n print(' -------------------------------------------------------- ')\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
from trac.db import DatabaseManager
def do_upgrade(env, ver, cursor):
"""Change schema name from taskboard_schema to agiletools_version
"""
cursor.execute('UPDATE system SET name=%s WHERE name=%s',
("agiletools_version", "taskboard_schema"))
|
normal
|
{
"blob_id": "56ed5bb22d77f4d8c061f97d832a60ed9a106549",
"index": 5231,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef do_upgrade(env, ver, cursor):\n \"\"\"Change schema name from taskboard_schema to agiletools_version\n \"\"\"\n cursor.execute('UPDATE system SET name=%s WHERE name=%s', (\n 'agiletools_version', 'taskboard_schema'))\n",
"step-3": "from trac.db import DatabaseManager\n\n\ndef do_upgrade(env, ver, cursor):\n \"\"\"Change schema name from taskboard_schema to agiletools_version\n \"\"\"\n cursor.execute('UPDATE system SET name=%s WHERE name=%s', (\n 'agiletools_version', 'taskboard_schema'))\n",
"step-4": "from trac.db import DatabaseManager\n\ndef do_upgrade(env, ver, cursor):\n \"\"\"Change schema name from taskboard_schema to agiletools_version\n \"\"\"\n cursor.execute('UPDATE system SET name=%s WHERE name=%s',\n (\"agiletools_version\", \"taskboard_schema\"))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if os.path.exists(path) and os.listdir(path) != []:
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split('_')[1]
dcm_size = len(glob.glob(path + '/*.dcm'))
dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for
dicom_slicei in range(1, dcm_size + 1)]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.
shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +
cdcm.shape[1] // 2, dcmi] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split('_')[1]
dcm_img = read_dicom(input_dir + '/' + casei)
print('Dcm shape: ', dcm_img.shape)
choices = range(330, 350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img = ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
<|reserved_special_token_0|>
def main(args):
image_input_dir = args.datasets_path
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
show_mask_circle(circle_mask_dir)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if os.path.exists(path) and os.listdir(path) != []:
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split('_')[1]
dcm_size = len(glob.glob(path + '/*.dcm'))
dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for
dicom_slicei in range(1, dcm_size + 1)]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.
shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +
cdcm.shape[1] // 2, dcmi] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split('_')[1]
dcm_img = read_dicom(input_dir + '/' + casei)
print('Dcm shape: ', dcm_img.shape)
choices = range(330, 350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img = ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
def show_image_mask(image_path, mask_path):
"""随机展示一个位置的病例图像及其标注。
:param image_path:
:param mask_path:
:return:
"""
files_choice = random.sample(os.listdir(image_path), 10)
for file_name in files_choice:
image_numpy = np.load(image_path + '/' + file_name)
mask_numpy = np.load(mask_path + '/' + file_name)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(211)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file_name))
plt.colorbar(img1)
ax2 = fig.add_subplot(212)
img2 = ax2.imshow(mask_numpy, cmap='gray')
plt.colorbar(img2)
plt.show()
def main(args):
image_input_dir = args.datasets_path
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
show_mask_circle(circle_mask_dir)
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if os.path.exists(path) and os.listdir(path) != []:
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split('_')[1]
dcm_size = len(glob.glob(path + '/*.dcm'))
dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for
dicom_slicei in range(1, dcm_size + 1)]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.
shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +
cdcm.shape[1] // 2, dcmi] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split('_')[1]
dcm_img = read_dicom(input_dir + '/' + casei)
print('Dcm shape: ', dcm_img.shape)
choices = range(330, 350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img = ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
def show_image_mask(image_path, mask_path):
"""随机展示一个位置的病例图像及其标注。
:param image_path:
:param mask_path:
:return:
"""
files_choice = random.sample(os.listdir(image_path), 10)
for file_name in files_choice:
image_numpy = np.load(image_path + '/' + file_name)
mask_numpy = np.load(mask_path + '/' + file_name)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(211)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file_name))
plt.colorbar(img1)
ax2 = fig.add_subplot(212)
img2 = ax2.imshow(mask_numpy, cmap='gray')
plt.colorbar(img2)
plt.show()
def main(args):
image_input_dir = args.datasets_path
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
show_mask_circle(circle_mask_dir)
if __name__ == '__main__':
args = parse_args()
main(args)
<|reserved_special_token_1|>
import os
import pydicom
import glob
import shutil
import random
import numpy as np
import cv2
import skimage.io as io
from data_Parameter import parse_args
import matplotlib.pyplot as plt
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if os.path.exists(path) and os.listdir(path) != []:
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split('_')[1]
dcm_size = len(glob.glob(path + '/*.dcm'))
dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for
dicom_slicei in range(1, dcm_size + 1)]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.
shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +
cdcm.shape[1] // 2, dcmi] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split('_')[1]
dcm_img = read_dicom(input_dir + '/' + casei)
print('Dcm shape: ', dcm_img.shape)
choices = range(330, 350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img = ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
def show_image_mask(image_path, mask_path):
"""随机展示一个位置的病例图像及其标注。
:param image_path:
:param mask_path:
:return:
"""
files_choice = random.sample(os.listdir(image_path), 10)
for file_name in files_choice:
image_numpy = np.load(image_path + '/' + file_name)
mask_numpy = np.load(mask_path + '/' + file_name)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(211)
img1 = ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file_name))
plt.colorbar(img1)
ax2 = fig.add_subplot(212)
img2 = ax2.imshow(mask_numpy, cmap='gray')
plt.colorbar(img2)
plt.show()
def main(args):
image_input_dir = args.datasets_path
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
show_mask_circle(circle_mask_dir)
if __name__ == '__main__':
args = parse_args()
main(args)
<|reserved_special_token_1|>
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Time : 2021/05/08 20:06
# @Author : Yi
# @FileName: show_slices.py
import os
import pydicom
import glob
import shutil
import random
import numpy as np
import cv2
import skimage.io as io
from data_Parameter import parse_args
import matplotlib.pyplot as plt
def dir_create(path):
"""创造新的文件夹。
:param path: 文件夹路径
:return:
"""
if (os.path.exists(path)) and (os.listdir(path) != []):
shutil.rmtree(path)
os.makedirs(path)
if not os.path.exists(path):
os.makedirs(path)
def read_dicom(path):
"""读取一个病例所有的slices,并转成一个720*720*720的numpy.array.
:param path: 一个病例dcm路径
:return:
"""
print(os.path.basename(path))
pi = os.path.basename(path).split("_")[1]
dcm_size = len(glob.glob(path + "/*.dcm"))
dcms = [
path + "/E" + pi + "S101I%d.dcm" % dicom_slicei
for dicom_slicei in range(1, dcm_size + 1)
]
length = int(len(dcms))
print(length)
dcm_f = pydicom.read_file(dcms[0]).pixel_array
dcm_size = max(max(dcm_f.shape), 720)
# print(dcm_f.shape)
dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)
for dcmi in range(len(dcms)):
cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)
cdcm -= np.mean(cdcm)
cdcm /= np.std(cdcm)
dcm_img[
dcm_size // 2 - cdcm.shape[0] // 2: dcm_size // 2 + cdcm.shape[0] // 2,
dcm_size // 2 - cdcm.shape[1] // 2: dcm_size // 2 + cdcm.shape[1] // 2,
dcmi,
] = cdcm
return dcm_img
def show_image(input_dir):
"""随机展示一个病例一些病理图像。
:param input_dir:
:return:
"""
# special cases: "P556", "P576", "P887",160*640*640
for casei in os.listdir(input_dir)[5:6]:
pi = casei.split("_")[1]
dcm_img = read_dicom(input_dir + "/" + casei)
print("Dcm shape: ", dcm_img.shape)
# choices = random.sample(list(np.arange(0, 720, 1)), 10)
# choices.append(316)
choices = range(330,350)
for i in choices:
fig = plt.figure(num=i, figsize=(10, 10))
ax = fig.add_subplot(111)
img=ax.imshow(dcm_img[:, :, i], cmap='gray')
ax.set_title(pi + '_' + str(i))
plt.colorbar(img)
plt.show()
def show_image_avail(input_dir):
"""随机展示一个位置的一些有标注的病例图像。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 15)
for file in choices:
image_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1=ax1.imshow(image_numpy, cmap='gray')
ax1.set_title(str(file))
plt.colorbar(img1)
plt.show()
def show_mask(input_dir):
"""随机展示一个位置标注的mask,2个channels.
:param input_dir:
:return:
"""
index = 0
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(num=index, figsize=(10, 5))
ax1 = fig.add_subplot(211)
ax1.imshow(mask_numpy[:, :, 0], cmap='gray')
ax1.set_title(str(file) + '_outer')
ax2 = fig.add_subplot(212)
ax2.imshow(mask_numpy[:, :, 1], cmap='gray')
ax2.set_title(str(file) + '_luman')
plt.show()
index += 1
def show_mask_circle(input_dir):
"""随机展示一个位置标注的mask环。
:param input_dir:
:return:
"""
choices = random.sample(os.listdir(input_dir), 10)
for file in choices:
mask_numpy = np.load(input_dir + '/' + file)
fig = plt.figure(figsize=(10, 5))
ax1 = fig.add_subplot(111)
img1=ax1.imshow(mask_numpy[:, :], cmap='gray')
ax1.set_title(str(file) + '_circle')
plt.colorbar(img1)
plt.show()
def show_image_mask(image_path,mask_path):
"""随机展示一个位置的病例图像及其标注。
:param image_path:
:param mask_path:
:return:
"""
files_choice=random.sample(os.listdir(image_path),10)
for file_name in files_choice:
image_numpy=np.load(image_path+'/'+file_name)
mask_numpy =np.load(mask_path+'/'+file_name)
fig =plt.figure(figsize=(10,5))
ax1 =fig.add_subplot(211)
img1=ax1.imshow(image_numpy,cmap='gray')
ax1.set_title(str(file_name))
plt.colorbar(img1)
ax2=fig.add_subplot(212)
img2=ax2.imshow(mask_numpy,cmap='gray')
# ax2.set_title(str(file_name))
plt.colorbar(img2)
plt.show()
def main(args):
image_input_dir = args.datasets_path
# image_avail_dir = args.image_save_sep_position + '/ICAR/positive'
# image_avail_dir = args.image_save_sep_position + '/ICAR/negative'
# circle_mask_dir=args.circle_mask_save_sep+'/ICAR/positive'
circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'
# show_image(image_input_dir) # 随机展示一些病例图像。
# show_image_avail(image_avail_dir)
show_mask_circle(circle_mask_dir)
# show_image_mask(image_avail_dir,circle_mask_dir)
if __name__ == '__main__':
args = parse_args()
main(args)
|
flexible
|
{
"blob_id": "4905b820f33619a80a9915d0603bc39e0d0368d9",
"index": 6175,
"step-1": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\n<mask token>\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n",
"step-4": "import os\nimport pydicom\nimport glob\nimport shutil\nimport random\nimport numpy as np\nimport cv2\nimport skimage.io as io\nfrom data_Parameter import parse_args\nimport matplotlib.pyplot as plt\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if os.path.exists(path) and os.listdir(path) != []:\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n pi = os.path.basename(path).split('_')[1]\n dcm_size = len(glob.glob(path + '/*.dcm'))\n dcms = [(path + '/E' + pi + 'S101I%d.dcm' % dicom_slicei) for\n dicom_slicei in range(1, dcm_size + 1)]\n length = int(len(dcms))\n print(length)\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n dcm_img[dcm_size // 2 - cdcm.shape[0] // 2:dcm_size // 2 + cdcm.\n shape[0] // 2, dcm_size // 2 - cdcm.shape[1] // 2:dcm_size // 2 +\n cdcm.shape[1] // 2, dcmi] = cdcm\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split('_')[1]\n dcm_img = read_dicom(input_dir + '/' + casei)\n print('Dcm shape: ', dcm_img.shape)\n choices = range(330, 350)\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img = ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1 = ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_image_mask(image_path, mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n files_choice = random.sample(os.listdir(image_path), 10)\n for file_name in files_choice:\n image_numpy = np.load(image_path + '/' + file_name)\n mask_numpy = np.load(mask_path + '/' + file_name)\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n img1 = ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n ax2 = fig.add_subplot(212)\n img2 = ax2.imshow(mask_numpy, cmap='gray')\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n show_mask_circle(circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)\n",
"step-5": "# !/usr/bin/env python3\n# -*- coding:utf-8 -*-\n\n# @Time : 2021/05/08 20:06\n# @Author : Yi\n# @FileName: show_slices.py\n\nimport os\nimport pydicom\nimport glob\nimport shutil\nimport random\nimport numpy as np\nimport cv2\nimport skimage.io as io\n\nfrom data_Parameter import parse_args\nimport matplotlib.pyplot as plt\n\n\ndef dir_create(path):\n \"\"\"创造新的文件夹。\n\n :param path: 文件夹路径\n :return:\n \"\"\"\n if (os.path.exists(path)) and (os.listdir(path) != []):\n shutil.rmtree(path)\n os.makedirs(path)\n if not os.path.exists(path):\n os.makedirs(path)\n\n\ndef read_dicom(path):\n \"\"\"读取一个病例所有的slices,并转成一个720*720*720的numpy.array.\n\n :param path: 一个病例dcm路径\n :return:\n \"\"\"\n print(os.path.basename(path))\n\n pi = os.path.basename(path).split(\"_\")[1]\n dcm_size = len(glob.glob(path + \"/*.dcm\"))\n dcms = [\n path + \"/E\" + pi + \"S101I%d.dcm\" % dicom_slicei\n for dicom_slicei in range(1, dcm_size + 1)\n ]\n\n length = int(len(dcms))\n print(length)\n\n dcm_f = pydicom.read_file(dcms[0]).pixel_array\n dcm_size = max(max(dcm_f.shape), 720)\n # print(dcm_f.shape)\n\n dcm_img = np.zeros((dcm_size, dcm_size, dcm_size), dtype=np.float32)\n\n for dcmi in range(len(dcms)):\n cdcm = pydicom.read_file(dcms[dcmi]).pixel_array.astype(np.float32)\n\n cdcm -= np.mean(cdcm)\n cdcm /= np.std(cdcm)\n\n dcm_img[\n dcm_size // 2 - cdcm.shape[0] // 2: dcm_size // 2 + cdcm.shape[0] // 2,\n dcm_size // 2 - cdcm.shape[1] // 2: dcm_size // 2 + cdcm.shape[1] // 2,\n dcmi,\n ] = cdcm\n\n return dcm_img\n\n\ndef show_image(input_dir):\n \"\"\"随机展示一个病例一些病理图像。\n\n :param input_dir:\n :return:\n \"\"\"\n\n # special cases: \"P556\", \"P576\", \"P887\",160*640*640\n for casei in os.listdir(input_dir)[5:6]:\n pi = casei.split(\"_\")[1]\n dcm_img = read_dicom(input_dir + \"/\" + casei)\n print(\"Dcm shape: \", dcm_img.shape)\n\n # choices = random.sample(list(np.arange(0, 720, 1)), 10)\n # choices.append(316)\n\n choices = range(330,350)\n\n for i in choices:\n fig = plt.figure(num=i, figsize=(10, 10))\n ax = fig.add_subplot(111)\n img=ax.imshow(dcm_img[:, :, i], cmap='gray')\n ax.set_title(pi + '_' + str(i))\n plt.colorbar(img)\n plt.show()\n\n\ndef show_image_avail(input_dir):\n \"\"\"随机展示一个位置的一些有标注的病例图像。\n\n :param input_dir:\n :return:\n \"\"\"\n\n choices = random.sample(os.listdir(input_dir), 15)\n for file in choices:\n image_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1=ax1.imshow(image_numpy, cmap='gray')\n ax1.set_title(str(file))\n plt.colorbar(img1)\n plt.show()\n\n\ndef show_mask(input_dir):\n \"\"\"随机展示一个位置标注的mask,2个channels.\n\n :param input_dir:\n :return:\n \"\"\"\n\n index = 0\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(num=index, figsize=(10, 5))\n ax1 = fig.add_subplot(211)\n ax1.imshow(mask_numpy[:, :, 0], cmap='gray')\n ax1.set_title(str(file) + '_outer')\n ax2 = fig.add_subplot(212)\n ax2.imshow(mask_numpy[:, :, 1], cmap='gray')\n ax2.set_title(str(file) + '_luman')\n plt.show()\n index += 1\n\n\ndef show_mask_circle(input_dir):\n \"\"\"随机展示一个位置标注的mask环。\n\n :param input_dir:\n :return:\n \"\"\"\n\n choices = random.sample(os.listdir(input_dir), 10)\n for file in choices:\n mask_numpy = np.load(input_dir + '/' + file)\n\n fig = plt.figure(figsize=(10, 5))\n ax1 = fig.add_subplot(111)\n img1=ax1.imshow(mask_numpy[:, :], cmap='gray')\n ax1.set_title(str(file) + '_circle')\n plt.colorbar(img1)\n\n plt.show()\n\n\ndef show_image_mask(image_path,mask_path):\n \"\"\"随机展示一个位置的病例图像及其标注。\n\n :param image_path:\n :param mask_path:\n :return:\n \"\"\"\n\n files_choice=random.sample(os.listdir(image_path),10)\n\n for file_name in files_choice:\n image_numpy=np.load(image_path+'/'+file_name)\n mask_numpy =np.load(mask_path+'/'+file_name)\n\n fig =plt.figure(figsize=(10,5))\n ax1 =fig.add_subplot(211)\n img1=ax1.imshow(image_numpy,cmap='gray')\n ax1.set_title(str(file_name))\n plt.colorbar(img1)\n\n ax2=fig.add_subplot(212)\n img2=ax2.imshow(mask_numpy,cmap='gray')\n # ax2.set_title(str(file_name))\n plt.colorbar(img2)\n plt.show()\n\n\ndef main(args):\n image_input_dir = args.datasets_path\n\n # image_avail_dir = args.image_save_sep_position + '/ICAR/positive'\n # image_avail_dir = args.image_save_sep_position + '/ICAR/negative'\n\n # circle_mask_dir=args.circle_mask_save_sep+'/ICAR/positive'\n circle_mask_dir = args.circle_mask_save_sep + '/ICAR/positive'\n\n # show_image(image_input_dir) # 随机展示一些病例图像。\n # show_image_avail(image_avail_dir)\n show_mask_circle(circle_mask_dir)\n\n # show_image_mask(image_avail_dir,circle_mask_dir)\n\n\nif __name__ == '__main__':\n args = parse_args()\n main(args)",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
import operator
def group_by_owners(files):
print(files, type(files))
for k, v in files.items():
# for v in k:
print(k, v)
# if k[v] == k[v]:
# print("same", v)
for f in files:
print(f[0])
for g in v:
print(g)
_files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)
print("Sorted: ", _files, type(_files))
# files = files.items()
# print(files, type(files))
# return None
files = {
'Input.txt': 'Randy',
'Code.py': 'Stan',
'Output.txt': 'Randy'
}
print(group_by_owners(files))
|
normal
|
{
"blob_id": "4843239a41fe1ecff6c8c3a97aceef76a3785647",
"index": 7334,
"step-1": "<mask token>\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\n<mask token>\nprint(group_by_owners(files))\n",
"step-3": "<mask token>\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\nfiles = {'Input.txt': 'Randy', 'Code.py': 'Stan', 'Output.txt': 'Randy'}\nprint(group_by_owners(files))\n",
"step-4": "import operator\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n print(k, v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print('Sorted: ', _files, type(_files))\n\n\nfiles = {'Input.txt': 'Randy', 'Code.py': 'Stan', 'Output.txt': 'Randy'}\nprint(group_by_owners(files))\n",
"step-5": "import operator\n\n\ndef group_by_owners(files):\n print(files, type(files))\n for k, v in files.items():\n # for v in k:\n print(k, v)\n # if k[v] == k[v]:\n # print(\"same\", v)\n for f in files:\n print(f[0])\n for g in v:\n print(g)\n _files = sorted(files.items(), key=operator.itemgetter(1), reverse=False)\n print(\"Sorted: \", _files, type(_files))\n\n # files = files.items()\n # print(files, type(files))\n\n # return None\n\n\nfiles = {\n 'Input.txt': 'Randy',\n 'Code.py': 'Stan',\n 'Output.txt': 'Randy'\n}\nprint(group_by_owners(files))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
"""
Urls for CAE_Web Audio_Visual app.
"""
from django.conf.urls import url
from . import views
app_name = 'cae_web_audio_visual'
urlpatterns = [
]
|
normal
|
{
"blob_id": "5debc97e99bbd78b17e545896d718d4b0eac8519",
"index": 2430,
"step-1": "<mask token>\n",
"step-2": "<mask token>\napp_name = 'cae_web_audio_visual'\nurlpatterns = []\n",
"step-3": "<mask token>\nfrom django.conf.urls import url\nfrom . import views\napp_name = 'cae_web_audio_visual'\nurlpatterns = []\n",
"step-4": "\"\"\"\nUrls for CAE_Web Audio_Visual app.\n\"\"\"\n\nfrom django.conf.urls import url\n\nfrom . import views\n\n\napp_name = 'cae_web_audio_visual'\nurlpatterns = [\n\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
np.set_printoptions(suppress=True)
<|reserved_special_token_0|>
for image in path:
n1 = cv2.imread(image)
n2 = cv2.resize(n1, (244, 244))
images.append(n2)
print(image)
<|reserved_special_token_0|>
if prediction[0][0] > 0.8:
print('2분음표')
elif prediction[0][1] > 0.8:
print('4분음표')
elif prediction[0][2] > 0.8:
print('8분음표')
elif prediction[0][3] > 0.8:
print('16분음표')
else:
print('음표아님')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
np.set_printoptions(suppress=True)
model = tensorflow.keras.models.load_model('./converted_keras/keras_model.h5')
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
path = glob.glob(
'/Users/zjisuoo/Documents/zjisuoo_git/OurChord/00_NOTE_DATA/TEST/*.png')
images = []
for image in path:
n1 = cv2.imread(image)
n2 = cv2.resize(n1, (244, 244))
images.append(n2)
print(image)
image_array = np.array(n2)
normalized_image_array = image_array.astype(dtype=np.float32) / 127.0 - 1
data = normalized_image_array
prediction = model.predict(data)
if prediction[0][0] > 0.8:
print('2분음표')
elif prediction[0][1] > 0.8:
print('4분음표')
elif prediction[0][2] > 0.8:
print('8분음표')
elif prediction[0][3] > 0.8:
print('16분음표')
else:
print('음표아님')
<|reserved_special_token_1|>
import tensorflow.keras
from PIL import Image, ImageOps
from os import listdir
from os.path import isfile, join
import numpy as np
import glob
import cv2
np.set_printoptions(suppress=True)
model = tensorflow.keras.models.load_model('./converted_keras/keras_model.h5')
data = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)
path = glob.glob(
'/Users/zjisuoo/Documents/zjisuoo_git/OurChord/00_NOTE_DATA/TEST/*.png')
images = []
for image in path:
n1 = cv2.imread(image)
n2 = cv2.resize(n1, (244, 244))
images.append(n2)
print(image)
image_array = np.array(n2)
normalized_image_array = image_array.astype(dtype=np.float32) / 127.0 - 1
data = normalized_image_array
prediction = model.predict(data)
if prediction[0][0] > 0.8:
print('2분음표')
elif prediction[0][1] > 0.8:
print('4분음표')
elif prediction[0][2] > 0.8:
print('8분음표')
elif prediction[0][3] > 0.8:
print('16분음표')
else:
print('음표아님')
<|reserved_special_token_1|>
import tensorflow.keras
from PIL import Image, ImageOps
from os import listdir
from os.path import isfile, join
import numpy as np
import glob
import cv2
np.set_printoptions(suppress = True)
# Load the model
model = tensorflow.keras.models.load_model('./converted_keras/keras_model.h5')
# Create the array of the right shape to feed into the keras model
# The 'length' or number of images you can put into the array is
# determined by the first position in the shape tuple, in this case 1.
data = np.ndarray(shape = (1, 224, 224, 3), dtype = np.float32)
path = glob.glob("/Users/zjisuoo/Documents/zjisuoo_git/OurChord/00_NOTE_DATA/TEST/*.png")
images = []
for image in path :
n1 = cv2.imread(image)
n2 = cv2.resize(n1, (244, 244))
images.append(n2)
print(image)
#turn the image int a numpy array
image_array = np.array(n2)
# Normalize the image
normalized_image_array = (image_array.astype(dtype = np.float32) / 127.0) - 1
# Load the image into the array
data = normalized_image_array
# run the inference
prediction = model.predict(data)
# print(prediction)
if(prediction[0][0] > 0.8):
print("2분음표")
elif(prediction[0][1] > 0.8):
print("4분음표")
elif(prediction[0][2] > 0.8):
print("8분음표")
elif(prediction[0][3] > 0.8):
print("16분음표")
else:
print("음표아님")
|
flexible
|
{
"blob_id": "13b69ec61d6b2129f1974ce7cae91c84100b3b58",
"index": 449,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nnp.set_printoptions(suppress=True)\n<mask token>\nfor image in path:\n n1 = cv2.imread(image)\n n2 = cv2.resize(n1, (244, 244))\n images.append(n2)\n print(image)\n<mask token>\nif prediction[0][0] > 0.8:\n print('2분음표')\nelif prediction[0][1] > 0.8:\n print('4분음표')\nelif prediction[0][2] > 0.8:\n print('8분음표')\nelif prediction[0][3] > 0.8:\n print('16분음표')\nelse:\n print('음표아님')\n",
"step-3": "<mask token>\nnp.set_printoptions(suppress=True)\nmodel = tensorflow.keras.models.load_model('./converted_keras/keras_model.h5')\ndata = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)\npath = glob.glob(\n '/Users/zjisuoo/Documents/zjisuoo_git/OurChord/00_NOTE_DATA/TEST/*.png')\nimages = []\nfor image in path:\n n1 = cv2.imread(image)\n n2 = cv2.resize(n1, (244, 244))\n images.append(n2)\n print(image)\nimage_array = np.array(n2)\nnormalized_image_array = image_array.astype(dtype=np.float32) / 127.0 - 1\ndata = normalized_image_array\nprediction = model.predict(data)\nif prediction[0][0] > 0.8:\n print('2분음표')\nelif prediction[0][1] > 0.8:\n print('4분음표')\nelif prediction[0][2] > 0.8:\n print('8분음표')\nelif prediction[0][3] > 0.8:\n print('16분음표')\nelse:\n print('음표아님')\n",
"step-4": "import tensorflow.keras\nfrom PIL import Image, ImageOps\nfrom os import listdir\nfrom os.path import isfile, join\nimport numpy as np\nimport glob\nimport cv2\nnp.set_printoptions(suppress=True)\nmodel = tensorflow.keras.models.load_model('./converted_keras/keras_model.h5')\ndata = np.ndarray(shape=(1, 224, 224, 3), dtype=np.float32)\npath = glob.glob(\n '/Users/zjisuoo/Documents/zjisuoo_git/OurChord/00_NOTE_DATA/TEST/*.png')\nimages = []\nfor image in path:\n n1 = cv2.imread(image)\n n2 = cv2.resize(n1, (244, 244))\n images.append(n2)\n print(image)\nimage_array = np.array(n2)\nnormalized_image_array = image_array.astype(dtype=np.float32) / 127.0 - 1\ndata = normalized_image_array\nprediction = model.predict(data)\nif prediction[0][0] > 0.8:\n print('2분음표')\nelif prediction[0][1] > 0.8:\n print('4분음표')\nelif prediction[0][2] > 0.8:\n print('8분음표')\nelif prediction[0][3] > 0.8:\n print('16분음표')\nelse:\n print('음표아님')\n",
"step-5": "import tensorflow.keras\nfrom PIL import Image, ImageOps\nfrom os import listdir\nfrom os.path import isfile, join\nimport numpy as np\nimport glob\nimport cv2\n\nnp.set_printoptions(suppress = True)\n\n# Load the model\nmodel = tensorflow.keras.models.load_model('./converted_keras/keras_model.h5')\n\n# Create the array of the right shape to feed into the keras model\n# The 'length' or number of images you can put into the array is\n# determined by the first position in the shape tuple, in this case 1.\ndata = np.ndarray(shape = (1, 224, 224, 3), dtype = np.float32)\n\npath = glob.glob(\"/Users/zjisuoo/Documents/zjisuoo_git/OurChord/00_NOTE_DATA/TEST/*.png\")\nimages = []\n\nfor image in path :\n n1 = cv2.imread(image)\n n2 = cv2.resize(n1, (244, 244))\n images.append(n2)\n\n print(image)\n\n#turn the image int a numpy array\nimage_array = np.array(n2)\n\n# Normalize the image\nnormalized_image_array = (image_array.astype(dtype = np.float32) / 127.0) - 1\n\n# Load the image into the array\ndata = normalized_image_array\n\n# run the inference\nprediction = model.predict(data)\n# print(prediction)\n\nif(prediction[0][0] > 0.8):\n print(\"2분음표\")\nelif(prediction[0][1] > 0.8):\n print(\"4분음표\")\nelif(prediction[0][2] > 0.8):\n print(\"8분음표\")\nelif(prediction[0][3] > 0.8):\n print(\"16분음표\")\nelse:\n print(\"음표아님\")",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
with alive_bar(100) as bar:
for i in range(100):
sleep(0.03)
bar()
with alive_bar(200, bar='bubbles', spinner='notes2') as bar:
for i in range(200):
sleep(0.03)
bar()
<|reserved_special_token_1|>
from alive_progress import alive_bar
from time import sleep
with alive_bar(100) as bar:
for i in range(100):
sleep(0.03)
bar()
with alive_bar(200, bar='bubbles', spinner='notes2') as bar:
for i in range(200):
sleep(0.03)
bar()
<|reserved_special_token_1|>
from alive_progress import alive_bar
from time import sleep
with alive_bar(100) as bar: # default setting
for i in range(100):
sleep(0.03)
bar() # call after consuming one item
# using bubble bar and notes spinner
with alive_bar(200, bar='bubbles', spinner='notes2') as bar:
for i in range(200):
sleep(0.03)
bar() # call after consuming one item
|
flexible
|
{
"blob_id": "06f961c07695d1c312cb943afbfa64508a709c7e",
"index": 1076,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwith alive_bar(100) as bar:\n for i in range(100):\n sleep(0.03)\n bar()\n with alive_bar(200, bar='bubbles', spinner='notes2') as bar:\n for i in range(200):\n sleep(0.03)\n bar()\n",
"step-3": "from alive_progress import alive_bar\nfrom time import sleep\nwith alive_bar(100) as bar:\n for i in range(100):\n sleep(0.03)\n bar()\n with alive_bar(200, bar='bubbles', spinner='notes2') as bar:\n for i in range(200):\n sleep(0.03)\n bar()\n",
"step-4": "from alive_progress import alive_bar\nfrom time import sleep\n\nwith alive_bar(100) as bar: # default setting\n for i in range(100):\n sleep(0.03)\n bar() # call after consuming one item\n\n # using bubble bar and notes spinner\n with alive_bar(200, bar='bubbles', spinner='notes2') as bar:\n for i in range(200):\n sleep(0.03)\n bar() # call after consuming one item\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Ejercicio 28 - Hoja VI (5) - Indicar la nota ponderada según el criterio dado
# (parte teórica 60%, práctica 40%) de cada uno de un número determinado de alumnos
numalumnos=int(input("Introduce el número total de alumnos:\n"))
print("Usa el punto '.' para los decimales")
for contador in range(1,numalumnos+1):
print(f"\nDatos del alumno número {contador} de {numalumnos}:")
teorica=float(input("- Introduce la nota de la parte teórica: "))
practica=float(input("- Introduce la nota de la parte practica: "))
nota=(teorica*60/100)+(practica*40/100)
print(f"La nota final del alumno número {contador} es {nota:.2f}.\n")
print("Ya se han calculado todas las notas.")
|
normal
|
{
"blob_id": "f2056ff46ce6e38c3b6ca553bbdec7f59d60b198",
"index": 1417,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(\"Usa el punto '.' para los decimales\")\nfor contador in range(1, numalumnos + 1):\n print(f'\\nDatos del alumno número {contador} de {numalumnos}:')\n teorica = float(input('- Introduce la nota de la parte teórica: '))\n practica = float(input('- Introduce la nota de la parte practica: '))\n nota = teorica * 60 / 100 + practica * 40 / 100\n print(f'La nota final del alumno número {contador} es {nota:.2f}.\\n')\nprint('Ya se han calculado todas las notas.')\n",
"step-3": "numalumnos = int(input('Introduce el número total de alumnos:\\n'))\nprint(\"Usa el punto '.' para los decimales\")\nfor contador in range(1, numalumnos + 1):\n print(f'\\nDatos del alumno número {contador} de {numalumnos}:')\n teorica = float(input('- Introduce la nota de la parte teórica: '))\n practica = float(input('- Introduce la nota de la parte practica: '))\n nota = teorica * 60 / 100 + practica * 40 / 100\n print(f'La nota final del alumno número {contador} es {nota:.2f}.\\n')\nprint('Ya se han calculado todas las notas.')\n",
"step-4": "# Ejercicio 28 - Hoja VI (5) - Indicar la nota ponderada según el criterio dado\n# (parte teórica 60%, práctica 40%) de cada uno de un número determinado de alumnos\n\nnumalumnos=int(input(\"Introduce el número total de alumnos:\\n\"))\nprint(\"Usa el punto '.' para los decimales\")\nfor contador in range(1,numalumnos+1):\n print(f\"\\nDatos del alumno número {contador} de {numalumnos}:\")\n teorica=float(input(\"- Introduce la nota de la parte teórica: \"))\n practica=float(input(\"- Introduce la nota de la parte practica: \"))\n nota=(teorica*60/100)+(practica*40/100)\n print(f\"La nota final del alumno número {contador} es {nota:.2f}.\\n\")\nprint(\"Ya se han calculado todas las notas.\")\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def uppercase_first_letter(string: str) ->str:
return string[0:1].upper() + string[1:]
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def uppercase_first_letter(string: str) ->str:
return string[0:1].upper() + string[1:]
string_list: List[str] = input('Please, input string: ').split(' ')
result: str = ''
for i, value in enumerate(string_list):
result += (lambda index: '' if index == 0 else ' ')(i
) + uppercase_first_letter(value)
print(result)
<|reserved_special_token_1|>
from typing import List
def uppercase_first_letter(string: str) ->str:
return string[0:1].upper() + string[1:]
string_list: List[str] = input('Please, input string: ').split(' ')
result: str = ''
for i, value in enumerate(string_list):
result += (lambda index: '' if index == 0 else ' ')(i
) + uppercase_first_letter(value)
print(result)
|
flexible
|
{
"blob_id": "0555c577a8fb746cf2debb929d02b46cd3be4d7b",
"index": 1062,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef uppercase_first_letter(string: str) ->str:\n return string[0:1].upper() + string[1:]\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef uppercase_first_letter(string: str) ->str:\n return string[0:1].upper() + string[1:]\n\n\nstring_list: List[str] = input('Please, input string: ').split(' ')\nresult: str = ''\nfor i, value in enumerate(string_list):\n result += (lambda index: '' if index == 0 else ' ')(i\n ) + uppercase_first_letter(value)\nprint(result)\n",
"step-4": "from typing import List\n\n\ndef uppercase_first_letter(string: str) ->str:\n return string[0:1].upper() + string[1:]\n\n\nstring_list: List[str] = input('Please, input string: ').split(' ')\nresult: str = ''\nfor i, value in enumerate(string_list):\n result += (lambda index: '' if index == 0 else ' ')(i\n ) + uppercase_first_letter(value)\nprint(result)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from quantopian.algorithm import order_optimal_portfolio
from quantopian.algorithm import attach_pipeline, pipeline_output
from quantopian.pipeline import Pipeline
from quantopian.pipeline.data.builtin import USEquityPricing
from quantopian.pipeline.factors import SimpleMovingAverage
from quantopian.pipeline.filters import QTradableStocksUS
import quantopian.optimize as opt
from quantopian.pipeline.factors import Returns
def initialize(context):
# Schedule our rebalance function to run at the end of
# each day, when the market closes
#set_slippage(slippage.FixedSlippage(spread=0.0, volume_limit=1))
#set_slippage(slippage.FixedBasisPointsSlippage(basis_points=0, volume_limit=100))
#set_slippage(slippage.VolumeShareSlippage(0))
schedule_function(
my_rebalance,
date_rules.every_day(),
time_rules.market_close(minutes=1 )
)
# Create our pipeline and attach it to our algorithm.
my_pipe = make_pipeline()
attach_pipeline(my_pipe, 'my_pipeline')
def make_pipeline():
#longs = Returns(window_length=2).percentile_between(0,20,mask=QTradableStocksUS())
#shorts = Returns(window_length=2).percentile_between(80,100,mask=QTradableStocksUS())
longs = Returns(window_length=2).bottom(1,mask=QTradableStocksUS())
shorts = Returns(window_length=2).top(1,mask=QTradableStocksUS())
return Pipeline(
columns={
'longs': longs,
'shorts': shorts,
},
screen=QTradableStocksUS()& (shorts | longs)
)
def compute_target_weights(context, data):
"""
Compute ordering weights.
"""
# Initialize empty target weights dictionary.
# This will map securities to their target weight.
weights = {}
# If there are securities in our longs and shorts lists,
# compute even target weights for each security.
if context.longs :
long_weight = 0.5 / len(context.longs)
if context.shorts:
short_weight = -0.5 / len(context.shorts)
#if ~(context.longs & context.shorts):
# return weights
# Exit positions in our portfolio if they are not
# in our longs or shorts lists.
for security in context.portfolio.positions:
if security not in context.longs and security not in context.shorts and data.can_trade(security):
weights[security] = 0
for security in context.longs:
weights[security] = long_weight
for security in context.shorts:
weights[security] = short_weight
return weights
def before_trading_start(context, data):
"""
Get pipeline results.
"""
# Gets our pipeline output every day.
pipe_results = pipeline_output('my_pipeline')
# Go long in securities for which the 'longs' value is True,
# and check if they can be traded.
context.longs = []
for sec in pipe_results[pipe_results['longs']].index.tolist():
if data.can_trade(sec):
context.longs.append(sec)
#print(context.longs)
#print('Longs: ')
#print(context.longs)
# Go short in securities for which the 'shorts' value is True,
# and check if they can be traded.
context.shorts = []
for sec in pipe_results[pipe_results['shorts']].index.tolist():
if data.can_trade(sec):
context.shorts.append(sec)
#print('Shorts: ')
#print(context.shorts)
def my_rebalance(context, data):
"""
Rebalance daily
"""
for stock in context.portfolio.positions:
#print('selling everything')
#print(stock)
order_target_percent(stock, 0.0)
# Calculate target weights to rebalance
#print(context)
target_weights = compute_target_weights(context, data)
#print(target_weights)
# If we have target weights, rebalance our portfolio
if target_weights:
order_optimal_portfolio(
objective=opt.TargetWeights(target_weights),
constraints=[],
)
|
normal
|
{
"blob_id": "c447d1fe38a4af43de39e05d46dacbe88249d427",
"index": 3654,
"step-1": "<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n<mask token>\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n pipe_results = pipeline_output('my_pipeline')\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n\n\ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n order_target_percent(stock, 0.0)\n target_weights = compute_target_weights(context, data)\n if target_weights:\n order_optimal_portfolio(objective=opt.TargetWeights(target_weights),\n constraints=[])\n",
"step-4": "from quantopian.algorithm import order_optimal_portfolio\nfrom quantopian.algorithm import attach_pipeline, pipeline_output\nfrom quantopian.pipeline import Pipeline\nfrom quantopian.pipeline.data.builtin import USEquityPricing\nfrom quantopian.pipeline.factors import SimpleMovingAverage\nfrom quantopian.pipeline.filters import QTradableStocksUS\nimport quantopian.optimize as opt\nfrom quantopian.pipeline.factors import Returns\n\n\ndef initialize(context):\n schedule_function(my_rebalance, date_rules.every_day(), time_rules.\n market_close(minutes=1))\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\ndef make_pipeline():\n longs = Returns(window_length=2).bottom(1, mask=QTradableStocksUS())\n shorts = Returns(window_length=2).top(1, mask=QTradableStocksUS())\n return Pipeline(columns={'longs': longs, 'shorts': shorts}, screen=\n QTradableStocksUS() & (shorts | longs))\n\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n weights = {}\n if context.longs:\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n for security in context.portfolio.positions:\n if (security not in context.longs and security not in context.\n shorts and data.can_trade(security)):\n weights[security] = 0\n for security in context.longs:\n weights[security] = long_weight\n for security in context.shorts:\n weights[security] = short_weight\n return weights\n\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n pipe_results = pipeline_output('my_pipeline')\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n\n\ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n order_target_percent(stock, 0.0)\n target_weights = compute_target_weights(context, data)\n if target_weights:\n order_optimal_portfolio(objective=opt.TargetWeights(target_weights),\n constraints=[])\n",
"step-5": "from quantopian.algorithm import order_optimal_portfolio\nfrom quantopian.algorithm import attach_pipeline, pipeline_output\nfrom quantopian.pipeline import Pipeline\nfrom quantopian.pipeline.data.builtin import USEquityPricing\nfrom quantopian.pipeline.factors import SimpleMovingAverage\nfrom quantopian.pipeline.filters import QTradableStocksUS\nimport quantopian.optimize as opt\nfrom quantopian.pipeline.factors import Returns\n\ndef initialize(context):\n # Schedule our rebalance function to run at the end of\n # each day, when the market closes\n #set_slippage(slippage.FixedSlippage(spread=0.0, volume_limit=1))\n #set_slippage(slippage.FixedBasisPointsSlippage(basis_points=0, volume_limit=100))\n #set_slippage(slippage.VolumeShareSlippage(0))\n schedule_function(\n my_rebalance,\n date_rules.every_day(),\n time_rules.market_close(minutes=1 )\n )\n\n # Create our pipeline and attach it to our algorithm.\n my_pipe = make_pipeline()\n attach_pipeline(my_pipe, 'my_pipeline')\n\n\n\ndef make_pipeline():\n \n #longs = Returns(window_length=2).percentile_between(0,20,mask=QTradableStocksUS())\n #shorts = Returns(window_length=2).percentile_between(80,100,mask=QTradableStocksUS())\n longs = Returns(window_length=2).bottom(1,mask=QTradableStocksUS())\n shorts = Returns(window_length=2).top(1,mask=QTradableStocksUS()) \n\n return Pipeline(\n columns={\n 'longs': longs,\n 'shorts': shorts,\n },\n screen=QTradableStocksUS()& (shorts | longs)\n )\n\ndef compute_target_weights(context, data):\n \"\"\"\n Compute ordering weights.\n \"\"\"\n\n # Initialize empty target weights dictionary.\n # This will map securities to their target weight.\n weights = {}\n\n # If there are securities in our longs and shorts lists,\n # compute even target weights for each security.\n if context.longs :\n long_weight = 0.5 / len(context.longs)\n if context.shorts:\n short_weight = -0.5 / len(context.shorts)\n #if ~(context.longs & context.shorts):\n # return weights\n\n # Exit positions in our portfolio if they are not\n # in our longs or shorts lists.\n for security in context.portfolio.positions:\n if security not in context.longs and security not in context.shorts and data.can_trade(security):\n weights[security] = 0\n\n for security in context.longs:\n weights[security] = long_weight\n\n for security in context.shorts:\n weights[security] = short_weight\n\n return weights\n\ndef before_trading_start(context, data):\n \"\"\"\n Get pipeline results.\n \"\"\"\n\n # Gets our pipeline output every day.\n pipe_results = pipeline_output('my_pipeline')\n\n # Go long in securities for which the 'longs' value is True,\n # and check if they can be traded.\n context.longs = []\n for sec in pipe_results[pipe_results['longs']].index.tolist():\n if data.can_trade(sec):\n context.longs.append(sec)\n #print(context.longs)\n #print('Longs: ') \n #print(context.longs)\n # Go short in securities for which the 'shorts' value is True,\n # and check if they can be traded.\n context.shorts = []\n for sec in pipe_results[pipe_results['shorts']].index.tolist():\n if data.can_trade(sec):\n context.shorts.append(sec)\n #print('Shorts: ')\n #print(context.shorts)\n \n \n \ndef my_rebalance(context, data):\n \"\"\"\n Rebalance daily\n \"\"\"\n for stock in context.portfolio.positions:\n #print('selling everything')\n #print(stock)\n order_target_percent(stock, 0.0) \n # Calculate target weights to rebalance\n #print(context)\n target_weights = compute_target_weights(context, data)\n #print(target_weights)\n\n # If we have target weights, rebalance our portfolio\n if target_weights:\n order_optimal_portfolio(\n objective=opt.TargetWeights(target_weights),\n constraints=[],\n )\n",
"step-ids": [
1,
2,
4,
6,
7
]
}
|
[
1,
2,
4,
6,
7
] |
import re
IS_WITH_SINGLETON_REGEX = re.compile("(!=|==)\s*(True|False|None)")
def check_is_with_singleton(physical_line, line_number):
match_obj = IS_WITH_SINGLETON_REGEX.search(physical_line)
if match_obj is not None:
offset = match_obj.span()[0]
return (0, 12, (line_number, offset), "Use equal with singleton")
plugins = {
"physical_line": [check_is_with_singleton],
"logical_line": [],
"ast": []
}
|
normal
|
{
"blob_id": "cf6d3a0fbf2a2daf8432622f780e138784ec505d",
"index": 8300,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef check_is_with_singleton(physical_line, line_number):\n match_obj = IS_WITH_SINGLETON_REGEX.search(physical_line)\n if match_obj is not None:\n offset = match_obj.span()[0]\n return 0, 12, (line_number, offset), 'Use equal with singleton'\n\n\n<mask token>\n",
"step-3": "<mask token>\nIS_WITH_SINGLETON_REGEX = re.compile('(!=|==)\\\\s*(True|False|None)')\n\n\ndef check_is_with_singleton(physical_line, line_number):\n match_obj = IS_WITH_SINGLETON_REGEX.search(physical_line)\n if match_obj is not None:\n offset = match_obj.span()[0]\n return 0, 12, (line_number, offset), 'Use equal with singleton'\n\n\nplugins = {'physical_line': [check_is_with_singleton], 'logical_line': [],\n 'ast': []}\n",
"step-4": "import re\nIS_WITH_SINGLETON_REGEX = re.compile('(!=|==)\\\\s*(True|False|None)')\n\n\ndef check_is_with_singleton(physical_line, line_number):\n match_obj = IS_WITH_SINGLETON_REGEX.search(physical_line)\n if match_obj is not None:\n offset = match_obj.span()[0]\n return 0, 12, (line_number, offset), 'Use equal with singleton'\n\n\nplugins = {'physical_line': [check_is_with_singleton], 'logical_line': [],\n 'ast': []}\n",
"step-5": "import re\n\nIS_WITH_SINGLETON_REGEX = re.compile(\"(!=|==)\\s*(True|False|None)\")\n\ndef check_is_with_singleton(physical_line, line_number):\n match_obj = IS_WITH_SINGLETON_REGEX.search(physical_line)\n\n if match_obj is not None:\n offset = match_obj.span()[0]\n return (0, 12, (line_number, offset), \"Use equal with singleton\")\n\nplugins = {\n \"physical_line\": [check_is_with_singleton],\n \"logical_line\": [],\n \"ast\": []\n}",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from __future__ import print_function
from itertools import permutations
s, space, k = raw_input().partition(' ')
for t in sorted(list(permutations(s, int(k)))):
print(*t, sep='')
|
normal
|
{
"blob_id": "37580939a0e58bdffb8cfad8252f339a7da4446e",
"index": 1130,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor t in sorted(list(permutations(s, int(k)))):\n print(*t, sep='')\n",
"step-3": "<mask token>\ns, space, k = raw_input().partition(' ')\nfor t in sorted(list(permutations(s, int(k)))):\n print(*t, sep='')\n",
"step-4": "from __future__ import print_function\nfrom itertools import permutations\ns, space, k = raw_input().partition(' ')\nfor t in sorted(list(permutations(s, int(k)))):\n print(*t, sep='')\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
def get_ecgs_by_query(json_data, query):
ecgs_ids = []
for case_id in json_data.keys():
print(case_id)
if query.is_query_ok(json_data[case_id]):
ecgs_ids.append(case_id)
return ecgs_ids
def save_new_dataset_by_ids(old_json, ecg_ids_to_save, name_new_dataset):
"""
Saves json only with selected (by id) patients.
:param old_json: initail dataset dict
:param ecg_ids_to_save: which patient we want to keep for new dataset
:param name_new_dataset: name of file, string, ends with .json
:return:
"""
import json
from settings import PATH_TO_METADATASETS_FOLDER
new_json_data = {}
for ecg_id in old_json.keys():
if ecg_id in ecg_ids_to_save:
new_json_data[ecg_id]= old_json[ecg_id]
result_file_path = PATH_TO_METADATASETS_FOLDER + "\\" + name_new_dataset
with open(result_file_path, 'w') as outfile:
json.dump(new_json_data, outfile)
|
normal
|
{
"blob_id": "445ae195edfe9fe9ee58c6c5a14ec787719d698c",
"index": 7454,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef save_new_dataset_by_ids(old_json, ecg_ids_to_save, name_new_dataset):\n \"\"\"\n Saves json only with selected (by id) patients.\n :param old_json: initail dataset dict\n :param ecg_ids_to_save: which patient we want to keep for new dataset\n :param name_new_dataset: name of file, string, ends with .json\n :return:\n \"\"\"\n import json\n from settings import PATH_TO_METADATASETS_FOLDER\n new_json_data = {}\n for ecg_id in old_json.keys():\n if ecg_id in ecg_ids_to_save:\n new_json_data[ecg_id] = old_json[ecg_id]\n result_file_path = PATH_TO_METADATASETS_FOLDER + '\\\\' + name_new_dataset\n with open(result_file_path, 'w') as outfile:\n json.dump(new_json_data, outfile)\n",
"step-3": "def get_ecgs_by_query(json_data, query):\n ecgs_ids = []\n for case_id in json_data.keys():\n print(case_id)\n if query.is_query_ok(json_data[case_id]):\n ecgs_ids.append(case_id)\n return ecgs_ids\n\n\ndef save_new_dataset_by_ids(old_json, ecg_ids_to_save, name_new_dataset):\n \"\"\"\n Saves json only with selected (by id) patients.\n :param old_json: initail dataset dict\n :param ecg_ids_to_save: which patient we want to keep for new dataset\n :param name_new_dataset: name of file, string, ends with .json\n :return:\n \"\"\"\n import json\n from settings import PATH_TO_METADATASETS_FOLDER\n new_json_data = {}\n for ecg_id in old_json.keys():\n if ecg_id in ecg_ids_to_save:\n new_json_data[ecg_id] = old_json[ecg_id]\n result_file_path = PATH_TO_METADATASETS_FOLDER + '\\\\' + name_new_dataset\n with open(result_file_path, 'w') as outfile:\n json.dump(new_json_data, outfile)\n",
"step-4": "\n\ndef get_ecgs_by_query(json_data, query):\n ecgs_ids = []\n for case_id in json_data.keys():\n print(case_id)\n if query.is_query_ok(json_data[case_id]):\n ecgs_ids.append(case_id)\n return ecgs_ids\n\ndef save_new_dataset_by_ids(old_json, ecg_ids_to_save, name_new_dataset):\n \"\"\"\n Saves json only with selected (by id) patients.\n :param old_json: initail dataset dict\n :param ecg_ids_to_save: which patient we want to keep for new dataset\n :param name_new_dataset: name of file, string, ends with .json\n :return:\n \"\"\"\n import json\n from settings import PATH_TO_METADATASETS_FOLDER\n new_json_data = {}\n for ecg_id in old_json.keys():\n if ecg_id in ecg_ids_to_save:\n new_json_data[ecg_id]= old_json[ecg_id]\n result_file_path = PATH_TO_METADATASETS_FOLDER + \"\\\\\" + name_new_dataset\n with open(result_file_path, 'w') as outfile:\n json.dump(new_json_data, outfile)\n\n\n\n\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
# Lahman.py
# Convert to/from web native JSON and Python/RDB types.
import json
# Include Flask packages
from flask import Flask
from flask import request
import copy
import SimpleBO
# The main program that executes. This call creates an instance of a
# class and the constructor starts the runtime.
app = Flask(__name__)
def parse_and_print_args():
fields = None
in_args = None
if request.args is not None:
in_args = dict(copy.copy(request.args))
fields = copy.copy(in_args.get('fields',None))
if fields:
del(in_args['fields'])
offset = copy.copy(in_args.get('offset',None))
if offset:
del(in_args['offset'])
limit = copy.copy(in_args.get('limit',None))
if limit:
del(in_args['limit'])
try:
if request.data:
body = json.loads(request.data)
else:
body = None
except Exception as e:
print("exception here is: ", e)
body = None
print("Request.args : ", json.dumps(in_args))
return in_args,fields,body,limit,offset
@app.route('/api/<resource>',methods = ['GET','POST'])
def Basic_resource(resource):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_by_template(resource,in_args,fields,limit,offset)
url = request.url
url_root = request.url_root
links = SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'POST':
result = SimpleBO.Insert(resource,body)
return result
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/<resource>/<primary_key>',methods = ['GET','PUT','DELETE'])
def Specific_resource(resource,primary_key):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_by_primary_key(resource,primary_key,fields)
return json.dumps(result), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'PUT':
result = SimpleBO.Update(resource,body,primary_key)
return json.dumps(result), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'DELETE':
result = SimpleBO.Delete(resource,primary_key)
return result
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/<resource>/<primary_key>/<related_resource>',methods = ['GET','POST'])
def related_resource(resource,primary_key,related_resource):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_by_fk(resource,primary_key,related_resource,in_args,fields,limit,offset)
url = request.url
url_root = request.url_root
all_resource = resource+"/"+primary_key+"/"+related_resource
links=SimpleBO.generate_links(url,url_root,all_resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'POST':
result = SimpleBO.Insert(related_resource,body)
return json.dumps(result), 200, \
{"content-type": "application/json; charset:utf-8"}
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/teammates/<playerid>', methods=['GET'])
def get_teammates(playerid):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_teammates(playerid,limit,offset)
url = request.url
url_root = request.url_root
resource = 'teammates/'+playerid
links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
@app.route('/api/people/<playerid>/career_stats', methods=['GET'])
def get_career_stats(playerid):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_career_stats(playerid,limit,offset)
url = request.url
url_root = request.url_root
resource = 'people/'+playerid+'/career_stats'
links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/roster', methods=['GET'])
def get_roster():
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_roster(in_args,limit,offset)
url = request.url
url_root = request.url_root
resource = 'roster'
links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
if __name__ == '__main__':
app.run()
|
normal
|
{
"blob_id": "d03a8076b77851ae4df5cf657ff898eb132c49c3",
"index": 5616,
"step-1": "<mask token>\n\n\ndef parse_and_print_args():\n fields = None\n in_args = None\n if request.args is not None:\n in_args = dict(copy.copy(request.args))\n fields = copy.copy(in_args.get('fields', None))\n if fields:\n del in_args['fields']\n offset = copy.copy(in_args.get('offset', None))\n if offset:\n del in_args['offset']\n limit = copy.copy(in_args.get('limit', None))\n if limit:\n del in_args['limit']\n try:\n if request.data:\n body = json.loads(request.data)\n else:\n body = None\n except Exception as e:\n print('exception here is: ', e)\n body = None\n print('Request.args : ', json.dumps(in_args))\n return in_args, fields, body, limit, offset\n\n\n@app.route('/api/<resource>', methods=['GET', 'POST'])\ndef Basic_resource(resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_template(resource, in_args, fields, limit,\n offset)\n url = request.url\n url_root = request.url_root\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(resource, body)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>', methods=['GET', 'PUT', 'DELETE'])\ndef Specific_resource(resource, primary_key):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_primary_key(resource, primary_key, fields)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'PUT':\n result = SimpleBO.Update(resource, body, primary_key)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'DELETE':\n result = SimpleBO.Delete(resource, primary_key)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>/<related_resource>', methods=[\n 'GET', 'POST'])\ndef related_resource(resource, primary_key, related_resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_fk(resource, primary_key,\n related_resource, in_args, fields, limit, offset)\n url = request.url\n url_root = request.url_root\n all_resource = resource + '/' + primary_key + '/' + related_resource\n links = SimpleBO.generate_links(url, url_root, all_resource,\n in_args, fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(related_resource, body)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/teammates/<playerid>', methods=['GET'])\ndef get_teammates(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_teammates(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'teammates/' + playerid\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n\n\n@app.route('/api/people/<playerid>/career_stats', methods=['GET'])\ndef get_career_stats(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_career_stats(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'people/' + playerid + '/career_stats'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/roster', methods=['GET'])\ndef get_roster():\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_roster(in_args, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'roster'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef parse_and_print_args():\n fields = None\n in_args = None\n if request.args is not None:\n in_args = dict(copy.copy(request.args))\n fields = copy.copy(in_args.get('fields', None))\n if fields:\n del in_args['fields']\n offset = copy.copy(in_args.get('offset', None))\n if offset:\n del in_args['offset']\n limit = copy.copy(in_args.get('limit', None))\n if limit:\n del in_args['limit']\n try:\n if request.data:\n body = json.loads(request.data)\n else:\n body = None\n except Exception as e:\n print('exception here is: ', e)\n body = None\n print('Request.args : ', json.dumps(in_args))\n return in_args, fields, body, limit, offset\n\n\n@app.route('/api/<resource>', methods=['GET', 'POST'])\ndef Basic_resource(resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_template(resource, in_args, fields, limit,\n offset)\n url = request.url\n url_root = request.url_root\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(resource, body)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>', methods=['GET', 'PUT', 'DELETE'])\ndef Specific_resource(resource, primary_key):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_primary_key(resource, primary_key, fields)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'PUT':\n result = SimpleBO.Update(resource, body, primary_key)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'DELETE':\n result = SimpleBO.Delete(resource, primary_key)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>/<related_resource>', methods=[\n 'GET', 'POST'])\ndef related_resource(resource, primary_key, related_resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_fk(resource, primary_key,\n related_resource, in_args, fields, limit, offset)\n url = request.url\n url_root = request.url_root\n all_resource = resource + '/' + primary_key + '/' + related_resource\n links = SimpleBO.generate_links(url, url_root, all_resource,\n in_args, fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(related_resource, body)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/teammates/<playerid>', methods=['GET'])\ndef get_teammates(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_teammates(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'teammates/' + playerid\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n\n\n@app.route('/api/people/<playerid>/career_stats', methods=['GET'])\ndef get_career_stats(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_career_stats(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'people/' + playerid + '/career_stats'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/roster', methods=['GET'])\ndef get_roster():\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_roster(in_args, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'roster'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-3": "<mask token>\napp = Flask(__name__)\n\n\ndef parse_and_print_args():\n fields = None\n in_args = None\n if request.args is not None:\n in_args = dict(copy.copy(request.args))\n fields = copy.copy(in_args.get('fields', None))\n if fields:\n del in_args['fields']\n offset = copy.copy(in_args.get('offset', None))\n if offset:\n del in_args['offset']\n limit = copy.copy(in_args.get('limit', None))\n if limit:\n del in_args['limit']\n try:\n if request.data:\n body = json.loads(request.data)\n else:\n body = None\n except Exception as e:\n print('exception here is: ', e)\n body = None\n print('Request.args : ', json.dumps(in_args))\n return in_args, fields, body, limit, offset\n\n\n@app.route('/api/<resource>', methods=['GET', 'POST'])\ndef Basic_resource(resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_template(resource, in_args, fields, limit,\n offset)\n url = request.url\n url_root = request.url_root\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(resource, body)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>', methods=['GET', 'PUT', 'DELETE'])\ndef Specific_resource(resource, primary_key):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_primary_key(resource, primary_key, fields)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'PUT':\n result = SimpleBO.Update(resource, body, primary_key)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'DELETE':\n result = SimpleBO.Delete(resource, primary_key)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>/<related_resource>', methods=[\n 'GET', 'POST'])\ndef related_resource(resource, primary_key, related_resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_fk(resource, primary_key,\n related_resource, in_args, fields, limit, offset)\n url = request.url\n url_root = request.url_root\n all_resource = resource + '/' + primary_key + '/' + related_resource\n links = SimpleBO.generate_links(url, url_root, all_resource,\n in_args, fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(related_resource, body)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/teammates/<playerid>', methods=['GET'])\ndef get_teammates(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_teammates(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'teammates/' + playerid\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n\n\n@app.route('/api/people/<playerid>/career_stats', methods=['GET'])\ndef get_career_stats(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_career_stats(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'people/' + playerid + '/career_stats'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/roster', methods=['GET'])\ndef get_roster():\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_roster(in_args, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'roster'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-4": "import json\nfrom flask import Flask\nfrom flask import request\nimport copy\nimport SimpleBO\napp = Flask(__name__)\n\n\ndef parse_and_print_args():\n fields = None\n in_args = None\n if request.args is not None:\n in_args = dict(copy.copy(request.args))\n fields = copy.copy(in_args.get('fields', None))\n if fields:\n del in_args['fields']\n offset = copy.copy(in_args.get('offset', None))\n if offset:\n del in_args['offset']\n limit = copy.copy(in_args.get('limit', None))\n if limit:\n del in_args['limit']\n try:\n if request.data:\n body = json.loads(request.data)\n else:\n body = None\n except Exception as e:\n print('exception here is: ', e)\n body = None\n print('Request.args : ', json.dumps(in_args))\n return in_args, fields, body, limit, offset\n\n\n@app.route('/api/<resource>', methods=['GET', 'POST'])\ndef Basic_resource(resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_template(resource, in_args, fields, limit,\n offset)\n url = request.url\n url_root = request.url_root\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(resource, body)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>', methods=['GET', 'PUT', 'DELETE'])\ndef Specific_resource(resource, primary_key):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_primary_key(resource, primary_key, fields)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'PUT':\n result = SimpleBO.Update(resource, body, primary_key)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'DELETE':\n result = SimpleBO.Delete(resource, primary_key)\n return result\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/<resource>/<primary_key>/<related_resource>', methods=[\n 'GET', 'POST'])\ndef related_resource(resource, primary_key, related_resource):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_fk(resource, primary_key,\n related_resource, in_args, fields, limit, offset)\n url = request.url\n url_root = request.url_root\n all_resource = resource + '/' + primary_key + '/' + related_resource\n links = SimpleBO.generate_links(url, url_root, all_resource,\n in_args, fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n elif request.method == 'POST':\n result = SimpleBO.Insert(related_resource, body)\n return json.dumps(result), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/teammates/<playerid>', methods=['GET'])\ndef get_teammates(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_teammates(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'teammates/' + playerid\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n\n\n@app.route('/api/people/<playerid>/career_stats', methods=['GET'])\ndef get_career_stats(playerid):\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_career_stats(playerid, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'people/' + playerid + '/career_stats'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\n@app.route('/api/roster', methods=['GET'])\ndef get_roster():\n in_args, fields, body, offset, limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_roster(in_args, limit, offset)\n url = request.url\n url_root = request.url_root\n resource = 'roster'\n links = SimpleBO.generate_links(url, url_root, resource, in_args,\n fields, offset, limit, result)\n output = [{'data': result, 'links': links}]\n return json.dumps(output), 200, {'content-type':\n 'application/json; charset:utf-8'}\n else:\n return ('Method ' + request.method + ' on resource ' + resource +\n ' not implemented!', 501, {'content-type':\n 'text/plain; charset: utf-8'})\n\n\nif __name__ == '__main__':\n app.run()\n",
"step-5": "# Lahman.py\n\n# Convert to/from web native JSON and Python/RDB types.\nimport json\n\n# Include Flask packages\nfrom flask import Flask\nfrom flask import request\nimport copy\n\nimport SimpleBO\n\n# The main program that executes. This call creates an instance of a\n# class and the constructor starts the runtime.\napp = Flask(__name__)\n\ndef parse_and_print_args():\n fields = None\n in_args = None\n if request.args is not None:\n in_args = dict(copy.copy(request.args))\n fields = copy.copy(in_args.get('fields',None))\n if fields:\n del(in_args['fields'])\n offset = copy.copy(in_args.get('offset',None))\n if offset:\n del(in_args['offset'])\n limit = copy.copy(in_args.get('limit',None))\n if limit:\n del(in_args['limit'])\n try:\n if request.data:\n body = json.loads(request.data)\n else:\n body = None\n except Exception as e:\n print(\"exception here is: \", e)\n body = None\n\n\n\n print(\"Request.args : \", json.dumps(in_args))\n return in_args,fields,body,limit,offset\n\n\n\n\n\n@app.route('/api/<resource>',methods = ['GET','POST'])\ndef Basic_resource(resource):\n\n in_args,fields,body,offset,limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_template(resource,in_args,fields,limit,offset)\n url = request.url\n url_root = request.url_root\n links = SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)\n output=[{\"data\":result,\n \"links\":links}]\n return json.dumps(output), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n elif request.method == 'POST':\n result = SimpleBO.Insert(resource,body)\n return result\n\n else:\n return \"Method \" + request.method + \" on resource \" + resource + \\\n \" not implemented!\", 501, {\"content-type\": \"text/plain; charset: utf-8\"}\n\n@app.route('/api/<resource>/<primary_key>',methods = ['GET','PUT','DELETE'])\ndef Specific_resource(resource,primary_key):\n\n in_args,fields,body,offset,limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_primary_key(resource,primary_key,fields)\n return json.dumps(result), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n elif request.method == 'PUT':\n result = SimpleBO.Update(resource,body,primary_key)\n return json.dumps(result), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n elif request.method == 'DELETE':\n result = SimpleBO.Delete(resource,primary_key)\n return result\n\n else:\n return \"Method \" + request.method + \" on resource \" + resource + \\\n \" not implemented!\", 501, {\"content-type\": \"text/plain; charset: utf-8\"}\n\n@app.route('/api/<resource>/<primary_key>/<related_resource>',methods = ['GET','POST'])\ndef related_resource(resource,primary_key,related_resource):\n\n in_args,fields,body,offset,limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_by_fk(resource,primary_key,related_resource,in_args,fields,limit,offset)\n url = request.url\n url_root = request.url_root\n all_resource = resource+\"/\"+primary_key+\"/\"+related_resource\n links=SimpleBO.generate_links(url,url_root,all_resource,in_args,fields,offset,limit,result)\n output=[{\"data\":result,\n \"links\":links}]\n return json.dumps(output), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n elif request.method == 'POST':\n result = SimpleBO.Insert(related_resource,body)\n return json.dumps(result), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n else:\n return \"Method \" + request.method + \" on resource \" + resource + \\\n \" not implemented!\", 501, {\"content-type\": \"text/plain; charset: utf-8\"}\n\n@app.route('/api/teammates/<playerid>', methods=['GET'])\ndef get_teammates(playerid):\n\n in_args,fields,body,offset,limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_teammates(playerid,limit,offset)\n url = request.url\n url_root = request.url_root\n resource = 'teammates/'+playerid\n links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)\n output=[{\"data\":result,\n \"links\":links}]\n return json.dumps(output), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n@app.route('/api/people/<playerid>/career_stats', methods=['GET'])\ndef get_career_stats(playerid):\n\n in_args,fields,body,offset,limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_career_stats(playerid,limit,offset)\n url = request.url\n url_root = request.url_root\n resource = 'people/'+playerid+'/career_stats'\n links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)\n output=[{\"data\":result,\n \"links\":links}]\n return json.dumps(output), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n else:\n return \"Method \" + request.method + \" on resource \" + resource + \\\n \" not implemented!\", 501, {\"content-type\": \"text/plain; charset: utf-8\"}\n\n@app.route('/api/roster', methods=['GET'])\ndef get_roster():\n\n in_args,fields,body,offset,limit = parse_and_print_args()\n if request.method == 'GET':\n result = SimpleBO.find_roster(in_args,limit,offset)\n url = request.url\n url_root = request.url_root\n resource = 'roster'\n links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)\n output=[{\"data\":result,\n \"links\":links}]\n return json.dumps(output), 200, \\\n {\"content-type\": \"application/json; charset:utf-8\"}\n\n else:\n return \"Method \" + request.method + \" on resource \" + resource + \\\n \" not implemented!\", 501, {\"content-type\": \"text/plain; charset: utf-8\"}\n\nif __name__ == '__main__':\n app.run()\n",
"step-ids": [
7,
8,
9,
10,
11
]
}
|
[
7,
8,
9,
10,
11
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
while x <= 24:
if x % 5 == 0:
x = x + 1
continue
print(x)
x = x + 1
<|reserved_special_token_1|>
x = 1
while x <= 24:
if x % 5 == 0:
x = x + 1
continue
print(x)
x = x + 1
|
flexible
|
{
"blob_id": "61cfc583cd87ac0528cb07f4e051392167414920",
"index": 1960,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nwhile x <= 24:\n if x % 5 == 0:\n x = x + 1\n continue\n print(x)\n x = x + 1\n",
"step-3": "x = 1\nwhile x <= 24:\n if x % 5 == 0:\n x = x + 1\n continue\n print(x)\n x = x + 1\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
"""A number can be broken into different contiguous sub-subsequence parts.
Suppose, a number 3245 can be broken into parts like 3 2 4 5 32 24 45 324 245.
And this number is a COLORFUL number, since product of every digit of a contiguous subsequence is different
"""
def colorful(A):
sA = str(A)
len_sA = len(sA)
if len_sA == 1:
return (1)
dig_list = []
for i in range(len_sA):
for j in range(i, len_sA):
dig_list.append(int(sA[i:j + 1]))
mul = {}
for val in dig_list:
m = 1
for v in str(val):
m *= int(v)
if m in mul:
return (0)
else:
mul[m] = 1
return (1)
print (colorful(0))
print (colorful(111))
print (colorful(3245))
|
normal
|
{
"blob_id": "41013469e65e45f6c909d66c2a54eaf11dfd474c",
"index": 3077,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef colorful(A):\n sA = str(A)\n len_sA = len(sA)\n if len_sA == 1:\n return 1\n dig_list = []\n for i in range(len_sA):\n for j in range(i, len_sA):\n dig_list.append(int(sA[i:j + 1]))\n mul = {}\n for val in dig_list:\n m = 1\n for v in str(val):\n m *= int(v)\n if m in mul:\n return 0\n else:\n mul[m] = 1\n return 1\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef colorful(A):\n sA = str(A)\n len_sA = len(sA)\n if len_sA == 1:\n return 1\n dig_list = []\n for i in range(len_sA):\n for j in range(i, len_sA):\n dig_list.append(int(sA[i:j + 1]))\n mul = {}\n for val in dig_list:\n m = 1\n for v in str(val):\n m *= int(v)\n if m in mul:\n return 0\n else:\n mul[m] = 1\n return 1\n\n\nprint(colorful(0))\nprint(colorful(111))\nprint(colorful(3245))\n",
"step-4": "\"\"\"A number can be broken into different contiguous sub-subsequence parts. \nSuppose, a number 3245 can be broken into parts like 3 2 4 5 32 24 45 324 245. \nAnd this number is a COLORFUL number, since product of every digit of a contiguous subsequence is different\n\"\"\"\n\ndef colorful(A):\n sA = str(A)\n len_sA = len(sA)\n if len_sA == 1:\n return (1)\n dig_list = []\n for i in range(len_sA):\n for j in range(i, len_sA):\n dig_list.append(int(sA[i:j + 1]))\n mul = {}\n for val in dig_list:\n m = 1\n for v in str(val):\n m *= int(v)\n if m in mul:\n return (0)\n else:\n mul[m] = 1\n return (1)\n\nprint (colorful(0))\nprint (colorful(111))\nprint (colorful(3245))\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
run=[] #Creating a empty list
no_players=int(input("enter the number of the players in the team :"))
for i in range (no_players):
run_score=int(input("Enter the runs scored by the player "+str(i+1)+":"))
run.append(run_score)
#code for the average score of the team
def average(run):
print("____________________________________")
sum=0
for i in range (0,len(run)):
sum+=run[i]
avg=sum/len(run)
print("Average score of the team is :",avg)
#code for the maximun runs scored by the players in the team
def high(run):
print("______________________________________")
max=run[0]
for i in range(len(run)):
if max<run[i]:
max=run[i]
print("Highest run score by the player is :",max)
#code for the minimum runs scored by the players in the team
def low(run):
print("____________________________________")
mim=run[0]
for i in range(len(run)):
if mim>run[i]:
mim=run[i]
print("Lowest runs scored by the player is :",mim)
#code for the runs scored more than 50 runs in the the team
def check(run):
print("_______________________________________")
count=0
for i in range(0,len(run)):
if run[i]>=50:
count+=1
else:
pass
print("Count of the player score more than '50' are :",count)
#code for the runs scored for higher number of the frequency
def feq(run):
print("___________________________________")
max=0
result=run[0]
for i in run:
freq=run.count(i)
if freq>max:
max=freq
result=i
print(f"run scored with the highest frequncy {result} is",max)
print("-------------'THANKYOU---------------")
average(run)
high(run)
low(run)
check(run)
feq(run)
|
normal
|
{
"blob_id": "3d7ca468a1f7aa1602bff22167e9550ad515fa79",
"index": 4777,
"step-1": "<mask token>\n\n\ndef average(run):\n print('____________________________________')\n sum = 0\n for i in range(0, len(run)):\n sum += run[i]\n avg = sum / len(run)\n print('Average score of the team is :', avg)\n\n\ndef high(run):\n print('______________________________________')\n max = run[0]\n for i in range(len(run)):\n if max < run[i]:\n max = run[i]\n print('Highest run score by the player is :', max)\n\n\ndef low(run):\n print('____________________________________')\n mim = run[0]\n for i in range(len(run)):\n if mim > run[i]:\n mim = run[i]\n print('Lowest runs scored by the player is :', mim)\n\n\ndef check(run):\n print('_______________________________________')\n count = 0\n for i in range(0, len(run)):\n if run[i] >= 50:\n count += 1\n else:\n pass\n print(\"Count of the player score more than '50' are :\", count)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef average(run):\n print('____________________________________')\n sum = 0\n for i in range(0, len(run)):\n sum += run[i]\n avg = sum / len(run)\n print('Average score of the team is :', avg)\n\n\ndef high(run):\n print('______________________________________')\n max = run[0]\n for i in range(len(run)):\n if max < run[i]:\n max = run[i]\n print('Highest run score by the player is :', max)\n\n\ndef low(run):\n print('____________________________________')\n mim = run[0]\n for i in range(len(run)):\n if mim > run[i]:\n mim = run[i]\n print('Lowest runs scored by the player is :', mim)\n\n\ndef check(run):\n print('_______________________________________')\n count = 0\n for i in range(0, len(run)):\n if run[i] >= 50:\n count += 1\n else:\n pass\n print(\"Count of the player score more than '50' are :\", count)\n\n\ndef feq(run):\n print('___________________________________')\n max = 0\n result = run[0]\n for i in run:\n freq = run.count(i)\n if freq > max:\n max = freq\n result = i\n print(f'run scored with the highest frequncy {result} is', max)\n print(\"-------------'THANKYOU---------------\")\n\n\n<mask token>\n",
"step-3": "<mask token>\nfor i in range(no_players):\n run_score = int(input('Enter the runs scored by the player ' + str(i + \n 1) + ':'))\n run.append(run_score)\n\n\ndef average(run):\n print('____________________________________')\n sum = 0\n for i in range(0, len(run)):\n sum += run[i]\n avg = sum / len(run)\n print('Average score of the team is :', avg)\n\n\ndef high(run):\n print('______________________________________')\n max = run[0]\n for i in range(len(run)):\n if max < run[i]:\n max = run[i]\n print('Highest run score by the player is :', max)\n\n\ndef low(run):\n print('____________________________________')\n mim = run[0]\n for i in range(len(run)):\n if mim > run[i]:\n mim = run[i]\n print('Lowest runs scored by the player is :', mim)\n\n\ndef check(run):\n print('_______________________________________')\n count = 0\n for i in range(0, len(run)):\n if run[i] >= 50:\n count += 1\n else:\n pass\n print(\"Count of the player score more than '50' are :\", count)\n\n\ndef feq(run):\n print('___________________________________')\n max = 0\n result = run[0]\n for i in run:\n freq = run.count(i)\n if freq > max:\n max = freq\n result = i\n print(f'run scored with the highest frequncy {result} is', max)\n print(\"-------------'THANKYOU---------------\")\n\n\naverage(run)\nhigh(run)\nlow(run)\ncheck(run)\nfeq(run)\n",
"step-4": "run = []\nno_players = int(input('enter the number of the players in the team :'))\nfor i in range(no_players):\n run_score = int(input('Enter the runs scored by the player ' + str(i + \n 1) + ':'))\n run.append(run_score)\n\n\ndef average(run):\n print('____________________________________')\n sum = 0\n for i in range(0, len(run)):\n sum += run[i]\n avg = sum / len(run)\n print('Average score of the team is :', avg)\n\n\ndef high(run):\n print('______________________________________')\n max = run[0]\n for i in range(len(run)):\n if max < run[i]:\n max = run[i]\n print('Highest run score by the player is :', max)\n\n\ndef low(run):\n print('____________________________________')\n mim = run[0]\n for i in range(len(run)):\n if mim > run[i]:\n mim = run[i]\n print('Lowest runs scored by the player is :', mim)\n\n\ndef check(run):\n print('_______________________________________')\n count = 0\n for i in range(0, len(run)):\n if run[i] >= 50:\n count += 1\n else:\n pass\n print(\"Count of the player score more than '50' are :\", count)\n\n\ndef feq(run):\n print('___________________________________')\n max = 0\n result = run[0]\n for i in run:\n freq = run.count(i)\n if freq > max:\n max = freq\n result = i\n print(f'run scored with the highest frequncy {result} is', max)\n print(\"-------------'THANKYOU---------------\")\n\n\naverage(run)\nhigh(run)\nlow(run)\ncheck(run)\nfeq(run)\n",
"step-5": "run=[] #Creating a empty list \r\nno_players=int(input(\"enter the number of the players in the team :\")) \r\nfor i in range (no_players):\r\n run_score=int(input(\"Enter the runs scored by the player \"+str(i+1)+\":\"))\r\n run.append(run_score)\r\n#code for the average score of the team\r\ndef average(run):\r\n print(\"____________________________________\")\r\n sum=0\r\n for i in range (0,len(run)):\r\n sum+=run[i]\r\n avg=sum/len(run)\r\n print(\"Average score of the team is :\",avg)\r\n#code for the maximun runs scored by the players in the team\r\ndef high(run):\r\n print(\"______________________________________\")\r\n max=run[0]\r\n for i in range(len(run)):\r\n if max<run[i]:\r\n max=run[i]\r\n print(\"Highest run score by the player is :\",max)\r\n#code for the minimum runs scored by the players in the team\r\ndef low(run):\r\n print(\"____________________________________\")\r\n mim=run[0]\r\n for i in range(len(run)):\r\n if mim>run[i]:\r\n mim=run[i]\r\n print(\"Lowest runs scored by the player is :\",mim)\r\n#code for the runs scored more than 50 runs in the the team\r\ndef check(run): \r\n print(\"_______________________________________\") \r\n count=0\r\n for i in range(0,len(run)):\r\n if run[i]>=50:\r\n count+=1\r\n else:\r\n pass\r\n print(\"Count of the player score more than '50' are :\",count)\r\n#code for the runs scored for higher number of the frequency\r\ndef feq(run):\r\n print(\"___________________________________\")\r\n max=0\r\n result=run[0]\r\n for i in run:\r\n freq=run.count(i)\r\n if freq>max:\r\n max=freq\r\n result=i\r\n \r\n print(f\"run scored with the highest frequncy {result} is\",max)\r\n print(\"-------------'THANKYOU---------------\")\r\n\r\naverage(run)\r\nhigh(run)\r\nlow(run)\r\ncheck(run)\r\nfeq(run)\r\n\r\n\r\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
#Las listas son similares a las tuplas
# con la diferencia de que permiten modificar los datos una vez creados
miLista = ['cadena', 21, 2.8, 'nuevo dato', 25]
print (miLista)
miLista[2] = 3.8 #el tercer elemento ahora es 3.8
print(miLista)
miLista.append('NuevoDato')
print(miLista)
|
normal
|
{
"blob_id": "27ec06d084bf819383801be0351c04e7d1fc1752",
"index": 5176,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(miLista)\n<mask token>\nprint(miLista)\nmiLista.append('NuevoDato')\nprint(miLista)\n",
"step-3": "miLista = ['cadena', 21, 2.8, 'nuevo dato', 25]\nprint(miLista)\nmiLista[2] = 3.8\nprint(miLista)\nmiLista.append('NuevoDato')\nprint(miLista)\n",
"step-4": "#Las listas son similares a las tuplas\n# con la diferencia de que permiten modificar los datos una vez creados\nmiLista = ['cadena', 21, 2.8, 'nuevo dato', 25]\nprint (miLista)\nmiLista[2] = 3.8 #el tercer elemento ahora es 3.8\nprint(miLista)\nmiLista.append('NuevoDato')\nprint(miLista)\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_swap():
cds = np.load('h3o_data/ffinal_h3o.npy')
dws = np.load('h3o_data/ffinal_h3o_dw.npy')
cds = cds[:10]
a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)
b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])
assert True
<|reserved_special_token_1|>
import pytest
import numpy as np
from GSPA_DMC import SymmetrizeWfn as symm
def test_swap():
cds = np.load('h3o_data/ffinal_h3o.npy')
dws = np.load('h3o_data/ffinal_h3o_dw.npy')
cds = cds[:10]
a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)
b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])
assert True
|
flexible
|
{
"blob_id": "4ecd756b94b0cbab47a8072e9bccf26e2dd716d0",
"index": 7833,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef test_swap():\n cds = np.load('h3o_data/ffinal_h3o.npy')\n dws = np.load('h3o_data/ffinal_h3o_dw.npy')\n cds = cds[:10]\n a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)\n b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])\n assert True\n",
"step-3": "import pytest\nimport numpy as np\nfrom GSPA_DMC import SymmetrizeWfn as symm\n\n\ndef test_swap():\n cds = np.load('h3o_data/ffinal_h3o.npy')\n dws = np.load('h3o_data/ffinal_h3o_dw.npy')\n cds = cds[:10]\n a = symm.swap_two_atoms(cds, dws, atm_1=1, atm_2=2)\n b = symm.swap_group(cds, dws, atm_list_1=[0, 1], atm_list_2=[2, 3])\n assert True\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def getNativeBlockNum(n, k):
"""Get number of native blocks."""
return k * (n - k)
<|reserved_special_token_0|>
def getNodeIdList(n, k):
"""Find the node id for a segment of blocks."""
"""Return a list of node id for the blocks."""
nodeidList = []
segmentSize = n - k
blockNum = getParityBlockNum(n, k)
for i in range(int(blockNum / segmentSize)):
for j in range(segmentSize):
nodeidList.append(i)
return nodeidList
<|reserved_special_token_0|>
def encode(n, k, src, parityCoeff, setting, metadata):
"""Encode src file to parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
infile = open(src, 'rb')
indatalist = infile.read()
infile.close()
totalchunk = nativeBlockNum
filesize = len(indatalist)
for i in range(metadata.totalnode):
fileNode = common.FileNodeMetadata(i)
fileNode.nodekey = setting.nodeInfo[i].nodekey
fileNode.nodetype = setting.nodeInfo[i].nodetype
fileNode.bucketname = setting.nodeInfo[i].bucketname
fileNode.bigchunksize = 0
fileNode.chunknum = 0
metadata.fileNodeInfo.append(fileNode)
if filesize > 0:
chunksize = filesize / totalchunk + 1
indatalist += '\x00' * (chunksize * totalchunk - filesize)
parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])
outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)
else:
chunksize = 0
nodeIdList = getNodeIdList(n, k)
for i in range(parityBlockNum):
chunk = common.ChunkMetadata(i)
chunk.chunkname = metadata.filename + '.chunk' + str(i)
chunk.chunksize = chunksize
chunk.chunktype = 'parity'
chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname
nodeid = nodeIdList[i]
chunk.nodeid = nodeid
chunk.nodekey = setting.nodeInfo[nodeid].nodekey
chunk.nodetype = setting.nodeInfo[nodeid].nodetype
chunk.bucketname = setting.nodeInfo[nodeid].bucketname
chunk.action = 'upload'
chunk.position = metadata.fileNodeInfo[nodeid].chunknum
metadata.chunkInfo.append(chunk)
metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize
metadata.fileNodeInfo[nodeid].chunknum += 1
metadata.totalchunk = parityBlockNum
metadata.parityCoeff = parityCoeff[:]
startchunk = 0
writelen = 1048576
for i in range(metadata.totalnode):
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)
if chunksize > 0:
f = open(dest, 'wb')
numchunks = nodeIdList.count(i)
writenext = startchunk * chunksize
for j in range(startchunk * chunksize, (startchunk + numchunks) *
chunksize - writelen, writelen):
writenext = j + writelen
f.write(outdatalist[j:writenext])
f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]
)
f.close()
startchunk += numchunks
else:
open(dest, 'wb').close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
def reversematrix(n, k, gj_matrix):
"""Reverse matrix."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
for rowNo in range(nativeBlockNum):
A = GF256int(0)
for i in range(rowNo, nativeBlockNum, 1):
if gj_matrix[i][rowNo] != 0:
A = gj_matrix[i][rowNo]
break
temp_vector = [GF256int(0)] * (nativeBlockNum * 2)
if i != rowNo:
for j in range(nativeBlockNum * 2):
temp_vector[j] = gj_matrix[i][j]
gj_matrix[i][j] = gj_matrix[rowNo][j]
gj_matrix[rowNo][j] = temp_vector[j]
for m in range(nativeBlockNum * 2):
gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A
for j in range(rowNo + 1, nativeBlockNum, 1):
B = gj_matrix[j][rowNo]
for m in range(rowNo, nativeBlockNum * 2, 1):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B
for rowNo in range(nativeBlockNum - 1, 0, -1):
for j in range(0, rowNo, 1):
C = gj_matrix[j][rowNo]
for m in range(nativeBlockNum * 2):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C
def decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):
"""Decode chunk files to dest file."""
if filesize <= 0:
open(dest, 'wb').close()
return
cv_temp = []
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(parityBlockNum)]
dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in
range(nativeBlockNum)]
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
enc_matrix[i][j] = GF256int(parityCoeff[counter])
counter += 1
cm1 = CoeffMatrix(nativeBlockNum)
for i in range(parityBlockNum):
cv_temp.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
cv_temp[i].coeff_[j] = enc_matrix[i][j]
cv_temp[i].first()
cm1.addcoeffvector(cv_temp[i])
i = 0
for selectChunkNo in blocknums:
for j in range(nativeBlockNum):
dec_matrix[i][j] = enc_matrix[selectChunkNo][j]
i += 1
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
if j == i:
rev_matrix[i][j] = GF256int(1)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum * 2):
if j < nativeBlockNum:
gj_matrix[i][j] = dec_matrix[i][j]
else:
gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]
reversematrix(n, k, gj_matrix)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]
selectchunk = []
for filename in src:
infile = open(filename, 'rb')
selectchunk.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
indatalist = ''.join(selectchunk)
parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(
nativeBlockNum) for j in range(nativeBlockNum)])
outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, chunksize)
outfile = open(dest, 'wb')
writelen = 1048576
writenext = 0
for i in range(0, filesize - writelen, writelen):
writenext = i + writelen
outfile.write(outdatalist[i:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
def getCheckNum(parityBlockNum):
"""Get check number for checking strong MDS, for fmsr(k=n-2) only."""
return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (
parityBlockNum / 2 - 1))
def getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,
checkNum, enc_matrix):
"""Get strong MDS property degree."""
currentStrongMDSPropertyDegree = 0
survivalcoeffvectorset = []
flag = 0
for i in range(parityBlockNum):
if int(i / 2) != repairNodeno:
survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][
j]
survivalcoeffvectorset[i - flag * 2].first()
else:
flag = 1
s = 0
for i in range(parityBlockNum - 2):
for j in range(parityBlockNum - 2):
if i < j:
checkmatrix = CoeffMatrix(nativeBlockNum)
for k in range(parityBlockNum - 2):
if k != i and k != j:
checkmatrix.addcoeffvector(survivalcoeffvectorset[k
].copy())
if checkmatrix.rank_ == nativeBlockNum:
currentStrongMDSPropertyDegree += 1
s += 1
return currentStrongMDSPropertyDegree
<|reserved_special_token_0|>
def checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):
"""Check strong MDS property, for fmsr(k=n-2) only."""
"""Return list of MDS property degrees."""
strongMDSPropertyDegrees = []
checkNum = getCheckNum(parityBlockNum)
for i in range(n):
strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,
nativeBlockNum, parityBlockNum, checkNum, enc_matrix))
return strongMDSPropertyDegrees
def testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):
"""Decide whether the current parity coefficient set passes the strong MDS property."""
result = True
threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2
for degree in strongMDSPropertyDegrees:
if degree < threshold:
result = False
return result
def functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,
repairChunks, setting, metadata):
"""Functional repair by generating new parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
checkNum = getCheckNum(parityBlockNum)
enc_matrix = metadata.enc_matrix
repairCodingCoeff = metadata.repairCodingCoeff
indatalist = []
for filepath in src:
infile = open(filepath, 'rb')
indatalist.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
if chunksize > 0:
indatalist_temp = ''.join(indatalist)
parityCoeff_temp = []
for i in range(n - k):
for j in range(n - 1):
parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))
parityCoeff_temp = ''.join(parityCoeff_temp)
outdatalist = codings.clibfmsr.clibfmsr.repairComputation(
indatalist_temp, parityCoeff_temp, n, k, chunksize)
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff[counter] = enc_matrix[i][j]
counter += 1
writelen = 1048576
writenext = 0
for i in range(metadata.totalnode):
if setting.nodeInfo[i].healthy == False:
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i
)
filesize = metadata.fileNodeInfo[i].bigchunksize
if chunksize <= 0:
open(dest, 'wb').close()
else:
outfile = open(dest, 'wb')
for j in range(0, filesize - writelen, writelen):
writenext = j + writelen
outfile.write(outdatalist[j:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getNativeBlockNum(n, k):
"""Get number of native blocks."""
return k * (n - k)
def getParityBlockNum(n, k):
"""Get number of parity blocks."""
return n * (n - k)
def getNodeIdList(n, k):
"""Find the node id for a segment of blocks."""
"""Return a list of node id for the blocks."""
nodeidList = []
segmentSize = n - k
blockNum = getParityBlockNum(n, k)
for i in range(int(blockNum / segmentSize)):
for j in range(segmentSize):
nodeidList.append(i)
return nodeidList
<|reserved_special_token_0|>
def encode(n, k, src, parityCoeff, setting, metadata):
"""Encode src file to parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
infile = open(src, 'rb')
indatalist = infile.read()
infile.close()
totalchunk = nativeBlockNum
filesize = len(indatalist)
for i in range(metadata.totalnode):
fileNode = common.FileNodeMetadata(i)
fileNode.nodekey = setting.nodeInfo[i].nodekey
fileNode.nodetype = setting.nodeInfo[i].nodetype
fileNode.bucketname = setting.nodeInfo[i].bucketname
fileNode.bigchunksize = 0
fileNode.chunknum = 0
metadata.fileNodeInfo.append(fileNode)
if filesize > 0:
chunksize = filesize / totalchunk + 1
indatalist += '\x00' * (chunksize * totalchunk - filesize)
parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])
outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)
else:
chunksize = 0
nodeIdList = getNodeIdList(n, k)
for i in range(parityBlockNum):
chunk = common.ChunkMetadata(i)
chunk.chunkname = metadata.filename + '.chunk' + str(i)
chunk.chunksize = chunksize
chunk.chunktype = 'parity'
chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname
nodeid = nodeIdList[i]
chunk.nodeid = nodeid
chunk.nodekey = setting.nodeInfo[nodeid].nodekey
chunk.nodetype = setting.nodeInfo[nodeid].nodetype
chunk.bucketname = setting.nodeInfo[nodeid].bucketname
chunk.action = 'upload'
chunk.position = metadata.fileNodeInfo[nodeid].chunknum
metadata.chunkInfo.append(chunk)
metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize
metadata.fileNodeInfo[nodeid].chunknum += 1
metadata.totalchunk = parityBlockNum
metadata.parityCoeff = parityCoeff[:]
startchunk = 0
writelen = 1048576
for i in range(metadata.totalnode):
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)
if chunksize > 0:
f = open(dest, 'wb')
numchunks = nodeIdList.count(i)
writenext = startchunk * chunksize
for j in range(startchunk * chunksize, (startchunk + numchunks) *
chunksize - writelen, writelen):
writenext = j + writelen
f.write(outdatalist[j:writenext])
f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]
)
f.close()
startchunk += numchunks
else:
open(dest, 'wb').close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
def reversematrix(n, k, gj_matrix):
"""Reverse matrix."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
for rowNo in range(nativeBlockNum):
A = GF256int(0)
for i in range(rowNo, nativeBlockNum, 1):
if gj_matrix[i][rowNo] != 0:
A = gj_matrix[i][rowNo]
break
temp_vector = [GF256int(0)] * (nativeBlockNum * 2)
if i != rowNo:
for j in range(nativeBlockNum * 2):
temp_vector[j] = gj_matrix[i][j]
gj_matrix[i][j] = gj_matrix[rowNo][j]
gj_matrix[rowNo][j] = temp_vector[j]
for m in range(nativeBlockNum * 2):
gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A
for j in range(rowNo + 1, nativeBlockNum, 1):
B = gj_matrix[j][rowNo]
for m in range(rowNo, nativeBlockNum * 2, 1):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B
for rowNo in range(nativeBlockNum - 1, 0, -1):
for j in range(0, rowNo, 1):
C = gj_matrix[j][rowNo]
for m in range(nativeBlockNum * 2):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C
def decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):
"""Decode chunk files to dest file."""
if filesize <= 0:
open(dest, 'wb').close()
return
cv_temp = []
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(parityBlockNum)]
dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in
range(nativeBlockNum)]
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
enc_matrix[i][j] = GF256int(parityCoeff[counter])
counter += 1
cm1 = CoeffMatrix(nativeBlockNum)
for i in range(parityBlockNum):
cv_temp.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
cv_temp[i].coeff_[j] = enc_matrix[i][j]
cv_temp[i].first()
cm1.addcoeffvector(cv_temp[i])
i = 0
for selectChunkNo in blocknums:
for j in range(nativeBlockNum):
dec_matrix[i][j] = enc_matrix[selectChunkNo][j]
i += 1
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
if j == i:
rev_matrix[i][j] = GF256int(1)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum * 2):
if j < nativeBlockNum:
gj_matrix[i][j] = dec_matrix[i][j]
else:
gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]
reversematrix(n, k, gj_matrix)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]
selectchunk = []
for filename in src:
infile = open(filename, 'rb')
selectchunk.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
indatalist = ''.join(selectchunk)
parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(
nativeBlockNum) for j in range(nativeBlockNum)])
outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, chunksize)
outfile = open(dest, 'wb')
writelen = 1048576
writenext = 0
for i in range(0, filesize - writelen, writelen):
writenext = i + writelen
outfile.write(outdatalist[i:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
def getCheckNum(parityBlockNum):
"""Get check number for checking strong MDS, for fmsr(k=n-2) only."""
return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (
parityBlockNum / 2 - 1))
def getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,
checkNum, enc_matrix):
"""Get strong MDS property degree."""
currentStrongMDSPropertyDegree = 0
survivalcoeffvectorset = []
flag = 0
for i in range(parityBlockNum):
if int(i / 2) != repairNodeno:
survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][
j]
survivalcoeffvectorset[i - flag * 2].first()
else:
flag = 1
s = 0
for i in range(parityBlockNum - 2):
for j in range(parityBlockNum - 2):
if i < j:
checkmatrix = CoeffMatrix(nativeBlockNum)
for k in range(parityBlockNum - 2):
if k != i and k != j:
checkmatrix.addcoeffvector(survivalcoeffvectorset[k
].copy())
if checkmatrix.rank_ == nativeBlockNum:
currentStrongMDSPropertyDegree += 1
s += 1
return currentStrongMDSPropertyDegree
def checkMDS(MSR_n, MSR_k, enc_matrix):
"""Check MDS property, for fmsr(k=n-2) only."""
"""Return a MDS property value."""
nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)
parityBlockNum = getParityBlockNum(MSR_n, MSR_k)
MDSpropery = True
allcoeffvectors = []
for i in range(parityBlockNum):
allcoeffvectors.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]
allcoeffvectors[i].first()
permutation = int(MSR_n * (MSR_n - 1) / 2)
checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]
s = 0
for i in range(MSR_n):
for j in range(MSR_n):
if i < j:
for b in range(MSR_n):
if b != i and b != j:
checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2
].copy())
checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2 +
1].copy())
if checkmatrix[s].rank_ != nativeBlockNum:
MDSpropery = False
s += 1
return MDSpropery
def checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):
"""Check strong MDS property, for fmsr(k=n-2) only."""
"""Return list of MDS property degrees."""
strongMDSPropertyDegrees = []
checkNum = getCheckNum(parityBlockNum)
for i in range(n):
strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,
nativeBlockNum, parityBlockNum, checkNum, enc_matrix))
return strongMDSPropertyDegrees
def testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):
"""Decide whether the current parity coefficient set passes the strong MDS property."""
result = True
threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2
for degree in strongMDSPropertyDegrees:
if degree < threshold:
result = False
return result
def functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,
repairChunks, setting, metadata):
"""Functional repair by generating new parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
checkNum = getCheckNum(parityBlockNum)
enc_matrix = metadata.enc_matrix
repairCodingCoeff = metadata.repairCodingCoeff
indatalist = []
for filepath in src:
infile = open(filepath, 'rb')
indatalist.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
if chunksize > 0:
indatalist_temp = ''.join(indatalist)
parityCoeff_temp = []
for i in range(n - k):
for j in range(n - 1):
parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))
parityCoeff_temp = ''.join(parityCoeff_temp)
outdatalist = codings.clibfmsr.clibfmsr.repairComputation(
indatalist_temp, parityCoeff_temp, n, k, chunksize)
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff[counter] = enc_matrix[i][j]
counter += 1
writelen = 1048576
writenext = 0
for i in range(metadata.totalnode):
if setting.nodeInfo[i].healthy == False:
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i
)
filesize = metadata.fileNodeInfo[i].bigchunksize
if chunksize <= 0:
open(dest, 'wb').close()
else:
outfile = open(dest, 'wb')
for j in range(0, filesize - writelen, writelen):
writenext = j + writelen
outfile.write(outdatalist[j:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def getNativeBlockNum(n, k):
"""Get number of native blocks."""
return k * (n - k)
def getParityBlockNum(n, k):
"""Get number of parity blocks."""
return n * (n - k)
def getNodeIdList(n, k):
"""Find the node id for a segment of blocks."""
"""Return a list of node id for the blocks."""
nodeidList = []
segmentSize = n - k
blockNum = getParityBlockNum(n, k)
for i in range(int(blockNum / segmentSize)):
for j in range(segmentSize):
nodeidList.append(i)
return nodeidList
def getParityCoeff(n, k):
"""Get the parity coefficients of the blocks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
parityCoeff = []
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff.append(GF256int(i + 1) ** j)
return parityCoeff
def encode(n, k, src, parityCoeff, setting, metadata):
"""Encode src file to parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
infile = open(src, 'rb')
indatalist = infile.read()
infile.close()
totalchunk = nativeBlockNum
filesize = len(indatalist)
for i in range(metadata.totalnode):
fileNode = common.FileNodeMetadata(i)
fileNode.nodekey = setting.nodeInfo[i].nodekey
fileNode.nodetype = setting.nodeInfo[i].nodetype
fileNode.bucketname = setting.nodeInfo[i].bucketname
fileNode.bigchunksize = 0
fileNode.chunknum = 0
metadata.fileNodeInfo.append(fileNode)
if filesize > 0:
chunksize = filesize / totalchunk + 1
indatalist += '\x00' * (chunksize * totalchunk - filesize)
parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])
outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)
else:
chunksize = 0
nodeIdList = getNodeIdList(n, k)
for i in range(parityBlockNum):
chunk = common.ChunkMetadata(i)
chunk.chunkname = metadata.filename + '.chunk' + str(i)
chunk.chunksize = chunksize
chunk.chunktype = 'parity'
chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname
nodeid = nodeIdList[i]
chunk.nodeid = nodeid
chunk.nodekey = setting.nodeInfo[nodeid].nodekey
chunk.nodetype = setting.nodeInfo[nodeid].nodetype
chunk.bucketname = setting.nodeInfo[nodeid].bucketname
chunk.action = 'upload'
chunk.position = metadata.fileNodeInfo[nodeid].chunknum
metadata.chunkInfo.append(chunk)
metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize
metadata.fileNodeInfo[nodeid].chunknum += 1
metadata.totalchunk = parityBlockNum
metadata.parityCoeff = parityCoeff[:]
startchunk = 0
writelen = 1048576
for i in range(metadata.totalnode):
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)
if chunksize > 0:
f = open(dest, 'wb')
numchunks = nodeIdList.count(i)
writenext = startchunk * chunksize
for j in range(startchunk * chunksize, (startchunk + numchunks) *
chunksize - writelen, writelen):
writenext = j + writelen
f.write(outdatalist[j:writenext])
f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]
)
f.close()
startchunk += numchunks
else:
open(dest, 'wb').close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
def reversematrix(n, k, gj_matrix):
"""Reverse matrix."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
for rowNo in range(nativeBlockNum):
A = GF256int(0)
for i in range(rowNo, nativeBlockNum, 1):
if gj_matrix[i][rowNo] != 0:
A = gj_matrix[i][rowNo]
break
temp_vector = [GF256int(0)] * (nativeBlockNum * 2)
if i != rowNo:
for j in range(nativeBlockNum * 2):
temp_vector[j] = gj_matrix[i][j]
gj_matrix[i][j] = gj_matrix[rowNo][j]
gj_matrix[rowNo][j] = temp_vector[j]
for m in range(nativeBlockNum * 2):
gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A
for j in range(rowNo + 1, nativeBlockNum, 1):
B = gj_matrix[j][rowNo]
for m in range(rowNo, nativeBlockNum * 2, 1):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B
for rowNo in range(nativeBlockNum - 1, 0, -1):
for j in range(0, rowNo, 1):
C = gj_matrix[j][rowNo]
for m in range(nativeBlockNum * 2):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C
def decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):
"""Decode chunk files to dest file."""
if filesize <= 0:
open(dest, 'wb').close()
return
cv_temp = []
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(parityBlockNum)]
dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in
range(nativeBlockNum)]
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
enc_matrix[i][j] = GF256int(parityCoeff[counter])
counter += 1
cm1 = CoeffMatrix(nativeBlockNum)
for i in range(parityBlockNum):
cv_temp.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
cv_temp[i].coeff_[j] = enc_matrix[i][j]
cv_temp[i].first()
cm1.addcoeffvector(cv_temp[i])
i = 0
for selectChunkNo in blocknums:
for j in range(nativeBlockNum):
dec_matrix[i][j] = enc_matrix[selectChunkNo][j]
i += 1
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
if j == i:
rev_matrix[i][j] = GF256int(1)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum * 2):
if j < nativeBlockNum:
gj_matrix[i][j] = dec_matrix[i][j]
else:
gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]
reversematrix(n, k, gj_matrix)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]
selectchunk = []
for filename in src:
infile = open(filename, 'rb')
selectchunk.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
indatalist = ''.join(selectchunk)
parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(
nativeBlockNum) for j in range(nativeBlockNum)])
outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, chunksize)
outfile = open(dest, 'wb')
writelen = 1048576
writenext = 0
for i in range(0, filesize - writelen, writelen):
writenext = i + writelen
outfile.write(outdatalist[i:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
def getCheckNum(parityBlockNum):
"""Get check number for checking strong MDS, for fmsr(k=n-2) only."""
return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (
parityBlockNum / 2 - 1))
def getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,
checkNum, enc_matrix):
"""Get strong MDS property degree."""
currentStrongMDSPropertyDegree = 0
survivalcoeffvectorset = []
flag = 0
for i in range(parityBlockNum):
if int(i / 2) != repairNodeno:
survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][
j]
survivalcoeffvectorset[i - flag * 2].first()
else:
flag = 1
s = 0
for i in range(parityBlockNum - 2):
for j in range(parityBlockNum - 2):
if i < j:
checkmatrix = CoeffMatrix(nativeBlockNum)
for k in range(parityBlockNum - 2):
if k != i and k != j:
checkmatrix.addcoeffvector(survivalcoeffvectorset[k
].copy())
if checkmatrix.rank_ == nativeBlockNum:
currentStrongMDSPropertyDegree += 1
s += 1
return currentStrongMDSPropertyDegree
def checkMDS(MSR_n, MSR_k, enc_matrix):
"""Check MDS property, for fmsr(k=n-2) only."""
"""Return a MDS property value."""
nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)
parityBlockNum = getParityBlockNum(MSR_n, MSR_k)
MDSpropery = True
allcoeffvectors = []
for i in range(parityBlockNum):
allcoeffvectors.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]
allcoeffvectors[i].first()
permutation = int(MSR_n * (MSR_n - 1) / 2)
checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]
s = 0
for i in range(MSR_n):
for j in range(MSR_n):
if i < j:
for b in range(MSR_n):
if b != i and b != j:
checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2
].copy())
checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2 +
1].copy())
if checkmatrix[s].rank_ != nativeBlockNum:
MDSpropery = False
s += 1
return MDSpropery
def checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):
"""Check strong MDS property, for fmsr(k=n-2) only."""
"""Return list of MDS property degrees."""
strongMDSPropertyDegrees = []
checkNum = getCheckNum(parityBlockNum)
for i in range(n):
strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,
nativeBlockNum, parityBlockNum, checkNum, enc_matrix))
return strongMDSPropertyDegrees
def testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):
"""Decide whether the current parity coefficient set passes the strong MDS property."""
result = True
threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2
for degree in strongMDSPropertyDegrees:
if degree < threshold:
result = False
return result
def functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,
repairChunks, setting, metadata):
"""Functional repair by generating new parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
checkNum = getCheckNum(parityBlockNum)
enc_matrix = metadata.enc_matrix
repairCodingCoeff = metadata.repairCodingCoeff
indatalist = []
for filepath in src:
infile = open(filepath, 'rb')
indatalist.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
if chunksize > 0:
indatalist_temp = ''.join(indatalist)
parityCoeff_temp = []
for i in range(n - k):
for j in range(n - 1):
parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))
parityCoeff_temp = ''.join(parityCoeff_temp)
outdatalist = codings.clibfmsr.clibfmsr.repairComputation(
indatalist_temp, parityCoeff_temp, n, k, chunksize)
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff[counter] = enc_matrix[i][j]
counter += 1
writelen = 1048576
writenext = 0
for i in range(metadata.totalnode):
if setting.nodeInfo[i].healthy == False:
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i
)
filesize = metadata.fileNodeInfo[i].bigchunksize
if chunksize <= 0:
open(dest, 'wb').close()
else:
outfile = open(dest, 'wb')
for j in range(0, filesize - writelen, writelen):
writenext = j + writelen
outfile.write(outdatalist[j:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
<|reserved_special_token_1|>
import sys
import os
import random
from finitefield import GF256int
from coeffvector import CoeffVector
from coeffvector import CoeffMatrix
import common
import codings.clibfmsr.clibfmsr
useClibfmsr = True
def getNativeBlockNum(n, k):
"""Get number of native blocks."""
return k * (n - k)
def getParityBlockNum(n, k):
"""Get number of parity blocks."""
return n * (n - k)
def getNodeIdList(n, k):
"""Find the node id for a segment of blocks."""
"""Return a list of node id for the blocks."""
nodeidList = []
segmentSize = n - k
blockNum = getParityBlockNum(n, k)
for i in range(int(blockNum / segmentSize)):
for j in range(segmentSize):
nodeidList.append(i)
return nodeidList
def getParityCoeff(n, k):
"""Get the parity coefficients of the blocks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
parityCoeff = []
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff.append(GF256int(i + 1) ** j)
return parityCoeff
def encode(n, k, src, parityCoeff, setting, metadata):
"""Encode src file to parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
infile = open(src, 'rb')
indatalist = infile.read()
infile.close()
totalchunk = nativeBlockNum
filesize = len(indatalist)
for i in range(metadata.totalnode):
fileNode = common.FileNodeMetadata(i)
fileNode.nodekey = setting.nodeInfo[i].nodekey
fileNode.nodetype = setting.nodeInfo[i].nodetype
fileNode.bucketname = setting.nodeInfo[i].bucketname
fileNode.bigchunksize = 0
fileNode.chunknum = 0
metadata.fileNodeInfo.append(fileNode)
if filesize > 0:
chunksize = filesize / totalchunk + 1
indatalist += '\x00' * (chunksize * totalchunk - filesize)
parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])
outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)
else:
chunksize = 0
nodeIdList = getNodeIdList(n, k)
for i in range(parityBlockNum):
chunk = common.ChunkMetadata(i)
chunk.chunkname = metadata.filename + '.chunk' + str(i)
chunk.chunksize = chunksize
chunk.chunktype = 'parity'
chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname
nodeid = nodeIdList[i]
chunk.nodeid = nodeid
chunk.nodekey = setting.nodeInfo[nodeid].nodekey
chunk.nodetype = setting.nodeInfo[nodeid].nodetype
chunk.bucketname = setting.nodeInfo[nodeid].bucketname
chunk.action = 'upload'
chunk.position = metadata.fileNodeInfo[nodeid].chunknum
metadata.chunkInfo.append(chunk)
metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize
metadata.fileNodeInfo[nodeid].chunknum += 1
metadata.totalchunk = parityBlockNum
metadata.parityCoeff = parityCoeff[:]
startchunk = 0
writelen = 1048576
for i in range(metadata.totalnode):
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)
if chunksize > 0:
f = open(dest, 'wb')
numchunks = nodeIdList.count(i)
writenext = startchunk * chunksize
for j in range(startchunk * chunksize, (startchunk + numchunks) *
chunksize - writelen, writelen):
writenext = j + writelen
f.write(outdatalist[j:writenext])
f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]
)
f.close()
startchunk += numchunks
else:
open(dest, 'wb').close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
def reversematrix(n, k, gj_matrix):
"""Reverse matrix."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
for rowNo in range(nativeBlockNum):
A = GF256int(0)
for i in range(rowNo, nativeBlockNum, 1):
if gj_matrix[i][rowNo] != 0:
A = gj_matrix[i][rowNo]
break
temp_vector = [GF256int(0)] * (nativeBlockNum * 2)
if i != rowNo:
for j in range(nativeBlockNum * 2):
temp_vector[j] = gj_matrix[i][j]
gj_matrix[i][j] = gj_matrix[rowNo][j]
gj_matrix[rowNo][j] = temp_vector[j]
for m in range(nativeBlockNum * 2):
gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A
for j in range(rowNo + 1, nativeBlockNum, 1):
B = gj_matrix[j][rowNo]
for m in range(rowNo, nativeBlockNum * 2, 1):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B
for rowNo in range(nativeBlockNum - 1, 0, -1):
for j in range(0, rowNo, 1):
C = gj_matrix[j][rowNo]
for m in range(nativeBlockNum * 2):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C
def decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):
"""Decode chunk files to dest file."""
if filesize <= 0:
open(dest, 'wb').close()
return
cv_temp = []
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(parityBlockNum)]
dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in
range(nativeBlockNum)]
gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in
range(nativeBlockNum)]
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
enc_matrix[i][j] = GF256int(parityCoeff[counter])
counter += 1
cm1 = CoeffMatrix(nativeBlockNum)
for i in range(parityBlockNum):
cv_temp.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
cv_temp[i].coeff_[j] = enc_matrix[i][j]
cv_temp[i].first()
cm1.addcoeffvector(cv_temp[i])
i = 0
for selectChunkNo in blocknums:
for j in range(nativeBlockNum):
dec_matrix[i][j] = enc_matrix[selectChunkNo][j]
i += 1
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
if j == i:
rev_matrix[i][j] = GF256int(1)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum * 2):
if j < nativeBlockNum:
gj_matrix[i][j] = dec_matrix[i][j]
else:
gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]
reversematrix(n, k, gj_matrix)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]
selectchunk = []
for filename in src:
infile = open(filename, 'rb')
selectchunk.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
indatalist = ''.join(selectchunk)
parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(
nativeBlockNum) for j in range(nativeBlockNum)])
outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,
parityCoeff_temp, nativeBlockNum, chunksize)
outfile = open(dest, 'wb')
writelen = 1048576
writenext = 0
for i in range(0, filesize - writelen, writelen):
writenext = i + writelen
outfile.write(outdatalist[i:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
def getCheckNum(parityBlockNum):
"""Get check number for checking strong MDS, for fmsr(k=n-2) only."""
return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (
parityBlockNum / 2 - 1))
def getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,
checkNum, enc_matrix):
"""Get strong MDS property degree."""
currentStrongMDSPropertyDegree = 0
survivalcoeffvectorset = []
flag = 0
for i in range(parityBlockNum):
if int(i / 2) != repairNodeno:
survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][
j]
survivalcoeffvectorset[i - flag * 2].first()
else:
flag = 1
s = 0
for i in range(parityBlockNum - 2):
for j in range(parityBlockNum - 2):
if i < j:
checkmatrix = CoeffMatrix(nativeBlockNum)
for k in range(parityBlockNum - 2):
if k != i and k != j:
checkmatrix.addcoeffvector(survivalcoeffvectorset[k
].copy())
if checkmatrix.rank_ == nativeBlockNum:
currentStrongMDSPropertyDegree += 1
s += 1
return currentStrongMDSPropertyDegree
def checkMDS(MSR_n, MSR_k, enc_matrix):
"""Check MDS property, for fmsr(k=n-2) only."""
"""Return a MDS property value."""
nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)
parityBlockNum = getParityBlockNum(MSR_n, MSR_k)
MDSpropery = True
allcoeffvectors = []
for i in range(parityBlockNum):
allcoeffvectors.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]
allcoeffvectors[i].first()
permutation = int(MSR_n * (MSR_n - 1) / 2)
checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]
s = 0
for i in range(MSR_n):
for j in range(MSR_n):
if i < j:
for b in range(MSR_n):
if b != i and b != j:
checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2
].copy())
checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2 +
1].copy())
if checkmatrix[s].rank_ != nativeBlockNum:
MDSpropery = False
s += 1
return MDSpropery
def checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):
"""Check strong MDS property, for fmsr(k=n-2) only."""
"""Return list of MDS property degrees."""
strongMDSPropertyDegrees = []
checkNum = getCheckNum(parityBlockNum)
for i in range(n):
strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,
nativeBlockNum, parityBlockNum, checkNum, enc_matrix))
return strongMDSPropertyDegrees
def testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):
"""Decide whether the current parity coefficient set passes the strong MDS property."""
result = True
threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2
for degree in strongMDSPropertyDegrees:
if degree < threshold:
result = False
return result
def functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,
repairChunks, setting, metadata):
"""Functional repair by generating new parity chunks."""
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
checkNum = getCheckNum(parityBlockNum)
enc_matrix = metadata.enc_matrix
repairCodingCoeff = metadata.repairCodingCoeff
indatalist = []
for filepath in src:
infile = open(filepath, 'rb')
indatalist.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
if chunksize > 0:
indatalist_temp = ''.join(indatalist)
parityCoeff_temp = []
for i in range(n - k):
for j in range(n - 1):
parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))
parityCoeff_temp = ''.join(parityCoeff_temp)
outdatalist = codings.clibfmsr.clibfmsr.repairComputation(
indatalist_temp, parityCoeff_temp, n, k, chunksize)
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff[counter] = enc_matrix[i][j]
counter += 1
writelen = 1048576
writenext = 0
for i in range(metadata.totalnode):
if setting.nodeInfo[i].healthy == False:
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i
)
filesize = metadata.fileNodeInfo[i].bigchunksize
if chunksize <= 0:
open(dest, 'wb').close()
else:
outfile = open(dest, 'wb')
for j in range(0, filesize - writelen, writelen):
writenext = j + writelen
outfile.write(outdatalist[j:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i
].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
<|reserved_special_token_1|>
#!/usr/bin/python
#
# @name = 'fmsrutil.py'
#
# @description = "F-MSR utilities module."
#
# @author = ['YU Chiu Man', 'HU Yuchong', 'TANG Yang']
#
import sys
import os
import random
from finitefield import GF256int
from coeffvector import CoeffVector
from coeffvector import CoeffMatrix
import common
#Check if C library of F-MSR is installed:
import codings.clibfmsr.clibfmsr
useClibfmsr = True
def getNativeBlockNum(n, k):
'''Get number of native blocks.'''
return k*(n-k)
def getParityBlockNum(n, k):
'''Get number of parity blocks.'''
return n*(n-k)
def getNodeIdList(n, k):
'''Find the node id for a segment of blocks.'''
'''Return a list of node id for the blocks.'''
nodeidList = []
segmentSize = n-k
blockNum = getParityBlockNum(n, k)
for i in range(int(blockNum/segmentSize)):
for j in range(segmentSize):
nodeidList.append(i)
return nodeidList
def getParityCoeff(n, k):
'''Get the parity coefficients of the blocks.'''
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
parityCoeff = []
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff.append(GF256int(i+1)**j)
return parityCoeff
def encode(n, k, src, parityCoeff, setting, metadata):
'''Encode src file to parity chunks.'''
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
infile = open(src, 'rb')
indatalist = infile.read()
infile.close()
totalchunk = nativeBlockNum
filesize = len(indatalist)
#Generate info for big-chunk:
for i in range(metadata.totalnode):
fileNode = common.FileNodeMetadata(i)
fileNode.nodekey = setting.nodeInfo[i].nodekey
fileNode.nodetype = setting.nodeInfo[i].nodetype
fileNode.bucketname = setting.nodeInfo[i].bucketname
fileNode.bigchunksize = 0
fileNode.chunknum = 0
metadata.fileNodeInfo.append(fileNode)
#Encode indatalist to outdatalist
if filesize > 0:
chunksize = filesize/totalchunk + 1
indatalist += '\0'*(chunksize*totalchunk - filesize)
parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])
outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist, \
parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)
else:
chunksize = 0
#Generate info for small chunks:
nodeIdList = getNodeIdList(n, k)
for i in range(parityBlockNum):
chunk = common.ChunkMetadata(i)
chunk.chunkname = metadata.filename + '.chunk' + str(i)
chunk.chunksize = chunksize
chunk.chunktype = 'parity'
chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname
nodeid = nodeIdList[i]
chunk.nodeid = nodeid
chunk.nodekey = setting.nodeInfo[nodeid].nodekey
chunk.nodetype = setting.nodeInfo[nodeid].nodetype
chunk.bucketname = setting.nodeInfo[nodeid].bucketname
chunk.action = 'upload'
#Add chunk position inside big-chunk:
chunk.position = metadata.fileNodeInfo[nodeid].chunknum
metadata.chunkInfo.append(chunk)
#Add support for big-chunk:
metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize
metadata.fileNodeInfo[nodeid].chunknum += 1
metadata.totalchunk = parityBlockNum
metadata.parityCoeff = parityCoeff[:]
#Generate big-chunks:
startchunk = 0
writelen = 1048576
for i in range(metadata.totalnode):
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)
if chunksize > 0:
f = open(dest, 'wb')
numchunks = nodeIdList.count(i)
writenext = startchunk*chunksize
for j in range(startchunk*chunksize, (startchunk+numchunks)*chunksize-writelen, writelen):
writenext = j+writelen
f.write(outdatalist[j:writenext])
f.write(outdatalist[writenext:(startchunk+numchunks)*chunksize])
f.close()
startchunk += numchunks
else:
open(dest, 'wb').close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
def reversematrix(n, k, gj_matrix):
'''Reverse matrix.'''
## The first elimination: decoding matrix -> lower unit triangular matrix
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
for rowNo in range(nativeBlockNum):
##1.find the rowNo row vector with 1st-coeff of valve non-zero
A = GF256int(0)
for i in range(rowNo,nativeBlockNum,1):
if gj_matrix[i][rowNo]!=0:
A = gj_matrix[i][rowNo]
break
##2. permutation between the rowNo row vector and the ith row vector
temp_vector = [GF256int(0)]*(nativeBlockNum*2)
if i!= rowNo:
for j in range(nativeBlockNum*2):
temp_vector[j] = gj_matrix[i][j]
gj_matrix[i][j] = gj_matrix[rowNo][j]
gj_matrix[rowNo][j] = temp_vector[j]
##3. in rowNo-th row vector, all the coeffs/1st coeff
for m in range(nativeBlockNum*2):
gj_matrix[rowNo][m] = gj_matrix[rowNo][m]/A
##4. The row vectors below rowNo-th row vector eliminate the rowNo-th coeff
for j in range(rowNo+1,nativeBlockNum,1):
B = gj_matrix[j][rowNo]
for m in range(rowNo,nativeBlockNum*2,1):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m]*B
# The second elimination: decoding matrix -> unit matrix
##5. The row vectors above rowNo-th row vector eliminate the rowNo-th coeff
for rowNo in range(nativeBlockNum-1,0,-1):
for j in range(0,rowNo,1):
C = gj_matrix[j][rowNo]
for m in range(nativeBlockNum*2):
gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m]*C
def decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):
'''Decode chunk files to dest file.'''
## special handling for 0B files
if filesize <= 0:
open(dest,'wb').close()
return
cv_temp=[]
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in range(parityBlockNum)]
dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in range(nativeBlockNum)]
rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in range(nativeBlockNum)]
gj_matrix = [[GF256int(0) for col in range(nativeBlockNum*2)] for row in range(nativeBlockNum)]
## generate the encoding matrix
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
enc_matrix[i][j] = GF256int(parityCoeff[counter])
counter += 1
cm1 = CoeffMatrix(nativeBlockNum)
for i in range(parityBlockNum):
cv_temp.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
cv_temp[i].coeff_[j] = enc_matrix[i][j]
cv_temp[i].first()
cm1.addcoeffvector(cv_temp[i])
## generate the decoding matrix
i=0
for selectChunkNo in blocknums:
for j in range(nativeBlockNum):
dec_matrix[i][j]=enc_matrix[selectChunkNo][j]
i += 1
## initialize the reverse matrix
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
if j==i:
rev_matrix[i][j]= GF256int(1)
## initialize the Gauss-Jordan matrix = [decoding,reverse]
for i in range(nativeBlockNum):
for j in range(nativeBlockNum*2):
if j<nativeBlockNum:
gj_matrix[i][j]= dec_matrix[i][j]
else:
gj_matrix[i][j]= rev_matrix[i][j-nativeBlockNum]
reversematrix(n, k, gj_matrix)
for i in range(nativeBlockNum):
for j in range(nativeBlockNum):
dec_matrix[i][j] = gj_matrix[i][j+nativeBlockNum]
##generate decode data chunks
selectchunk=[]
for filename in src:
infile = open(filename,'rb')
selectchunk.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
indatalist = ''.join(selectchunk)
##rebuild the original chunks
parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) \
for i in range(nativeBlockNum) \
for j in range(nativeBlockNum)])
outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist, \
parityCoeff_temp, nativeBlockNum, chunksize)
outfile = open(dest,'wb')
writelen = 1048576
writenext = 0
for i in range(0,filesize-writelen,writelen):
writenext = i+writelen
outfile.write(outdatalist[i:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
def getCheckNum(parityBlockNum):
'''Get check number for checking strong MDS, for fmsr(k=n-2) only.'''
return int((parityBlockNum-2)*(parityBlockNum-2-1)/2 - ((parityBlockNum/2)-1))
def getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum, checkNum, enc_matrix):
'''Get strong MDS property degree.'''
currentStrongMDSPropertyDegree = 0
survivalcoeffvectorset = []
flag = 0
for i in range(parityBlockNum):
#get coeff vectors of survival parity blocks
if int(i/2)!= repairNodeno:
survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
survivalcoeffvectorset[i - flag*2].coeff_[j] = enc_matrix[i][j]
survivalcoeffvectorset[i - flag*2].first()
else:
flag =1
s = 0
for i in range(parityBlockNum-2):
for j in range(parityBlockNum-2):
if i<j:
checkmatrix = CoeffMatrix(nativeBlockNum)
for k in range (parityBlockNum-2):
if k!=i and k!=j:
checkmatrix.addcoeffvector(survivalcoeffvectorset[k].copy())
if checkmatrix.rank_ == nativeBlockNum:
currentStrongMDSPropertyDegree += 1
s += 1
return currentStrongMDSPropertyDegree
def checkMDS(MSR_n, MSR_k, enc_matrix):
'''Check MDS property, for fmsr(k=n-2) only.'''
'''Return a MDS property value.'''
nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)
parityBlockNum = getParityBlockNum(MSR_n, MSR_k)
MDSpropery = True
allcoeffvectors = []
for i in range(parityBlockNum):
allcoeffvectors.append(CoeffVector(nativeBlockNum))
for j in range(nativeBlockNum):
allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]
allcoeffvectors[i].first()
permutation = int(MSR_n * (MSR_n - 1) / 2)
#permutation of selecting n-2 nodes from n nodes
checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]
s = 0
for i in range (MSR_n):
for j in range(MSR_n):
if i<j:
for b in range(MSR_n):
if b !=i and b!=j:
checkmatrix[s].addcoeffvector(allcoeffvectors[b*2].copy())
checkmatrix[s].addcoeffvector(allcoeffvectors[b*2+1].copy())
if checkmatrix[s].rank_ != nativeBlockNum:
MDSpropery = False
s += 1
return MDSpropery
def checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):
'''Check strong MDS property, for fmsr(k=n-2) only.'''
'''Return list of MDS property degrees.'''
strongMDSPropertyDegrees = []
#get check-combination number
checkNum = getCheckNum(parityBlockNum)
#Calculate total strong MDS property degree
for i in range(n):
strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i, \
nativeBlockNum, parityBlockNum, checkNum, enc_matrix))
return strongMDSPropertyDegrees
def testStrongMDSProperty(strongMDSPropertyDegrees, checkNum,n):
'''Decide whether the current parity coefficient set passes the strong MDS property.'''
result = True
#threshold = checkNum
threshold = 2*(n-1)*(n-2)-(n-2)*(n-3)/2
#Important: currently the threshold value is hardcode
for degree in strongMDSPropertyDegrees:
if degree < threshold:
result = False
return result
def functionalRepair(n, k, src, blocknums, failedNode, parityCoeff, repairChunks, setting, metadata):
'''Functional repair by generating new parity chunks.'''
nativeBlockNum = getNativeBlockNum(n, k)
parityBlockNum = getParityBlockNum(n, k)
checkNum = getCheckNum(parityBlockNum)
## read the encoding matrix and repair
enc_matrix = metadata.enc_matrix
repairCodingCoeff = metadata.repairCodingCoeff
indatalist = []
for filepath in src:
infile = open(filepath, 'rb')
indatalist.append(infile.read())
infile.close()
chunksize = os.path.getsize(src[0])
if chunksize > 0:
#Repair computation:
indatalist_temp = ''.join(indatalist)
parityCoeff_temp = []
for i in range(n-k):
for j in range(n-1):
parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))
parityCoeff_temp = ''.join(parityCoeff_temp)
outdatalist = codings.clibfmsr.clibfmsr.repairComputation(indatalist_temp, \
parityCoeff_temp, n, k, chunksize)
counter = 0
for i in range(parityBlockNum):
for j in range(nativeBlockNum):
parityCoeff[counter] = enc_matrix[i][j]
counter += 1
#Add support for big-chunk:
writelen = 1048576
writenext = 0
for i in range(metadata.totalnode):
if setting.nodeInfo[i].healthy == False:
dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)
filesize = metadata.fileNodeInfo[i].bigchunksize
if chunksize <= 0:
open(dest,'wb').close()
else:
outfile = open(dest, 'wb')
for j in range(0,filesize-writelen,writelen):
writenext = j+writelen
outfile.write(outdatalist[j:writenext])
outfile.write(outdatalist[writenext:filesize])
outfile.close()
metadata.fileNodeInfo[i].bigchunkpath = dest
metadata.fileNodeInfo[i].bigchunkname = metadata.filename + '.node' + str(i)
metadata.fileNodeInfo[i].action = 'upload'
|
flexible
|
{
"blob_id": "0ebd19079a16a6e3da34da2ecfda0d159b8580b2",
"index": 9527,
"step-1": "<mask token>\n\n\ndef getNativeBlockNum(n, k):\n \"\"\"Get number of native blocks.\"\"\"\n return k * (n - k)\n\n\n<mask token>\n\n\ndef getNodeIdList(n, k):\n \"\"\"Find the node id for a segment of blocks.\"\"\"\n \"\"\"Return a list of node id for the blocks.\"\"\"\n nodeidList = []\n segmentSize = n - k\n blockNum = getParityBlockNum(n, k)\n for i in range(int(blockNum / segmentSize)):\n for j in range(segmentSize):\n nodeidList.append(i)\n return nodeidList\n\n\n<mask token>\n\n\ndef encode(n, k, src, parityCoeff, setting, metadata):\n \"\"\"Encode src file to parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n infile = open(src, 'rb')\n indatalist = infile.read()\n infile.close()\n totalchunk = nativeBlockNum\n filesize = len(indatalist)\n for i in range(metadata.totalnode):\n fileNode = common.FileNodeMetadata(i)\n fileNode.nodekey = setting.nodeInfo[i].nodekey\n fileNode.nodetype = setting.nodeInfo[i].nodetype\n fileNode.bucketname = setting.nodeInfo[i].bucketname\n fileNode.bigchunksize = 0\n fileNode.chunknum = 0\n metadata.fileNodeInfo.append(fileNode)\n if filesize > 0:\n chunksize = filesize / totalchunk + 1\n indatalist += '\\x00' * (chunksize * totalchunk - filesize)\n parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])\n outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)\n else:\n chunksize = 0\n nodeIdList = getNodeIdList(n, k)\n for i in range(parityBlockNum):\n chunk = common.ChunkMetadata(i)\n chunk.chunkname = metadata.filename + '.chunk' + str(i)\n chunk.chunksize = chunksize\n chunk.chunktype = 'parity'\n chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname\n nodeid = nodeIdList[i]\n chunk.nodeid = nodeid\n chunk.nodekey = setting.nodeInfo[nodeid].nodekey\n chunk.nodetype = setting.nodeInfo[nodeid].nodetype\n chunk.bucketname = setting.nodeInfo[nodeid].bucketname\n chunk.action = 'upload'\n chunk.position = metadata.fileNodeInfo[nodeid].chunknum\n metadata.chunkInfo.append(chunk)\n metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize\n metadata.fileNodeInfo[nodeid].chunknum += 1\n metadata.totalchunk = parityBlockNum\n metadata.parityCoeff = parityCoeff[:]\n startchunk = 0\n writelen = 1048576\n for i in range(metadata.totalnode):\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)\n if chunksize > 0:\n f = open(dest, 'wb')\n numchunks = nodeIdList.count(i)\n writenext = startchunk * chunksize\n for j in range(startchunk * chunksize, (startchunk + numchunks) *\n chunksize - writelen, writelen):\n writenext = j + writelen\n f.write(outdatalist[j:writenext])\n f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]\n )\n f.close()\n startchunk += numchunks\n else:\n open(dest, 'wb').close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n\n\ndef reversematrix(n, k, gj_matrix):\n \"\"\"Reverse matrix.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n for rowNo in range(nativeBlockNum):\n A = GF256int(0)\n for i in range(rowNo, nativeBlockNum, 1):\n if gj_matrix[i][rowNo] != 0:\n A = gj_matrix[i][rowNo]\n break\n temp_vector = [GF256int(0)] * (nativeBlockNum * 2)\n if i != rowNo:\n for j in range(nativeBlockNum * 2):\n temp_vector[j] = gj_matrix[i][j]\n gj_matrix[i][j] = gj_matrix[rowNo][j]\n gj_matrix[rowNo][j] = temp_vector[j]\n for m in range(nativeBlockNum * 2):\n gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A\n for j in range(rowNo + 1, nativeBlockNum, 1):\n B = gj_matrix[j][rowNo]\n for m in range(rowNo, nativeBlockNum * 2, 1):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B\n for rowNo in range(nativeBlockNum - 1, 0, -1):\n for j in range(0, rowNo, 1):\n C = gj_matrix[j][rowNo]\n for m in range(nativeBlockNum * 2):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C\n\n\ndef decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):\n \"\"\"Decode chunk files to dest file.\"\"\"\n if filesize <= 0:\n open(dest, 'wb').close()\n return\n cv_temp = []\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(parityBlockNum)]\n dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in\n range(nativeBlockNum)]\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n enc_matrix[i][j] = GF256int(parityCoeff[counter])\n counter += 1\n cm1 = CoeffMatrix(nativeBlockNum)\n for i in range(parityBlockNum):\n cv_temp.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n cv_temp[i].coeff_[j] = enc_matrix[i][j]\n cv_temp[i].first()\n cm1.addcoeffvector(cv_temp[i])\n i = 0\n for selectChunkNo in blocknums:\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = enc_matrix[selectChunkNo][j]\n i += 1\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n if j == i:\n rev_matrix[i][j] = GF256int(1)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum * 2):\n if j < nativeBlockNum:\n gj_matrix[i][j] = dec_matrix[i][j]\n else:\n gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]\n reversematrix(n, k, gj_matrix)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]\n selectchunk = []\n for filename in src:\n infile = open(filename, 'rb')\n selectchunk.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n indatalist = ''.join(selectchunk)\n parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(\n nativeBlockNum) for j in range(nativeBlockNum)])\n outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, chunksize)\n outfile = open(dest, 'wb')\n writelen = 1048576\n writenext = 0\n for i in range(0, filesize - writelen, writelen):\n writenext = i + writelen\n outfile.write(outdatalist[i:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n\n\ndef getCheckNum(parityBlockNum):\n \"\"\"Get check number for checking strong MDS, for fmsr(k=n-2) only.\"\"\"\n return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (\n parityBlockNum / 2 - 1))\n\n\ndef getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,\n checkNum, enc_matrix):\n \"\"\"Get strong MDS property degree.\"\"\"\n currentStrongMDSPropertyDegree = 0\n survivalcoeffvectorset = []\n flag = 0\n for i in range(parityBlockNum):\n if int(i / 2) != repairNodeno:\n survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][\n j]\n survivalcoeffvectorset[i - flag * 2].first()\n else:\n flag = 1\n s = 0\n for i in range(parityBlockNum - 2):\n for j in range(parityBlockNum - 2):\n if i < j:\n checkmatrix = CoeffMatrix(nativeBlockNum)\n for k in range(parityBlockNum - 2):\n if k != i and k != j:\n checkmatrix.addcoeffvector(survivalcoeffvectorset[k\n ].copy())\n if checkmatrix.rank_ == nativeBlockNum:\n currentStrongMDSPropertyDegree += 1\n s += 1\n return currentStrongMDSPropertyDegree\n\n\n<mask token>\n\n\ndef checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):\n \"\"\"Check strong MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return list of MDS property degrees.\"\"\"\n strongMDSPropertyDegrees = []\n checkNum = getCheckNum(parityBlockNum)\n for i in range(n):\n strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,\n nativeBlockNum, parityBlockNum, checkNum, enc_matrix))\n return strongMDSPropertyDegrees\n\n\ndef testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):\n \"\"\"Decide whether the current parity coefficient set passes the strong MDS property.\"\"\"\n result = True\n threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2\n for degree in strongMDSPropertyDegrees:\n if degree < threshold:\n result = False\n return result\n\n\ndef functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,\n repairChunks, setting, metadata):\n \"\"\"Functional repair by generating new parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n checkNum = getCheckNum(parityBlockNum)\n enc_matrix = metadata.enc_matrix\n repairCodingCoeff = metadata.repairCodingCoeff\n indatalist = []\n for filepath in src:\n infile = open(filepath, 'rb')\n indatalist.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n if chunksize > 0:\n indatalist_temp = ''.join(indatalist)\n parityCoeff_temp = []\n for i in range(n - k):\n for j in range(n - 1):\n parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))\n parityCoeff_temp = ''.join(parityCoeff_temp)\n outdatalist = codings.clibfmsr.clibfmsr.repairComputation(\n indatalist_temp, parityCoeff_temp, n, k, chunksize)\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff[counter] = enc_matrix[i][j]\n counter += 1\n writelen = 1048576\n writenext = 0\n for i in range(metadata.totalnode):\n if setting.nodeInfo[i].healthy == False:\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i\n )\n filesize = metadata.fileNodeInfo[i].bigchunksize\n if chunksize <= 0:\n open(dest, 'wb').close()\n else:\n outfile = open(dest, 'wb')\n for j in range(0, filesize - writelen, writelen):\n writenext = j + writelen\n outfile.write(outdatalist[j:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n",
"step-2": "<mask token>\n\n\ndef getNativeBlockNum(n, k):\n \"\"\"Get number of native blocks.\"\"\"\n return k * (n - k)\n\n\ndef getParityBlockNum(n, k):\n \"\"\"Get number of parity blocks.\"\"\"\n return n * (n - k)\n\n\ndef getNodeIdList(n, k):\n \"\"\"Find the node id for a segment of blocks.\"\"\"\n \"\"\"Return a list of node id for the blocks.\"\"\"\n nodeidList = []\n segmentSize = n - k\n blockNum = getParityBlockNum(n, k)\n for i in range(int(blockNum / segmentSize)):\n for j in range(segmentSize):\n nodeidList.append(i)\n return nodeidList\n\n\n<mask token>\n\n\ndef encode(n, k, src, parityCoeff, setting, metadata):\n \"\"\"Encode src file to parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n infile = open(src, 'rb')\n indatalist = infile.read()\n infile.close()\n totalchunk = nativeBlockNum\n filesize = len(indatalist)\n for i in range(metadata.totalnode):\n fileNode = common.FileNodeMetadata(i)\n fileNode.nodekey = setting.nodeInfo[i].nodekey\n fileNode.nodetype = setting.nodeInfo[i].nodetype\n fileNode.bucketname = setting.nodeInfo[i].bucketname\n fileNode.bigchunksize = 0\n fileNode.chunknum = 0\n metadata.fileNodeInfo.append(fileNode)\n if filesize > 0:\n chunksize = filesize / totalchunk + 1\n indatalist += '\\x00' * (chunksize * totalchunk - filesize)\n parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])\n outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)\n else:\n chunksize = 0\n nodeIdList = getNodeIdList(n, k)\n for i in range(parityBlockNum):\n chunk = common.ChunkMetadata(i)\n chunk.chunkname = metadata.filename + '.chunk' + str(i)\n chunk.chunksize = chunksize\n chunk.chunktype = 'parity'\n chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname\n nodeid = nodeIdList[i]\n chunk.nodeid = nodeid\n chunk.nodekey = setting.nodeInfo[nodeid].nodekey\n chunk.nodetype = setting.nodeInfo[nodeid].nodetype\n chunk.bucketname = setting.nodeInfo[nodeid].bucketname\n chunk.action = 'upload'\n chunk.position = metadata.fileNodeInfo[nodeid].chunknum\n metadata.chunkInfo.append(chunk)\n metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize\n metadata.fileNodeInfo[nodeid].chunknum += 1\n metadata.totalchunk = parityBlockNum\n metadata.parityCoeff = parityCoeff[:]\n startchunk = 0\n writelen = 1048576\n for i in range(metadata.totalnode):\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)\n if chunksize > 0:\n f = open(dest, 'wb')\n numchunks = nodeIdList.count(i)\n writenext = startchunk * chunksize\n for j in range(startchunk * chunksize, (startchunk + numchunks) *\n chunksize - writelen, writelen):\n writenext = j + writelen\n f.write(outdatalist[j:writenext])\n f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]\n )\n f.close()\n startchunk += numchunks\n else:\n open(dest, 'wb').close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n\n\ndef reversematrix(n, k, gj_matrix):\n \"\"\"Reverse matrix.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n for rowNo in range(nativeBlockNum):\n A = GF256int(0)\n for i in range(rowNo, nativeBlockNum, 1):\n if gj_matrix[i][rowNo] != 0:\n A = gj_matrix[i][rowNo]\n break\n temp_vector = [GF256int(0)] * (nativeBlockNum * 2)\n if i != rowNo:\n for j in range(nativeBlockNum * 2):\n temp_vector[j] = gj_matrix[i][j]\n gj_matrix[i][j] = gj_matrix[rowNo][j]\n gj_matrix[rowNo][j] = temp_vector[j]\n for m in range(nativeBlockNum * 2):\n gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A\n for j in range(rowNo + 1, nativeBlockNum, 1):\n B = gj_matrix[j][rowNo]\n for m in range(rowNo, nativeBlockNum * 2, 1):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B\n for rowNo in range(nativeBlockNum - 1, 0, -1):\n for j in range(0, rowNo, 1):\n C = gj_matrix[j][rowNo]\n for m in range(nativeBlockNum * 2):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C\n\n\ndef decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):\n \"\"\"Decode chunk files to dest file.\"\"\"\n if filesize <= 0:\n open(dest, 'wb').close()\n return\n cv_temp = []\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(parityBlockNum)]\n dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in\n range(nativeBlockNum)]\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n enc_matrix[i][j] = GF256int(parityCoeff[counter])\n counter += 1\n cm1 = CoeffMatrix(nativeBlockNum)\n for i in range(parityBlockNum):\n cv_temp.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n cv_temp[i].coeff_[j] = enc_matrix[i][j]\n cv_temp[i].first()\n cm1.addcoeffvector(cv_temp[i])\n i = 0\n for selectChunkNo in blocknums:\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = enc_matrix[selectChunkNo][j]\n i += 1\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n if j == i:\n rev_matrix[i][j] = GF256int(1)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum * 2):\n if j < nativeBlockNum:\n gj_matrix[i][j] = dec_matrix[i][j]\n else:\n gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]\n reversematrix(n, k, gj_matrix)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]\n selectchunk = []\n for filename in src:\n infile = open(filename, 'rb')\n selectchunk.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n indatalist = ''.join(selectchunk)\n parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(\n nativeBlockNum) for j in range(nativeBlockNum)])\n outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, chunksize)\n outfile = open(dest, 'wb')\n writelen = 1048576\n writenext = 0\n for i in range(0, filesize - writelen, writelen):\n writenext = i + writelen\n outfile.write(outdatalist[i:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n\n\ndef getCheckNum(parityBlockNum):\n \"\"\"Get check number for checking strong MDS, for fmsr(k=n-2) only.\"\"\"\n return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (\n parityBlockNum / 2 - 1))\n\n\ndef getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,\n checkNum, enc_matrix):\n \"\"\"Get strong MDS property degree.\"\"\"\n currentStrongMDSPropertyDegree = 0\n survivalcoeffvectorset = []\n flag = 0\n for i in range(parityBlockNum):\n if int(i / 2) != repairNodeno:\n survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][\n j]\n survivalcoeffvectorset[i - flag * 2].first()\n else:\n flag = 1\n s = 0\n for i in range(parityBlockNum - 2):\n for j in range(parityBlockNum - 2):\n if i < j:\n checkmatrix = CoeffMatrix(nativeBlockNum)\n for k in range(parityBlockNum - 2):\n if k != i and k != j:\n checkmatrix.addcoeffvector(survivalcoeffvectorset[k\n ].copy())\n if checkmatrix.rank_ == nativeBlockNum:\n currentStrongMDSPropertyDegree += 1\n s += 1\n return currentStrongMDSPropertyDegree\n\n\ndef checkMDS(MSR_n, MSR_k, enc_matrix):\n \"\"\"Check MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return a MDS property value.\"\"\"\n nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)\n parityBlockNum = getParityBlockNum(MSR_n, MSR_k)\n MDSpropery = True\n allcoeffvectors = []\n for i in range(parityBlockNum):\n allcoeffvectors.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]\n allcoeffvectors[i].first()\n permutation = int(MSR_n * (MSR_n - 1) / 2)\n checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]\n s = 0\n for i in range(MSR_n):\n for j in range(MSR_n):\n if i < j:\n for b in range(MSR_n):\n if b != i and b != j:\n checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2\n ].copy())\n checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2 +\n 1].copy())\n if checkmatrix[s].rank_ != nativeBlockNum:\n MDSpropery = False\n s += 1\n return MDSpropery\n\n\ndef checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):\n \"\"\"Check strong MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return list of MDS property degrees.\"\"\"\n strongMDSPropertyDegrees = []\n checkNum = getCheckNum(parityBlockNum)\n for i in range(n):\n strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,\n nativeBlockNum, parityBlockNum, checkNum, enc_matrix))\n return strongMDSPropertyDegrees\n\n\ndef testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):\n \"\"\"Decide whether the current parity coefficient set passes the strong MDS property.\"\"\"\n result = True\n threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2\n for degree in strongMDSPropertyDegrees:\n if degree < threshold:\n result = False\n return result\n\n\ndef functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,\n repairChunks, setting, metadata):\n \"\"\"Functional repair by generating new parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n checkNum = getCheckNum(parityBlockNum)\n enc_matrix = metadata.enc_matrix\n repairCodingCoeff = metadata.repairCodingCoeff\n indatalist = []\n for filepath in src:\n infile = open(filepath, 'rb')\n indatalist.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n if chunksize > 0:\n indatalist_temp = ''.join(indatalist)\n parityCoeff_temp = []\n for i in range(n - k):\n for j in range(n - 1):\n parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))\n parityCoeff_temp = ''.join(parityCoeff_temp)\n outdatalist = codings.clibfmsr.clibfmsr.repairComputation(\n indatalist_temp, parityCoeff_temp, n, k, chunksize)\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff[counter] = enc_matrix[i][j]\n counter += 1\n writelen = 1048576\n writenext = 0\n for i in range(metadata.totalnode):\n if setting.nodeInfo[i].healthy == False:\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i\n )\n filesize = metadata.fileNodeInfo[i].bigchunksize\n if chunksize <= 0:\n open(dest, 'wb').close()\n else:\n outfile = open(dest, 'wb')\n for j in range(0, filesize - writelen, writelen):\n writenext = j + writelen\n outfile.write(outdatalist[j:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n",
"step-3": "<mask token>\n\n\ndef getNativeBlockNum(n, k):\n \"\"\"Get number of native blocks.\"\"\"\n return k * (n - k)\n\n\ndef getParityBlockNum(n, k):\n \"\"\"Get number of parity blocks.\"\"\"\n return n * (n - k)\n\n\ndef getNodeIdList(n, k):\n \"\"\"Find the node id for a segment of blocks.\"\"\"\n \"\"\"Return a list of node id for the blocks.\"\"\"\n nodeidList = []\n segmentSize = n - k\n blockNum = getParityBlockNum(n, k)\n for i in range(int(blockNum / segmentSize)):\n for j in range(segmentSize):\n nodeidList.append(i)\n return nodeidList\n\n\ndef getParityCoeff(n, k):\n \"\"\"Get the parity coefficients of the blocks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n parityCoeff = []\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff.append(GF256int(i + 1) ** j)\n return parityCoeff\n\n\ndef encode(n, k, src, parityCoeff, setting, metadata):\n \"\"\"Encode src file to parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n infile = open(src, 'rb')\n indatalist = infile.read()\n infile.close()\n totalchunk = nativeBlockNum\n filesize = len(indatalist)\n for i in range(metadata.totalnode):\n fileNode = common.FileNodeMetadata(i)\n fileNode.nodekey = setting.nodeInfo[i].nodekey\n fileNode.nodetype = setting.nodeInfo[i].nodetype\n fileNode.bucketname = setting.nodeInfo[i].bucketname\n fileNode.bigchunksize = 0\n fileNode.chunknum = 0\n metadata.fileNodeInfo.append(fileNode)\n if filesize > 0:\n chunksize = filesize / totalchunk + 1\n indatalist += '\\x00' * (chunksize * totalchunk - filesize)\n parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])\n outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)\n else:\n chunksize = 0\n nodeIdList = getNodeIdList(n, k)\n for i in range(parityBlockNum):\n chunk = common.ChunkMetadata(i)\n chunk.chunkname = metadata.filename + '.chunk' + str(i)\n chunk.chunksize = chunksize\n chunk.chunktype = 'parity'\n chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname\n nodeid = nodeIdList[i]\n chunk.nodeid = nodeid\n chunk.nodekey = setting.nodeInfo[nodeid].nodekey\n chunk.nodetype = setting.nodeInfo[nodeid].nodetype\n chunk.bucketname = setting.nodeInfo[nodeid].bucketname\n chunk.action = 'upload'\n chunk.position = metadata.fileNodeInfo[nodeid].chunknum\n metadata.chunkInfo.append(chunk)\n metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize\n metadata.fileNodeInfo[nodeid].chunknum += 1\n metadata.totalchunk = parityBlockNum\n metadata.parityCoeff = parityCoeff[:]\n startchunk = 0\n writelen = 1048576\n for i in range(metadata.totalnode):\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)\n if chunksize > 0:\n f = open(dest, 'wb')\n numchunks = nodeIdList.count(i)\n writenext = startchunk * chunksize\n for j in range(startchunk * chunksize, (startchunk + numchunks) *\n chunksize - writelen, writelen):\n writenext = j + writelen\n f.write(outdatalist[j:writenext])\n f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]\n )\n f.close()\n startchunk += numchunks\n else:\n open(dest, 'wb').close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n\n\ndef reversematrix(n, k, gj_matrix):\n \"\"\"Reverse matrix.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n for rowNo in range(nativeBlockNum):\n A = GF256int(0)\n for i in range(rowNo, nativeBlockNum, 1):\n if gj_matrix[i][rowNo] != 0:\n A = gj_matrix[i][rowNo]\n break\n temp_vector = [GF256int(0)] * (nativeBlockNum * 2)\n if i != rowNo:\n for j in range(nativeBlockNum * 2):\n temp_vector[j] = gj_matrix[i][j]\n gj_matrix[i][j] = gj_matrix[rowNo][j]\n gj_matrix[rowNo][j] = temp_vector[j]\n for m in range(nativeBlockNum * 2):\n gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A\n for j in range(rowNo + 1, nativeBlockNum, 1):\n B = gj_matrix[j][rowNo]\n for m in range(rowNo, nativeBlockNum * 2, 1):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B\n for rowNo in range(nativeBlockNum - 1, 0, -1):\n for j in range(0, rowNo, 1):\n C = gj_matrix[j][rowNo]\n for m in range(nativeBlockNum * 2):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C\n\n\ndef decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):\n \"\"\"Decode chunk files to dest file.\"\"\"\n if filesize <= 0:\n open(dest, 'wb').close()\n return\n cv_temp = []\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(parityBlockNum)]\n dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in\n range(nativeBlockNum)]\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n enc_matrix[i][j] = GF256int(parityCoeff[counter])\n counter += 1\n cm1 = CoeffMatrix(nativeBlockNum)\n for i in range(parityBlockNum):\n cv_temp.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n cv_temp[i].coeff_[j] = enc_matrix[i][j]\n cv_temp[i].first()\n cm1.addcoeffvector(cv_temp[i])\n i = 0\n for selectChunkNo in blocknums:\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = enc_matrix[selectChunkNo][j]\n i += 1\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n if j == i:\n rev_matrix[i][j] = GF256int(1)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum * 2):\n if j < nativeBlockNum:\n gj_matrix[i][j] = dec_matrix[i][j]\n else:\n gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]\n reversematrix(n, k, gj_matrix)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]\n selectchunk = []\n for filename in src:\n infile = open(filename, 'rb')\n selectchunk.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n indatalist = ''.join(selectchunk)\n parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(\n nativeBlockNum) for j in range(nativeBlockNum)])\n outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, chunksize)\n outfile = open(dest, 'wb')\n writelen = 1048576\n writenext = 0\n for i in range(0, filesize - writelen, writelen):\n writenext = i + writelen\n outfile.write(outdatalist[i:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n\n\ndef getCheckNum(parityBlockNum):\n \"\"\"Get check number for checking strong MDS, for fmsr(k=n-2) only.\"\"\"\n return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (\n parityBlockNum / 2 - 1))\n\n\ndef getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,\n checkNum, enc_matrix):\n \"\"\"Get strong MDS property degree.\"\"\"\n currentStrongMDSPropertyDegree = 0\n survivalcoeffvectorset = []\n flag = 0\n for i in range(parityBlockNum):\n if int(i / 2) != repairNodeno:\n survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][\n j]\n survivalcoeffvectorset[i - flag * 2].first()\n else:\n flag = 1\n s = 0\n for i in range(parityBlockNum - 2):\n for j in range(parityBlockNum - 2):\n if i < j:\n checkmatrix = CoeffMatrix(nativeBlockNum)\n for k in range(parityBlockNum - 2):\n if k != i and k != j:\n checkmatrix.addcoeffvector(survivalcoeffvectorset[k\n ].copy())\n if checkmatrix.rank_ == nativeBlockNum:\n currentStrongMDSPropertyDegree += 1\n s += 1\n return currentStrongMDSPropertyDegree\n\n\ndef checkMDS(MSR_n, MSR_k, enc_matrix):\n \"\"\"Check MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return a MDS property value.\"\"\"\n nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)\n parityBlockNum = getParityBlockNum(MSR_n, MSR_k)\n MDSpropery = True\n allcoeffvectors = []\n for i in range(parityBlockNum):\n allcoeffvectors.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]\n allcoeffvectors[i].first()\n permutation = int(MSR_n * (MSR_n - 1) / 2)\n checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]\n s = 0\n for i in range(MSR_n):\n for j in range(MSR_n):\n if i < j:\n for b in range(MSR_n):\n if b != i and b != j:\n checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2\n ].copy())\n checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2 +\n 1].copy())\n if checkmatrix[s].rank_ != nativeBlockNum:\n MDSpropery = False\n s += 1\n return MDSpropery\n\n\ndef checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):\n \"\"\"Check strong MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return list of MDS property degrees.\"\"\"\n strongMDSPropertyDegrees = []\n checkNum = getCheckNum(parityBlockNum)\n for i in range(n):\n strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,\n nativeBlockNum, parityBlockNum, checkNum, enc_matrix))\n return strongMDSPropertyDegrees\n\n\ndef testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):\n \"\"\"Decide whether the current parity coefficient set passes the strong MDS property.\"\"\"\n result = True\n threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2\n for degree in strongMDSPropertyDegrees:\n if degree < threshold:\n result = False\n return result\n\n\ndef functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,\n repairChunks, setting, metadata):\n \"\"\"Functional repair by generating new parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n checkNum = getCheckNum(parityBlockNum)\n enc_matrix = metadata.enc_matrix\n repairCodingCoeff = metadata.repairCodingCoeff\n indatalist = []\n for filepath in src:\n infile = open(filepath, 'rb')\n indatalist.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n if chunksize > 0:\n indatalist_temp = ''.join(indatalist)\n parityCoeff_temp = []\n for i in range(n - k):\n for j in range(n - 1):\n parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))\n parityCoeff_temp = ''.join(parityCoeff_temp)\n outdatalist = codings.clibfmsr.clibfmsr.repairComputation(\n indatalist_temp, parityCoeff_temp, n, k, chunksize)\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff[counter] = enc_matrix[i][j]\n counter += 1\n writelen = 1048576\n writenext = 0\n for i in range(metadata.totalnode):\n if setting.nodeInfo[i].healthy == False:\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i\n )\n filesize = metadata.fileNodeInfo[i].bigchunksize\n if chunksize <= 0:\n open(dest, 'wb').close()\n else:\n outfile = open(dest, 'wb')\n for j in range(0, filesize - writelen, writelen):\n writenext = j + writelen\n outfile.write(outdatalist[j:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n",
"step-4": "import sys\nimport os\nimport random\nfrom finitefield import GF256int\nfrom coeffvector import CoeffVector\nfrom coeffvector import CoeffMatrix\nimport common\nimport codings.clibfmsr.clibfmsr\nuseClibfmsr = True\n\n\ndef getNativeBlockNum(n, k):\n \"\"\"Get number of native blocks.\"\"\"\n return k * (n - k)\n\n\ndef getParityBlockNum(n, k):\n \"\"\"Get number of parity blocks.\"\"\"\n return n * (n - k)\n\n\ndef getNodeIdList(n, k):\n \"\"\"Find the node id for a segment of blocks.\"\"\"\n \"\"\"Return a list of node id for the blocks.\"\"\"\n nodeidList = []\n segmentSize = n - k\n blockNum = getParityBlockNum(n, k)\n for i in range(int(blockNum / segmentSize)):\n for j in range(segmentSize):\n nodeidList.append(i)\n return nodeidList\n\n\ndef getParityCoeff(n, k):\n \"\"\"Get the parity coefficients of the blocks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n parityCoeff = []\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff.append(GF256int(i + 1) ** j)\n return parityCoeff\n\n\ndef encode(n, k, src, parityCoeff, setting, metadata):\n \"\"\"Encode src file to parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n infile = open(src, 'rb')\n indatalist = infile.read()\n infile.close()\n totalchunk = nativeBlockNum\n filesize = len(indatalist)\n for i in range(metadata.totalnode):\n fileNode = common.FileNodeMetadata(i)\n fileNode.nodekey = setting.nodeInfo[i].nodekey\n fileNode.nodetype = setting.nodeInfo[i].nodetype\n fileNode.bucketname = setting.nodeInfo[i].bucketname\n fileNode.bigchunksize = 0\n fileNode.chunknum = 0\n metadata.fileNodeInfo.append(fileNode)\n if filesize > 0:\n chunksize = filesize / totalchunk + 1\n indatalist += '\\x00' * (chunksize * totalchunk - filesize)\n parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])\n outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)\n else:\n chunksize = 0\n nodeIdList = getNodeIdList(n, k)\n for i in range(parityBlockNum):\n chunk = common.ChunkMetadata(i)\n chunk.chunkname = metadata.filename + '.chunk' + str(i)\n chunk.chunksize = chunksize\n chunk.chunktype = 'parity'\n chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname\n nodeid = nodeIdList[i]\n chunk.nodeid = nodeid\n chunk.nodekey = setting.nodeInfo[nodeid].nodekey\n chunk.nodetype = setting.nodeInfo[nodeid].nodetype\n chunk.bucketname = setting.nodeInfo[nodeid].bucketname\n chunk.action = 'upload'\n chunk.position = metadata.fileNodeInfo[nodeid].chunknum\n metadata.chunkInfo.append(chunk)\n metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize\n metadata.fileNodeInfo[nodeid].chunknum += 1\n metadata.totalchunk = parityBlockNum\n metadata.parityCoeff = parityCoeff[:]\n startchunk = 0\n writelen = 1048576\n for i in range(metadata.totalnode):\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)\n if chunksize > 0:\n f = open(dest, 'wb')\n numchunks = nodeIdList.count(i)\n writenext = startchunk * chunksize\n for j in range(startchunk * chunksize, (startchunk + numchunks) *\n chunksize - writelen, writelen):\n writenext = j + writelen\n f.write(outdatalist[j:writenext])\n f.write(outdatalist[writenext:(startchunk + numchunks) * chunksize]\n )\n f.close()\n startchunk += numchunks\n else:\n open(dest, 'wb').close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n\n\ndef reversematrix(n, k, gj_matrix):\n \"\"\"Reverse matrix.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n for rowNo in range(nativeBlockNum):\n A = GF256int(0)\n for i in range(rowNo, nativeBlockNum, 1):\n if gj_matrix[i][rowNo] != 0:\n A = gj_matrix[i][rowNo]\n break\n temp_vector = [GF256int(0)] * (nativeBlockNum * 2)\n if i != rowNo:\n for j in range(nativeBlockNum * 2):\n temp_vector[j] = gj_matrix[i][j]\n gj_matrix[i][j] = gj_matrix[rowNo][j]\n gj_matrix[rowNo][j] = temp_vector[j]\n for m in range(nativeBlockNum * 2):\n gj_matrix[rowNo][m] = gj_matrix[rowNo][m] / A\n for j in range(rowNo + 1, nativeBlockNum, 1):\n B = gj_matrix[j][rowNo]\n for m in range(rowNo, nativeBlockNum * 2, 1):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * B\n for rowNo in range(nativeBlockNum - 1, 0, -1):\n for j in range(0, rowNo, 1):\n C = gj_matrix[j][rowNo]\n for m in range(nativeBlockNum * 2):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m] * C\n\n\ndef decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):\n \"\"\"Decode chunk files to dest file.\"\"\"\n if filesize <= 0:\n open(dest, 'wb').close()\n return\n cv_temp = []\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(parityBlockNum)]\n dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in\n range(nativeBlockNum)]\n gj_matrix = [[GF256int(0) for col in range(nativeBlockNum * 2)] for row in\n range(nativeBlockNum)]\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n enc_matrix[i][j] = GF256int(parityCoeff[counter])\n counter += 1\n cm1 = CoeffMatrix(nativeBlockNum)\n for i in range(parityBlockNum):\n cv_temp.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n cv_temp[i].coeff_[j] = enc_matrix[i][j]\n cv_temp[i].first()\n cm1.addcoeffvector(cv_temp[i])\n i = 0\n for selectChunkNo in blocknums:\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = enc_matrix[selectChunkNo][j]\n i += 1\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n if j == i:\n rev_matrix[i][j] = GF256int(1)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum * 2):\n if j < nativeBlockNum:\n gj_matrix[i][j] = dec_matrix[i][j]\n else:\n gj_matrix[i][j] = rev_matrix[i][j - nativeBlockNum]\n reversematrix(n, k, gj_matrix)\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = gj_matrix[i][j + nativeBlockNum]\n selectchunk = []\n for filename in src:\n infile = open(filename, 'rb')\n selectchunk.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n indatalist = ''.join(selectchunk)\n parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) for i in range(\n nativeBlockNum) for j in range(nativeBlockNum)])\n outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist,\n parityCoeff_temp, nativeBlockNum, chunksize)\n outfile = open(dest, 'wb')\n writelen = 1048576\n writenext = 0\n for i in range(0, filesize - writelen, writelen):\n writenext = i + writelen\n outfile.write(outdatalist[i:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n\n\ndef getCheckNum(parityBlockNum):\n \"\"\"Get check number for checking strong MDS, for fmsr(k=n-2) only.\"\"\"\n return int((parityBlockNum - 2) * (parityBlockNum - 2 - 1) / 2 - (\n parityBlockNum / 2 - 1))\n\n\ndef getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum,\n checkNum, enc_matrix):\n \"\"\"Get strong MDS property degree.\"\"\"\n currentStrongMDSPropertyDegree = 0\n survivalcoeffvectorset = []\n flag = 0\n for i in range(parityBlockNum):\n if int(i / 2) != repairNodeno:\n survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n survivalcoeffvectorset[i - flag * 2].coeff_[j] = enc_matrix[i][\n j]\n survivalcoeffvectorset[i - flag * 2].first()\n else:\n flag = 1\n s = 0\n for i in range(parityBlockNum - 2):\n for j in range(parityBlockNum - 2):\n if i < j:\n checkmatrix = CoeffMatrix(nativeBlockNum)\n for k in range(parityBlockNum - 2):\n if k != i and k != j:\n checkmatrix.addcoeffvector(survivalcoeffvectorset[k\n ].copy())\n if checkmatrix.rank_ == nativeBlockNum:\n currentStrongMDSPropertyDegree += 1\n s += 1\n return currentStrongMDSPropertyDegree\n\n\ndef checkMDS(MSR_n, MSR_k, enc_matrix):\n \"\"\"Check MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return a MDS property value.\"\"\"\n nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)\n parityBlockNum = getParityBlockNum(MSR_n, MSR_k)\n MDSpropery = True\n allcoeffvectors = []\n for i in range(parityBlockNum):\n allcoeffvectors.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]\n allcoeffvectors[i].first()\n permutation = int(MSR_n * (MSR_n - 1) / 2)\n checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]\n s = 0\n for i in range(MSR_n):\n for j in range(MSR_n):\n if i < j:\n for b in range(MSR_n):\n if b != i and b != j:\n checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2\n ].copy())\n checkmatrix[s].addcoeffvector(allcoeffvectors[b * 2 +\n 1].copy())\n if checkmatrix[s].rank_ != nativeBlockNum:\n MDSpropery = False\n s += 1\n return MDSpropery\n\n\ndef checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):\n \"\"\"Check strong MDS property, for fmsr(k=n-2) only.\"\"\"\n \"\"\"Return list of MDS property degrees.\"\"\"\n strongMDSPropertyDegrees = []\n checkNum = getCheckNum(parityBlockNum)\n for i in range(n):\n strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i,\n nativeBlockNum, parityBlockNum, checkNum, enc_matrix))\n return strongMDSPropertyDegrees\n\n\ndef testStrongMDSProperty(strongMDSPropertyDegrees, checkNum, n):\n \"\"\"Decide whether the current parity coefficient set passes the strong MDS property.\"\"\"\n result = True\n threshold = 2 * (n - 1) * (n - 2) - (n - 2) * (n - 3) / 2\n for degree in strongMDSPropertyDegrees:\n if degree < threshold:\n result = False\n return result\n\n\ndef functionalRepair(n, k, src, blocknums, failedNode, parityCoeff,\n repairChunks, setting, metadata):\n \"\"\"Functional repair by generating new parity chunks.\"\"\"\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n checkNum = getCheckNum(parityBlockNum)\n enc_matrix = metadata.enc_matrix\n repairCodingCoeff = metadata.repairCodingCoeff\n indatalist = []\n for filepath in src:\n infile = open(filepath, 'rb')\n indatalist.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n if chunksize > 0:\n indatalist_temp = ''.join(indatalist)\n parityCoeff_temp = []\n for i in range(n - k):\n for j in range(n - 1):\n parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))\n parityCoeff_temp = ''.join(parityCoeff_temp)\n outdatalist = codings.clibfmsr.clibfmsr.repairComputation(\n indatalist_temp, parityCoeff_temp, n, k, chunksize)\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff[counter] = enc_matrix[i][j]\n counter += 1\n writelen = 1048576\n writenext = 0\n for i in range(metadata.totalnode):\n if setting.nodeInfo[i].healthy == False:\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i\n )\n filesize = metadata.fileNodeInfo[i].bigchunksize\n if chunksize <= 0:\n open(dest, 'wb').close()\n else:\n outfile = open(dest, 'wb')\n for j in range(0, filesize - writelen, writelen):\n writenext = j + writelen\n outfile.write(outdatalist[j:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i\n ].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n",
"step-5": "#!/usr/bin/python\n#\n# @name = 'fmsrutil.py'\n# \n# @description = \"F-MSR utilities module.\"\n#\n# @author = ['YU Chiu Man', 'HU Yuchong', 'TANG Yang']\n#\n\nimport sys\nimport os\nimport random\n\nfrom finitefield import GF256int\nfrom coeffvector import CoeffVector\nfrom coeffvector import CoeffMatrix\n\nimport common\n\n#Check if C library of F-MSR is installed:\nimport codings.clibfmsr.clibfmsr\nuseClibfmsr = True\n\n\ndef getNativeBlockNum(n, k):\n '''Get number of native blocks.'''\n return k*(n-k)\n\n\ndef getParityBlockNum(n, k):\n '''Get number of parity blocks.'''\n return n*(n-k)\n\n\ndef getNodeIdList(n, k):\n '''Find the node id for a segment of blocks.'''\n '''Return a list of node id for the blocks.'''\n nodeidList = []\n segmentSize = n-k\n blockNum = getParityBlockNum(n, k)\n for i in range(int(blockNum/segmentSize)):\n for j in range(segmentSize):\n nodeidList.append(i)\n return nodeidList\n\n\ndef getParityCoeff(n, k):\n '''Get the parity coefficients of the blocks.'''\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n parityCoeff = []\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff.append(GF256int(i+1)**j)\n return parityCoeff\n\n\ndef encode(n, k, src, parityCoeff, setting, metadata):\n '''Encode src file to parity chunks.'''\n\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n\n infile = open(src, 'rb')\n indatalist = infile.read()\n infile.close()\n totalchunk = nativeBlockNum\n filesize = len(indatalist)\n\n #Generate info for big-chunk:\n for i in range(metadata.totalnode):\n fileNode = common.FileNodeMetadata(i)\n fileNode.nodekey = setting.nodeInfo[i].nodekey\n fileNode.nodetype = setting.nodeInfo[i].nodetype\n fileNode.bucketname = setting.nodeInfo[i].bucketname\n fileNode.bigchunksize = 0\n fileNode.chunknum = 0\n metadata.fileNodeInfo.append(fileNode)\n\n #Encode indatalist to outdatalist\n if filesize > 0:\n chunksize = filesize/totalchunk + 1\n indatalist += '\\0'*(chunksize*totalchunk - filesize)\n parityCoeff_temp = ''.join([chr(parity) for parity in parityCoeff])\n outdatalist = codings.clibfmsr.clibfmsr.encodeComputation(indatalist, \\\n parityCoeff_temp, nativeBlockNum, parityBlockNum, chunksize)\n else:\n chunksize = 0\n\n #Generate info for small chunks:\n nodeIdList = getNodeIdList(n, k)\n for i in range(parityBlockNum):\n chunk = common.ChunkMetadata(i)\n chunk.chunkname = metadata.filename + '.chunk' + str(i)\n chunk.chunksize = chunksize\n chunk.chunktype = 'parity'\n chunk.chunkpath = setting.chunkdir + '/' + chunk.chunkname\n nodeid = nodeIdList[i]\n chunk.nodeid = nodeid\n chunk.nodekey = setting.nodeInfo[nodeid].nodekey\n chunk.nodetype = setting.nodeInfo[nodeid].nodetype\n chunk.bucketname = setting.nodeInfo[nodeid].bucketname\n chunk.action = 'upload'\n #Add chunk position inside big-chunk:\n chunk.position = metadata.fileNodeInfo[nodeid].chunknum\n metadata.chunkInfo.append(chunk)\n #Add support for big-chunk:\n metadata.fileNodeInfo[nodeid].bigchunksize += chunk.chunksize\n metadata.fileNodeInfo[nodeid].chunknum += 1\n metadata.totalchunk = parityBlockNum\n metadata.parityCoeff = parityCoeff[:]\n\n #Generate big-chunks:\n startchunk = 0\n writelen = 1048576\n for i in range(metadata.totalnode):\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)\n if chunksize > 0:\n f = open(dest, 'wb')\n numchunks = nodeIdList.count(i)\n writenext = startchunk*chunksize\n for j in range(startchunk*chunksize, (startchunk+numchunks)*chunksize-writelen, writelen):\n writenext = j+writelen\n f.write(outdatalist[j:writenext])\n f.write(outdatalist[writenext:(startchunk+numchunks)*chunksize])\n f.close()\n startchunk += numchunks\n else:\n open(dest, 'wb').close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n\n\ndef reversematrix(n, k, gj_matrix):\n '''Reverse matrix.'''\n\n ## The first elimination: decoding matrix -> lower unit triangular matrix\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n\n for rowNo in range(nativeBlockNum): \n ##1.find the rowNo row vector with 1st-coeff of valve non-zero \n A = GF256int(0) \n for i in range(rowNo,nativeBlockNum,1):\n if gj_matrix[i][rowNo]!=0:\n A = gj_matrix[i][rowNo]\n break\n\n ##2. permutation between the rowNo row vector and the ith row vector\n temp_vector = [GF256int(0)]*(nativeBlockNum*2)\n\n if i!= rowNo:\n for j in range(nativeBlockNum*2):\n temp_vector[j] = gj_matrix[i][j] \n gj_matrix[i][j] = gj_matrix[rowNo][j]\n gj_matrix[rowNo][j] = temp_vector[j] \n ##3. in rowNo-th row vector, all the coeffs/1st coeff\n\n for m in range(nativeBlockNum*2):\n gj_matrix[rowNo][m] = gj_matrix[rowNo][m]/A \n\n ##4. The row vectors below rowNo-th row vector eliminate the rowNo-th coeff\n for j in range(rowNo+1,nativeBlockNum,1):\n B = gj_matrix[j][rowNo]\n for m in range(rowNo,nativeBlockNum*2,1):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m]*B\n\n # The second elimination: decoding matrix -> unit matrix\n ##5. The row vectors above rowNo-th row vector eliminate the rowNo-th coeff \n for rowNo in range(nativeBlockNum-1,0,-1): \n for j in range(0,rowNo,1):\n C = gj_matrix[j][rowNo]\n for m in range(nativeBlockNum*2):\n gj_matrix[j][m] = gj_matrix[j][m] - gj_matrix[rowNo][m]*C\n\n\ndef decode(n, k, src, blocknums, parityCoeff, dest, filesize, setting):\n '''Decode chunk files to dest file.'''\n\n ## special handling for 0B files\n if filesize <= 0:\n open(dest,'wb').close()\n return\n\n cv_temp=[]\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n enc_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in range(parityBlockNum)]\n dec_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in range(nativeBlockNum)]\n rev_matrix = [[GF256int(0) for col in range(nativeBlockNum)] for row in range(nativeBlockNum)]\n gj_matrix = [[GF256int(0) for col in range(nativeBlockNum*2)] for row in range(nativeBlockNum)]\n\n ## generate the encoding matrix\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n enc_matrix[i][j] = GF256int(parityCoeff[counter])\n counter += 1\n\n cm1 = CoeffMatrix(nativeBlockNum)\n for i in range(parityBlockNum):\n cv_temp.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n cv_temp[i].coeff_[j] = enc_matrix[i][j]\n cv_temp[i].first()\n cm1.addcoeffvector(cv_temp[i])\n\n ## generate the decoding matrix\n i=0\n for selectChunkNo in blocknums:\n for j in range(nativeBlockNum):\n dec_matrix[i][j]=enc_matrix[selectChunkNo][j]\n i += 1\n\n ## initialize the reverse matrix\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n if j==i:\n rev_matrix[i][j]= GF256int(1)\n\n ## initialize the Gauss-Jordan matrix = [decoding,reverse]\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum*2):\n if j<nativeBlockNum:\n gj_matrix[i][j]= dec_matrix[i][j]\n else:\n gj_matrix[i][j]= rev_matrix[i][j-nativeBlockNum]\n\n reversematrix(n, k, gj_matrix)\n\n for i in range(nativeBlockNum):\n for j in range(nativeBlockNum):\n dec_matrix[i][j] = gj_matrix[i][j+nativeBlockNum]\n\n ##generate decode data chunks\n selectchunk=[]\n for filename in src:\n infile = open(filename,'rb')\n selectchunk.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n indatalist = ''.join(selectchunk)\n\n ##rebuild the original chunks\n parityCoeff_temp = ''.join([chr(dec_matrix[i][j]) \\\n for i in range(nativeBlockNum) \\\n for j in range(nativeBlockNum)])\n outdatalist = codings.clibfmsr.clibfmsr.decodeComputation(indatalist, \\\n parityCoeff_temp, nativeBlockNum, chunksize)\n\n outfile = open(dest,'wb')\n writelen = 1048576\n writenext = 0\n for i in range(0,filesize-writelen,writelen):\n writenext = i+writelen\n outfile.write(outdatalist[i:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n\n\ndef getCheckNum(parityBlockNum):\n '''Get check number for checking strong MDS, for fmsr(k=n-2) only.'''\n return int((parityBlockNum-2)*(parityBlockNum-2-1)/2 - ((parityBlockNum/2)-1))\n\ndef getStrongMDSPropertyDegree(repairNodeno, nativeBlockNum, parityBlockNum, checkNum, enc_matrix):\n '''Get strong MDS property degree.'''\n\n currentStrongMDSPropertyDegree = 0\n survivalcoeffvectorset = []\n flag = 0\n for i in range(parityBlockNum):\n #get coeff vectors of survival parity blocks\n if int(i/2)!= repairNodeno:\n survivalcoeffvectorset.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n survivalcoeffvectorset[i - flag*2].coeff_[j] = enc_matrix[i][j]\n survivalcoeffvectorset[i - flag*2].first() \n else:\n flag =1\n\n s = 0\n for i in range(parityBlockNum-2):\n for j in range(parityBlockNum-2):\n if i<j:\n checkmatrix = CoeffMatrix(nativeBlockNum)\n for k in range (parityBlockNum-2):\n if k!=i and k!=j:\n checkmatrix.addcoeffvector(survivalcoeffvectorset[k].copy())\n if checkmatrix.rank_ == nativeBlockNum:\n currentStrongMDSPropertyDegree += 1\n s += 1\n return currentStrongMDSPropertyDegree\n\ndef checkMDS(MSR_n, MSR_k, enc_matrix):\n '''Check MDS property, for fmsr(k=n-2) only.'''\n '''Return a MDS property value.'''\n\n nativeBlockNum = getNativeBlockNum(MSR_n, MSR_k)\n parityBlockNum = getParityBlockNum(MSR_n, MSR_k)\n MDSpropery = True\n allcoeffvectors = []\n for i in range(parityBlockNum):\n allcoeffvectors.append(CoeffVector(nativeBlockNum))\n for j in range(nativeBlockNum):\n allcoeffvectors[i].coeff_[j] = enc_matrix[i][j]\n allcoeffvectors[i].first()\n permutation = int(MSR_n * (MSR_n - 1) / 2)\n #permutation of selecting n-2 nodes from n nodes\n checkmatrix = [CoeffMatrix(nativeBlockNum) for col in range(permutation)]\n s = 0\n for i in range (MSR_n):\n for j in range(MSR_n):\n if i<j:\n for b in range(MSR_n):\n if b !=i and b!=j:\n checkmatrix[s].addcoeffvector(allcoeffvectors[b*2].copy())\n checkmatrix[s].addcoeffvector(allcoeffvectors[b*2+1].copy())\n if checkmatrix[s].rank_ != nativeBlockNum:\n MDSpropery = False\n s += 1\n return MDSpropery\n\ndef checkstongMDS(n, k, nativeBlockNum, parityBlockNum, enc_matrix):\n '''Check strong MDS property, for fmsr(k=n-2) only.'''\n '''Return list of MDS property degrees.'''\n\n strongMDSPropertyDegrees = []\n #get check-combination number\n checkNum = getCheckNum(parityBlockNum)\n #Calculate total strong MDS property degree\n for i in range(n):\n strongMDSPropertyDegrees.append(getStrongMDSPropertyDegree(i, \\\n nativeBlockNum, parityBlockNum, checkNum, enc_matrix))\n return strongMDSPropertyDegrees\n\n\ndef testStrongMDSProperty(strongMDSPropertyDegrees, checkNum,n):\n '''Decide whether the current parity coefficient set passes the strong MDS property.'''\n\n result = True\n #threshold = checkNum\n threshold = 2*(n-1)*(n-2)-(n-2)*(n-3)/2\n #Important: currently the threshold value is hardcode\n for degree in strongMDSPropertyDegrees:\n if degree < threshold:\n result = False\n return result\n\n\ndef functionalRepair(n, k, src, blocknums, failedNode, parityCoeff, repairChunks, setting, metadata):\n '''Functional repair by generating new parity chunks.'''\n\n nativeBlockNum = getNativeBlockNum(n, k)\n parityBlockNum = getParityBlockNum(n, k)\n checkNum = getCheckNum(parityBlockNum)\n\n ## read the encoding matrix and repair\n enc_matrix = metadata.enc_matrix\n repairCodingCoeff = metadata.repairCodingCoeff\n\n indatalist = []\n for filepath in src:\n infile = open(filepath, 'rb')\n indatalist.append(infile.read())\n infile.close()\n chunksize = os.path.getsize(src[0])\n\n if chunksize > 0:\n #Repair computation:\n indatalist_temp = ''.join(indatalist)\n parityCoeff_temp = []\n for i in range(n-k):\n for j in range(n-1):\n parityCoeff_temp.append(chr(repairCodingCoeff[i][j]))\n parityCoeff_temp = ''.join(parityCoeff_temp)\n outdatalist = codings.clibfmsr.clibfmsr.repairComputation(indatalist_temp, \\\n parityCoeff_temp, n, k, chunksize)\n\n counter = 0\n for i in range(parityBlockNum):\n for j in range(nativeBlockNum):\n parityCoeff[counter] = enc_matrix[i][j]\n counter += 1\n\n #Add support for big-chunk:\n writelen = 1048576\n writenext = 0\n for i in range(metadata.totalnode):\n if setting.nodeInfo[i].healthy == False:\n dest = setting.chunkdir + '/' + metadata.filename + '.node' + str(i)\n filesize = metadata.fileNodeInfo[i].bigchunksize\n if chunksize <= 0:\n open(dest,'wb').close()\n else:\n outfile = open(dest, 'wb')\n for j in range(0,filesize-writelen,writelen):\n writenext = j+writelen\n outfile.write(outdatalist[j:writenext])\n outfile.write(outdatalist[writenext:filesize])\n outfile.close()\n metadata.fileNodeInfo[i].bigchunkpath = dest\n metadata.fileNodeInfo[i].bigchunkname = metadata.filename + '.node' + str(i)\n metadata.fileNodeInfo[i].action = 'upload'\n\n",
"step-ids": [
10,
12,
13,
15,
16
]
}
|
[
10,
12,
13,
15,
16
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Migration(migrations.Migration):
dependencies = [('notifications', '0011_auto_20171229_1747')]
operations = [migrations.AlterField(model_name='notification', name=
'date', field=models.DateTimeField(auto_now=True, verbose_name=
'Dato')), migrations.AlterField(model_name='notification', name=
'priority', field=models.PositiveIntegerField(choices=[(0, 'Low'),
(1, 'Medium'), (2, 'High')], default=1, verbose_name='priority')),
migrations.AlterField(model_name='notification', name='sent_mail',
field=models.BooleanField(default=False, verbose_name='sent mail'))]
<|reserved_special_token_1|>
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [('notifications', '0011_auto_20171229_1747')]
operations = [migrations.AlterField(model_name='notification', name=
'date', field=models.DateTimeField(auto_now=True, verbose_name=
'Dato')), migrations.AlterField(model_name='notification', name=
'priority', field=models.PositiveIntegerField(choices=[(0, 'Low'),
(1, 'Medium'), (2, 'High')], default=1, verbose_name='priority')),
migrations.AlterField(model_name='notification', name='sent_mail',
field=models.BooleanField(default=False, verbose_name='sent mail'))]
<|reserved_special_token_1|>
# Generated by Django 2.2.13 on 2021-08-11 15:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
("notifications", "0011_auto_20171229_1747"),
]
operations = [
migrations.AlterField(
model_name="notification",
name="date",
field=models.DateTimeField(auto_now=True, verbose_name="Dato"),
),
migrations.AlterField(
model_name="notification",
name="priority",
field=models.PositiveIntegerField(
choices=[(0, "Low"), (1, "Medium"), (2, "High")],
default=1,
verbose_name="priority",
),
),
migrations.AlterField(
model_name="notification",
name="sent_mail",
field=models.BooleanField(default=False, verbose_name="sent mail"),
),
]
|
flexible
|
{
"blob_id": "fa045ccd4e54332f6c05bf64e3318e05b8123a10",
"index": 3317,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\nclass Migration(migrations.Migration):\n <mask token>\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass Migration(migrations.Migration):\n dependencies = [('notifications', '0011_auto_20171229_1747')]\n operations = [migrations.AlterField(model_name='notification', name=\n 'date', field=models.DateTimeField(auto_now=True, verbose_name=\n 'Dato')), migrations.AlterField(model_name='notification', name=\n 'priority', field=models.PositiveIntegerField(choices=[(0, 'Low'),\n (1, 'Medium'), (2, 'High')], default=1, verbose_name='priority')),\n migrations.AlterField(model_name='notification', name='sent_mail',\n field=models.BooleanField(default=False, verbose_name='sent mail'))]\n",
"step-4": "from django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n dependencies = [('notifications', '0011_auto_20171229_1747')]\n operations = [migrations.AlterField(model_name='notification', name=\n 'date', field=models.DateTimeField(auto_now=True, verbose_name=\n 'Dato')), migrations.AlterField(model_name='notification', name=\n 'priority', field=models.PositiveIntegerField(choices=[(0, 'Low'),\n (1, 'Medium'), (2, 'High')], default=1, verbose_name='priority')),\n migrations.AlterField(model_name='notification', name='sent_mail',\n field=models.BooleanField(default=False, verbose_name='sent mail'))]\n",
"step-5": "# Generated by Django 2.2.13 on 2021-08-11 15:38\n\nfrom django.db import migrations, models\n\n\nclass Migration(migrations.Migration):\n\n dependencies = [\n (\"notifications\", \"0011_auto_20171229_1747\"),\n ]\n\n operations = [\n migrations.AlterField(\n model_name=\"notification\",\n name=\"date\",\n field=models.DateTimeField(auto_now=True, verbose_name=\"Dato\"),\n ),\n migrations.AlterField(\n model_name=\"notification\",\n name=\"priority\",\n field=models.PositiveIntegerField(\n choices=[(0, \"Low\"), (1, \"Medium\"), (2, \"High\")],\n default=1,\n verbose_name=\"priority\",\n ),\n ),\n migrations.AlterField(\n model_name=\"notification\",\n name=\"sent_mail\",\n field=models.BooleanField(default=False, verbose_name=\"sent mail\"),\n ),\n ]\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
"""
Auteur:Fayçal Chena
Date : 07 avril 2020
Consignes :
Écrire une fonction alea_dice(s) qui génère trois nombres (pseudo) aléatoires à l’aide
de la fonction randint du module random, représentant trois dés (à six faces avec
les valeurs de 1 à 6), et qui renvoie la valeur booléenne True si les dés forment un 421,
et la valeur booléenne False sinon.
Le paramètre s de la fonction est un nombre entier, qui sera passé en argument
à la fonction random.seed au début du code de la fonction. Cela permettra de
générer la même suite de nombres aléatoires à chaque appel de la fonction,
et ainsi de pouvoir tester son fonctionnement.
"""
def foo_6(x, y):
return y, x
a = 4
b = 8
foo_6(a, b)
print(a, b)
|
normal
|
{
"blob_id": "ad5a9e353d065eee477381aa6b1f233f975ea0ed",
"index": 3374,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n\n\ndef foo_6(x, y):\n return y, x\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef foo_6(x, y):\n return y, x\n\n\n<mask token>\nfoo_6(a, b)\nprint(a, b)\n",
"step-4": "<mask token>\n\n\ndef foo_6(x, y):\n return y, x\n\n\na = 4\nb = 8\nfoo_6(a, b)\nprint(a, b)\n",
"step-5": "\"\"\"\nAuteur:Fayçal Chena\nDate : 07 avril 2020\nConsignes :\nÉcrire une fonction alea_dice(s) qui génère trois nombres (pseudo) aléatoires à l’aide\nde la fonction randint du module random, représentant trois dés (à six faces avec\nles valeurs de 1 à 6), et qui renvoie la valeur booléenne True si les dés forment un 421,\net la valeur booléenne False sinon.\nLe paramètre s de la fonction est un nombre entier, qui sera passé en argument\nà la fonction random.seed au début du code de la fonction. Cela permettra de\ngénérer la même suite de nombres aléatoires à chaque appel de la fonction,\net ainsi de pouvoir tester son fonctionnement.\n\"\"\"\n\n\ndef foo_6(x, y):\n return y, x\n\n\na = 4\nb = 8\nfoo_6(a, b)\nprint(a, b)\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
from subprocess import check_output
import json
import datetime
date = datetime.datetime.now()
mo = date.month
day = date.day
year = date.year
str = '{0}-{1}-{2}'.format(mo, day, year)
instances = json.loads(check_output("aws lightsail get-instances", shell=True))
inst_names = []
inst_dict = {}
for instance in instances['instances']:
inst_names.append(instance['name'])
inst_dict[instance['name']] = []
print(inst_names)
snapshots = json.loads(check_output("aws lightsail get-instance-snapshots", shell=True))
for snapshot in snapshots['instanceSnapshots']:
inst_dict[snapshot['fromInstanceName']].append(snapshot)
for instance, snapshots in inst_dict.items():
print(json.dumps(json.loads(
check_output("aws lightsail create-instance-snapshot --instance-name " + instance + " --instance-snapshot-name " + instance + "-" + str,
shell=True))))
if len(snapshots) > 1:
sorted_snapshots = sorted(snapshots, key=lambda k: k['createdAt'])
print(json.dumps(json.loads(check_output("aws lightsail delete-instance-snapshot --instance-snapshot-name " + sorted_snapshots[0]['name'], shell=True))))
|
normal
|
{
"blob_id": "2023e0b749338488e63cbbb475b7a915bccccce0",
"index": 7531,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor instance in instances['instances']:\n inst_names.append(instance['name'])\n inst_dict[instance['name']] = []\nprint(inst_names)\n<mask token>\nfor snapshot in snapshots['instanceSnapshots']:\n inst_dict[snapshot['fromInstanceName']].append(snapshot)\nfor instance, snapshots in inst_dict.items():\n print(json.dumps(json.loads(check_output(\n 'aws lightsail create-instance-snapshot --instance-name ' +\n instance + ' --instance-snapshot-name ' + instance + '-' + str,\n shell=True))))\n if len(snapshots) > 1:\n sorted_snapshots = sorted(snapshots, key=lambda k: k['createdAt'])\n print(json.dumps(json.loads(check_output(\n 'aws lightsail delete-instance-snapshot --instance-snapshot-name '\n + sorted_snapshots[0]['name'], shell=True))))\n",
"step-3": "<mask token>\ndate = datetime.datetime.now()\nmo = date.month\nday = date.day\nyear = date.year\nstr = '{0}-{1}-{2}'.format(mo, day, year)\ninstances = json.loads(check_output('aws lightsail get-instances', shell=True))\ninst_names = []\ninst_dict = {}\nfor instance in instances['instances']:\n inst_names.append(instance['name'])\n inst_dict[instance['name']] = []\nprint(inst_names)\nsnapshots = json.loads(check_output('aws lightsail get-instance-snapshots',\n shell=True))\nfor snapshot in snapshots['instanceSnapshots']:\n inst_dict[snapshot['fromInstanceName']].append(snapshot)\nfor instance, snapshots in inst_dict.items():\n print(json.dumps(json.loads(check_output(\n 'aws lightsail create-instance-snapshot --instance-name ' +\n instance + ' --instance-snapshot-name ' + instance + '-' + str,\n shell=True))))\n if len(snapshots) > 1:\n sorted_snapshots = sorted(snapshots, key=lambda k: k['createdAt'])\n print(json.dumps(json.loads(check_output(\n 'aws lightsail delete-instance-snapshot --instance-snapshot-name '\n + sorted_snapshots[0]['name'], shell=True))))\n",
"step-4": "from subprocess import check_output\nimport json\nimport datetime\ndate = datetime.datetime.now()\nmo = date.month\nday = date.day\nyear = date.year\nstr = '{0}-{1}-{2}'.format(mo, day, year)\ninstances = json.loads(check_output('aws lightsail get-instances', shell=True))\ninst_names = []\ninst_dict = {}\nfor instance in instances['instances']:\n inst_names.append(instance['name'])\n inst_dict[instance['name']] = []\nprint(inst_names)\nsnapshots = json.loads(check_output('aws lightsail get-instance-snapshots',\n shell=True))\nfor snapshot in snapshots['instanceSnapshots']:\n inst_dict[snapshot['fromInstanceName']].append(snapshot)\nfor instance, snapshots in inst_dict.items():\n print(json.dumps(json.loads(check_output(\n 'aws lightsail create-instance-snapshot --instance-name ' +\n instance + ' --instance-snapshot-name ' + instance + '-' + str,\n shell=True))))\n if len(snapshots) > 1:\n sorted_snapshots = sorted(snapshots, key=lambda k: k['createdAt'])\n print(json.dumps(json.loads(check_output(\n 'aws lightsail delete-instance-snapshot --instance-snapshot-name '\n + sorted_snapshots[0]['name'], shell=True))))\n",
"step-5": "from subprocess import check_output\nimport json\nimport datetime\n\ndate = datetime.datetime.now()\nmo = date.month\nday = date.day\nyear = date.year\nstr = '{0}-{1}-{2}'.format(mo, day, year)\ninstances = json.loads(check_output(\"aws lightsail get-instances\", shell=True))\n\ninst_names = []\ninst_dict = {}\nfor instance in instances['instances']:\n inst_names.append(instance['name'])\n inst_dict[instance['name']] = []\n\nprint(inst_names)\nsnapshots = json.loads(check_output(\"aws lightsail get-instance-snapshots\", shell=True))\n\nfor snapshot in snapshots['instanceSnapshots']:\n inst_dict[snapshot['fromInstanceName']].append(snapshot)\n\nfor instance, snapshots in inst_dict.items():\n print(json.dumps(json.loads(\n check_output(\"aws lightsail create-instance-snapshot --instance-name \" + instance + \" --instance-snapshot-name \" + instance + \"-\" + str,\n shell=True))))\n if len(snapshots) > 1:\n sorted_snapshots = sorted(snapshots, key=lambda k: k['createdAt'])\n print(json.dumps(json.loads(check_output(\"aws lightsail delete-instance-snapshot --instance-snapshot-name \" + sorted_snapshots[0]['name'], shell=True))))\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
<|reserved_special_token_0|>
class GlibConan(ConanFile):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GlibConan(ConanFile):
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_0|>
def source(self):
tools.get(
f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'
)
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class GlibConan(ConanFile):
name = 'glib'
description = 'Common C routines used by Gtk+ and other libs'
license = 'LGPL'
settings = {'os': ['Linux'], 'arch': ['x86_64', 'armv8']}
build_requires = 'generators/1.0.0', 'autotools/1.0.0'
requires = 'glibc/[>=2.31]', 'sh/[>=]'
def source(self):
tools.get(
f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'
)
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
<|reserved_special_token_1|>
from conans import *
class GlibConan(ConanFile):
name = 'glib'
description = 'Common C routines used by Gtk+ and other libs'
license = 'LGPL'
settings = {'os': ['Linux'], 'arch': ['x86_64', 'armv8']}
build_requires = 'generators/1.0.0', 'autotools/1.0.0'
requires = 'glibc/[>=2.31]', 'sh/[>=]'
def source(self):
tools.get(
f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'
)
def build(self):
args = ['--disable-static']
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=
f'{self.name}-{self.version}')
autotools.make()
autotools.install()
<|reserved_special_token_1|>
from conans import *
class GlibConan(ConanFile):
name = "glib"
description = "Common C routines used by Gtk+ and other libs"
license = "LGPL"
settings = {"os": ["Linux"], "arch": ["x86_64", "armv8"]}
build_requires = (
"generators/1.0.0",
"autotools/1.0.0",
)
requires = (
"glibc/[>=2.31]",
"sh/[>=]",
)
def source(self):
tools.get(f"ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz")
def build(self):
args = [
"--disable-static",
]
autotools = AutoToolsBuildEnvironment(self)
autotools.configure(args=args, configure_dir=f"{self.name}-{self.version}")
autotools.make()
autotools.install()
|
flexible
|
{
"blob_id": "e49c5c6475a1210a9657d7bbd0490c8d20863718",
"index": 2285,
"step-1": "<mask token>\n\n\nclass GlibConan(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def build(self):\n args = ['--disable-static']\n autotools = AutoToolsBuildEnvironment(self)\n autotools.configure(args=args, configure_dir=\n f'{self.name}-{self.version}')\n autotools.make()\n autotools.install()\n",
"step-2": "<mask token>\n\n\nclass GlibConan(ConanFile):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n def source(self):\n tools.get(\n f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'\n )\n\n def build(self):\n args = ['--disable-static']\n autotools = AutoToolsBuildEnvironment(self)\n autotools.configure(args=args, configure_dir=\n f'{self.name}-{self.version}')\n autotools.make()\n autotools.install()\n",
"step-3": "<mask token>\n\n\nclass GlibConan(ConanFile):\n name = 'glib'\n description = 'Common C routines used by Gtk+ and other libs'\n license = 'LGPL'\n settings = {'os': ['Linux'], 'arch': ['x86_64', 'armv8']}\n build_requires = 'generators/1.0.0', 'autotools/1.0.0'\n requires = 'glibc/[>=2.31]', 'sh/[>=]'\n\n def source(self):\n tools.get(\n f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'\n )\n\n def build(self):\n args = ['--disable-static']\n autotools = AutoToolsBuildEnvironment(self)\n autotools.configure(args=args, configure_dir=\n f'{self.name}-{self.version}')\n autotools.make()\n autotools.install()\n",
"step-4": "from conans import *\n\n\nclass GlibConan(ConanFile):\n name = 'glib'\n description = 'Common C routines used by Gtk+ and other libs'\n license = 'LGPL'\n settings = {'os': ['Linux'], 'arch': ['x86_64', 'armv8']}\n build_requires = 'generators/1.0.0', 'autotools/1.0.0'\n requires = 'glibc/[>=2.31]', 'sh/[>=]'\n\n def source(self):\n tools.get(\n f'ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz'\n )\n\n def build(self):\n args = ['--disable-static']\n autotools = AutoToolsBuildEnvironment(self)\n autotools.configure(args=args, configure_dir=\n f'{self.name}-{self.version}')\n autotools.make()\n autotools.install()\n",
"step-5": "from conans import *\n\nclass GlibConan(ConanFile):\n name = \"glib\"\n description = \"Common C routines used by Gtk+ and other libs\"\n license = \"LGPL\"\n settings = {\"os\": [\"Linux\"], \"arch\": [\"x86_64\", \"armv8\"]}\n build_requires = (\n \"generators/1.0.0\",\n \"autotools/1.0.0\",\n )\n requires = (\n \"glibc/[>=2.31]\",\n \"sh/[>=]\",\n )\n\n def source(self):\n tools.get(f\"ftp://ftp.gnome.org/pub/gnome/sources/glib/1.2/glib-{self.version}.tar.gz\")\n\n def build(self):\n args = [\n \"--disable-static\",\n ]\n autotools = AutoToolsBuildEnvironment(self)\n autotools.configure(args=args, configure_dir=f\"{self.name}-{self.version}\")\n autotools.make()\n autotools.install()\n",
"step-ids": [
2,
3,
4,
5,
6
]
}
|
[
2,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
def executeUpgrade():
shell.executeCommand('pkg upgrade')
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def executeUpgrade():
shell.executeCommand('pkg upgrade')
<|reserved_special_token_0|>
def executeFindByName(name):
shell.executeCommand('pkg search ' + name)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def executeUpgrade():
shell.executeCommand('pkg upgrade')
<|reserved_special_token_0|>
def executeRemove(pkg_name):
shell.executeCommand('pkg remove ' + pkg_name)
shell.executeCommand('pkg autoremove')
def executeFindByName(name):
shell.executeCommand('pkg search ' + name)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def executeUpgrade():
shell.executeCommand('pkg upgrade')
def executeInstall(pkg_name):
shell.executeCommand('pkg install ' + pkg_name)
def executeRemove(pkg_name):
shell.executeCommand('pkg remove ' + pkg_name)
shell.executeCommand('pkg autoremove')
def executeFindByName(name):
shell.executeCommand('pkg search ' + name)
<|reserved_special_token_1|>
import shell
def executeUpgrade():
shell.executeCommand('pkg upgrade')
def executeInstall(pkg_name):
shell.executeCommand('pkg install ' + pkg_name)
def executeRemove(pkg_name):
shell.executeCommand('pkg remove ' + pkg_name)
shell.executeCommand('pkg autoremove')
def executeFindByName(name):
shell.executeCommand('pkg search ' + name)
|
flexible
|
{
"blob_id": "db55a603615c7d896569ada84f3110dd6c0ce45f",
"index": 1250,
"step-1": "<mask token>\n\n\ndef executeUpgrade():\n shell.executeCommand('pkg upgrade')\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef executeUpgrade():\n shell.executeCommand('pkg upgrade')\n\n\n<mask token>\n\n\ndef executeFindByName(name):\n shell.executeCommand('pkg search ' + name)\n",
"step-3": "<mask token>\n\n\ndef executeUpgrade():\n shell.executeCommand('pkg upgrade')\n\n\n<mask token>\n\n\ndef executeRemove(pkg_name):\n shell.executeCommand('pkg remove ' + pkg_name)\n shell.executeCommand('pkg autoremove')\n\n\ndef executeFindByName(name):\n shell.executeCommand('pkg search ' + name)\n",
"step-4": "<mask token>\n\n\ndef executeUpgrade():\n shell.executeCommand('pkg upgrade')\n\n\ndef executeInstall(pkg_name):\n shell.executeCommand('pkg install ' + pkg_name)\n\n\ndef executeRemove(pkg_name):\n shell.executeCommand('pkg remove ' + pkg_name)\n shell.executeCommand('pkg autoremove')\n\n\ndef executeFindByName(name):\n shell.executeCommand('pkg search ' + name)\n",
"step-5": "import shell\n\n\ndef executeUpgrade():\n shell.executeCommand('pkg upgrade')\n\n\ndef executeInstall(pkg_name):\n shell.executeCommand('pkg install ' + pkg_name)\n\n\ndef executeRemove(pkg_name):\n shell.executeCommand('pkg remove ' + pkg_name)\n shell.executeCommand('pkg autoremove')\n\n\ndef executeFindByName(name):\n shell.executeCommand('pkg search ' + name)\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
print(D.cumsum())
print(D.rolling(2).sum())
<|reserved_special_token_1|>
<|reserved_special_token_0|>
D = pd.Series(range(0, 20))
print(D.cumsum())
print(D.rolling(2).sum())
<|reserved_special_token_1|>
import pandas as pd
D = pd.Series(range(0, 20))
print(D.cumsum())
print(D.rolling(2).sum())
<|reserved_special_token_1|>
# 代码3-14 pandas累积统计特征函数、移动窗口统计函数示例
import pandas as pd
D = pd.Series(range(0, 20)) # 构造Series,内容为0~19共20个整数
print(D.cumsum()) # 给出前n项和
print(D.rolling(2).sum()) # 依次对相邻两项求和
|
flexible
|
{
"blob_id": "7639b80c9e6e1b2e1e55a47a862c433b64168cf6",
"index": 7475,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nprint(D.cumsum())\nprint(D.rolling(2).sum())\n",
"step-3": "<mask token>\nD = pd.Series(range(0, 20))\nprint(D.cumsum())\nprint(D.rolling(2).sum())\n",
"step-4": "import pandas as pd\nD = pd.Series(range(0, 20))\nprint(D.cumsum())\nprint(D.rolling(2).sum())\n",
"step-5": "# 代码3-14 pandas累积统计特征函数、移动窗口统计函数示例\n\nimport pandas as pd\n\nD = pd.Series(range(0, 20)) # 构造Series,内容为0~19共20个整数\nprint(D.cumsum()) # 给出前n项和\nprint(D.rolling(2).sum()) # 依次对相邻两项求和\n",
"step-ids": [
0,
1,
2,
3,
4
]
}
|
[
0,
1,
2,
3,
4
] |
class subset:
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class subset:
def __init__(self, weight, itemSet, size, setNum):
self.weight = weight
self.itemSet = itemSet
self.size = size
self.setNum = setNum
def findCover(base, arr):
uniq = []
uni = []
if len(base.itemSet) == rangeOfVal:
return base
remain = rangeOfVal
for i in arr:
if base.itemSet.isdisjoint(i.itemSet) == True:
uniq.append(i)
remain = remain - len(i.itemSet)
addedSub = subset(base.weight + i.weight, base.itemSet.union(i.
itemSet), base.size + i.size, str(base.setNum) + ' ' + str(
i.setNum))
uni.append(addedSub)
print('added:', addedSub.itemSet)
if addedSub.size == rangeOfVal:
return addedSub
print()
for j in uni:
if remain == len(base.itemSet):
findCover(j, uniq)
return
<|reserved_special_token_0|>
<|reserved_special_token_1|>
class subset:
def __init__(self, weight, itemSet, size, setNum):
self.weight = weight
self.itemSet = itemSet
self.size = size
self.setNum = setNum
def findCover(base, arr):
uniq = []
uni = []
if len(base.itemSet) == rangeOfVal:
return base
remain = rangeOfVal
for i in arr:
if base.itemSet.isdisjoint(i.itemSet) == True:
uniq.append(i)
remain = remain - len(i.itemSet)
addedSub = subset(base.weight + i.weight, base.itemSet.union(i.
itemSet), base.size + i.size, str(base.setNum) + ' ' + str(
i.setNum))
uni.append(addedSub)
print('added:', addedSub.itemSet)
if addedSub.size == rangeOfVal:
return addedSub
print()
for j in uni:
if remain == len(base.itemSet):
findCover(j, uniq)
return
<|reserved_special_token_0|>
while True:
itemSet = f.readline()
if itemSet == '':
break
else:
weight = int(f.readline())
arrItems = itemSet.split(' ')
i = 0
for item in arrItems:
if item != '\n':
arrItems[i] = int(item)
i += 1
else:
arrItems.remove('\n')
arrItems.sort()
s = subset(weight, set(arrItems), len(arrItems), num)
subsetList.append(s)
num += 1
<|reserved_special_token_0|>
for base in subsetList:
print('base:', base.setNum)
o = findCover(base, subsetList[inc:len(subsetList)])
if o != None:
print('here!')
covers.append(o)
inc += 1
for w in covers:
if w.weight < minWeight:
minWeight = w.weight
minCover = w.setNum
print(minWeight)
print(minCover)
<|reserved_special_token_1|>
class subset:
def __init__(self, weight, itemSet, size, setNum):
self.weight = weight
self.itemSet = itemSet
self.size = size
self.setNum = setNum
def findCover(base, arr):
uniq = []
uni = []
if len(base.itemSet) == rangeOfVal:
return base
remain = rangeOfVal
for i in arr:
if base.itemSet.isdisjoint(i.itemSet) == True:
uniq.append(i)
remain = remain - len(i.itemSet)
addedSub = subset(base.weight + i.weight, base.itemSet.union(i.
itemSet), base.size + i.size, str(base.setNum) + ' ' + str(
i.setNum))
uni.append(addedSub)
print('added:', addedSub.itemSet)
if addedSub.size == rangeOfVal:
return addedSub
print()
for j in uni:
if remain == len(base.itemSet):
findCover(j, uniq)
return
fileName = 'Input_attempt3.txt'
f = open(fileName, 'r')
rangeOfVal = int(f.readline())
numOfSub = int(f.readline())
num = 0
minWeight = 500001
minCover = []
subsetList = []
while True:
itemSet = f.readline()
if itemSet == '':
break
else:
weight = int(f.readline())
arrItems = itemSet.split(' ')
i = 0
for item in arrItems:
if item != '\n':
arrItems[i] = int(item)
i += 1
else:
arrItems.remove('\n')
arrItems.sort()
s = subset(weight, set(arrItems), len(arrItems), num)
subsetList.append(s)
num += 1
covers = []
inc = 1
for base in subsetList:
print('base:', base.setNum)
o = findCover(base, subsetList[inc:len(subsetList)])
if o != None:
print('here!')
covers.append(o)
inc += 1
for w in covers:
if w.weight < minWeight:
minWeight = w.weight
minCover = w.setNum
print(minWeight)
print(minCover)
<|reserved_special_token_1|>
class subset:
def __init__(self, weight, itemSet, size, setNum):
self.weight = weight
self.itemSet = itemSet
self.size = size
self.setNum = setNum
def findCover(base, arr):
uniq = [] #array that can be union
uni = [] #array has been unionized w/ base
if len(base.itemSet) == rangeOfVal:
# print("COVER:", base.itemSet)
return base
remain = rangeOfVal
# Search through arr to find all potential subsets
for i in arr:
# print("compare: ", i.itemSet)
if base.itemSet.isdisjoint(i.itemSet) == True:
# Unique array
uniq.append(i)
remain = remain - len(i.itemSet)
# print("uniq: ", len(uniq))
addedSub = subset(base.weight + i.weight,
base.itemSet.union(i.itemSet),
base.size + i.size,
str(base.setNum) + " " + str(i.setNum))
# Union array
uni.append(addedSub)
print("added:", addedSub.itemSet)
if addedSub.size == rangeOfVal:
# print("COVER:", addedSub.itemSet)
return addedSub
print()
for j in uni:
# print(j.setNum)
if remain == len(base.itemSet):
findCover(j, uniq)
# print("_____________________________NONE_______________________________")
return
# fileName="./inputs/input_group115.txt"
fileName="Input_attempt3.txt"
f=open(fileName, "r")
rangeOfVal=int(f.readline()) # n
numOfSub=int(f.readline()) # m
num=0
minWeight=500001
minCover=[]
subsetList=[]
# Loop to read through file and set up the data structures
# to hold all the values
while True:
itemSet=f.readline()
if itemSet == "":
break
else:
weight=int(f.readline())
arrItems=itemSet.split(" ")
i=0
# Convert each item into an int and delete any \n
for item in arrItems:
if item != "\n":
arrItems[i]=int(item)
i += 1
else:
arrItems.remove("\n")
arrItems.sort()
s=subset(weight, set(arrItems), len(arrItems), num)
subsetList.append(s)
num += 1
# print("---------------------------------------------")
# for s in subsetList:
# print(s.itemSet)
# print("---------------------------------------------")
covers = []
inc = 1
for base in subsetList:
# print()
print("base:", base.setNum)
o = findCover(base, subsetList[inc:len(subsetList)])
if o != None:
print("here!")
covers.append(o)
# print(o.setNum)
inc += 1
for w in covers:
if w.weight < minWeight:
minWeight = w.weight
# if type(s.setNum) == int: continue
# else: minCover = (s.setNum).split(" ").sort()
minCover = w.setNum
print(minWeight)
print(minCover)
# for cov in covers:
# print(cov.itemSet)
# #
|
flexible
|
{
"blob_id": "b865c37623f405f67592d1eabc620d11ff87827e",
"index": 3378,
"step-1": "class subset:\n <mask token>\n\n\n<mask token>\n",
"step-2": "class subset:\n\n def __init__(self, weight, itemSet, size, setNum):\n self.weight = weight\n self.itemSet = itemSet\n self.size = size\n self.setNum = setNum\n\n\ndef findCover(base, arr):\n uniq = []\n uni = []\n if len(base.itemSet) == rangeOfVal:\n return base\n remain = rangeOfVal\n for i in arr:\n if base.itemSet.isdisjoint(i.itemSet) == True:\n uniq.append(i)\n remain = remain - len(i.itemSet)\n addedSub = subset(base.weight + i.weight, base.itemSet.union(i.\n itemSet), base.size + i.size, str(base.setNum) + ' ' + str(\n i.setNum))\n uni.append(addedSub)\n print('added:', addedSub.itemSet)\n if addedSub.size == rangeOfVal:\n return addedSub\n print()\n for j in uni:\n if remain == len(base.itemSet):\n findCover(j, uniq)\n return\n\n\n<mask token>\n",
"step-3": "class subset:\n\n def __init__(self, weight, itemSet, size, setNum):\n self.weight = weight\n self.itemSet = itemSet\n self.size = size\n self.setNum = setNum\n\n\ndef findCover(base, arr):\n uniq = []\n uni = []\n if len(base.itemSet) == rangeOfVal:\n return base\n remain = rangeOfVal\n for i in arr:\n if base.itemSet.isdisjoint(i.itemSet) == True:\n uniq.append(i)\n remain = remain - len(i.itemSet)\n addedSub = subset(base.weight + i.weight, base.itemSet.union(i.\n itemSet), base.size + i.size, str(base.setNum) + ' ' + str(\n i.setNum))\n uni.append(addedSub)\n print('added:', addedSub.itemSet)\n if addedSub.size == rangeOfVal:\n return addedSub\n print()\n for j in uni:\n if remain == len(base.itemSet):\n findCover(j, uniq)\n return\n\n\n<mask token>\nwhile True:\n itemSet = f.readline()\n if itemSet == '':\n break\n else:\n weight = int(f.readline())\n arrItems = itemSet.split(' ')\n i = 0\n for item in arrItems:\n if item != '\\n':\n arrItems[i] = int(item)\n i += 1\n else:\n arrItems.remove('\\n')\n arrItems.sort()\n s = subset(weight, set(arrItems), len(arrItems), num)\n subsetList.append(s)\n num += 1\n<mask token>\nfor base in subsetList:\n print('base:', base.setNum)\n o = findCover(base, subsetList[inc:len(subsetList)])\n if o != None:\n print('here!')\n covers.append(o)\n inc += 1\nfor w in covers:\n if w.weight < minWeight:\n minWeight = w.weight\n minCover = w.setNum\nprint(minWeight)\nprint(minCover)\n",
"step-4": "class subset:\n\n def __init__(self, weight, itemSet, size, setNum):\n self.weight = weight\n self.itemSet = itemSet\n self.size = size\n self.setNum = setNum\n\n\ndef findCover(base, arr):\n uniq = []\n uni = []\n if len(base.itemSet) == rangeOfVal:\n return base\n remain = rangeOfVal\n for i in arr:\n if base.itemSet.isdisjoint(i.itemSet) == True:\n uniq.append(i)\n remain = remain - len(i.itemSet)\n addedSub = subset(base.weight + i.weight, base.itemSet.union(i.\n itemSet), base.size + i.size, str(base.setNum) + ' ' + str(\n i.setNum))\n uni.append(addedSub)\n print('added:', addedSub.itemSet)\n if addedSub.size == rangeOfVal:\n return addedSub\n print()\n for j in uni:\n if remain == len(base.itemSet):\n findCover(j, uniq)\n return\n\n\nfileName = 'Input_attempt3.txt'\nf = open(fileName, 'r')\nrangeOfVal = int(f.readline())\nnumOfSub = int(f.readline())\nnum = 0\nminWeight = 500001\nminCover = []\nsubsetList = []\nwhile True:\n itemSet = f.readline()\n if itemSet == '':\n break\n else:\n weight = int(f.readline())\n arrItems = itemSet.split(' ')\n i = 0\n for item in arrItems:\n if item != '\\n':\n arrItems[i] = int(item)\n i += 1\n else:\n arrItems.remove('\\n')\n arrItems.sort()\n s = subset(weight, set(arrItems), len(arrItems), num)\n subsetList.append(s)\n num += 1\ncovers = []\ninc = 1\nfor base in subsetList:\n print('base:', base.setNum)\n o = findCover(base, subsetList[inc:len(subsetList)])\n if o != None:\n print('here!')\n covers.append(o)\n inc += 1\nfor w in covers:\n if w.weight < minWeight:\n minWeight = w.weight\n minCover = w.setNum\nprint(minWeight)\nprint(minCover)\n",
"step-5": "class subset:\n\tdef __init__(self, weight, itemSet, size, setNum):\n\t\tself.weight = weight\n\t\tself.itemSet = itemSet\n\t\tself.size = size\n\t\tself.setNum = setNum\n\n\ndef findCover(base, arr):\n\tuniq = [] #array that can be union\n\tuni = [] #array has been unionized w/ base\n\tif len(base.itemSet) == rangeOfVal:\n\t\t# print(\"COVER:\", base.itemSet)\n\t\treturn base\n\tremain = rangeOfVal\n\t# Search through arr to find all potential subsets\n\tfor i in arr:\n\t\t# print(\"compare: \", i.itemSet)\n\t\tif base.itemSet.isdisjoint(i.itemSet) == True:\n\t\t\t# Unique array\n\t\t\tuniq.append(i)\n\t\t\tremain = remain - len(i.itemSet)\n\t\t\t# print(\"uniq: \", len(uniq))\n\t\t\taddedSub = subset(base.weight + i.weight,\n\t\t\t\t\t\t\tbase.itemSet.union(i.itemSet),\n\t\t\t\t\t\t\tbase.size + i.size,\n\t\t\t\t\t\t\tstr(base.setNum) + \" \" + str(i.setNum))\n\t\t\t# Union array\n\t\t\tuni.append(addedSub)\n\t\t\tprint(\"added:\", addedSub.itemSet)\n\t\t\tif addedSub.size == rangeOfVal:\n\t\t\t\t# print(\"COVER:\", addedSub.itemSet)\n\t\t\t\treturn addedSub\n\tprint()\n\tfor j in uni:\n\t\t# print(j.setNum)\n\t\tif remain == len(base.itemSet):\n\t\t\tfindCover(j, uniq)\n\t# print(\"_____________________________NONE_______________________________\")\t\t \n\treturn\n\n\n\n# fileName=\"./inputs/input_group115.txt\"\nfileName=\"Input_attempt3.txt\"\nf=open(fileName, \"r\")\n\nrangeOfVal=int(f.readline()) # n\nnumOfSub=int(f.readline()) # m\nnum=0\nminWeight=500001\nminCover=[]\nsubsetList=[]\n# Loop to read through file and set up the data structures\n# to hold all the values\nwhile True:\n\titemSet=f.readline()\n\tif itemSet == \"\":\n\t\tbreak\n\telse:\n\t\tweight=int(f.readline())\n\t\tarrItems=itemSet.split(\" \")\n\t\ti=0\n\t\t# Convert each item into an int and delete any \\n\n\t\tfor item in arrItems:\n\t\t\tif item != \"\\n\":\n\t\t\t\tarrItems[i]=int(item)\n\t\t\t\ti += 1\n\t\t\telse:\n\t\t\t\tarrItems.remove(\"\\n\")\n\t\tarrItems.sort()\n\t\ts=subset(weight, set(arrItems), len(arrItems), num)\n\t\tsubsetList.append(s)\n\tnum += 1\n\n# print(\"---------------------------------------------\")\n# for s in subsetList:\n# \tprint(s.itemSet)\n# print(\"---------------------------------------------\")\n\ncovers = []\ninc = 1\nfor base in subsetList:\n\t# print()\n\tprint(\"base:\", base.setNum)\n\to = findCover(base, subsetList[inc:len(subsetList)])\n\tif o != None:\n\t\tprint(\"here!\")\n\t\tcovers.append(o)\n\t\t# print(o.setNum)\n\tinc += 1\nfor w in covers:\n\tif w.weight < minWeight:\n\t\tminWeight = w.weight\n\t\t# if type(s.setNum) == int: continue\n\t\t# else: minCover = (s.setNum).split(\" \").sort()\n\t\tminCover = w.setNum\n\nprint(minWeight)\nprint(minCover)\n\n\n# for cov in covers:\n# \tprint(cov.itemSet)\n\n# # \n",
"step-ids": [
1,
3,
4,
5,
6
]
}
|
[
1,
3,
4,
5,
6
] |
<|reserved_special_token_0|>
class Detector(object):
<|reserved_special_token_0|>
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
<|reserved_special_token_1|>
<|reserved_special_token_0|>
__all__ = ['Detector']
CLASSES = dict(voc=('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',
'tvmonitor'), coco=('person', 'bicycle', 'car', 'motorcycle',
'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat',
'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',
'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush'))
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
<|reserved_special_token_1|>
import sys
import cv2
import numpy as np
import matplotlib.pyplot as plt
from .caffe_path import caffe
from .timer import Timer
__all__ = ['Detector']
CLASSES = dict(voc=('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',
'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',
'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',
'tvmonitor'), coco=('person', 'bicycle', 'car', 'motorcycle',
'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',
'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat',
'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',
'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',
'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',
'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',
'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',
'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',
'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush'))
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=
600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465,
122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(name=self.__class__.
__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None, None, :]
self.classes = CLASSES[dataset]
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale / short_size, self.max_size / long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *im.shape)
self.net.blobs['data'].data[0, ...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0, 6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print('Detection took {:.3f}s for {:d} objects'.format(timer.
total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
im = im[:, :, (2, 1, 0)]
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.
colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],
score), bbox=dict(facecolor=self.colormap[class_id], alpha=
0.5), fontsize=12, color='white')
return ax
<|reserved_special_token_1|>
import sys
import cv2
import numpy as np
import matplotlib.pyplot as plt
from .caffe_path import caffe
from .timer import Timer
__all__ = ['Detector']
# VOC Class list
CLASSES = dict(
voc = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',
'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',
'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'),
coco = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',
'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep',
'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella',
'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard',
'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard',
'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork',
'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange',
'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair',
'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',
'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',
'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',
'scissors', 'teddy bear', 'hair drier', 'toothbrush')
)
class Detector(object):
"""Faster R-CNN Detector"""
def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco',
scale=600, max_size=1000, transpose=(2, 0, 1),
mean=[102.9801, 115.9465, 122.7717]):
if gpu_id < 0:
caffe.set_mode_cpu()
else:
caffe.set_mode_gpu()
caffe.set_device(gpu_id)
self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)
print('[{name}] Loaded network {model}'.format(
name=self.__class__.__name__, model=caffemodel))
self.scale = scale
self.max_size = max_size
self.transpose = transpose
self.mean = np.array(mean, dtype=np.float32)[None,None,:]
self.classes = CLASSES[dataset]
# colormap for visualization
self.colormap = []
for i in range(len(self.classes)):
self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))
def preprocess(self, im):
im = im.astype(np.float32) - self.mean
short_size, long_size = sorted(im.shape[:2])
factor = min(self.scale/short_size, self.max_size/long_size)
im = cv2.resize(im, None, None, fx=factor, fy=factor)
im = im.transpose(self.transpose)
info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)
return im, info, factor
def detect(self, im):
im, info, factor = self.preprocess(im)
self.net.blobs['data'].reshape(1, *(im.shape))
self.net.blobs['data'].data[0,...] = im
self.net.blobs['im_info'].data[...] = info
dets = self.net.forward()['rcnn_out']
if dets.ndim != 2:
return np.empty((0,6), dtype=np.float32)
else:
return dets
def demo(self, image):
im = cv2.imread(image)
timer = Timer()
timer.tic()
dets = self.detect(im)
timer.toc()
print ('Detection took {:.3f}s for {:d} objects'.format(timer.total_time, len(dets)))
return self.plot(im, dets)
def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):
# create image axes
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(1,1,1)
im = im[:, :, (2, 1, 0)] # to rgb
ax.imshow(im.astype(np.uint8))
if len(dets) == 0:
return ax
print(dets.shape)
for det in dets:
score = det[1]
if score < thresh:
continue
class_id = int(det[0])
x, y = det[2:4]
w, h = det[4:6] - det[2:4]
rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.colormap[class_id], linewidth=linewidth)
ax.add_patch(rect)
ax.text(x, y-2, '{:s} {:.3f}'.format(self.classes[class_id], score),
bbox=dict(facecolor=self.colormap[class_id], alpha=0.5), fontsize=12, color='white')
return ax
|
flexible
|
{
"blob_id": "de12c6d78c0144978ffc651829364de16930b173",
"index": 2078,
"step-1": "<mask token>\n\n\nclass Detector(object):\n <mask token>\n\n def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=\n 600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465, \n 122.7717]):\n if gpu_id < 0:\n caffe.set_mode_cpu()\n else:\n caffe.set_mode_gpu()\n caffe.set_device(gpu_id)\n self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)\n print('[{name}] Loaded network {model}'.format(name=self.__class__.\n __name__, model=caffemodel))\n self.scale = scale\n self.max_size = max_size\n self.transpose = transpose\n self.mean = np.array(mean, dtype=np.float32)[None, None, :]\n self.classes = CLASSES[dataset]\n self.colormap = []\n for i in range(len(self.classes)):\n self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))\n\n def preprocess(self, im):\n im = im.astype(np.float32) - self.mean\n short_size, long_size = sorted(im.shape[:2])\n factor = min(self.scale / short_size, self.max_size / long_size)\n im = cv2.resize(im, None, None, fx=factor, fy=factor)\n im = im.transpose(self.transpose)\n info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)\n return im, info, factor\n\n def detect(self, im):\n im, info, factor = self.preprocess(im)\n self.net.blobs['data'].reshape(1, *im.shape)\n self.net.blobs['data'].data[0, ...] = im\n self.net.blobs['im_info'].data[...] = info\n dets = self.net.forward()['rcnn_out']\n if dets.ndim != 2:\n return np.empty((0, 6), dtype=np.float32)\n else:\n return dets\n\n def demo(self, image):\n im = cv2.imread(image)\n timer = Timer()\n timer.tic()\n dets = self.detect(im)\n timer.toc()\n print('Detection took {:.3f}s for {:d} objects'.format(timer.\n total_time, len(dets)))\n return self.plot(im, dets)\n\n def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):\n if ax is None:\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n im = im[:, :, (2, 1, 0)]\n ax.imshow(im.astype(np.uint8))\n if len(dets) == 0:\n return ax\n print(dets.shape)\n for det in dets:\n score = det[1]\n if score < thresh:\n continue\n class_id = int(det[0])\n x, y = det[2:4]\n w, h = det[4:6] - det[2:4]\n rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.\n colormap[class_id], linewidth=linewidth)\n ax.add_patch(rect)\n ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],\n score), bbox=dict(facecolor=self.colormap[class_id], alpha=\n 0.5), fontsize=12, color='white')\n return ax\n",
"step-2": "<mask token>\n\n\nclass Detector(object):\n \"\"\"Faster R-CNN Detector\"\"\"\n\n def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=\n 600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465, \n 122.7717]):\n if gpu_id < 0:\n caffe.set_mode_cpu()\n else:\n caffe.set_mode_gpu()\n caffe.set_device(gpu_id)\n self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)\n print('[{name}] Loaded network {model}'.format(name=self.__class__.\n __name__, model=caffemodel))\n self.scale = scale\n self.max_size = max_size\n self.transpose = transpose\n self.mean = np.array(mean, dtype=np.float32)[None, None, :]\n self.classes = CLASSES[dataset]\n self.colormap = []\n for i in range(len(self.classes)):\n self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))\n\n def preprocess(self, im):\n im = im.astype(np.float32) - self.mean\n short_size, long_size = sorted(im.shape[:2])\n factor = min(self.scale / short_size, self.max_size / long_size)\n im = cv2.resize(im, None, None, fx=factor, fy=factor)\n im = im.transpose(self.transpose)\n info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)\n return im, info, factor\n\n def detect(self, im):\n im, info, factor = self.preprocess(im)\n self.net.blobs['data'].reshape(1, *im.shape)\n self.net.blobs['data'].data[0, ...] = im\n self.net.blobs['im_info'].data[...] = info\n dets = self.net.forward()['rcnn_out']\n if dets.ndim != 2:\n return np.empty((0, 6), dtype=np.float32)\n else:\n return dets\n\n def demo(self, image):\n im = cv2.imread(image)\n timer = Timer()\n timer.tic()\n dets = self.detect(im)\n timer.toc()\n print('Detection took {:.3f}s for {:d} objects'.format(timer.\n total_time, len(dets)))\n return self.plot(im, dets)\n\n def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):\n if ax is None:\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n im = im[:, :, (2, 1, 0)]\n ax.imshow(im.astype(np.uint8))\n if len(dets) == 0:\n return ax\n print(dets.shape)\n for det in dets:\n score = det[1]\n if score < thresh:\n continue\n class_id = int(det[0])\n x, y = det[2:4]\n w, h = det[4:6] - det[2:4]\n rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.\n colormap[class_id], linewidth=linewidth)\n ax.add_patch(rect)\n ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],\n score), bbox=dict(facecolor=self.colormap[class_id], alpha=\n 0.5), fontsize=12, color='white')\n return ax\n",
"step-3": "<mask token>\n__all__ = ['Detector']\nCLASSES = dict(voc=('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',\n 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',\n 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',\n 'tvmonitor'), coco=('person', 'bicycle', 'car', 'motorcycle',\n 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',\n 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat',\n 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',\n 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',\n 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',\n 'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',\n 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',\n 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',\n 'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',\n 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',\n 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',\n 'scissors', 'teddy bear', 'hair drier', 'toothbrush'))\n\n\nclass Detector(object):\n \"\"\"Faster R-CNN Detector\"\"\"\n\n def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=\n 600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465, \n 122.7717]):\n if gpu_id < 0:\n caffe.set_mode_cpu()\n else:\n caffe.set_mode_gpu()\n caffe.set_device(gpu_id)\n self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)\n print('[{name}] Loaded network {model}'.format(name=self.__class__.\n __name__, model=caffemodel))\n self.scale = scale\n self.max_size = max_size\n self.transpose = transpose\n self.mean = np.array(mean, dtype=np.float32)[None, None, :]\n self.classes = CLASSES[dataset]\n self.colormap = []\n for i in range(len(self.classes)):\n self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))\n\n def preprocess(self, im):\n im = im.astype(np.float32) - self.mean\n short_size, long_size = sorted(im.shape[:2])\n factor = min(self.scale / short_size, self.max_size / long_size)\n im = cv2.resize(im, None, None, fx=factor, fy=factor)\n im = im.transpose(self.transpose)\n info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)\n return im, info, factor\n\n def detect(self, im):\n im, info, factor = self.preprocess(im)\n self.net.blobs['data'].reshape(1, *im.shape)\n self.net.blobs['data'].data[0, ...] = im\n self.net.blobs['im_info'].data[...] = info\n dets = self.net.forward()['rcnn_out']\n if dets.ndim != 2:\n return np.empty((0, 6), dtype=np.float32)\n else:\n return dets\n\n def demo(self, image):\n im = cv2.imread(image)\n timer = Timer()\n timer.tic()\n dets = self.detect(im)\n timer.toc()\n print('Detection took {:.3f}s for {:d} objects'.format(timer.\n total_time, len(dets)))\n return self.plot(im, dets)\n\n def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):\n if ax is None:\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n im = im[:, :, (2, 1, 0)]\n ax.imshow(im.astype(np.uint8))\n if len(dets) == 0:\n return ax\n print(dets.shape)\n for det in dets:\n score = det[1]\n if score < thresh:\n continue\n class_id = int(det[0])\n x, y = det[2:4]\n w, h = det[4:6] - det[2:4]\n rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.\n colormap[class_id], linewidth=linewidth)\n ax.add_patch(rect)\n ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],\n score), bbox=dict(facecolor=self.colormap[class_id], alpha=\n 0.5), fontsize=12, color='white')\n return ax\n",
"step-4": "import sys\nimport cv2\nimport numpy as np\nimport matplotlib.pyplot as plt\nfrom .caffe_path import caffe\nfrom .timer import Timer\n__all__ = ['Detector']\nCLASSES = dict(voc=('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus',\n 'car', 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse',\n 'motorbike', 'person', 'pottedplant', 'sheep', 'sofa', 'train',\n 'tvmonitor'), coco=('person', 'bicycle', 'car', 'motorcycle',\n 'airplane', 'bus', 'train', 'truck', 'boat', 'traffic light',\n 'fire hydrant', 'stop sign', 'parking meter', 'bench', 'bird', 'cat',\n 'dog', 'horse', 'sheep', 'cow', 'elephant', 'bear', 'zebra', 'giraffe',\n 'backpack', 'umbrella', 'handbag', 'tie', 'suitcase', 'frisbee', 'skis',\n 'snowboard', 'sports ball', 'kite', 'baseball bat', 'baseball glove',\n 'skateboard', 'surfboard', 'tennis racket', 'bottle', 'wine glass',\n 'cup', 'fork', 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich',\n 'orange', 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake',\n 'chair', 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',\n 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',\n 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',\n 'scissors', 'teddy bear', 'hair drier', 'toothbrush'))\n\n\nclass Detector(object):\n \"\"\"Faster R-CNN Detector\"\"\"\n\n def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco', scale=\n 600, max_size=1000, transpose=(2, 0, 1), mean=[102.9801, 115.9465, \n 122.7717]):\n if gpu_id < 0:\n caffe.set_mode_cpu()\n else:\n caffe.set_mode_gpu()\n caffe.set_device(gpu_id)\n self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)\n print('[{name}] Loaded network {model}'.format(name=self.__class__.\n __name__, model=caffemodel))\n self.scale = scale\n self.max_size = max_size\n self.transpose = transpose\n self.mean = np.array(mean, dtype=np.float32)[None, None, :]\n self.classes = CLASSES[dataset]\n self.colormap = []\n for i in range(len(self.classes)):\n self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))\n\n def preprocess(self, im):\n im = im.astype(np.float32) - self.mean\n short_size, long_size = sorted(im.shape[:2])\n factor = min(self.scale / short_size, self.max_size / long_size)\n im = cv2.resize(im, None, None, fx=factor, fy=factor)\n im = im.transpose(self.transpose)\n info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)\n return im, info, factor\n\n def detect(self, im):\n im, info, factor = self.preprocess(im)\n self.net.blobs['data'].reshape(1, *im.shape)\n self.net.blobs['data'].data[0, ...] = im\n self.net.blobs['im_info'].data[...] = info\n dets = self.net.forward()['rcnn_out']\n if dets.ndim != 2:\n return np.empty((0, 6), dtype=np.float32)\n else:\n return dets\n\n def demo(self, image):\n im = cv2.imread(image)\n timer = Timer()\n timer.tic()\n dets = self.detect(im)\n timer.toc()\n print('Detection took {:.3f}s for {:d} objects'.format(timer.\n total_time, len(dets)))\n return self.plot(im, dets)\n\n def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):\n if ax is None:\n fig = plt.figure()\n ax = fig.add_subplot(1, 1, 1)\n im = im[:, :, (2, 1, 0)]\n ax.imshow(im.astype(np.uint8))\n if len(dets) == 0:\n return ax\n print(dets.shape)\n for det in dets:\n score = det[1]\n if score < thresh:\n continue\n class_id = int(det[0])\n x, y = det[2:4]\n w, h = det[4:6] - det[2:4]\n rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.\n colormap[class_id], linewidth=linewidth)\n ax.add_patch(rect)\n ax.text(x, y - 2, '{:s} {:.3f}'.format(self.classes[class_id],\n score), bbox=dict(facecolor=self.colormap[class_id], alpha=\n 0.5), fontsize=12, color='white')\n return ax\n",
"step-5": "import sys\nimport cv2\nimport numpy as np\nimport matplotlib.pyplot as plt\n\nfrom .caffe_path import caffe\nfrom .timer import Timer\n\n__all__ = ['Detector']\n\n# VOC Class list\nCLASSES = dict(\n voc = ('aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car',\n 'cat', 'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike',\n 'person', 'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'),\n coco = ('person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',\n 'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',\n 'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep',\n 'cow', 'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella',\n 'handbag', 'tie', 'suitcase', 'frisbee', 'skis', 'snowboard',\n 'sports ball', 'kite', 'baseball bat', 'baseball glove', 'skateboard',\n 'surfboard', 'tennis racket', 'bottle', 'wine glass', 'cup', 'fork',\n 'knife', 'spoon', 'bowl', 'banana', 'apple', 'sandwich', 'orange',\n 'broccoli', 'carrot', 'hot dog', 'pizza', 'donut', 'cake', 'chair',\n 'couch', 'potted plant', 'bed', 'dining table', 'toilet', 'tv',\n 'laptop', 'mouse', 'remote', 'keyboard', 'cell phone', 'microwave',\n 'oven', 'toaster', 'sink', 'refrigerator', 'book', 'clock', 'vase',\n 'scissors', 'teddy bear', 'hair drier', 'toothbrush')\n)\n\nclass Detector(object):\n \"\"\"Faster R-CNN Detector\"\"\"\n def __init__(self, prototxt, caffemodel, gpu_id, dataset='coco',\n scale=600, max_size=1000, transpose=(2, 0, 1),\n mean=[102.9801, 115.9465, 122.7717]):\n if gpu_id < 0:\n caffe.set_mode_cpu()\n else:\n caffe.set_mode_gpu()\n caffe.set_device(gpu_id)\n self.net = caffe.Net(prototxt, caffe.TEST, weights=caffemodel)\n print('[{name}] Loaded network {model}'.format(\n name=self.__class__.__name__, model=caffemodel))\n\n self.scale = scale\n self.max_size = max_size\n self.transpose = transpose\n self.mean = np.array(mean, dtype=np.float32)[None,None,:]\n self.classes = CLASSES[dataset]\n\n # colormap for visualization\n self.colormap = []\n for i in range(len(self.classes)):\n self.colormap.append(plt.get_cmap('hsv')(i / len(self.classes)))\n\n def preprocess(self, im):\n im = im.astype(np.float32) - self.mean\n short_size, long_size = sorted(im.shape[:2])\n factor = min(self.scale/short_size, self.max_size/long_size)\n im = cv2.resize(im, None, None, fx=factor, fy=factor)\n im = im.transpose(self.transpose)\n info = np.array((im.shape[1], im.shape[2], factor), dtype=np.float32)\n return im, info, factor\n\n def detect(self, im):\n im, info, factor = self.preprocess(im)\n self.net.blobs['data'].reshape(1, *(im.shape))\n self.net.blobs['data'].data[0,...] = im\n self.net.blobs['im_info'].data[...] = info\n dets = self.net.forward()['rcnn_out']\n if dets.ndim != 2:\n return np.empty((0,6), dtype=np.float32)\n else:\n return dets\n\n def demo(self, image):\n im = cv2.imread(image)\n timer = Timer()\n timer.tic()\n dets = self.detect(im)\n timer.toc()\n print ('Detection took {:.3f}s for {:d} objects'.format(timer.total_time, len(dets)))\n return self.plot(im, dets)\n \n def plot(self, im, dets, thresh=0, ax=None, linewidth=2.5):\n # create image axes\n if ax is None:\n fig = plt.figure()\n ax = fig.add_subplot(1,1,1)\n im = im[:, :, (2, 1, 0)] # to rgb\n ax.imshow(im.astype(np.uint8))\n if len(dets) == 0:\n return ax\n\n print(dets.shape)\n for det in dets:\n score = det[1]\n if score < thresh:\n continue\n class_id = int(det[0])\n x, y = det[2:4]\n w, h = det[4:6] - det[2:4]\n rect = plt.Rectangle((x, y), w, h, fill=False, edgecolor=self.colormap[class_id], linewidth=linewidth)\n ax.add_patch(rect)\n ax.text(x, y-2, '{:s} {:.3f}'.format(self.classes[class_id], score),\n bbox=dict(facecolor=self.colormap[class_id], alpha=0.5), fontsize=12, color='white')\n return ax",
"step-ids": [
6,
7,
8,
9,
10
]
}
|
[
6,
7,
8,
9,
10
] |
"""
Code for Alexa skill to check PB tracking
"""
from __future__ import print_function
import traceback
import requests
import os
import json
# --------------- Helpers that build all of the responses ----------------------
def build_speechlet_response(title, output, reprompt_text, should_end_session):
return {
'outputSpeech': {
'type': 'PlainText',
'text': output
},
'card': {
'type': 'Simple',
'title': "SessionSpeechlet - " + title,
'content': "SessionSpeechlet - " + output
},
'reprompt': {
'outputSpeech': {
'type': 'PlainText',
'text': reprompt_text
}
},
'shouldEndSession': should_end_session
}
def build_response(session_attributes, speechlet_response):
return {
'version': '1.0',
'sessionAttributes': session_attributes,
'response': speechlet_response
}
# --------------- Functions that control the skill's behavior ------------------
def get_welcome_response():
""" If we wanted to initialize the session to have some attributes we could
add those here
"""
session_attributes = {}
card_title = "Welcome to PB Parcel Tracker"
speech_output = "Please give first 10 digits of tracking number"
# If the user either does not reply to the welcome message or says something
# that is not understood, they will be prompted again with this text.
reprompt_text = "Please give first 10 digits of tracking number"
should_end_session = False
return build_response(session_attributes, build_speechlet_response(
card_title, speech_output, reprompt_text, should_end_session))
def handle_session_end_request():
card_title = "Session Ended"
speech_output = "Thank you for trying the Alexa Skills Kit sample. " \
"Have a nice day! "
# Setting this to true ends the session and exits the skill.
should_end_session = True
return build_response({}, build_speechlet_response(
card_title, speech_output, None, should_end_session))
#----- get tracking ------
def setFirstEleven(intent, session):
session_attributes = {}
should_end_session = False
speech_output = "Now give remaining digits"
reprompt_text = "Now give the next eleven numbers"
try:
tracking_number_1 = intent['slots']['One']['value']
tracking_number_2 = intent['slots']['Two']['value']
tracking_number_3 = intent['slots']['Three']['value']
tracking_number_4 = intent['slots']['Four']['value']
tracking_number_5 = intent['slots']['Five']['value']
tracking_number_6 = intent['slots']['Six']['value']
tracking_number_7 = intent['slots']['Seven']['value']
tracking_number_8 = intent['slots']['Eight']['value']
tracking_number_9 = intent['slots']['Nine']['value']
tracking_number_10 = intent['slots']['Ten']['value']
first_ten = "%s%s%s%s%s%s%s%s%s%s" % (tracking_number_1, tracking_number_2,tracking_number_3, tracking_number_4,tracking_number_5, tracking_number_6,tracking_number_7, tracking_number_8,tracking_number_9, tracking_number_10)
session_attributes['first_ten'] = first_ten
print("session after adding first ten--->")
print(session_attributes)
except Exception as app_exception:
traceback.print_tb
speech_output = "There was some problem, Please provide first ten digits of the tracking number"
reprompt_text = "Please say first ten digits of the tracking number"
return build_response(session_attributes, build_speechlet_response(
intent['name'], speech_output, reprompt_text, should_end_session))
#----- get tracking ------
def getParcelStatus(intent, session):
session_attributes = {}
should_end_session = True
speech_output = "There was some problem in taking your input"
reprompt_text = "Please say remaining digits of the tracking number"
try:
tracking_number_11= intent['slots']['Eleven']['value']
tracking_number_12 = intent['slots']['Twelve']['value']
tracking_number_13 = intent['slots']['Thirteen']['value']
tracking_number_14 = intent['slots']['Fourteen']['value']
tracking_number_15 = intent['slots']['Fifteen']['value']
tracking_number_16 = intent['slots']['Sixteen']['value']
tracking_number_17 = intent['slots']['Seventeen']['value']
tracking_number_18 = intent['slots']['Eighteen']['value']
tracking_number_19 = intent['slots']['Nineteen']['value']
tracking_number_20 = intent['slots']['Twenty']['value']
tracking_number_21 = intent['slots']['TwentyOne']['value']
tracking_number_22 = intent['slots']['TwentyTwo']['value']
tracking_number = "%s%s%s%s%s%s%s%s%s%s%s%s" % (tracking_number_11,tracking_number_12, tracking_number_13, tracking_number_14,tracking_number_15, tracking_number_16,tracking_number_17, tracking_number_18,tracking_number_19, tracking_number_20,tracking_number_21, tracking_number_22)
print("'first_ten' not in session['attributes']--->")
print('first_ten' not in session['attributes'])
full_tracking_number = "%s%s" % (session['attributes']['first_ten'], tracking_number)
bearer = "Bearer %s" % (session['access_token'])
print("USPS FULL Tracking Number ----> %s" % (full_tracking_number))
url = "https://api-sandbox.pitneybowes.com/shippingservices/v1/tracking/%s?packageIdentifierType=TrackingNumber&carrier=USPS" %(full_tracking_number)
r=requests.get(url, headers={"Authorization" : bearer})
tracking_response = {}
tracking_response = json.loads(r.content)
if(r.status_code == 200):
speech_output = "The status of the parcel is "+tracking_response['status']
reprompt_text = "The status of the parcel is "+tracking_response['status']
else:
speech_output = tracking_response['errors'][0]['errorDescription']
reprompt_text = tracking_response['errors'][0]['errorDescription']
print(r.content)
except Exception as app_exception:
traceback.print_tb
should_end_session = False
if ('attributes' not in session or ('attributes' in session and 'first_ten' not in session['attributes'])):
speech_output = "Please provide only first ten digits of the tracking number"
reprompt_text = "Please provide only first ten digits of the tracking number"
else:
speech_output = "There was some problem, Please say remaining digits of the tracking number"
reprompt_text = "Please say remaining digits of the tracking number"
return build_response(session_attributes, build_speechlet_response(
intent['name'], speech_output, reprompt_text, should_end_session))
# --------------- Events ------------------
def on_session_started(session_started_request, session):
""" Called when the session starts """
print("on_session_started requestId=" + session_started_request['requestId']
+ ", sessionId=" + session['sessionId'])
def on_launch(launch_request, session):
""" Called when the user launches the skill without specifying what they
want
"""
print("on_launch requestId=" + launch_request['requestId'] +
", sessionId=" + session['sessionId'])
# Dispatch to your skill's launch
return get_welcome_response()
def oauth_request(session):
access_key = os.environ['key']
access_key_value = "Basic "+access_key
url = 'https://api-sandbox.pitneybowes.com/oauth/token'
r = requests.post(url, headers={"Authorization": access_key_value,
"Content-Type": "application/x-www-form-urlencoded"},
data={"grant_type": "client_credentials"})
print(r.status_code)
if(r.status_code == 200):
j = json.loads(r.content)
print(j)
session['access_token'] = j['access_token']
def on_intent(intent_request, session):
""" Called when the user specifies an intent for this skill """
print("on_intent requestId=" + intent_request['requestId'] +
", sessionId=" + session['sessionId'])
intent = intent_request['intent']
intent_name = intent_request['intent']['name']
if('access_token' not in session):
oauth_request(session)
print(session['access_token'])
# Dispatch to your skill's intent handlers
if intent_name == "Tracking":
return setFirstEleven(intent, session)
elif intent_name == "TrackingSecond":
return getParcelStatus(intent, session)
elif intent_name == "AMAZON.HelpIntent":
return get_welcome_response()
elif intent_name == "AMAZON.CancelIntent" or intent_name == "AMAZON.StopIntent":
return handle_session_end_request()
else:
raise ValueError("Invalid intent")
def on_session_ended(session_ended_request, session):
""" Called when the user ends the session.
Is not called when the skill returns should_end_session=true
"""
print("on_session_ended requestId=" + session_ended_request['requestId'] +
", sessionId=" + session['sessionId'])
# add cleanup logic here
# --------------- Main handler ------------------
def lambda_handler(event, context):
""" Route the incoming request based on type (LaunchRequest, IntentRequest,
etc.) The JSON body of the request is provided in the event parameter.
"""
print("event.session.application.applicationId=" +
event['session']['application']['applicationId'])
"""
Uncomment this if statement and populate with your skill's application ID to
prevent someone else from configuring a skill that sends requests to this
function.
"""
# if (event['session']['application']['applicationId'] !=
# "amzn1.echo-sdk-ams.app.[unique-value-here]"):
# raise ValueError("Invalid Application ID")
if event['session']['new']:
on_session_started({'requestId': event['request']['requestId']},
event['session'])
if event['request']['type'] == "LaunchRequest":
return on_launch(event['request'], event['session'])
elif event['request']['type'] == "IntentRequest":
return on_intent(event['request'], event['session'])
elif event['request']['type'] == "SessionEndedRequest":
return on_session_ended(event['request'], event['session'])
|
normal
|
{
"blob_id": "a5ef2adbf85b5ab80c59697340f94bc57d60952e",
"index": 4463,
"step-1": "<mask token>\n\n\ndef build_response(session_attributes, speechlet_response):\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n \"\"\" If we wanted to initialize the session to have some attributes we could\n add those here\n \"\"\"\n session_attributes = {}\n card_title = 'Welcome to PB Parcel Tracker'\n speech_output = 'Please give first 10 digits of tracking number'\n reprompt_text = 'Please give first 10 digits of tracking number'\n should_end_session = False\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef handle_session_end_request():\n card_title = 'Session Ended'\n speech_output = (\n 'Thank you for trying the Alexa Skills Kit sample. Have a nice day! ')\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\n<mask token>\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they\n want\n \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef oauth_request(session):\n access_key = os.environ['key']\n access_key_value = 'Basic ' + access_key\n url = 'https://api-sandbox.pitneybowes.com/oauth/token'\n r = requests.post(url, headers={'Authorization': access_key_value,\n 'Content-Type': 'application/x-www-form-urlencoded'}, data={\n 'grant_type': 'client_credentials'})\n print(r.status_code)\n if r.status_code == 200:\n j = json.loads(r.content)\n print(j)\n session['access_token'] = j['access_token']\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if 'access_token' not in session:\n oauth_request(session)\n print(session['access_token'])\n if intent_name == 'Tracking':\n return setFirstEleven(intent, session)\n elif intent_name == 'TrackingSecond':\n return getParcelStatus(intent, session)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef on_session_ended(session_ended_request, session):\n \"\"\" Called when the user ends the session.\n\n Is not called when the skill returns should_end_session=true\n \"\"\"\n print('on_session_ended requestId=' + session_ended_request['requestId'\n ] + ', sessionId=' + session['sessionId'])\n\n\ndef lambda_handler(event, context):\n \"\"\" Route the incoming request based on type (LaunchRequest, IntentRequest,\n etc.) The JSON body of the request is provided in the event parameter.\n \"\"\"\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return on_session_ended(event['request'], event['session'])\n",
"step-2": "<mask token>\n\n\ndef build_response(session_attributes, speechlet_response):\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n \"\"\" If we wanted to initialize the session to have some attributes we could\n add those here\n \"\"\"\n session_attributes = {}\n card_title = 'Welcome to PB Parcel Tracker'\n speech_output = 'Please give first 10 digits of tracking number'\n reprompt_text = 'Please give first 10 digits of tracking number'\n should_end_session = False\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef handle_session_end_request():\n card_title = 'Session Ended'\n speech_output = (\n 'Thank you for trying the Alexa Skills Kit sample. Have a nice day! ')\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\n<mask token>\n\n\ndef getParcelStatus(intent, session):\n session_attributes = {}\n should_end_session = True\n speech_output = 'There was some problem in taking your input'\n reprompt_text = 'Please say remaining digits of the tracking number'\n try:\n tracking_number_11 = intent['slots']['Eleven']['value']\n tracking_number_12 = intent['slots']['Twelve']['value']\n tracking_number_13 = intent['slots']['Thirteen']['value']\n tracking_number_14 = intent['slots']['Fourteen']['value']\n tracking_number_15 = intent['slots']['Fifteen']['value']\n tracking_number_16 = intent['slots']['Sixteen']['value']\n tracking_number_17 = intent['slots']['Seventeen']['value']\n tracking_number_18 = intent['slots']['Eighteen']['value']\n tracking_number_19 = intent['slots']['Nineteen']['value']\n tracking_number_20 = intent['slots']['Twenty']['value']\n tracking_number_21 = intent['slots']['TwentyOne']['value']\n tracking_number_22 = intent['slots']['TwentyTwo']['value']\n tracking_number = '%s%s%s%s%s%s%s%s%s%s%s%s' % (tracking_number_11,\n tracking_number_12, tracking_number_13, tracking_number_14,\n tracking_number_15, tracking_number_16, tracking_number_17,\n tracking_number_18, tracking_number_19, tracking_number_20,\n tracking_number_21, tracking_number_22)\n print(\"'first_ten' not in session['attributes']--->\")\n print('first_ten' not in session['attributes'])\n full_tracking_number = '%s%s' % (session['attributes']['first_ten'],\n tracking_number)\n bearer = 'Bearer %s' % session['access_token']\n print('USPS FULL Tracking Number ----> %s' % full_tracking_number)\n url = (\n 'https://api-sandbox.pitneybowes.com/shippingservices/v1/tracking/%s?packageIdentifierType=TrackingNumber&carrier=USPS'\n % full_tracking_number)\n r = requests.get(url, headers={'Authorization': bearer})\n tracking_response = {}\n tracking_response = json.loads(r.content)\n if r.status_code == 200:\n speech_output = 'The status of the parcel is ' + tracking_response[\n 'status']\n reprompt_text = 'The status of the parcel is ' + tracking_response[\n 'status']\n else:\n speech_output = tracking_response['errors'][0]['errorDescription']\n reprompt_text = tracking_response['errors'][0]['errorDescription']\n print(r.content)\n except Exception as app_exception:\n traceback.print_tb\n should_end_session = False\n if ('attributes' not in session or 'attributes' in session and \n 'first_ten' not in session['attributes']):\n speech_output = (\n 'Please provide only first ten digits of the tracking number')\n reprompt_text = (\n 'Please provide only first ten digits of the tracking number')\n else:\n speech_output = (\n 'There was some problem, Please say remaining digits of the tracking number'\n )\n reprompt_text = (\n 'Please say remaining digits of the tracking number')\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they\n want\n \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef oauth_request(session):\n access_key = os.environ['key']\n access_key_value = 'Basic ' + access_key\n url = 'https://api-sandbox.pitneybowes.com/oauth/token'\n r = requests.post(url, headers={'Authorization': access_key_value,\n 'Content-Type': 'application/x-www-form-urlencoded'}, data={\n 'grant_type': 'client_credentials'})\n print(r.status_code)\n if r.status_code == 200:\n j = json.loads(r.content)\n print(j)\n session['access_token'] = j['access_token']\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if 'access_token' not in session:\n oauth_request(session)\n print(session['access_token'])\n if intent_name == 'Tracking':\n return setFirstEleven(intent, session)\n elif intent_name == 'TrackingSecond':\n return getParcelStatus(intent, session)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef on_session_ended(session_ended_request, session):\n \"\"\" Called when the user ends the session.\n\n Is not called when the skill returns should_end_session=true\n \"\"\"\n print('on_session_ended requestId=' + session_ended_request['requestId'\n ] + ', sessionId=' + session['sessionId'])\n\n\ndef lambda_handler(event, context):\n \"\"\" Route the incoming request based on type (LaunchRequest, IntentRequest,\n etc.) The JSON body of the request is provided in the event parameter.\n \"\"\"\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return on_session_ended(event['request'], event['session'])\n",
"step-3": "<mask token>\n\n\ndef build_speechlet_response(title, output, reprompt_text, should_end_session):\n return {'outputSpeech': {'type': 'PlainText', 'text': output}, 'card':\n {'type': 'Simple', 'title': 'SessionSpeechlet - ' + title,\n 'content': 'SessionSpeechlet - ' + output}, 'reprompt': {\n 'outputSpeech': {'type': 'PlainText', 'text': reprompt_text}},\n 'shouldEndSession': should_end_session}\n\n\ndef build_response(session_attributes, speechlet_response):\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n \"\"\" If we wanted to initialize the session to have some attributes we could\n add those here\n \"\"\"\n session_attributes = {}\n card_title = 'Welcome to PB Parcel Tracker'\n speech_output = 'Please give first 10 digits of tracking number'\n reprompt_text = 'Please give first 10 digits of tracking number'\n should_end_session = False\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef handle_session_end_request():\n card_title = 'Session Ended'\n speech_output = (\n 'Thank you for trying the Alexa Skills Kit sample. Have a nice day! ')\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\ndef setFirstEleven(intent, session):\n session_attributes = {}\n should_end_session = False\n speech_output = 'Now give remaining digits'\n reprompt_text = 'Now give the next eleven numbers'\n try:\n tracking_number_1 = intent['slots']['One']['value']\n tracking_number_2 = intent['slots']['Two']['value']\n tracking_number_3 = intent['slots']['Three']['value']\n tracking_number_4 = intent['slots']['Four']['value']\n tracking_number_5 = intent['slots']['Five']['value']\n tracking_number_6 = intent['slots']['Six']['value']\n tracking_number_7 = intent['slots']['Seven']['value']\n tracking_number_8 = intent['slots']['Eight']['value']\n tracking_number_9 = intent['slots']['Nine']['value']\n tracking_number_10 = intent['slots']['Ten']['value']\n first_ten = '%s%s%s%s%s%s%s%s%s%s' % (tracking_number_1,\n tracking_number_2, tracking_number_3, tracking_number_4,\n tracking_number_5, tracking_number_6, tracking_number_7,\n tracking_number_8, tracking_number_9, tracking_number_10)\n session_attributes['first_ten'] = first_ten\n print('session after adding first ten--->')\n print(session_attributes)\n except Exception as app_exception:\n traceback.print_tb\n speech_output = (\n 'There was some problem, Please provide first ten digits of the tracking number'\n )\n reprompt_text = 'Please say first ten digits of the tracking number'\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n\ndef getParcelStatus(intent, session):\n session_attributes = {}\n should_end_session = True\n speech_output = 'There was some problem in taking your input'\n reprompt_text = 'Please say remaining digits of the tracking number'\n try:\n tracking_number_11 = intent['slots']['Eleven']['value']\n tracking_number_12 = intent['slots']['Twelve']['value']\n tracking_number_13 = intent['slots']['Thirteen']['value']\n tracking_number_14 = intent['slots']['Fourteen']['value']\n tracking_number_15 = intent['slots']['Fifteen']['value']\n tracking_number_16 = intent['slots']['Sixteen']['value']\n tracking_number_17 = intent['slots']['Seventeen']['value']\n tracking_number_18 = intent['slots']['Eighteen']['value']\n tracking_number_19 = intent['slots']['Nineteen']['value']\n tracking_number_20 = intent['slots']['Twenty']['value']\n tracking_number_21 = intent['slots']['TwentyOne']['value']\n tracking_number_22 = intent['slots']['TwentyTwo']['value']\n tracking_number = '%s%s%s%s%s%s%s%s%s%s%s%s' % (tracking_number_11,\n tracking_number_12, tracking_number_13, tracking_number_14,\n tracking_number_15, tracking_number_16, tracking_number_17,\n tracking_number_18, tracking_number_19, tracking_number_20,\n tracking_number_21, tracking_number_22)\n print(\"'first_ten' not in session['attributes']--->\")\n print('first_ten' not in session['attributes'])\n full_tracking_number = '%s%s' % (session['attributes']['first_ten'],\n tracking_number)\n bearer = 'Bearer %s' % session['access_token']\n print('USPS FULL Tracking Number ----> %s' % full_tracking_number)\n url = (\n 'https://api-sandbox.pitneybowes.com/shippingservices/v1/tracking/%s?packageIdentifierType=TrackingNumber&carrier=USPS'\n % full_tracking_number)\n r = requests.get(url, headers={'Authorization': bearer})\n tracking_response = {}\n tracking_response = json.loads(r.content)\n if r.status_code == 200:\n speech_output = 'The status of the parcel is ' + tracking_response[\n 'status']\n reprompt_text = 'The status of the parcel is ' + tracking_response[\n 'status']\n else:\n speech_output = tracking_response['errors'][0]['errorDescription']\n reprompt_text = tracking_response['errors'][0]['errorDescription']\n print(r.content)\n except Exception as app_exception:\n traceback.print_tb\n should_end_session = False\n if ('attributes' not in session or 'attributes' in session and \n 'first_ten' not in session['attributes']):\n speech_output = (\n 'Please provide only first ten digits of the tracking number')\n reprompt_text = (\n 'Please provide only first ten digits of the tracking number')\n else:\n speech_output = (\n 'There was some problem, Please say remaining digits of the tracking number'\n )\n reprompt_text = (\n 'Please say remaining digits of the tracking number')\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they\n want\n \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef oauth_request(session):\n access_key = os.environ['key']\n access_key_value = 'Basic ' + access_key\n url = 'https://api-sandbox.pitneybowes.com/oauth/token'\n r = requests.post(url, headers={'Authorization': access_key_value,\n 'Content-Type': 'application/x-www-form-urlencoded'}, data={\n 'grant_type': 'client_credentials'})\n print(r.status_code)\n if r.status_code == 200:\n j = json.loads(r.content)\n print(j)\n session['access_token'] = j['access_token']\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if 'access_token' not in session:\n oauth_request(session)\n print(session['access_token'])\n if intent_name == 'Tracking':\n return setFirstEleven(intent, session)\n elif intent_name == 'TrackingSecond':\n return getParcelStatus(intent, session)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef on_session_ended(session_ended_request, session):\n \"\"\" Called when the user ends the session.\n\n Is not called when the skill returns should_end_session=true\n \"\"\"\n print('on_session_ended requestId=' + session_ended_request['requestId'\n ] + ', sessionId=' + session['sessionId'])\n\n\ndef lambda_handler(event, context):\n \"\"\" Route the incoming request based on type (LaunchRequest, IntentRequest,\n etc.) The JSON body of the request is provided in the event parameter.\n \"\"\"\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return on_session_ended(event['request'], event['session'])\n",
"step-4": "<mask token>\nfrom __future__ import print_function\nimport traceback\nimport requests\nimport os\nimport json\n\n\ndef build_speechlet_response(title, output, reprompt_text, should_end_session):\n return {'outputSpeech': {'type': 'PlainText', 'text': output}, 'card':\n {'type': 'Simple', 'title': 'SessionSpeechlet - ' + title,\n 'content': 'SessionSpeechlet - ' + output}, 'reprompt': {\n 'outputSpeech': {'type': 'PlainText', 'text': reprompt_text}},\n 'shouldEndSession': should_end_session}\n\n\ndef build_response(session_attributes, speechlet_response):\n return {'version': '1.0', 'sessionAttributes': session_attributes,\n 'response': speechlet_response}\n\n\ndef get_welcome_response():\n \"\"\" If we wanted to initialize the session to have some attributes we could\n add those here\n \"\"\"\n session_attributes = {}\n card_title = 'Welcome to PB Parcel Tracker'\n speech_output = 'Please give first 10 digits of tracking number'\n reprompt_text = 'Please give first 10 digits of tracking number'\n should_end_session = False\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef handle_session_end_request():\n card_title = 'Session Ended'\n speech_output = (\n 'Thank you for trying the Alexa Skills Kit sample. Have a nice day! ')\n should_end_session = True\n return build_response({}, build_speechlet_response(card_title,\n speech_output, None, should_end_session))\n\n\ndef setFirstEleven(intent, session):\n session_attributes = {}\n should_end_session = False\n speech_output = 'Now give remaining digits'\n reprompt_text = 'Now give the next eleven numbers'\n try:\n tracking_number_1 = intent['slots']['One']['value']\n tracking_number_2 = intent['slots']['Two']['value']\n tracking_number_3 = intent['slots']['Three']['value']\n tracking_number_4 = intent['slots']['Four']['value']\n tracking_number_5 = intent['slots']['Five']['value']\n tracking_number_6 = intent['slots']['Six']['value']\n tracking_number_7 = intent['slots']['Seven']['value']\n tracking_number_8 = intent['slots']['Eight']['value']\n tracking_number_9 = intent['slots']['Nine']['value']\n tracking_number_10 = intent['slots']['Ten']['value']\n first_ten = '%s%s%s%s%s%s%s%s%s%s' % (tracking_number_1,\n tracking_number_2, tracking_number_3, tracking_number_4,\n tracking_number_5, tracking_number_6, tracking_number_7,\n tracking_number_8, tracking_number_9, tracking_number_10)\n session_attributes['first_ten'] = first_ten\n print('session after adding first ten--->')\n print(session_attributes)\n except Exception as app_exception:\n traceback.print_tb\n speech_output = (\n 'There was some problem, Please provide first ten digits of the tracking number'\n )\n reprompt_text = 'Please say first ten digits of the tracking number'\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n\ndef getParcelStatus(intent, session):\n session_attributes = {}\n should_end_session = True\n speech_output = 'There was some problem in taking your input'\n reprompt_text = 'Please say remaining digits of the tracking number'\n try:\n tracking_number_11 = intent['slots']['Eleven']['value']\n tracking_number_12 = intent['slots']['Twelve']['value']\n tracking_number_13 = intent['slots']['Thirteen']['value']\n tracking_number_14 = intent['slots']['Fourteen']['value']\n tracking_number_15 = intent['slots']['Fifteen']['value']\n tracking_number_16 = intent['slots']['Sixteen']['value']\n tracking_number_17 = intent['slots']['Seventeen']['value']\n tracking_number_18 = intent['slots']['Eighteen']['value']\n tracking_number_19 = intent['slots']['Nineteen']['value']\n tracking_number_20 = intent['slots']['Twenty']['value']\n tracking_number_21 = intent['slots']['TwentyOne']['value']\n tracking_number_22 = intent['slots']['TwentyTwo']['value']\n tracking_number = '%s%s%s%s%s%s%s%s%s%s%s%s' % (tracking_number_11,\n tracking_number_12, tracking_number_13, tracking_number_14,\n tracking_number_15, tracking_number_16, tracking_number_17,\n tracking_number_18, tracking_number_19, tracking_number_20,\n tracking_number_21, tracking_number_22)\n print(\"'first_ten' not in session['attributes']--->\")\n print('first_ten' not in session['attributes'])\n full_tracking_number = '%s%s' % (session['attributes']['first_ten'],\n tracking_number)\n bearer = 'Bearer %s' % session['access_token']\n print('USPS FULL Tracking Number ----> %s' % full_tracking_number)\n url = (\n 'https://api-sandbox.pitneybowes.com/shippingservices/v1/tracking/%s?packageIdentifierType=TrackingNumber&carrier=USPS'\n % full_tracking_number)\n r = requests.get(url, headers={'Authorization': bearer})\n tracking_response = {}\n tracking_response = json.loads(r.content)\n if r.status_code == 200:\n speech_output = 'The status of the parcel is ' + tracking_response[\n 'status']\n reprompt_text = 'The status of the parcel is ' + tracking_response[\n 'status']\n else:\n speech_output = tracking_response['errors'][0]['errorDescription']\n reprompt_text = tracking_response['errors'][0]['errorDescription']\n print(r.content)\n except Exception as app_exception:\n traceback.print_tb\n should_end_session = False\n if ('attributes' not in session or 'attributes' in session and \n 'first_ten' not in session['attributes']):\n speech_output = (\n 'Please provide only first ten digits of the tracking number')\n reprompt_text = (\n 'Please provide only first ten digits of the tracking number')\n else:\n speech_output = (\n 'There was some problem, Please say remaining digits of the tracking number'\n )\n reprompt_text = (\n 'Please say remaining digits of the tracking number')\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n print('on_session_started requestId=' + session_started_request[\n 'requestId'] + ', sessionId=' + session['sessionId'])\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they\n want\n \"\"\"\n print('on_launch requestId=' + launch_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n return get_welcome_response()\n\n\ndef oauth_request(session):\n access_key = os.environ['key']\n access_key_value = 'Basic ' + access_key\n url = 'https://api-sandbox.pitneybowes.com/oauth/token'\n r = requests.post(url, headers={'Authorization': access_key_value,\n 'Content-Type': 'application/x-www-form-urlencoded'}, data={\n 'grant_type': 'client_credentials'})\n print(r.status_code)\n if r.status_code == 200:\n j = json.loads(r.content)\n print(j)\n session['access_token'] = j['access_token']\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n print('on_intent requestId=' + intent_request['requestId'] +\n ', sessionId=' + session['sessionId'])\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if 'access_token' not in session:\n oauth_request(session)\n print(session['access_token'])\n if intent_name == 'Tracking':\n return setFirstEleven(intent, session)\n elif intent_name == 'TrackingSecond':\n return getParcelStatus(intent, session)\n elif intent_name == 'AMAZON.HelpIntent':\n return get_welcome_response()\n elif intent_name == 'AMAZON.CancelIntent' or intent_name == 'AMAZON.StopIntent':\n return handle_session_end_request()\n else:\n raise ValueError('Invalid intent')\n\n\ndef on_session_ended(session_ended_request, session):\n \"\"\" Called when the user ends the session.\n\n Is not called when the skill returns should_end_session=true\n \"\"\"\n print('on_session_ended requestId=' + session_ended_request['requestId'\n ] + ', sessionId=' + session['sessionId'])\n\n\ndef lambda_handler(event, context):\n \"\"\" Route the incoming request based on type (LaunchRequest, IntentRequest,\n etc.) The JSON body of the request is provided in the event parameter.\n \"\"\"\n print('event.session.application.applicationId=' + event['session'][\n 'application']['applicationId'])\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n if event['request']['type'] == 'LaunchRequest':\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == 'IntentRequest':\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == 'SessionEndedRequest':\n return on_session_ended(event['request'], event['session'])\n",
"step-5": "\"\"\"\nCode for Alexa skill to check PB tracking\n\"\"\"\n\nfrom __future__ import print_function\nimport traceback\nimport requests\nimport os\nimport json\n\n\n# --------------- Helpers that build all of the responses ----------------------\n\ndef build_speechlet_response(title, output, reprompt_text, should_end_session):\n return {\n 'outputSpeech': {\n 'type': 'PlainText',\n 'text': output\n },\n 'card': {\n 'type': 'Simple',\n 'title': \"SessionSpeechlet - \" + title,\n 'content': \"SessionSpeechlet - \" + output\n },\n 'reprompt': {\n 'outputSpeech': {\n 'type': 'PlainText',\n 'text': reprompt_text\n }\n },\n 'shouldEndSession': should_end_session\n }\n\n\ndef build_response(session_attributes, speechlet_response):\n return {\n 'version': '1.0',\n 'sessionAttributes': session_attributes,\n 'response': speechlet_response\n }\n\n\n# --------------- Functions that control the skill's behavior ------------------\n\ndef get_welcome_response():\n \"\"\" If we wanted to initialize the session to have some attributes we could\n add those here\n \"\"\"\n\n session_attributes = {}\n card_title = \"Welcome to PB Parcel Tracker\"\n speech_output = \"Please give first 10 digits of tracking number\"\n # If the user either does not reply to the welcome message or says something\n # that is not understood, they will be prompted again with this text.\n reprompt_text = \"Please give first 10 digits of tracking number\"\n should_end_session = False\n return build_response(session_attributes, build_speechlet_response(\n card_title, speech_output, reprompt_text, should_end_session))\n\n\ndef handle_session_end_request():\n card_title = \"Session Ended\"\n speech_output = \"Thank you for trying the Alexa Skills Kit sample. \" \\\n \"Have a nice day! \"\n # Setting this to true ends the session and exits the skill.\n should_end_session = True\n return build_response({}, build_speechlet_response(\n card_title, speech_output, None, should_end_session))\n\n#----- get tracking ------\n\ndef setFirstEleven(intent, session):\n session_attributes = {}\n should_end_session = False\n speech_output = \"Now give remaining digits\"\n reprompt_text = \"Now give the next eleven numbers\"\n try:\n tracking_number_1 = intent['slots']['One']['value']\n tracking_number_2 = intent['slots']['Two']['value']\n tracking_number_3 = intent['slots']['Three']['value']\n tracking_number_4 = intent['slots']['Four']['value']\n tracking_number_5 = intent['slots']['Five']['value']\n tracking_number_6 = intent['slots']['Six']['value']\n tracking_number_7 = intent['slots']['Seven']['value']\n tracking_number_8 = intent['slots']['Eight']['value']\n tracking_number_9 = intent['slots']['Nine']['value']\n tracking_number_10 = intent['slots']['Ten']['value']\n first_ten = \"%s%s%s%s%s%s%s%s%s%s\" % (tracking_number_1, tracking_number_2,tracking_number_3, tracking_number_4,tracking_number_5, tracking_number_6,tracking_number_7, tracking_number_8,tracking_number_9, tracking_number_10)\n session_attributes['first_ten'] = first_ten\n print(\"session after adding first ten--->\")\n print(session_attributes)\n except Exception as app_exception:\n traceback.print_tb\n speech_output = \"There was some problem, Please provide first ten digits of the tracking number\"\n reprompt_text = \"Please say first ten digits of the tracking number\"\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n#----- get tracking ------\n\ndef getParcelStatus(intent, session):\n session_attributes = {}\n should_end_session = True\n speech_output = \"There was some problem in taking your input\"\n reprompt_text = \"Please say remaining digits of the tracking number\"\n try:\n tracking_number_11= intent['slots']['Eleven']['value']\n tracking_number_12 = intent['slots']['Twelve']['value']\n tracking_number_13 = intent['slots']['Thirteen']['value']\n tracking_number_14 = intent['slots']['Fourteen']['value']\n tracking_number_15 = intent['slots']['Fifteen']['value']\n tracking_number_16 = intent['slots']['Sixteen']['value']\n tracking_number_17 = intent['slots']['Seventeen']['value']\n tracking_number_18 = intent['slots']['Eighteen']['value']\n tracking_number_19 = intent['slots']['Nineteen']['value']\n tracking_number_20 = intent['slots']['Twenty']['value']\n tracking_number_21 = intent['slots']['TwentyOne']['value']\n tracking_number_22 = intent['slots']['TwentyTwo']['value']\n tracking_number = \"%s%s%s%s%s%s%s%s%s%s%s%s\" % (tracking_number_11,tracking_number_12, tracking_number_13, tracking_number_14,tracking_number_15, tracking_number_16,tracking_number_17, tracking_number_18,tracking_number_19, tracking_number_20,tracking_number_21, tracking_number_22)\n print(\"'first_ten' not in session['attributes']--->\")\n print('first_ten' not in session['attributes'])\n full_tracking_number = \"%s%s\" % (session['attributes']['first_ten'], tracking_number)\n bearer = \"Bearer %s\" % (session['access_token'])\n print(\"USPS FULL Tracking Number ----> %s\" % (full_tracking_number))\n url = \"https://api-sandbox.pitneybowes.com/shippingservices/v1/tracking/%s?packageIdentifierType=TrackingNumber&carrier=USPS\" %(full_tracking_number)\n r=requests.get(url, headers={\"Authorization\" : bearer})\n tracking_response = {}\n tracking_response = json.loads(r.content)\n if(r.status_code == 200):\n speech_output = \"The status of the parcel is \"+tracking_response['status']\n reprompt_text = \"The status of the parcel is \"+tracking_response['status']\n else:\n speech_output = tracking_response['errors'][0]['errorDescription']\n reprompt_text = tracking_response['errors'][0]['errorDescription']\n print(r.content)\n except Exception as app_exception:\n traceback.print_tb\n should_end_session = False\n if ('attributes' not in session or ('attributes' in session and 'first_ten' not in session['attributes'])):\n speech_output = \"Please provide only first ten digits of the tracking number\"\n reprompt_text = \"Please provide only first ten digits of the tracking number\"\n else:\n speech_output = \"There was some problem, Please say remaining digits of the tracking number\"\n reprompt_text = \"Please say remaining digits of the tracking number\"\n return build_response(session_attributes, build_speechlet_response(\n intent['name'], speech_output, reprompt_text, should_end_session))\n\n# --------------- Events ------------------\n\ndef on_session_started(session_started_request, session):\n \"\"\" Called when the session starts \"\"\"\n print(\"on_session_started requestId=\" + session_started_request['requestId']\n + \", sessionId=\" + session['sessionId'])\n\n\ndef on_launch(launch_request, session):\n \"\"\" Called when the user launches the skill without specifying what they\n want\n \"\"\"\n\n print(\"on_launch requestId=\" + launch_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n # Dispatch to your skill's launch\n return get_welcome_response()\n\n\ndef oauth_request(session):\n access_key = os.environ['key']\n access_key_value = \"Basic \"+access_key\n url = 'https://api-sandbox.pitneybowes.com/oauth/token'\n r = requests.post(url, headers={\"Authorization\": access_key_value,\n \"Content-Type\": \"application/x-www-form-urlencoded\"},\n data={\"grant_type\": \"client_credentials\"})\n print(r.status_code)\n if(r.status_code == 200):\n j = json.loads(r.content)\n print(j)\n session['access_token'] = j['access_token']\n\n\ndef on_intent(intent_request, session):\n \"\"\" Called when the user specifies an intent for this skill \"\"\"\n\n print(\"on_intent requestId=\" + intent_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n\n intent = intent_request['intent']\n intent_name = intent_request['intent']['name']\n if('access_token' not in session):\n oauth_request(session)\n print(session['access_token'])\n # Dispatch to your skill's intent handlers\n if intent_name == \"Tracking\":\n return setFirstEleven(intent, session)\n elif intent_name == \"TrackingSecond\":\n return getParcelStatus(intent, session)\n elif intent_name == \"AMAZON.HelpIntent\":\n return get_welcome_response()\n elif intent_name == \"AMAZON.CancelIntent\" or intent_name == \"AMAZON.StopIntent\":\n return handle_session_end_request()\n else:\n raise ValueError(\"Invalid intent\")\n\n\ndef on_session_ended(session_ended_request, session):\n \"\"\" Called when the user ends the session.\n\n Is not called when the skill returns should_end_session=true\n \"\"\"\n print(\"on_session_ended requestId=\" + session_ended_request['requestId'] +\n \", sessionId=\" + session['sessionId'])\n # add cleanup logic here\n\n\n# --------------- Main handler ------------------\n\ndef lambda_handler(event, context):\n \"\"\" Route the incoming request based on type (LaunchRequest, IntentRequest,\n etc.) The JSON body of the request is provided in the event parameter.\n \"\"\"\n print(\"event.session.application.applicationId=\" +\n event['session']['application']['applicationId'])\n\n \"\"\"\n Uncomment this if statement and populate with your skill's application ID to\n prevent someone else from configuring a skill that sends requests to this\n function.\n \"\"\"\n # if (event['session']['application']['applicationId'] !=\n # \"amzn1.echo-sdk-ams.app.[unique-value-here]\"):\n # raise ValueError(\"Invalid Application ID\")\n\n if event['session']['new']:\n on_session_started({'requestId': event['request']['requestId']},\n event['session'])\n\n if event['request']['type'] == \"LaunchRequest\":\n return on_launch(event['request'], event['session'])\n elif event['request']['type'] == \"IntentRequest\":\n return on_intent(event['request'], event['session'])\n elif event['request']['type'] == \"SessionEndedRequest\":\n return on_session_ended(event['request'], event['session'])\n",
"step-ids": [
9,
10,
12,
13,
14
]
}
|
[
9,
10,
12,
13,
14
] |
# -*- coding: utf-8 -*-
import base64
import logging
from decimal import Decimal
import requests
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from currencies.currencies import decimal_round
from payments.systems import base
from payments.systems.bankusd import display_amount_usd
from payments.systems.base import CommissionCalculationResult
name = _("Neteller")
logo = "neteller.png"
slug = __name__.rsplit(".", 1)[-1]
currencies = ["USD"]
mt4_payment_slug = "NETELLER"
transfer_details = {
"deposit": {
"fee": "3.5% min $1",
"time": _("Within day"),
"min_amount": display_amount_usd(10),
},
"withdraw": {
"fee": _("2.5% min $1 max $30"),
"time": _("Within day"),
"min_amount": display_amount_usd(10),
}
}
templates = {
"deposit": "payments/forms/deposit/neteller.html",
"withdraw": "payments/forms/withdraw/electronic.html",
}
log = logging.getLogger(__name__)
class DepositForm(base.DepositForm):
purse = forms.CharField(max_length=100, label=_("Net account"),
help_text=_("Your Neteller's 12-digit Account ID or email address that is "
"associated with their NETELLER account"))
secure_id = forms.IntegerField(label=_("Secure ID"), help_text=_("Your Neteller's 6-digit Secure ID"))
bill_address = "https://api.neteller.com/v1/transferIn"
get_token_url = "https://api.neteller.com/v1/oauth2/token?grant_type=client_credentials"
commission_rate = Decimal("0.035")
MIN_AMOUNT = (10, 'USD')
@classmethod
def is_automatic(cls, instance):
return True
def get_neteller_token(self):
"""
:return: tuple. ('accessToken', 'Auth method'). Example: ("0.AQAAAU3in", "Bearer")
or None if can't get token.
"""
headers = {'Content-Type': 'application/json',
'Cache-Control': 'no-cache',
'Authorization': 'Basic ' + base64.b64encode(
settings.NETELLER_MERCHANT_ID + ':' + settings.NETELLER_SECRET_KEY)}
result = requests.post(self.get_token_url, headers = headers)
if result.status_code == 200:
result = result.json()
else:
return None
if result.get("accessToken"):
return result.get("accessToken"), result.get("tokenType")
else:
return None
def make_request(self):
import json
currency = {
"RUR": "RUB"
}.get(self.instance.currency, self.instance.currency)
amount = int(decimal_round(self.instance.amount) * 100)
token_tuple = self.get_neteller_token()
if not token_tuple:
return "Can't get the token."
data = {
"paymentMethod": {
"type": "neteller",
"value": self.instance.purse
},
"transaction": {
"merchantRefId": unicode(self.instance.pk),
"amount": amount,
"currency": currency
},
"verificationCode": unicode(self.instance.params["secure_id"]),
}
headers = {'Content-Type': 'application/json', 'Authorization': token_tuple[1] + " " + token_tuple[0]}
request = requests.post(self.bill_address, data=json.dumps(data), headers=headers)
request = request.json()
if request.get("transaction") and request.get("transaction").get("status") == "accepted":
self.instance.refresh_state()
self.instance.is_payed = True
self.instance.params["transaction"] = request.get("transaction").get("id")
self.instance.save()
return None
else:
error_message = request.get("error").get("message") if request.get("error") else \
"Automatic payment failed."
self.instance.is_committed = False
self.instance.is_payed = False
self.instance.public_comment = error_message
self.instance.save()
return error_message
@classmethod
def generate_mt4_comment(cls, payment_request):
return "{NETELLER}[%s]" % payment_request.pk
def clean(self):
from platforms.converter import convert_currency
amount = self.cleaned_data["amount"]
currency = self.cleaned_data["currency"]
return super(DepositForm, self).clean()
def confirmed_response_data(self, request):
error = self.make_request()
if error:
return {'detail': "Error: %s" % error}, 400
else:
return {"success": True}, None
@classmethod
def _calculate_commission(cls, request, full_commission=False):
commission = request.amount * cls.commission_rate
min_comm = Decimal("1")
commission = max(min_comm, commission)
return CommissionCalculationResult(
amount=request.amount,
commission=commission,
currency=request.currency
)
class DetailsForm(base.DetailsForm):
def __init__(self, *args, **kwargs):
super(DetailsForm, self).__init__(*args, **kwargs)
self.fields["purse"].label = _("Net account")
self.fields["purse"].help_text = _("Your Neteller's 12-digit Account ID or email address that is "
"associated with their NETELLER account")
class WithdrawForm(base.WithdrawForm):
MIN_AMOUNT = (10, 'USD')
commission_rate = Decimal("0.025")
@classmethod
def _calculate_commission(cls, request, full_commission=False):
commission = request.amount * cls.commission_rate
min_comm = Decimal("1")
max_comm = Decimal("30")
commission = min(max_comm, max(min_comm, commission))
return CommissionCalculationResult(
amount=request.amount,
commission=commission,
currency=request.currency
)
|
normal
|
{
"blob_id": "15c1db535beb115c45aeba433a946255f70fa86e",
"index": 7845,
"step-1": "<mask token>\n\n\nclass DepositForm(base.DepositForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def is_automatic(cls, instance):\n return True\n <mask token>\n <mask token>\n <mask token>\n\n def clean(self):\n from platforms.converter import convert_currency\n amount = self.cleaned_data['amount']\n currency = self.cleaned_data['currency']\n return super(DepositForm, self).clean()\n <mask token>\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n commission = max(min_comm, commission)\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n\n\nclass DetailsForm(base.DetailsForm):\n\n def __init__(self, *args, **kwargs):\n super(DetailsForm, self).__init__(*args, **kwargs)\n self.fields['purse'].label = _('Net account')\n self.fields['purse'].help_text = _(\n \"Your Neteller's 12-digit Account ID or email address that is associated with their NETELLER account\"\n )\n\n\nclass WithdrawForm(base.WithdrawForm):\n MIN_AMOUNT = 10, 'USD'\n commission_rate = Decimal('0.025')\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n max_comm = Decimal('30')\n commission = min(max_comm, max(min_comm, commission))\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n",
"step-2": "<mask token>\n\n\nclass DepositForm(base.DepositForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def is_automatic(cls, instance):\n return True\n <mask token>\n\n def make_request(self):\n import json\n currency = {'RUR': 'RUB'}.get(self.instance.currency, self.instance\n .currency)\n amount = int(decimal_round(self.instance.amount) * 100)\n token_tuple = self.get_neteller_token()\n if not token_tuple:\n return \"Can't get the token.\"\n data = {'paymentMethod': {'type': 'neteller', 'value': self.\n instance.purse}, 'transaction': {'merchantRefId': unicode(self.\n instance.pk), 'amount': amount, 'currency': currency},\n 'verificationCode': unicode(self.instance.params['secure_id'])}\n headers = {'Content-Type': 'application/json', 'Authorization': \n token_tuple[1] + ' ' + token_tuple[0]}\n request = requests.post(self.bill_address, data=json.dumps(data),\n headers=headers)\n request = request.json()\n if request.get('transaction') and request.get('transaction').get(\n 'status') == 'accepted':\n self.instance.refresh_state()\n self.instance.is_payed = True\n self.instance.params['transaction'] = request.get('transaction'\n ).get('id')\n self.instance.save()\n return None\n else:\n error_message = request.get('error').get('message') if request.get(\n 'error') else 'Automatic payment failed.'\n self.instance.is_committed = False\n self.instance.is_payed = False\n self.instance.public_comment = error_message\n self.instance.save()\n return error_message\n <mask token>\n\n def clean(self):\n from platforms.converter import convert_currency\n amount = self.cleaned_data['amount']\n currency = self.cleaned_data['currency']\n return super(DepositForm, self).clean()\n <mask token>\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n commission = max(min_comm, commission)\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n\n\nclass DetailsForm(base.DetailsForm):\n\n def __init__(self, *args, **kwargs):\n super(DetailsForm, self).__init__(*args, **kwargs)\n self.fields['purse'].label = _('Net account')\n self.fields['purse'].help_text = _(\n \"Your Neteller's 12-digit Account ID or email address that is associated with their NETELLER account\"\n )\n\n\nclass WithdrawForm(base.WithdrawForm):\n MIN_AMOUNT = 10, 'USD'\n commission_rate = Decimal('0.025')\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n max_comm = Decimal('30')\n commission = min(max_comm, max(min_comm, commission))\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n",
"step-3": "<mask token>\n\n\nclass DepositForm(base.DepositForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def is_automatic(cls, instance):\n return True\n <mask token>\n\n def make_request(self):\n import json\n currency = {'RUR': 'RUB'}.get(self.instance.currency, self.instance\n .currency)\n amount = int(decimal_round(self.instance.amount) * 100)\n token_tuple = self.get_neteller_token()\n if not token_tuple:\n return \"Can't get the token.\"\n data = {'paymentMethod': {'type': 'neteller', 'value': self.\n instance.purse}, 'transaction': {'merchantRefId': unicode(self.\n instance.pk), 'amount': amount, 'currency': currency},\n 'verificationCode': unicode(self.instance.params['secure_id'])}\n headers = {'Content-Type': 'application/json', 'Authorization': \n token_tuple[1] + ' ' + token_tuple[0]}\n request = requests.post(self.bill_address, data=json.dumps(data),\n headers=headers)\n request = request.json()\n if request.get('transaction') and request.get('transaction').get(\n 'status') == 'accepted':\n self.instance.refresh_state()\n self.instance.is_payed = True\n self.instance.params['transaction'] = request.get('transaction'\n ).get('id')\n self.instance.save()\n return None\n else:\n error_message = request.get('error').get('message') if request.get(\n 'error') else 'Automatic payment failed.'\n self.instance.is_committed = False\n self.instance.is_payed = False\n self.instance.public_comment = error_message\n self.instance.save()\n return error_message\n\n @classmethod\n def generate_mt4_comment(cls, payment_request):\n return '{NETELLER}[%s]' % payment_request.pk\n\n def clean(self):\n from platforms.converter import convert_currency\n amount = self.cleaned_data['amount']\n currency = self.cleaned_data['currency']\n return super(DepositForm, self).clean()\n <mask token>\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n commission = max(min_comm, commission)\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n\n\nclass DetailsForm(base.DetailsForm):\n\n def __init__(self, *args, **kwargs):\n super(DetailsForm, self).__init__(*args, **kwargs)\n self.fields['purse'].label = _('Net account')\n self.fields['purse'].help_text = _(\n \"Your Neteller's 12-digit Account ID or email address that is associated with their NETELLER account\"\n )\n\n\nclass WithdrawForm(base.WithdrawForm):\n MIN_AMOUNT = 10, 'USD'\n commission_rate = Decimal('0.025')\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n max_comm = Decimal('30')\n commission = min(max_comm, max(min_comm, commission))\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n",
"step-4": "<mask token>\n\n\nclass DepositForm(base.DepositForm):\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n <mask token>\n\n @classmethod\n def is_automatic(cls, instance):\n return True\n\n def get_neteller_token(self):\n \"\"\"\n :return: tuple. ('accessToken', 'Auth method'). Example: (\"0.AQAAAU3in\", \"Bearer\")\n or None if can't get token.\n \"\"\"\n headers = {'Content-Type': 'application/json', 'Cache-Control':\n 'no-cache', 'Authorization': 'Basic ' + base64.b64encode(\n settings.NETELLER_MERCHANT_ID + ':' + settings.NETELLER_SECRET_KEY)\n }\n result = requests.post(self.get_token_url, headers=headers)\n if result.status_code == 200:\n result = result.json()\n else:\n return None\n if result.get('accessToken'):\n return result.get('accessToken'), result.get('tokenType')\n else:\n return None\n\n def make_request(self):\n import json\n currency = {'RUR': 'RUB'}.get(self.instance.currency, self.instance\n .currency)\n amount = int(decimal_round(self.instance.amount) * 100)\n token_tuple = self.get_neteller_token()\n if not token_tuple:\n return \"Can't get the token.\"\n data = {'paymentMethod': {'type': 'neteller', 'value': self.\n instance.purse}, 'transaction': {'merchantRefId': unicode(self.\n instance.pk), 'amount': amount, 'currency': currency},\n 'verificationCode': unicode(self.instance.params['secure_id'])}\n headers = {'Content-Type': 'application/json', 'Authorization': \n token_tuple[1] + ' ' + token_tuple[0]}\n request = requests.post(self.bill_address, data=json.dumps(data),\n headers=headers)\n request = request.json()\n if request.get('transaction') and request.get('transaction').get(\n 'status') == 'accepted':\n self.instance.refresh_state()\n self.instance.is_payed = True\n self.instance.params['transaction'] = request.get('transaction'\n ).get('id')\n self.instance.save()\n return None\n else:\n error_message = request.get('error').get('message') if request.get(\n 'error') else 'Automatic payment failed.'\n self.instance.is_committed = False\n self.instance.is_payed = False\n self.instance.public_comment = error_message\n self.instance.save()\n return error_message\n\n @classmethod\n def generate_mt4_comment(cls, payment_request):\n return '{NETELLER}[%s]' % payment_request.pk\n\n def clean(self):\n from platforms.converter import convert_currency\n amount = self.cleaned_data['amount']\n currency = self.cleaned_data['currency']\n return super(DepositForm, self).clean()\n\n def confirmed_response_data(self, request):\n error = self.make_request()\n if error:\n return {'detail': 'Error: %s' % error}, 400\n else:\n return {'success': True}, None\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n commission = max(min_comm, commission)\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n\n\nclass DetailsForm(base.DetailsForm):\n\n def __init__(self, *args, **kwargs):\n super(DetailsForm, self).__init__(*args, **kwargs)\n self.fields['purse'].label = _('Net account')\n self.fields['purse'].help_text = _(\n \"Your Neteller's 12-digit Account ID or email address that is associated with their NETELLER account\"\n )\n\n\nclass WithdrawForm(base.WithdrawForm):\n MIN_AMOUNT = 10, 'USD'\n commission_rate = Decimal('0.025')\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal('1')\n max_comm = Decimal('30')\n commission = min(max_comm, max(min_comm, commission))\n return CommissionCalculationResult(amount=request.amount,\n commission=commission, currency=request.currency)\n",
"step-5": "# -*- coding: utf-8 -*-\nimport base64\nimport logging\nfrom decimal import Decimal\n\nimport requests\nfrom django import forms\nfrom django.conf import settings\nfrom django.utils.translation import ugettext_lazy as _\n\nfrom currencies.currencies import decimal_round\nfrom payments.systems import base\nfrom payments.systems.bankusd import display_amount_usd\nfrom payments.systems.base import CommissionCalculationResult\n\nname = _(\"Neteller\")\nlogo = \"neteller.png\"\nslug = __name__.rsplit(\".\", 1)[-1]\ncurrencies = [\"USD\"]\nmt4_payment_slug = \"NETELLER\"\n\ntransfer_details = {\n \"deposit\": {\n \"fee\": \"3.5% min $1\",\n \"time\": _(\"Within day\"),\n \"min_amount\": display_amount_usd(10),\n },\n \"withdraw\": {\n \"fee\": _(\"2.5% min $1 max $30\"),\n \"time\": _(\"Within day\"),\n \"min_amount\": display_amount_usd(10),\n }\n}\n\ntemplates = {\n \"deposit\": \"payments/forms/deposit/neteller.html\",\n \"withdraw\": \"payments/forms/withdraw/electronic.html\",\n}\n\nlog = logging.getLogger(__name__)\n\n\nclass DepositForm(base.DepositForm):\n\n purse = forms.CharField(max_length=100, label=_(\"Net account\"),\n help_text=_(\"Your Neteller's 12-digit Account ID or email address that is \"\n \"associated with their NETELLER account\"))\n secure_id = forms.IntegerField(label=_(\"Secure ID\"), help_text=_(\"Your Neteller's 6-digit Secure ID\"))\n\n bill_address = \"https://api.neteller.com/v1/transferIn\"\n get_token_url = \"https://api.neteller.com/v1/oauth2/token?grant_type=client_credentials\"\n commission_rate = Decimal(\"0.035\")\n MIN_AMOUNT = (10, 'USD')\n\n @classmethod\n def is_automatic(cls, instance):\n return True\n\n def get_neteller_token(self):\n \"\"\"\n :return: tuple. ('accessToken', 'Auth method'). Example: (\"0.AQAAAU3in\", \"Bearer\")\n or None if can't get token.\n \"\"\"\n\n headers = {'Content-Type': 'application/json',\n 'Cache-Control': 'no-cache',\n 'Authorization': 'Basic ' + base64.b64encode(\n settings.NETELLER_MERCHANT_ID + ':' + settings.NETELLER_SECRET_KEY)}\n\n\n result = requests.post(self.get_token_url, headers = headers)\n\n if result.status_code == 200:\n result = result.json()\n else:\n return None\n\n if result.get(\"accessToken\"):\n return result.get(\"accessToken\"), result.get(\"tokenType\")\n else:\n return None\n\n def make_request(self):\n import json\n\n currency = {\n \"RUR\": \"RUB\"\n }.get(self.instance.currency, self.instance.currency)\n amount = int(decimal_round(self.instance.amount) * 100)\n token_tuple = self.get_neteller_token()\n\n if not token_tuple:\n return \"Can't get the token.\"\n\n data = {\n \"paymentMethod\": {\n \"type\": \"neteller\",\n \"value\": self.instance.purse\n },\n \"transaction\": {\n \"merchantRefId\": unicode(self.instance.pk),\n \"amount\": amount,\n \"currency\": currency\n },\n \"verificationCode\": unicode(self.instance.params[\"secure_id\"]),\n }\n\n headers = {'Content-Type': 'application/json', 'Authorization': token_tuple[1] + \" \" + token_tuple[0]}\n\n request = requests.post(self.bill_address, data=json.dumps(data), headers=headers)\n\n request = request.json()\n\n if request.get(\"transaction\") and request.get(\"transaction\").get(\"status\") == \"accepted\":\n self.instance.refresh_state()\n self.instance.is_payed = True\n self.instance.params[\"transaction\"] = request.get(\"transaction\").get(\"id\")\n self.instance.save()\n return None\n else:\n error_message = request.get(\"error\").get(\"message\") if request.get(\"error\") else \\\n \"Automatic payment failed.\"\n self.instance.is_committed = False\n self.instance.is_payed = False\n self.instance.public_comment = error_message\n self.instance.save()\n return error_message\n\n @classmethod\n def generate_mt4_comment(cls, payment_request):\n return \"{NETELLER}[%s]\" % payment_request.pk\n\n def clean(self):\n from platforms.converter import convert_currency\n amount = self.cleaned_data[\"amount\"]\n currency = self.cleaned_data[\"currency\"]\n return super(DepositForm, self).clean()\n\n def confirmed_response_data(self, request):\n error = self.make_request()\n if error:\n return {'detail': \"Error: %s\" % error}, 400\n else:\n return {\"success\": True}, None\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal(\"1\")\n commission = max(min_comm, commission)\n return CommissionCalculationResult(\n amount=request.amount,\n commission=commission,\n currency=request.currency\n )\n\nclass DetailsForm(base.DetailsForm):\n\n def __init__(self, *args, **kwargs):\n super(DetailsForm, self).__init__(*args, **kwargs)\n self.fields[\"purse\"].label = _(\"Net account\")\n self.fields[\"purse\"].help_text = _(\"Your Neteller's 12-digit Account ID or email address that is \"\n \"associated with their NETELLER account\")\n\n\nclass WithdrawForm(base.WithdrawForm):\n MIN_AMOUNT = (10, 'USD')\n commission_rate = Decimal(\"0.025\")\n\n @classmethod\n def _calculate_commission(cls, request, full_commission=False):\n commission = request.amount * cls.commission_rate\n min_comm = Decimal(\"1\")\n max_comm = Decimal(\"30\")\n commission = min(max_comm, max(min_comm, commission))\n return CommissionCalculationResult(\n amount=request.amount,\n commission=commission,\n currency=request.currency\n )\n",
"step-ids": [
9,
10,
11,
13,
17
]
}
|
[
9,
10,
11,
13,
17
] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# ==================================================
# @Author : Copyright@Ryuchen
# ==================================================
from .version import VERSION
__all__ = [
"VERSION"
]
|
normal
|
{
"blob_id": "d815c6e233d81dfb144442a83e6006aa4e29bfce",
"index": 100,
"step-1": "<mask token>\n",
"step-2": "<mask token>\n__all__ = ['VERSION']\n",
"step-3": "from .version import VERSION\n__all__ = ['VERSION']\n",
"step-4": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n# ==================================================\n# @Author : Copyright@Ryuchen\n# ==================================================\n\nfrom .version import VERSION\n\n__all__ = [\n \"VERSION\"\n]\n",
"step-5": null,
"step-ids": [
0,
1,
2,
3
]
}
|
[
0,
1,
2,
3
] |
from fastapi import APIRouter, Depends, status, Response
from typing import List
import schemas, database
from sqlalchemy.orm import Session
import repository.blog as blog
from .oauth2 import get_current_user
router = APIRouter(
prefix="/blog",
tags=['Blog'])
@router.get('/', status_code=status.HTTP_200_OK, response_model=List[schemas.ShowBlog])
def all_blog(db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):
return blog.all_blog(db)
@router.post('/', status_code=status.HTTP_201_CREATED)
def create(request:schemas.Blog, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):
return blog.create(request, db)
@router.delete('/{id}', status_code=status.HTTP_200_OK)
def destroy(id, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):
return blog.destroy(id, db)
@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)
def update(id, request: schemas.Blog, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):
return blog.update(id, request, db)
@router.get('/{id}', status_code=status.HTTP_200_OK, response_model=schemas.ShowBlog)
def show(id, response: Response, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):
return blog.show(id, response, db)
|
normal
|
{
"blob_id": "7fd5e83d28e919e7b94cea290c6b4db3378938b6",
"index": 4600,
"step-1": "<mask token>\n\n\n@router.get('/', status_code=status.HTTP_200_OK, response_model=List[\n schemas.ShowBlog])\ndef all_blog(db: Session=Depends(database.get_db), current_user: schemas.\n User=Depends(get_current_user)):\n return blog.all_blog(db)\n\n\n@router.post('/', status_code=status.HTTP_201_CREATED)\ndef create(request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.create(request, db)\n\n\n@router.delete('/{id}', status_code=status.HTTP_200_OK)\ndef destroy(id, db: Session=Depends(database.get_db), current_user: schemas\n .User=Depends(get_current_user)):\n return blog.destroy(id, db)\n\n\n@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)\ndef update(id, request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.update(id, request, db)\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\n@router.get('/', status_code=status.HTTP_200_OK, response_model=List[\n schemas.ShowBlog])\ndef all_blog(db: Session=Depends(database.get_db), current_user: schemas.\n User=Depends(get_current_user)):\n return blog.all_blog(db)\n\n\n@router.post('/', status_code=status.HTTP_201_CREATED)\ndef create(request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.create(request, db)\n\n\n@router.delete('/{id}', status_code=status.HTTP_200_OK)\ndef destroy(id, db: Session=Depends(database.get_db), current_user: schemas\n .User=Depends(get_current_user)):\n return blog.destroy(id, db)\n\n\n@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)\ndef update(id, request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.update(id, request, db)\n\n\n@router.get('/{id}', status_code=status.HTTP_200_OK, response_model=schemas\n .ShowBlog)\ndef show(id, response: Response, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.show(id, response, db)\n",
"step-3": "<mask token>\nrouter = APIRouter(prefix='/blog', tags=['Blog'])\n\n\n@router.get('/', status_code=status.HTTP_200_OK, response_model=List[\n schemas.ShowBlog])\ndef all_blog(db: Session=Depends(database.get_db), current_user: schemas.\n User=Depends(get_current_user)):\n return blog.all_blog(db)\n\n\n@router.post('/', status_code=status.HTTP_201_CREATED)\ndef create(request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.create(request, db)\n\n\n@router.delete('/{id}', status_code=status.HTTP_200_OK)\ndef destroy(id, db: Session=Depends(database.get_db), current_user: schemas\n .User=Depends(get_current_user)):\n return blog.destroy(id, db)\n\n\n@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)\ndef update(id, request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.update(id, request, db)\n\n\n@router.get('/{id}', status_code=status.HTTP_200_OK, response_model=schemas\n .ShowBlog)\ndef show(id, response: Response, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.show(id, response, db)\n",
"step-4": "from fastapi import APIRouter, Depends, status, Response\nfrom typing import List\nimport schemas, database\nfrom sqlalchemy.orm import Session\nimport repository.blog as blog\nfrom .oauth2 import get_current_user\nrouter = APIRouter(prefix='/blog', tags=['Blog'])\n\n\n@router.get('/', status_code=status.HTTP_200_OK, response_model=List[\n schemas.ShowBlog])\ndef all_blog(db: Session=Depends(database.get_db), current_user: schemas.\n User=Depends(get_current_user)):\n return blog.all_blog(db)\n\n\n@router.post('/', status_code=status.HTTP_201_CREATED)\ndef create(request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.create(request, db)\n\n\n@router.delete('/{id}', status_code=status.HTTP_200_OK)\ndef destroy(id, db: Session=Depends(database.get_db), current_user: schemas\n .User=Depends(get_current_user)):\n return blog.destroy(id, db)\n\n\n@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)\ndef update(id, request: schemas.Blog, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.update(id, request, db)\n\n\n@router.get('/{id}', status_code=status.HTTP_200_OK, response_model=schemas\n .ShowBlog)\ndef show(id, response: Response, db: Session=Depends(database.get_db),\n current_user: schemas.User=Depends(get_current_user)):\n return blog.show(id, response, db)\n",
"step-5": "from fastapi import APIRouter, Depends, status, Response\nfrom typing import List\nimport schemas, database\nfrom sqlalchemy.orm import Session\nimport repository.blog as blog\nfrom .oauth2 import get_current_user\n\nrouter = APIRouter(\n prefix=\"/blog\",\n tags=['Blog'])\n\n@router.get('/', status_code=status.HTTP_200_OK, response_model=List[schemas.ShowBlog])\ndef all_blog(db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):\n return blog.all_blog(db)\n\n@router.post('/', status_code=status.HTTP_201_CREATED)\ndef create(request:schemas.Blog, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):\n return blog.create(request, db)\n\n@router.delete('/{id}', status_code=status.HTTP_200_OK)\ndef destroy(id, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):\n return blog.destroy(id, db)\n\n@router.put('/{id}', status_code=status.HTTP_202_ACCEPTED)\ndef update(id, request: schemas.Blog, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):\n return blog.update(id, request, db)\n\n@router.get('/{id}', status_code=status.HTTP_200_OK, response_model=schemas.ShowBlog)\ndef show(id, response: Response, db: Session = Depends(database.get_db), current_user: schemas.User=Depends(get_current_user)):\n return blog.show(id, response, db)",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
class EUDataCenter(DataCenter):
<|reserved_special_token_0|>
@classmethod
def PRODUCTION(cls):
"""
This method represents the Zoho CRM Production environment in EU domain
:return: An instance of Environments
"""
return DataCenter.Environment('https://www.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def SANDBOX(cls):
"""
This method represents the Zoho CRM Sandbox environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def DEVELOPER(cls):
"""
This method represents the Zoho CRM Developer environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://developer.zohoapis.eu', cls(
).get_iam_url(), cls().get_file_upload_url())
<|reserved_special_token_0|>
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class EUDataCenter(DataCenter):
<|reserved_special_token_0|>
@classmethod
def PRODUCTION(cls):
"""
This method represents the Zoho CRM Production environment in EU domain
:return: An instance of Environments
"""
return DataCenter.Environment('https://www.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def SANDBOX(cls):
"""
This method represents the Zoho CRM Sandbox environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def DEVELOPER(cls):
"""
This method represents the Zoho CRM Developer environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://developer.zohoapis.eu', cls(
).get_iam_url(), cls().get_file_upload_url())
def get_iam_url(self):
return 'https://accounts.zoho.eu/oauth/v2/token'
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
class EUDataCenter(DataCenter):
"""
This class represents the properties of Zoho CRM in EU Domain.
"""
@classmethod
def PRODUCTION(cls):
"""
This method represents the Zoho CRM Production environment in EU domain
:return: An instance of Environments
"""
return DataCenter.Environment('https://www.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def SANDBOX(cls):
"""
This method represents the Zoho CRM Sandbox environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def DEVELOPER(cls):
"""
This method represents the Zoho CRM Developer environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://developer.zohoapis.eu', cls(
).get_iam_url(), cls().get_file_upload_url())
def get_iam_url(self):
return 'https://accounts.zoho.eu/oauth/v2/token'
def get_file_upload_url(self):
return 'https://content.zohoapis.eu'
<|reserved_special_token_1|>
try:
from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter
except Exception as e:
from .data_center import DataCenter
class EUDataCenter(DataCenter):
"""
This class represents the properties of Zoho CRM in EU Domain.
"""
@classmethod
def PRODUCTION(cls):
"""
This method represents the Zoho CRM Production environment in EU domain
:return: An instance of Environments
"""
return DataCenter.Environment('https://www.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def SANDBOX(cls):
"""
This method represents the Zoho CRM Sandbox environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().
get_iam_url(), cls().get_file_upload_url())
@classmethod
def DEVELOPER(cls):
"""
This method represents the Zoho CRM Developer environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment('https://developer.zohoapis.eu', cls(
).get_iam_url(), cls().get_file_upload_url())
def get_iam_url(self):
return 'https://accounts.zoho.eu/oauth/v2/token'
def get_file_upload_url(self):
return 'https://content.zohoapis.eu'
<|reserved_special_token_1|>
try:
from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter
except Exception as e:
from .data_center import DataCenter
class EUDataCenter(DataCenter):
"""
This class represents the properties of Zoho CRM in EU Domain.
"""
@classmethod
def PRODUCTION(cls):
"""
This method represents the Zoho CRM Production environment in EU domain
:return: An instance of Environments
"""
return DataCenter.Environment("https://www.zohoapis.eu", cls().get_iam_url(), cls().get_file_upload_url())
@classmethod
def SANDBOX(cls):
"""
This method represents the Zoho CRM Sandbox environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment("https://sandbox.zohoapis.eu", cls().get_iam_url(), cls().get_file_upload_url())
@classmethod
def DEVELOPER(cls):
"""
This method represents the Zoho CRM Developer environment in EU domain
:return: An instance of Environment
"""
return DataCenter.Environment("https://developer.zohoapis.eu", cls().get_iam_url(), cls().get_file_upload_url())
def get_iam_url(self):
return "https://accounts.zoho.eu/oauth/v2/token"
def get_file_upload_url(self):
return "https://content.zohoapis.eu"
|
flexible
|
{
"blob_id": "27c364ccf4a6703f74c95ebb386f8ced38b1eafd",
"index": 4960,
"step-1": "<mask token>\n\n\nclass EUDataCenter(DataCenter):\n <mask token>\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n <mask token>\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass EUDataCenter(DataCenter):\n <mask token>\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return 'https://accounts.zoho.eu/oauth/v2/token'\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass EUDataCenter(DataCenter):\n \"\"\"\n This class represents the properties of Zoho CRM in EU Domain.\n \"\"\"\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return 'https://accounts.zoho.eu/oauth/v2/token'\n\n def get_file_upload_url(self):\n return 'https://content.zohoapis.eu'\n",
"step-4": "try:\n from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter\nexcept Exception as e:\n from .data_center import DataCenter\n\n\nclass EUDataCenter(DataCenter):\n \"\"\"\n This class represents the properties of Zoho CRM in EU Domain.\n \"\"\"\n\n @classmethod\n def PRODUCTION(cls):\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n return DataCenter.Environment('https://www.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://sandbox.zohoapis.eu', cls().\n get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n return DataCenter.Environment('https://developer.zohoapis.eu', cls(\n ).get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return 'https://accounts.zoho.eu/oauth/v2/token'\n\n def get_file_upload_url(self):\n return 'https://content.zohoapis.eu'\n",
"step-5": "try:\n from zcrmsdk.src.com.zoho.crm.api.dc.data_center import DataCenter\nexcept Exception as e:\n from .data_center import DataCenter\n\n\nclass EUDataCenter(DataCenter):\n\n \"\"\"\n This class represents the properties of Zoho CRM in EU Domain.\n \"\"\"\n\n @classmethod\n def PRODUCTION(cls):\n\n \"\"\"\n This method represents the Zoho CRM Production environment in EU domain\n :return: An instance of Environments\n \"\"\"\n\n return DataCenter.Environment(\"https://www.zohoapis.eu\", cls().get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def SANDBOX(cls):\n\n \"\"\"\n This method represents the Zoho CRM Sandbox environment in EU domain\n :return: An instance of Environment\n \"\"\"\n\n return DataCenter.Environment(\"https://sandbox.zohoapis.eu\", cls().get_iam_url(), cls().get_file_upload_url())\n\n @classmethod\n def DEVELOPER(cls):\n\n \"\"\"\n This method represents the Zoho CRM Developer environment in EU domain\n :return: An instance of Environment\n \"\"\"\n\n return DataCenter.Environment(\"https://developer.zohoapis.eu\", cls().get_iam_url(), cls().get_file_upload_url())\n\n def get_iam_url(self):\n return \"https://accounts.zoho.eu/oauth/v2/token\"\n\n def get_file_upload_url(self):\n return \"https://content.zohoapis.eu\"\n",
"step-ids": [
4,
5,
7,
8,
9
]
}
|
[
4,
5,
7,
8,
9
] |
import pygame
import time
from menus import MainMenu
from scenes import TestWorldGen
from scenes import TestAnimation
from scenes import TestLevel2
from scenes import MainGame
import random
class GameManager:
def __init__(self):
self.screen = pygame.display.set_mode((1280, 720),
flags=pygame.FULLSCREEN |
pygame.HWSURFACE |
pygame.DOUBLEBUF) # type: pygame.Surface
self.running = True
self.delta_time = 1
self.active_scene = None
# self.load_scene(MainMenu.MainMenu, (self,))
# self.load_scene(TestWorldGen.TestWorldGen, (self,))
# self.load_scene(TestAnimation.TestAnimation, (self,))
# self.load_scene(TestLevel2.TestLevel, (self, ))
self.load_scene(MainGame.MainGame, (self,))
self.fps_font = pygame.font.Font("game_data/fonts/calling_code.ttf", 14)
self.pygame_clock = pygame.time.Clock() # type: pygame
self.pygame_clock.tick()
pygame.joystick.init()
self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.joystick.get_count())]
for joystick in self.joystick:
joystick.init()
random.seed(time.time())
self.player_joy = -1
def __del__(self):
self.exit()
def main_loop(self):
while self.running:
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
self.exit()
self.delta_time = float(self.pygame_clock.tick(60)) / (10 ** 3)
fps_text = self.fps_font.render("FPS: {}".format(round(1 / self.delta_time)), False, (255, 255, 255))
self.active_scene.main_loop(events)
self.screen.blit(fps_text, (self.screen.get_width() - fps_text.get_width(), 0))
pygame.display.flip()
def load_scene(self, scene_object, scene_parameters):
self.active_scene = scene_object(*scene_parameters)
def exit(self):
self.running = False
|
normal
|
{
"blob_id": "91806afea92587476ac743346b88098b197a033c",
"index": 9706,
"step-1": "<mask token>\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n <mask token>\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n <mask token>\n",
"step-2": "<mask token>\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n <mask token>\n",
"step-3": "<mask token>\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n\n def exit(self):\n self.running = False\n",
"step-4": "import pygame\nimport time\nfrom menus import MainMenu\nfrom scenes import TestWorldGen\nfrom scenes import TestAnimation\nfrom scenes import TestLevel2\nfrom scenes import MainGame\nimport random\n\n\nclass GameManager:\n\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720), flags=pygame.\n FULLSCREEN | pygame.HWSURFACE | pygame.DOUBLEBUF)\n self.running = True\n self.delta_time = 1\n self.active_scene = None\n self.load_scene(MainGame.MainGame, (self,))\n self.fps_font = pygame.font.Font('game_data/fonts/calling_code.ttf', 14\n )\n self.pygame_clock = pygame.time.Clock()\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.\n joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n random.seed(time.time())\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n self.delta_time = float(self.pygame_clock.tick(60)) / 10 ** 3\n fps_text = self.fps_font.render('FPS: {}'.format(round(1 / self\n .delta_time)), False, (255, 255, 255))\n self.active_scene.main_loop(events)\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.\n get_width(), 0))\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n\n def exit(self):\n self.running = False\n",
"step-5": "import pygame\nimport time\nfrom menus import MainMenu\nfrom scenes import TestWorldGen\nfrom scenes import TestAnimation\nfrom scenes import TestLevel2\nfrom scenes import MainGame\nimport random\n\n\nclass GameManager:\n def __init__(self):\n self.screen = pygame.display.set_mode((1280, 720),\n flags=pygame.FULLSCREEN |\n pygame.HWSURFACE |\n pygame.DOUBLEBUF) # type: pygame.Surface\n\n self.running = True\n\n self.delta_time = 1\n\n self.active_scene = None\n # self.load_scene(MainMenu.MainMenu, (self,))\n # self.load_scene(TestWorldGen.TestWorldGen, (self,))\n # self.load_scene(TestAnimation.TestAnimation, (self,))\n # self.load_scene(TestLevel2.TestLevel, (self, ))\n self.load_scene(MainGame.MainGame, (self,))\n\n self.fps_font = pygame.font.Font(\"game_data/fonts/calling_code.ttf\", 14)\n\n self.pygame_clock = pygame.time.Clock() # type: pygame\n self.pygame_clock.tick()\n pygame.joystick.init()\n self.joystick = [pygame.joystick.Joystick(i) for i in range(pygame.joystick.get_count())]\n for joystick in self.joystick:\n joystick.init()\n\n random.seed(time.time())\n\n self.player_joy = -1\n\n def __del__(self):\n self.exit()\n\n def main_loop(self):\n while self.running:\n events = pygame.event.get()\n for event in events:\n if event.type == pygame.QUIT:\n self.exit()\n\n self.delta_time = float(self.pygame_clock.tick(60)) / (10 ** 3)\n\n fps_text = self.fps_font.render(\"FPS: {}\".format(round(1 / self.delta_time)), False, (255, 255, 255))\n\n self.active_scene.main_loop(events)\n\n self.screen.blit(fps_text, (self.screen.get_width() - fps_text.get_width(), 0))\n\n pygame.display.flip()\n\n def load_scene(self, scene_object, scene_parameters):\n self.active_scene = scene_object(*scene_parameters)\n\n def exit(self):\n self.running = False\n",
"step-ids": [
4,
5,
6,
7,
8
]
}
|
[
4,
5,
6,
7,
8
] |
<|reserved_special_token_0|>
def lambda_handler(event, context):
current_date = datetime.now(pytz.timezone('US/Central'))
yesterday_date = current_date - timedleta(days=1)
yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('AppStreamDynamoDB1')
response = table.scan(FilterExpression=Attr('formData').contains(
yesterday_date_string))
items = response['Items']
print(items)
print('testing')
print(yesterday_date_string)
if len(items) != 0:
print(items)
return items
saving_backup()
delete_entires()
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def lambda_handler(event, context):
current_date = datetime.now(pytz.timezone('US/Central'))
yesterday_date = current_date - timedleta(days=1)
yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('AppStreamDynamoDB1')
response = table.scan(FilterExpression=Attr('formData').contains(
yesterday_date_string))
items = response['Items']
print(items)
print('testing')
print(yesterday_date_string)
if len(items) != 0:
print(items)
return items
saving_backup()
delete_entires()
def saving_backup():
s3_client = boto3.client('s3')
key = datetime.now(pytz.timezone('US/Central')).strftime('%Y-%m-%dT')
bucket = 'REPLACE_WITH_BUCKET_NAME'
data = []
serializedData = json.dumps(data)
try:
response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)
except ClientError as e:
logging.error(e)
return False
return True
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def lambda_handler(event, context):
current_date = datetime.now(pytz.timezone('US/Central'))
yesterday_date = current_date - timedleta(days=1)
yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('AppStreamDynamoDB1')
response = table.scan(FilterExpression=Attr('formData').contains(
yesterday_date_string))
items = response['Items']
print(items)
print('testing')
print(yesterday_date_string)
if len(items) != 0:
print(items)
return items
saving_backup()
delete_entires()
def saving_backup():
s3_client = boto3.client('s3')
key = datetime.now(pytz.timezone('US/Central')).strftime('%Y-%m-%dT')
bucket = 'REPLACE_WITH_BUCKET_NAME'
data = []
serializedData = json.dumps(data)
try:
response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)
except ClientError as e:
logging.error(e)
return False
return True
def delete_entires():
saving_backup() == True
print('Attempting a conditional delete...')
try:
response = table.delete_item(Key={'date': yesterday_date_string})
except ClientError as e:
if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
print(e.response['Error']['Message'])
else:
raise
else:
print('DeleteItem succeeded:')
<|reserved_special_token_1|>
import csv
import boto3
import pytz
import time
from datetime import datetime, timedelta
from boto3.dynamodb.conditions import Key, Attr
def lambda_handler(event, context):
current_date = datetime.now(pytz.timezone('US/Central'))
yesterday_date = current_date - timedleta(days=1)
yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('AppStreamDynamoDB1')
response = table.scan(FilterExpression=Attr('formData').contains(
yesterday_date_string))
items = response['Items']
print(items)
print('testing')
print(yesterday_date_string)
if len(items) != 0:
print(items)
return items
saving_backup()
delete_entires()
def saving_backup():
s3_client = boto3.client('s3')
key = datetime.now(pytz.timezone('US/Central')).strftime('%Y-%m-%dT')
bucket = 'REPLACE_WITH_BUCKET_NAME'
data = []
serializedData = json.dumps(data)
try:
response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)
except ClientError as e:
logging.error(e)
return False
return True
def delete_entires():
saving_backup() == True
print('Attempting a conditional delete...')
try:
response = table.delete_item(Key={'date': yesterday_date_string})
except ClientError as e:
if e.response['Error']['Code'] == 'ConditionalCheckFailedException':
print(e.response['Error']['Message'])
else:
raise
else:
print('DeleteItem succeeded:')
<|reserved_special_token_1|>
import csv
import boto3
import pytz
import time
from datetime import datetime, timedelta
# current_time = int(datetime.now())
from boto3.dynamodb.conditions import Key, Attr
def lambda_handler(event, context):
current_date = datetime.now(pytz.timezone('US/Central'))
yesterday_date = current_date - timedleta(days=1)
yesterday_date_string = yesterday_date.strftime("%Y-%m-%dT")
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table('AppStreamDynamoDB1')
response = table.scan(
FilterExpression=Attr('formData').contains(yesterday_date_string)
)
items = response['Items']
print(items) # it should print out the values
print("testing")
print(yesterday_date_string)
if len(items) != 0:
print(items) # it should print null
return items
saving_backup()
delete_entires()
def saving_backup():
s3_client = boto3.client('s3')
key = datetime.now(pytz.timezone('US/Central')).strftime("%Y-%m-%dT")
bucket = 'REPLACE_WITH_BUCKET_NAME'
data = []
serializedData = json.dumps(data)
try:
# response = s3_client.upload_file(file_name, bucket, object_name)
response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)
except ClientError as e:
logging.error(e)
return False
return True
def delete_entires():
saving_backup() == True
#----------------------Delete Items inside the dynamo db---------------------------------------------
print("Attempting a conditional delete...")
try:
response = table.delete_item(
Key={
'date': yesterday_date_string ,
},
# ConditionExpression="info.rating <= :val",
# ExpressionAttributeValues= {
# ":val": decimal.Decimal(5)
# }
)
except ClientError as e:
if e.response['Error']['Code'] == "ConditionalCheckFailedException":
print(e.response['Error']['Message'])
else:
raise
else:
print("DeleteItem succeeded:")
# print(json.dumps(response, indent=4, cls=DecimalEncoder))
|
flexible
|
{
"blob_id": "64d955d568a6bfec50aad36c9c4f1e36998e4d74",
"index": 7467,
"step-1": "<mask token>\n\n\ndef lambda_handler(event, context):\n current_date = datetime.now(pytz.timezone('US/Central'))\n yesterday_date = current_date - timedleta(days=1)\n yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')\n dynamodb = boto3.resource('dynamodb')\n table = dynamodb.Table('AppStreamDynamoDB1')\n response = table.scan(FilterExpression=Attr('formData').contains(\n yesterday_date_string))\n items = response['Items']\n print(items)\n print('testing')\n print(yesterday_date_string)\n if len(items) != 0:\n print(items)\n return items\n saving_backup()\n delete_entires()\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef lambda_handler(event, context):\n current_date = datetime.now(pytz.timezone('US/Central'))\n yesterday_date = current_date - timedleta(days=1)\n yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')\n dynamodb = boto3.resource('dynamodb')\n table = dynamodb.Table('AppStreamDynamoDB1')\n response = table.scan(FilterExpression=Attr('formData').contains(\n yesterday_date_string))\n items = response['Items']\n print(items)\n print('testing')\n print(yesterday_date_string)\n if len(items) != 0:\n print(items)\n return items\n saving_backup()\n delete_entires()\n\n\ndef saving_backup():\n s3_client = boto3.client('s3')\n key = datetime.now(pytz.timezone('US/Central')).strftime('%Y-%m-%dT')\n bucket = 'REPLACE_WITH_BUCKET_NAME'\n data = []\n serializedData = json.dumps(data)\n try:\n response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)\n except ClientError as e:\n logging.error(e)\n return False\n return True\n\n\n<mask token>\n",
"step-3": "<mask token>\n\n\ndef lambda_handler(event, context):\n current_date = datetime.now(pytz.timezone('US/Central'))\n yesterday_date = current_date - timedleta(days=1)\n yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')\n dynamodb = boto3.resource('dynamodb')\n table = dynamodb.Table('AppStreamDynamoDB1')\n response = table.scan(FilterExpression=Attr('formData').contains(\n yesterday_date_string))\n items = response['Items']\n print(items)\n print('testing')\n print(yesterday_date_string)\n if len(items) != 0:\n print(items)\n return items\n saving_backup()\n delete_entires()\n\n\ndef saving_backup():\n s3_client = boto3.client('s3')\n key = datetime.now(pytz.timezone('US/Central')).strftime('%Y-%m-%dT')\n bucket = 'REPLACE_WITH_BUCKET_NAME'\n data = []\n serializedData = json.dumps(data)\n try:\n response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)\n except ClientError as e:\n logging.error(e)\n return False\n return True\n\n\ndef delete_entires():\n saving_backup() == True\n print('Attempting a conditional delete...')\n try:\n response = table.delete_item(Key={'date': yesterday_date_string})\n except ClientError as e:\n if e.response['Error']['Code'] == 'ConditionalCheckFailedException':\n print(e.response['Error']['Message'])\n else:\n raise\n else:\n print('DeleteItem succeeded:')\n",
"step-4": "import csv\nimport boto3\nimport pytz\nimport time\nfrom datetime import datetime, timedelta\nfrom boto3.dynamodb.conditions import Key, Attr\n\n\ndef lambda_handler(event, context):\n current_date = datetime.now(pytz.timezone('US/Central'))\n yesterday_date = current_date - timedleta(days=1)\n yesterday_date_string = yesterday_date.strftime('%Y-%m-%dT')\n dynamodb = boto3.resource('dynamodb')\n table = dynamodb.Table('AppStreamDynamoDB1')\n response = table.scan(FilterExpression=Attr('formData').contains(\n yesterday_date_string))\n items = response['Items']\n print(items)\n print('testing')\n print(yesterday_date_string)\n if len(items) != 0:\n print(items)\n return items\n saving_backup()\n delete_entires()\n\n\ndef saving_backup():\n s3_client = boto3.client('s3')\n key = datetime.now(pytz.timezone('US/Central')).strftime('%Y-%m-%dT')\n bucket = 'REPLACE_WITH_BUCKET_NAME'\n data = []\n serializedData = json.dumps(data)\n try:\n response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)\n except ClientError as e:\n logging.error(e)\n return False\n return True\n\n\ndef delete_entires():\n saving_backup() == True\n print('Attempting a conditional delete...')\n try:\n response = table.delete_item(Key={'date': yesterday_date_string})\n except ClientError as e:\n if e.response['Error']['Code'] == 'ConditionalCheckFailedException':\n print(e.response['Error']['Message'])\n else:\n raise\n else:\n print('DeleteItem succeeded:')\n",
"step-5": "import csv\nimport boto3 \nimport pytz\nimport time\nfrom datetime import datetime, timedelta\n# current_time = int(datetime.now())\nfrom boto3.dynamodb.conditions import Key, Attr\n\n\ndef lambda_handler(event, context):\n current_date = datetime.now(pytz.timezone('US/Central'))\n yesterday_date = current_date - timedleta(days=1)\n yesterday_date_string = yesterday_date.strftime(\"%Y-%m-%dT\")\n\n dynamodb = boto3.resource('dynamodb')\n\n table = dynamodb.Table('AppStreamDynamoDB1')\n\n\n response = table.scan(\n FilterExpression=Attr('formData').contains(yesterday_date_string)\n )\n items = response['Items']\n print(items) # it should print out the values\n print(\"testing\")\n print(yesterday_date_string)\n\n if len(items) != 0:\n print(items) # it should print null\n return items\n\n \n saving_backup()\n delete_entires()\n\ndef saving_backup():\n s3_client = boto3.client('s3')\n key = datetime.now(pytz.timezone('US/Central')).strftime(\"%Y-%m-%dT\")\n bucket = 'REPLACE_WITH_BUCKET_NAME'\n data = []\n serializedData = json.dumps(data)\n try:\n # response = s3_client.upload_file(file_name, bucket, object_name)\n response = s3.put_object(Bucket=bucket, Key=key, Body=serializedData)\n except ClientError as e:\n logging.error(e)\n return False\n return True\n\n\ndef delete_entires():\n saving_backup() == True\n #----------------------Delete Items inside the dynamo db---------------------------------------------\n\n print(\"Attempting a conditional delete...\")\n\n try:\n response = table.delete_item(\n Key={\n 'date': yesterday_date_string ,\n \n },\n # ConditionExpression=\"info.rating <= :val\",\n # ExpressionAttributeValues= {\n # \":val\": decimal.Decimal(5)\n # }\n )\n except ClientError as e:\n if e.response['Error']['Code'] == \"ConditionalCheckFailedException\":\n print(e.response['Error']['Message'])\n else:\n raise\n else:\n print(\"DeleteItem succeeded:\")\n # print(json.dumps(response, indent=4, cls=DecimalEncoder))\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
def split(string):
"""
Function takes input of a string and returns an array of strings
the original string should be comma separated with a space after
the comma in order for this function to be accurate.
"""
names = []
index = 0
last = 0
for letter in string:
if letter == ',' or index == len(string) - 1:
if index == len(string) - 1:
names.append(string[last:index + 1])
else:
names.append(string[last:index])
last = index + 2
index += 1
return names
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def split(string):
"""
Function takes input of a string and returns an array of strings
the original string should be comma separated with a space after
the comma in order for this function to be accurate.
"""
names = []
index = 0
last = 0
for letter in string:
if letter == ',' or index == len(string) - 1:
if index == len(string) - 1:
names.append(string[last:index + 1])
else:
names.append(string[last:index])
last = index + 2
index += 1
return names
<|reserved_special_token_0|>
for name in books_data['authors']:
if count % 1000 == 0:
print(count)
split_names = split(name)
for author in split_names:
if author in unique_authors:
pass
else:
unique_authors.append(author)
count += 1
<|reserved_special_token_0|>
for author in unique_authors:
if count % 100 == 0:
print(str(count) + '/' + str(length_2))
books = []
for i in range(length):
split_names = split(books_data['authors'][i])
if author in split_names:
books.append(books_data['goodreads_book_id'][i])
authors_books.append(books)
count += 1
<|reserved_special_token_0|>
books_by_author.to_csv(write_path, index=False)
<|reserved_special_token_1|>
<|reserved_special_token_0|>
loc = (
'C:\\Users\\james\\OneDrive\\Documents\\University\\2017-18 Southampton\\Data Mining\\Group Coursework\\Data'
)
path = os.path.join(loc, 'Processed_Data\\new_books_data.csv')
books_data = pd.read_csv(path)
def split(string):
"""
Function takes input of a string and returns an array of strings
the original string should be comma separated with a space after
the comma in order for this function to be accurate.
"""
names = []
index = 0
last = 0
for letter in string:
if letter == ',' or index == len(string) - 1:
if index == len(string) - 1:
names.append(string[last:index + 1])
else:
names.append(string[last:index])
last = index + 2
index += 1
return names
unique_authors = []
count = 0
for name in books_data['authors']:
if count % 1000 == 0:
print(count)
split_names = split(name)
for author in split_names:
if author in unique_authors:
pass
else:
unique_authors.append(author)
count += 1
authors_books = []
length = len(books_data.index)
count = 0
length_2 = len(unique_authors)
for author in unique_authors:
if count % 100 == 0:
print(str(count) + '/' + str(length_2))
books = []
for i in range(length):
split_names = split(books_data['authors'][i])
if author in split_names:
books.append(books_data['goodreads_book_id'][i])
authors_books.append(books)
count += 1
d = {'author': unique_authors, 'book_id': authors_books}
books_by_author = pd.DataFrame(data=d)
write_path = os.path.join(loc, 'Processed_Data\\books_by_author.csv')
books_by_author.to_csv(write_path, index=False)
<|reserved_special_token_1|>
import pandas as pd
import os
<|reserved_special_token_0|>
loc = (
'C:\\Users\\james\\OneDrive\\Documents\\University\\2017-18 Southampton\\Data Mining\\Group Coursework\\Data'
)
path = os.path.join(loc, 'Processed_Data\\new_books_data.csv')
books_data = pd.read_csv(path)
def split(string):
"""
Function takes input of a string and returns an array of strings
the original string should be comma separated with a space after
the comma in order for this function to be accurate.
"""
names = []
index = 0
last = 0
for letter in string:
if letter == ',' or index == len(string) - 1:
if index == len(string) - 1:
names.append(string[last:index + 1])
else:
names.append(string[last:index])
last = index + 2
index += 1
return names
unique_authors = []
count = 0
for name in books_data['authors']:
if count % 1000 == 0:
print(count)
split_names = split(name)
for author in split_names:
if author in unique_authors:
pass
else:
unique_authors.append(author)
count += 1
authors_books = []
length = len(books_data.index)
count = 0
length_2 = len(unique_authors)
for author in unique_authors:
if count % 100 == 0:
print(str(count) + '/' + str(length_2))
books = []
for i in range(length):
split_names = split(books_data['authors'][i])
if author in split_names:
books.append(books_data['goodreads_book_id'][i])
authors_books.append(books)
count += 1
d = {'author': unique_authors, 'book_id': authors_books}
books_by_author = pd.DataFrame(data=d)
write_path = os.path.join(loc, 'Processed_Data\\books_by_author.csv')
books_by_author.to_csv(write_path, index=False)
<|reserved_special_token_1|>
import pandas as pd
import os
"""
This code relies heavily on the form of the data. Namely it will fail if
the authors of the same book are not comma separated. It will also be inaccurate
or even fail if the same author for different books is not spelt in exactly the
same way.
"""
loc = r'C:\Users\james\OneDrive\Documents\University\2017-18 Southampton\Data Mining\Group Coursework\Data'
#path = os.path.join(loc, r'Sample\new_books_data.csv')
path = os.path.join(loc, r'Processed_Data\new_books_data.csv')
books_data = pd.read_csv(path)
def split(string):
"""
Function takes input of a string and returns an array of strings
the original string should be comma separated with a space after
the comma in order for this function to be accurate.
"""
names = []
index = 0
last = 0
for letter in string:
if ((letter == ',') or (index == (len(string) - 1))):
if (index == (len(string) - 1)):
names.append(string[last:(index+1)])
else:
names.append(string[last:index])
last = index+2
index += 1
return names
unique_authors = []
count = 0
for name in books_data['authors']:
if (count%1000 == 0):
print(count)
split_names = split(name)
for author in split_names:
if (author in unique_authors):
pass
else:
unique_authors.append(author)
count += 1
authors_books = []
length = len(books_data.index)
count = 0
length_2 = len(unique_authors)
for author in unique_authors:
if (count%100 == 0):
print(str(count)+'/'+str(length_2))
books = []
for i in range(length):
split_names = split(books_data['authors'][i])
if (author in split_names):
books.append(books_data['goodreads_book_id'][i])
authors_books.append(books)
count += 1
d = {'author': unique_authors, 'book_id': authors_books}
books_by_author = pd.DataFrame(data=d)
#write_path = os.path.join(loc, r'Sample\books_by_author.csv')
write_path = os.path.join(loc, r'Processed_Data\books_by_author.csv')
books_by_author.to_csv(write_path, index=False)
|
flexible
|
{
"blob_id": "f57490c8f4a5ba76824c3b41eb18905eb2213c23",
"index": 5107,
"step-1": "<mask token>\n\n\ndef split(string):\n \"\"\"\n Function takes input of a string and returns an array of strings\n the original string should be comma separated with a space after\n the comma in order for this function to be accurate.\n \"\"\"\n names = []\n index = 0\n last = 0\n for letter in string:\n if letter == ',' or index == len(string) - 1:\n if index == len(string) - 1:\n names.append(string[last:index + 1])\n else:\n names.append(string[last:index])\n last = index + 2\n index += 1\n return names\n\n\n<mask token>\n",
"step-2": "<mask token>\n\n\ndef split(string):\n \"\"\"\n Function takes input of a string and returns an array of strings\n the original string should be comma separated with a space after\n the comma in order for this function to be accurate.\n \"\"\"\n names = []\n index = 0\n last = 0\n for letter in string:\n if letter == ',' or index == len(string) - 1:\n if index == len(string) - 1:\n names.append(string[last:index + 1])\n else:\n names.append(string[last:index])\n last = index + 2\n index += 1\n return names\n\n\n<mask token>\nfor name in books_data['authors']:\n if count % 1000 == 0:\n print(count)\n split_names = split(name)\n for author in split_names:\n if author in unique_authors:\n pass\n else:\n unique_authors.append(author)\n count += 1\n<mask token>\nfor author in unique_authors:\n if count % 100 == 0:\n print(str(count) + '/' + str(length_2))\n books = []\n for i in range(length):\n split_names = split(books_data['authors'][i])\n if author in split_names:\n books.append(books_data['goodreads_book_id'][i])\n authors_books.append(books)\n count += 1\n<mask token>\nbooks_by_author.to_csv(write_path, index=False)\n",
"step-3": "<mask token>\nloc = (\n 'C:\\\\Users\\\\james\\\\OneDrive\\\\Documents\\\\University\\\\2017-18 Southampton\\\\Data Mining\\\\Group Coursework\\\\Data'\n )\npath = os.path.join(loc, 'Processed_Data\\\\new_books_data.csv')\nbooks_data = pd.read_csv(path)\n\n\ndef split(string):\n \"\"\"\n Function takes input of a string and returns an array of strings\n the original string should be comma separated with a space after\n the comma in order for this function to be accurate.\n \"\"\"\n names = []\n index = 0\n last = 0\n for letter in string:\n if letter == ',' or index == len(string) - 1:\n if index == len(string) - 1:\n names.append(string[last:index + 1])\n else:\n names.append(string[last:index])\n last = index + 2\n index += 1\n return names\n\n\nunique_authors = []\ncount = 0\nfor name in books_data['authors']:\n if count % 1000 == 0:\n print(count)\n split_names = split(name)\n for author in split_names:\n if author in unique_authors:\n pass\n else:\n unique_authors.append(author)\n count += 1\nauthors_books = []\nlength = len(books_data.index)\ncount = 0\nlength_2 = len(unique_authors)\nfor author in unique_authors:\n if count % 100 == 0:\n print(str(count) + '/' + str(length_2))\n books = []\n for i in range(length):\n split_names = split(books_data['authors'][i])\n if author in split_names:\n books.append(books_data['goodreads_book_id'][i])\n authors_books.append(books)\n count += 1\nd = {'author': unique_authors, 'book_id': authors_books}\nbooks_by_author = pd.DataFrame(data=d)\nwrite_path = os.path.join(loc, 'Processed_Data\\\\books_by_author.csv')\nbooks_by_author.to_csv(write_path, index=False)\n",
"step-4": "import pandas as pd\nimport os\n<mask token>\nloc = (\n 'C:\\\\Users\\\\james\\\\OneDrive\\\\Documents\\\\University\\\\2017-18 Southampton\\\\Data Mining\\\\Group Coursework\\\\Data'\n )\npath = os.path.join(loc, 'Processed_Data\\\\new_books_data.csv')\nbooks_data = pd.read_csv(path)\n\n\ndef split(string):\n \"\"\"\n Function takes input of a string and returns an array of strings\n the original string should be comma separated with a space after\n the comma in order for this function to be accurate.\n \"\"\"\n names = []\n index = 0\n last = 0\n for letter in string:\n if letter == ',' or index == len(string) - 1:\n if index == len(string) - 1:\n names.append(string[last:index + 1])\n else:\n names.append(string[last:index])\n last = index + 2\n index += 1\n return names\n\n\nunique_authors = []\ncount = 0\nfor name in books_data['authors']:\n if count % 1000 == 0:\n print(count)\n split_names = split(name)\n for author in split_names:\n if author in unique_authors:\n pass\n else:\n unique_authors.append(author)\n count += 1\nauthors_books = []\nlength = len(books_data.index)\ncount = 0\nlength_2 = len(unique_authors)\nfor author in unique_authors:\n if count % 100 == 0:\n print(str(count) + '/' + str(length_2))\n books = []\n for i in range(length):\n split_names = split(books_data['authors'][i])\n if author in split_names:\n books.append(books_data['goodreads_book_id'][i])\n authors_books.append(books)\n count += 1\nd = {'author': unique_authors, 'book_id': authors_books}\nbooks_by_author = pd.DataFrame(data=d)\nwrite_path = os.path.join(loc, 'Processed_Data\\\\books_by_author.csv')\nbooks_by_author.to_csv(write_path, index=False)\n",
"step-5": "import pandas as pd\nimport os\n\n\"\"\"\nThis code relies heavily on the form of the data. Namely it will fail if \nthe authors of the same book are not comma separated. It will also be inaccurate\nor even fail if the same author for different books is not spelt in exactly the\nsame way.\n\"\"\"\n\n\nloc = r'C:\\Users\\james\\OneDrive\\Documents\\University\\2017-18 Southampton\\Data Mining\\Group Coursework\\Data'\n \n#path = os.path.join(loc, r'Sample\\new_books_data.csv')\npath = os.path.join(loc, r'Processed_Data\\new_books_data.csv')\n\nbooks_data = pd.read_csv(path)\n\n\ndef split(string):\n \"\"\"\n Function takes input of a string and returns an array of strings\n the original string should be comma separated with a space after\n the comma in order for this function to be accurate.\n \"\"\"\n names = []\n index = 0\n last = 0\n for letter in string:\n if ((letter == ',') or (index == (len(string) - 1))):\n if (index == (len(string) - 1)):\n names.append(string[last:(index+1)])\n else:\n names.append(string[last:index])\n last = index+2\n index += 1\n return names\n\n\nunique_authors = []\ncount = 0\nfor name in books_data['authors']:\n if (count%1000 == 0):\n print(count)\n split_names = split(name)\n for author in split_names:\n if (author in unique_authors):\n pass\n else:\n unique_authors.append(author)\n count += 1\n\nauthors_books = []\nlength = len(books_data.index)\n\ncount = 0\nlength_2 = len(unique_authors)\nfor author in unique_authors:\n if (count%100 == 0):\n print(str(count)+'/'+str(length_2))\n books = []\n for i in range(length):\n split_names = split(books_data['authors'][i])\n if (author in split_names):\n books.append(books_data['goodreads_book_id'][i])\n authors_books.append(books)\n count += 1\n\nd = {'author': unique_authors, 'book_id': authors_books}\nbooks_by_author = pd.DataFrame(data=d)\n\n#write_path = os.path.join(loc, r'Sample\\books_by_author.csv')\nwrite_path = os.path.join(loc, r'Processed_Data\\books_by_author.csv')\nbooks_by_author.to_csv(write_path, index=False)\n\n\n\n",
"step-ids": [
1,
2,
3,
4,
5
]
}
|
[
1,
2,
3,
4,
5
] |
<|reserved_special_token_0|>
<|reserved_special_token_1|>
<|reserved_special_token_0|>
admin.site.register(Blog)
<|reserved_special_token_1|>
from django.contrib import admin
from pages.blog.models import Blog
admin.site.register(Blog)
|
flexible
|
{
"blob_id": "534aaf8371707089522af014a93f3ff6c4f913ff",
"index": 8510,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nadmin.site.register(Blog)\n",
"step-3": "from django.contrib import admin\nfrom pages.blog.models import Blog\nadmin.site.register(Blog)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
# Midterm Review Class!
'''
This is a Multi line comment:
'''
# Break and Continue
# for i in range(10):
# if i == 5:
# continue
# print(i)
# Prints 0-4, 6-9
# # Structure
# Some MCQ
# Some T/F
# Some short answer
# # Lists
# Append
# remove
# del
# sort
# #Strings
# replace
# join
# split
# upper
# lower
# # Variables as conditions:
#
# a = 8
# if a:
# print("This evaluted to true")
# else:
# print("False")
#
# # General rule: Any variable that is either 0 or empty (empty string "" or []) will evaluate to false
# # Anything else will evaluate to true
# # Immutables vs. Mutables
# # Lists are mutable
#
# words = ["burrito", "pad thai", "hot dogs"]
# print(words)
# words[0] = "pizza"
# print(words)
#
# # Strings are not
#
# pet = "Mocha"
# print(pet)
# print(pet[1])
# # pet [1] = "a" # CANNOT DO THIS
# pet = pet.replace("o", "a")
# print(pet)
# Functions
# Write a function called countWords
# Count how many times a word appears in a list
# Input: list of strings, and a string to search for
# Output: an integer representing how many times the searched word was found
# def countWords(wordList, searchWord):
# counter = 0
# for word in wordList:
# if word == searchWord:
# counter += 1
#
# return counter
#
# # Now write a main function to use the countWords function to create a list of words and use countWords
# # to figure out how many times word is in wordList
#
# def main():
# vegetables = ["celery", "carrot", "celery", "spinach", "celery", "kale"]
# veggieWord1 = "celery"
#
# veggieCount1 = countWords(vegetables, veggieWord1)
# print(veggieWord1,"appears",veggieCount1,"times.")
#
#
#
# main()
# # Delimiters
# # Character that separates elements
# msg = "hi, brandon, is, not, feeling, well"
# wordList = msg.split(",")
# for item in wordList:
# print(item, end="")
# print()
# print(wordList)
#
# newMsg = " ".join(wordList)
# print(newMsg)
# File Reading/Writing
# def main():
# fileIn = open("cities.txt", "r")
# for line in fileIn:
# line = line.strip()
# print(line)
#
#
# main()
for i in range(0,100,10):
print(i + 10)
|
normal
|
{
"blob_id": "3d3b77630d275f830daf9f6e0d50a77ef624521e",
"index": 7139,
"step-1": "<mask token>\n",
"step-2": "<mask token>\nfor i in range(0, 100, 10):\n print(i + 10)\n",
"step-3": "# Midterm Review Class!\n\n'''\nThis is a Multi line comment:\n'''\n\n# Break and Continue\n # for i in range(10):\n # if i == 5:\n # continue\n # print(i)\n # Prints 0-4, 6-9\n\n# # Structure\n # Some MCQ\n # Some T/F\n # Some short answer\n\n# # Lists\n# Append\n# remove\n# del\n# sort\n# #Strings\n# replace\n# join\n# split\n# upper\n# lower\n\n\n\n\n# # Variables as conditions:\n#\n# a = 8\n# if a:\n# print(\"This evaluted to true\")\n# else:\n# print(\"False\")\n#\n# # General rule: Any variable that is either 0 or empty (empty string \"\" or []) will evaluate to false\n# # Anything else will evaluate to true\n\n\n\n\n# # Immutables vs. Mutables\n# # Lists are mutable\n#\n# words = [\"burrito\", \"pad thai\", \"hot dogs\"]\n# print(words)\n# words[0] = \"pizza\"\n# print(words)\n#\n# # Strings are not\n#\n# pet = \"Mocha\"\n# print(pet)\n# print(pet[1])\n# # pet [1] = \"a\" # CANNOT DO THIS\n# pet = pet.replace(\"o\", \"a\")\n# print(pet)\n\n\n\n\n# Functions\n # Write a function called countWords\n # Count how many times a word appears in a list\n # Input: list of strings, and a string to search for\n # Output: an integer representing how many times the searched word was found\n\n# def countWords(wordList, searchWord):\n# counter = 0\n# for word in wordList:\n# if word == searchWord:\n# counter += 1\n#\n# return counter\n#\n# # Now write a main function to use the countWords function to create a list of words and use countWords\n# # to figure out how many times word is in wordList\n#\n# def main():\n# vegetables = [\"celery\", \"carrot\", \"celery\", \"spinach\", \"celery\", \"kale\"]\n# veggieWord1 = \"celery\"\n#\n# veggieCount1 = countWords(vegetables, veggieWord1)\n# print(veggieWord1,\"appears\",veggieCount1,\"times.\")\n#\n#\n#\n# main()\n\n\n\n# # Delimiters\n# # Character that separates elements\n# msg = \"hi, brandon, is, not, feeling, well\"\n# wordList = msg.split(\",\")\n# for item in wordList:\n# print(item, end=\"\")\n# print()\n# print(wordList)\n#\n# newMsg = \" \".join(wordList)\n# print(newMsg)\n\n\n# File Reading/Writing\n# def main():\n# fileIn = open(\"cities.txt\", \"r\")\n# for line in fileIn:\n# line = line.strip()\n# print(line)\n#\n#\n# main()\n\nfor i in range(0,100,10):\n print(i + 10)\n",
"step-4": null,
"step-5": null,
"step-ids": [
0,
1,
2
]
}
|
[
0,
1,
2
] |
<|reserved_special_token_0|>
def test_json_float():
assert_equals(json(1.234), '1.234')
def test_json_array():
data = [1, 2, 3]
assert_equals(json(data), '[1,2,3]')
def test_json_array02():
data = ['bla', 1, 1.2]
assert_equals(json(data), '["bla",1,1.2]')
def test_json_dict():
data = {'foo': 'bar'}
assert_equals(json(data), '{"foo":"bar"}')
<|reserved_special_token_0|>
def test_json_dict_int_key():
data = {(1): [1, 2, 3]}
assert_equals(json(data), '{"1":[1,2,3]}')
def test_json_dictindict():
data = {'foo': {'fizz': 'buzz'}}
assert_equals(json(data), '{"foo":{"fizz":"buzz"}}')
def test_json_2_dict():
data = {'foo': 'fizz', 'bar': 'buzz'}
assert_equals(json(data), '{"bar":"buzz","foo":"fizz"}')
<|reserved_special_token_0|>
def test_list_with_empty_dict():
data = [{}]
assert_equals(json(data), '[{}]')
<|reserved_special_token_0|>
def test_object():
def closure():
json(object())
assert_raises(TypeError, closure)
<|reserved_special_token_0|>
def test_object_in_dict():
def closure():
json({'a': object()})
assert_raises(TypeError, closure)
def test_object_class():
def closure():
json(object)
assert_raises(TypeError, closure)
def test_escape():
assert_equals(json('"'), '"\\""')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_json_basestring():
assert_equals(json('Hello World'), '"Hello World"')
<|reserved_special_token_0|>
def test_json_float():
assert_equals(json(1.234), '1.234')
def test_json_array():
data = [1, 2, 3]
assert_equals(json(data), '[1,2,3]')
def test_json_array02():
data = ['bla', 1, 1.2]
assert_equals(json(data), '["bla",1,1.2]')
def test_json_dict():
data = {'foo': 'bar'}
assert_equals(json(data), '{"foo":"bar"}')
<|reserved_special_token_0|>
def test_json_dict_int_key():
data = {(1): [1, 2, 3]}
assert_equals(json(data), '{"1":[1,2,3]}')
def test_json_dictindict():
data = {'foo': {'fizz': 'buzz'}}
assert_equals(json(data), '{"foo":{"fizz":"buzz"}}')
def test_json_2_dict():
data = {'foo': 'fizz', 'bar': 'buzz'}
assert_equals(json(data), '{"bar":"buzz","foo":"fizz"}')
<|reserved_special_token_0|>
def test_list_with_empty_dict():
data = [{}]
assert_equals(json(data), '[{}]')
<|reserved_special_token_0|>
def test_object():
def closure():
json(object())
assert_raises(TypeError, closure)
<|reserved_special_token_0|>
def test_object_in_dict():
def closure():
json({'a': object()})
assert_raises(TypeError, closure)
def test_object_class():
def closure():
json(object)
assert_raises(TypeError, closure)
def test_escape():
assert_equals(json('"'), '"\\""')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_json_basestring():
assert_equals(json('Hello World'), '"Hello World"')
<|reserved_special_token_0|>
def test_json_float():
assert_equals(json(1.234), '1.234')
def test_json_array():
data = [1, 2, 3]
assert_equals(json(data), '[1,2,3]')
def test_json_array02():
data = ['bla', 1, 1.2]
assert_equals(json(data), '["bla",1,1.2]')
def test_json_dict():
data = {'foo': 'bar'}
assert_equals(json(data), '{"foo":"bar"}')
def test_json_dict_list():
data = {'foo': [1, 2, 3]}
assert_equals(json(data), '{"foo":[1,2,3]}')
def test_json_dict_int_key():
data = {(1): [1, 2, 3]}
assert_equals(json(data), '{"1":[1,2,3]}')
def test_json_dictindict():
data = {'foo': {'fizz': 'buzz'}}
assert_equals(json(data), '{"foo":{"fizz":"buzz"}}')
def test_json_2_dict():
data = {'foo': 'fizz', 'bar': 'buzz'}
assert_equals(json(data), '{"bar":"buzz","foo":"fizz"}')
def test_json_2_dict_2():
data = {'foo': 'fizz', 'bar': 'buzz', 'a': [1, 2, 3]}
assert_equals(json(data), '{"a":[1,2,3],"bar":"buzz","foo":"fizz"}')
<|reserved_special_token_0|>
def test_empty_dict():
data = {}
assert_equals(json(data), '{}')
def test_list_with_empty_dict():
data = [{}]
assert_equals(json(data), '[{}]')
def test_rangie2():
data = {'': 0}
assert_equals(json(data), '{"":0}')
def test_none():
assert_equals(json(None), 'null')
def test_object():
def closure():
json(object())
assert_raises(TypeError, closure)
def test_bool():
assert_equals(json(True), 'true')
<|reserved_special_token_0|>
def test_object_in_dict():
def closure():
json({'a': object()})
assert_raises(TypeError, closure)
def test_object_class():
def closure():
json(object)
assert_raises(TypeError, closure)
def test_escape():
assert_equals(json('"'), '"\\""')
<|reserved_special_token_1|>
<|reserved_special_token_0|>
def test_json_basestring():
assert_equals(json('Hello World'), '"Hello World"')
def test_json_integer():
assert_equals(json(9), '9')
def test_json_float():
assert_equals(json(1.234), '1.234')
def test_json_array():
data = [1, 2, 3]
assert_equals(json(data), '[1,2,3]')
def test_json_array02():
data = ['bla', 1, 1.2]
assert_equals(json(data), '["bla",1,1.2]')
def test_json_dict():
data = {'foo': 'bar'}
assert_equals(json(data), '{"foo":"bar"}')
def test_json_dict_list():
data = {'foo': [1, 2, 3]}
assert_equals(json(data), '{"foo":[1,2,3]}')
def test_json_dict_int_key():
data = {(1): [1, 2, 3]}
assert_equals(json(data), '{"1":[1,2,3]}')
def test_json_dictindict():
data = {'foo': {'fizz': 'buzz'}}
assert_equals(json(data), '{"foo":{"fizz":"buzz"}}')
def test_json_2_dict():
data = {'foo': 'fizz', 'bar': 'buzz'}
assert_equals(json(data), '{"bar":"buzz","foo":"fizz"}')
def test_json_2_dict_2():
data = {'foo': 'fizz', 'bar': 'buzz', 'a': [1, 2, 3]}
assert_equals(json(data), '{"a":[1,2,3],"bar":"buzz","foo":"fizz"}')
def test_empty_list():
data = []
assert_equals(json(data), '[]')
def test_empty_dict():
data = {}
assert_equals(json(data), '{}')
def test_list_with_empty_dict():
data = [{}]
assert_equals(json(data), '[{}]')
def test_rangie2():
data = {'': 0}
assert_equals(json(data), '{"":0}')
def test_none():
assert_equals(json(None), 'null')
def test_object():
def closure():
json(object())
assert_raises(TypeError, closure)
def test_bool():
assert_equals(json(True), 'true')
def test_object_in_array():
def closure():
json([object()])
assert_raises(TypeError, closure)
def test_object_in_dict():
def closure():
json({'a': object()})
assert_raises(TypeError, closure)
def test_object_class():
def closure():
json(object)
assert_raises(TypeError, closure)
def test_escape():
assert_equals(json('"'), '"\\""')
<|reserved_special_token_1|>
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import *
from json import json
def test_json_basestring():
assert_equals(json("Hello World"), '"Hello World"')
def test_json_integer():
assert_equals(json(9), "9")
def test_json_float():
assert_equals(json(1.234), "1.234")
def test_json_array():
data = [1, 2, 3]
assert_equals(json(data), '[1,2,3]')
def test_json_array02():
data = ['bla', 1, 1.2]
assert_equals(json(data), '["bla",1,1.2]')
def test_json_dict():
data = { 'foo': 'bar' }
assert_equals(json(data), '{"foo":"bar"}')
def test_json_dict_list():
data = { 'foo': [1, 2, 3] }
assert_equals(json(data), '{"foo":[1,2,3]}')
def test_json_dict_int_key():
data = {1:[1, 2, 3] }
assert_equals(json(data), '{"1":[1,2,3]}')
def test_json_dictindict():
data = { 'foo': {'fizz' : 'buzz'} }
assert_equals(json(data), '{"foo":{"fizz":"buzz"}}')
def test_json_2_dict():
data = { 'foo': 'fizz', 'bar' : 'buzz'}
assert_equals(json(data), '{"bar":"buzz","foo":"fizz"}')
def test_json_2_dict_2():
data = { 'foo': 'fizz', 'bar' : 'buzz', 'a': [1, 2, 3]}
assert_equals(json(data), '{"a":[1,2,3],"bar":"buzz","foo":"fizz"}')
def test_empty_list():
data = []
assert_equals(json(data), "[]")
def test_empty_dict():
data = {}
assert_equals(json(data), "{}")
def test_list_with_empty_dict():
data = [{}]
assert_equals(json(data), "[{}]")
def test_rangie2():
data = {"": 0}
assert_equals(json(data), '{"":0}')
def test_none():
assert_equals(json(None), "null")
def test_object():
def closure():
json(object())
assert_raises(TypeError, closure)
def test_bool():
assert_equals(json(True), 'true')
def test_object_in_array():
def closure():
json([object()])
assert_raises(TypeError, closure)
def test_object_in_dict():
def closure():
json({'a': object()})
assert_raises(TypeError, closure)
def test_object_class():
def closure():
json(object)
assert_raises(TypeError, closure)
def test_escape():
assert_equals(json('"') , '"\\""')
|
flexible
|
{
"blob_id": "09ce2aeccfd1f3f4f130fd79001db47485cc95c2",
"index": 9891,
"step-1": "<mask token>\n\n\ndef test_json_float():\n assert_equals(json(1.234), '1.234')\n\n\ndef test_json_array():\n data = [1, 2, 3]\n assert_equals(json(data), '[1,2,3]')\n\n\ndef test_json_array02():\n data = ['bla', 1, 1.2]\n assert_equals(json(data), '[\"bla\",1,1.2]')\n\n\ndef test_json_dict():\n data = {'foo': 'bar'}\n assert_equals(json(data), '{\"foo\":\"bar\"}')\n\n\n<mask token>\n\n\ndef test_json_dict_int_key():\n data = {(1): [1, 2, 3]}\n assert_equals(json(data), '{\"1\":[1,2,3]}')\n\n\ndef test_json_dictindict():\n data = {'foo': {'fizz': 'buzz'}}\n assert_equals(json(data), '{\"foo\":{\"fizz\":\"buzz\"}}')\n\n\ndef test_json_2_dict():\n data = {'foo': 'fizz', 'bar': 'buzz'}\n assert_equals(json(data), '{\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\n\n<mask token>\n\n\ndef test_list_with_empty_dict():\n data = [{}]\n assert_equals(json(data), '[{}]')\n\n\n<mask token>\n\n\ndef test_object():\n\n def closure():\n json(object())\n assert_raises(TypeError, closure)\n\n\n<mask token>\n\n\ndef test_object_in_dict():\n\n def closure():\n json({'a': object()})\n assert_raises(TypeError, closure)\n\n\ndef test_object_class():\n\n def closure():\n json(object)\n assert_raises(TypeError, closure)\n\n\ndef test_escape():\n assert_equals(json('\"'), '\"\\\\\"\"')\n",
"step-2": "<mask token>\n\n\ndef test_json_basestring():\n assert_equals(json('Hello World'), '\"Hello World\"')\n\n\n<mask token>\n\n\ndef test_json_float():\n assert_equals(json(1.234), '1.234')\n\n\ndef test_json_array():\n data = [1, 2, 3]\n assert_equals(json(data), '[1,2,3]')\n\n\ndef test_json_array02():\n data = ['bla', 1, 1.2]\n assert_equals(json(data), '[\"bla\",1,1.2]')\n\n\ndef test_json_dict():\n data = {'foo': 'bar'}\n assert_equals(json(data), '{\"foo\":\"bar\"}')\n\n\n<mask token>\n\n\ndef test_json_dict_int_key():\n data = {(1): [1, 2, 3]}\n assert_equals(json(data), '{\"1\":[1,2,3]}')\n\n\ndef test_json_dictindict():\n data = {'foo': {'fizz': 'buzz'}}\n assert_equals(json(data), '{\"foo\":{\"fizz\":\"buzz\"}}')\n\n\ndef test_json_2_dict():\n data = {'foo': 'fizz', 'bar': 'buzz'}\n assert_equals(json(data), '{\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\n\n<mask token>\n\n\ndef test_list_with_empty_dict():\n data = [{}]\n assert_equals(json(data), '[{}]')\n\n\n<mask token>\n\n\ndef test_object():\n\n def closure():\n json(object())\n assert_raises(TypeError, closure)\n\n\n<mask token>\n\n\ndef test_object_in_dict():\n\n def closure():\n json({'a': object()})\n assert_raises(TypeError, closure)\n\n\ndef test_object_class():\n\n def closure():\n json(object)\n assert_raises(TypeError, closure)\n\n\ndef test_escape():\n assert_equals(json('\"'), '\"\\\\\"\"')\n",
"step-3": "<mask token>\n\n\ndef test_json_basestring():\n assert_equals(json('Hello World'), '\"Hello World\"')\n\n\n<mask token>\n\n\ndef test_json_float():\n assert_equals(json(1.234), '1.234')\n\n\ndef test_json_array():\n data = [1, 2, 3]\n assert_equals(json(data), '[1,2,3]')\n\n\ndef test_json_array02():\n data = ['bla', 1, 1.2]\n assert_equals(json(data), '[\"bla\",1,1.2]')\n\n\ndef test_json_dict():\n data = {'foo': 'bar'}\n assert_equals(json(data), '{\"foo\":\"bar\"}')\n\n\ndef test_json_dict_list():\n data = {'foo': [1, 2, 3]}\n assert_equals(json(data), '{\"foo\":[1,2,3]}')\n\n\ndef test_json_dict_int_key():\n data = {(1): [1, 2, 3]}\n assert_equals(json(data), '{\"1\":[1,2,3]}')\n\n\ndef test_json_dictindict():\n data = {'foo': {'fizz': 'buzz'}}\n assert_equals(json(data), '{\"foo\":{\"fizz\":\"buzz\"}}')\n\n\ndef test_json_2_dict():\n data = {'foo': 'fizz', 'bar': 'buzz'}\n assert_equals(json(data), '{\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\n\ndef test_json_2_dict_2():\n data = {'foo': 'fizz', 'bar': 'buzz', 'a': [1, 2, 3]}\n assert_equals(json(data), '{\"a\":[1,2,3],\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\n\n<mask token>\n\n\ndef test_empty_dict():\n data = {}\n assert_equals(json(data), '{}')\n\n\ndef test_list_with_empty_dict():\n data = [{}]\n assert_equals(json(data), '[{}]')\n\n\ndef test_rangie2():\n data = {'': 0}\n assert_equals(json(data), '{\"\":0}')\n\n\ndef test_none():\n assert_equals(json(None), 'null')\n\n\ndef test_object():\n\n def closure():\n json(object())\n assert_raises(TypeError, closure)\n\n\ndef test_bool():\n assert_equals(json(True), 'true')\n\n\n<mask token>\n\n\ndef test_object_in_dict():\n\n def closure():\n json({'a': object()})\n assert_raises(TypeError, closure)\n\n\ndef test_object_class():\n\n def closure():\n json(object)\n assert_raises(TypeError, closure)\n\n\ndef test_escape():\n assert_equals(json('\"'), '\"\\\\\"\"')\n",
"step-4": "<mask token>\n\n\ndef test_json_basestring():\n assert_equals(json('Hello World'), '\"Hello World\"')\n\n\ndef test_json_integer():\n assert_equals(json(9), '9')\n\n\ndef test_json_float():\n assert_equals(json(1.234), '1.234')\n\n\ndef test_json_array():\n data = [1, 2, 3]\n assert_equals(json(data), '[1,2,3]')\n\n\ndef test_json_array02():\n data = ['bla', 1, 1.2]\n assert_equals(json(data), '[\"bla\",1,1.2]')\n\n\ndef test_json_dict():\n data = {'foo': 'bar'}\n assert_equals(json(data), '{\"foo\":\"bar\"}')\n\n\ndef test_json_dict_list():\n data = {'foo': [1, 2, 3]}\n assert_equals(json(data), '{\"foo\":[1,2,3]}')\n\n\ndef test_json_dict_int_key():\n data = {(1): [1, 2, 3]}\n assert_equals(json(data), '{\"1\":[1,2,3]}')\n\n\ndef test_json_dictindict():\n data = {'foo': {'fizz': 'buzz'}}\n assert_equals(json(data), '{\"foo\":{\"fizz\":\"buzz\"}}')\n\n\ndef test_json_2_dict():\n data = {'foo': 'fizz', 'bar': 'buzz'}\n assert_equals(json(data), '{\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\n\ndef test_json_2_dict_2():\n data = {'foo': 'fizz', 'bar': 'buzz', 'a': [1, 2, 3]}\n assert_equals(json(data), '{\"a\":[1,2,3],\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\n\ndef test_empty_list():\n data = []\n assert_equals(json(data), '[]')\n\n\ndef test_empty_dict():\n data = {}\n assert_equals(json(data), '{}')\n\n\ndef test_list_with_empty_dict():\n data = [{}]\n assert_equals(json(data), '[{}]')\n\n\ndef test_rangie2():\n data = {'': 0}\n assert_equals(json(data), '{\"\":0}')\n\n\ndef test_none():\n assert_equals(json(None), 'null')\n\n\ndef test_object():\n\n def closure():\n json(object())\n assert_raises(TypeError, closure)\n\n\ndef test_bool():\n assert_equals(json(True), 'true')\n\n\ndef test_object_in_array():\n\n def closure():\n json([object()])\n assert_raises(TypeError, closure)\n\n\ndef test_object_in_dict():\n\n def closure():\n json({'a': object()})\n assert_raises(TypeError, closure)\n\n\ndef test_object_class():\n\n def closure():\n json(object)\n assert_raises(TypeError, closure)\n\n\ndef test_escape():\n assert_equals(json('\"'), '\"\\\\\"\"')\n",
"step-5": "#!/usr/bin/env python\n# -*- coding: utf-8 -*-\n\nfrom nose.tools import *\n\nfrom json import json\n\ndef test_json_basestring():\n assert_equals(json(\"Hello World\"), '\"Hello World\"')\n\ndef test_json_integer():\n assert_equals(json(9), \"9\")\n\ndef test_json_float():\n assert_equals(json(1.234), \"1.234\")\n \ndef test_json_array():\n data = [1, 2, 3]\n assert_equals(json(data), '[1,2,3]')\n\ndef test_json_array02():\n data = ['bla', 1, 1.2]\n assert_equals(json(data), '[\"bla\",1,1.2]')\n\ndef test_json_dict():\n data = { 'foo': 'bar' }\n assert_equals(json(data), '{\"foo\":\"bar\"}')\n \ndef test_json_dict_list():\n data = { 'foo': [1, 2, 3] }\n assert_equals(json(data), '{\"foo\":[1,2,3]}')\n \ndef test_json_dict_int_key():\n data = {1:[1, 2, 3] }\n assert_equals(json(data), '{\"1\":[1,2,3]}')\n \ndef test_json_dictindict():\n data = { 'foo': {'fizz' : 'buzz'} }\n assert_equals(json(data), '{\"foo\":{\"fizz\":\"buzz\"}}')\n\ndef test_json_2_dict():\n data = { 'foo': 'fizz', 'bar' : 'buzz'}\n assert_equals(json(data), '{\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\ndef test_json_2_dict_2():\n data = { 'foo': 'fizz', 'bar' : 'buzz', 'a': [1, 2, 3]}\n assert_equals(json(data), '{\"a\":[1,2,3],\"bar\":\"buzz\",\"foo\":\"fizz\"}')\n\ndef test_empty_list():\n data = []\n assert_equals(json(data), \"[]\")\n \ndef test_empty_dict():\n data = {}\n assert_equals(json(data), \"{}\")\n \ndef test_list_with_empty_dict():\n data = [{}]\n assert_equals(json(data), \"[{}]\")\n \ndef test_rangie2():\n data = {\"\": 0}\n assert_equals(json(data), '{\"\":0}')\n \ndef test_none():\n assert_equals(json(None), \"null\")\n\ndef test_object():\n def closure():\n json(object())\n assert_raises(TypeError, closure)\n \ndef test_bool():\n assert_equals(json(True), 'true')\n \ndef test_object_in_array():\n def closure():\n json([object()])\n assert_raises(TypeError, closure)\n\ndef test_object_in_dict():\n def closure():\n json({'a': object()})\n assert_raises(TypeError, closure)\n \ndef test_object_class():\n def closure():\n json(object)\n assert_raises(TypeError, closure)\n\ndef test_escape():\n assert_equals(json('\"') , '\"\\\\\"\"')\n \n",
"step-ids": [
12,
13,
19,
22,
24
]
}
|
[
12,
13,
19,
22,
24
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.