max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
binarysearch/numOfBits.py | Ry4nW/python-wars | 1 | 6615851 | <gh_stars>1-10
class Solution:
def solve(self, n):
bitCount = 0
intInBinary = bin(n)
for i in range(2, len(intInBinary)):
if intInBinary[i] == '1':
bitCount += 1
return bitCount
| class Solution:
def solve(self, n):
bitCount = 0
intInBinary = bin(n)
for i in range(2, len(intInBinary)):
if intInBinary[i] == '1':
bitCount += 1
return bitCount | none | 1 | 3.036272 | 3 | |
src/bot.py | IlyaEp/MatchCoverAesthetics | 0 | 6615852 | import telebot
import spotipy
from typing import List, Dict
from src.model.api import MatchCoverAPI
import base64
import requests
import io
from PIL import Image
import credentials as cred
class Answer:
"""
This class represents user answer.
"""
def __init__(self, file_path):
self.file_path = file_path
self.use_playlists = True
TOKEN = cred.TELEGRAM_TOKEN
BOT = telebot.TeleBot(TOKEN)
PLAYLIST_MODEL = MatchCoverAPI("facebook/deit-tiny-distilled-patch16-224")
ALBUM_MODEL = MatchCoverAPI("facebook/deit-tiny-distilled-patch16-224")
USERS: Dict[int, Answer] = {}
def get_playlist_link(track_ids: List[str], image_url: str) -> str:
"""
This function generates a spotify playlist and returns a link to it
:param track_ids: ID tracks in spotify
:param image_url: link to picture for download
:return: link to playlist
"""
scope = "playlist-modify-public ugc-image-upload"
bot_id = cred.SPOTIFY_BOT_ID
client_id = cred.SPOTIFY_CLIENT_ID
client_secret = cred.SPOTIFY_CLIENT_SECRET
auth_manager = spotipy.SpotifyOAuth(
client_id=client_id, client_secret=client_secret, scope=scope, redirect_uri="http://localhost:8888/callback/"
)
sp = spotipy.Spotify(auth_manager=auth_manager)
playlist_name = "Playlist for you"
playlist = sp.user_playlist_create(bot_id, public=True, name=playlist_name)
playlist_id = playlist["id"]
sp.playlist_add_items(playlist_id, track_ids)
# prepare cover
img = Image.open(requests.get(image_url, stream=True).raw)
img.thumbnail((256, 256))
img_byte_arr = io.BytesIO()
img.save(img_byte_arr, format="JPEG")
img_byte_arr = img_byte_arr.getvalue()
coded_img = base64.b64encode(img_byte_arr)
sp.playlist_upload_cover_image(playlist_id, coded_img)
return playlist["external_urls"]["spotify"]
def get_picture(message):
"""
This function is responsible for getting a link to the user's picture.
"""
if message.photo is None:
msg = BOT.reply_to(message, "Мне нужна картинка 🥺")
BOT.register_next_step_handler(msg, get_picture)
return
photo = BOT.get_file(message.photo[-1].file_id)
USERS[message.chat.id] = Answer(photo.file_path)
markup = telebot.types.ReplyKeyboardMarkup(one_time_keyboard=True)
markup.add("Альбом", "Плейлист")
msg = BOT.reply_to(
message, "Отлично! А откуда мне смотреть картинки, из альбомов или из плейлистов?", reply_markup=markup
)
BOT.register_next_step_handler(msg, get_type_model)
def get_type_model(message):
"""
This function is responsible for model selection (album/playlist).
"""
if message.text != "Альбом" and message.text != "Плейлист":
markup = telebot.types.ReplyKeyboardMarkup(one_time_keyboard=True)
markup.add("Альбом", "Плейлист")
msg = BOT.reply_to(
message,
"Я не знаю такое🙁\nОткуда мне смотреть картинки, из альбомов или из плейлистов?",
reply_markup=markup,
)
BOT.register_next_step_handler(msg, get_type_model)
return
if message.text == "Альбом":
USERS[message.chat.id].use_playlists = False
markup = telebot.types.ReplyKeyboardRemove(selective=True)
msg = BOT.reply_to(message, "Отлично! Теперь скажи, сколько песен ты хочешь?", reply_markup=markup)
BOT.register_next_step_handler(msg, get_number_of_songs)
@BOT.message_handler(commands=["start"])
def start(message):
"""
This function handles the start command.
"""
msg = BOT.reply_to(
message, "Привет ✨\nОтправь картинку, а я сделаю для тебя плейлист с подходящей по настроению музыкой 🧙"
)
BOT.register_next_step_handler(msg, get_picture)
def get_number_of_songs(message):
"""
This function remembers the number of musical compositions that the user requires and calls the model to get the result.
"""
if str(message.text).isdigit() and 0 < int(message.text) <= 100:
BOT.send_message(message.from_user.id, "Дай мне немного времени...")
file_url = f"https://api.telegram.org/file/bot{TOKEN}/{USERS[message.chat.id].file_path}"
if USERS[message.chat.id].use_playlists:
songs = get_playlist_link(PLAYLIST_MODEL.predict(file_url, int(message.text)), file_url)
else:
songs = get_playlist_link(ALBUM_MODEL.predict(file_url, int(message.text)), file_url)
BOT.reply_to(message, f"Готово! Вот что получилось: {songs}")
msg = BOT.send_message(
message.from_user.id,
"Надеюсь, тебе понравится 🥰\nЕсли хочешь еще один плейлист," " пришли мне новую картинку",
)
BOT.register_next_step_handler(msg, get_picture)
else:
msg = BOT.reply_to(
message,
"Что-то пошло не так 😔 Cколько песен ты хочешь? (я жду от тебя некоторое число от 0 до 100)",
)
BOT.register_next_step_handler(msg, get_number_of_songs)
if __name__ == "__main__":
PLAYLIST_MODEL.load_from_disk("../data/playlists.pt")
ALBUM_MODEL.load_from_disk("../data/albums.pt")
print("I'm ready")
BOT.set_my_commands(
[
telebot.types.BotCommand("/start", "Для знакомства"),
]
)
BOT.polling(none_stop=True, interval=0)
| import telebot
import spotipy
from typing import List, Dict
from src.model.api import MatchCoverAPI
import base64
import requests
import io
from PIL import Image
import credentials as cred
class Answer:
"""
This class represents user answer.
"""
def __init__(self, file_path):
self.file_path = file_path
self.use_playlists = True
TOKEN = cred.TELEGRAM_TOKEN
BOT = telebot.TeleBot(TOKEN)
PLAYLIST_MODEL = MatchCoverAPI("facebook/deit-tiny-distilled-patch16-224")
ALBUM_MODEL = MatchCoverAPI("facebook/deit-tiny-distilled-patch16-224")
USERS: Dict[int, Answer] = {}
def get_playlist_link(track_ids: List[str], image_url: str) -> str:
"""
This function generates a spotify playlist and returns a link to it
:param track_ids: ID tracks in spotify
:param image_url: link to picture for download
:return: link to playlist
"""
scope = "playlist-modify-public ugc-image-upload"
bot_id = cred.SPOTIFY_BOT_ID
client_id = cred.SPOTIFY_CLIENT_ID
client_secret = cred.SPOTIFY_CLIENT_SECRET
auth_manager = spotipy.SpotifyOAuth(
client_id=client_id, client_secret=client_secret, scope=scope, redirect_uri="http://localhost:8888/callback/"
)
sp = spotipy.Spotify(auth_manager=auth_manager)
playlist_name = "Playlist for you"
playlist = sp.user_playlist_create(bot_id, public=True, name=playlist_name)
playlist_id = playlist["id"]
sp.playlist_add_items(playlist_id, track_ids)
# prepare cover
img = Image.open(requests.get(image_url, stream=True).raw)
img.thumbnail((256, 256))
img_byte_arr = io.BytesIO()
img.save(img_byte_arr, format="JPEG")
img_byte_arr = img_byte_arr.getvalue()
coded_img = base64.b64encode(img_byte_arr)
sp.playlist_upload_cover_image(playlist_id, coded_img)
return playlist["external_urls"]["spotify"]
def get_picture(message):
"""
This function is responsible for getting a link to the user's picture.
"""
if message.photo is None:
msg = BOT.reply_to(message, "Мне нужна картинка 🥺")
BOT.register_next_step_handler(msg, get_picture)
return
photo = BOT.get_file(message.photo[-1].file_id)
USERS[message.chat.id] = Answer(photo.file_path)
markup = telebot.types.ReplyKeyboardMarkup(one_time_keyboard=True)
markup.add("Альбом", "Плейлист")
msg = BOT.reply_to(
message, "Отлично! А откуда мне смотреть картинки, из альбомов или из плейлистов?", reply_markup=markup
)
BOT.register_next_step_handler(msg, get_type_model)
def get_type_model(message):
"""
This function is responsible for model selection (album/playlist).
"""
if message.text != "Альбом" and message.text != "Плейлист":
markup = telebot.types.ReplyKeyboardMarkup(one_time_keyboard=True)
markup.add("Альбом", "Плейлист")
msg = BOT.reply_to(
message,
"Я не знаю такое🙁\nОткуда мне смотреть картинки, из альбомов или из плейлистов?",
reply_markup=markup,
)
BOT.register_next_step_handler(msg, get_type_model)
return
if message.text == "Альбом":
USERS[message.chat.id].use_playlists = False
markup = telebot.types.ReplyKeyboardRemove(selective=True)
msg = BOT.reply_to(message, "Отлично! Теперь скажи, сколько песен ты хочешь?", reply_markup=markup)
BOT.register_next_step_handler(msg, get_number_of_songs)
@BOT.message_handler(commands=["start"])
def start(message):
"""
This function handles the start command.
"""
msg = BOT.reply_to(
message, "Привет ✨\nОтправь картинку, а я сделаю для тебя плейлист с подходящей по настроению музыкой 🧙"
)
BOT.register_next_step_handler(msg, get_picture)
def get_number_of_songs(message):
"""
This function remembers the number of musical compositions that the user requires and calls the model to get the result.
"""
if str(message.text).isdigit() and 0 < int(message.text) <= 100:
BOT.send_message(message.from_user.id, "Дай мне немного времени...")
file_url = f"https://api.telegram.org/file/bot{TOKEN}/{USERS[message.chat.id].file_path}"
if USERS[message.chat.id].use_playlists:
songs = get_playlist_link(PLAYLIST_MODEL.predict(file_url, int(message.text)), file_url)
else:
songs = get_playlist_link(ALBUM_MODEL.predict(file_url, int(message.text)), file_url)
BOT.reply_to(message, f"Готово! Вот что получилось: {songs}")
msg = BOT.send_message(
message.from_user.id,
"Надеюсь, тебе понравится 🥰\nЕсли хочешь еще один плейлист," " пришли мне новую картинку",
)
BOT.register_next_step_handler(msg, get_picture)
else:
msg = BOT.reply_to(
message,
"Что-то пошло не так 😔 Cколько песен ты хочешь? (я жду от тебя некоторое число от 0 до 100)",
)
BOT.register_next_step_handler(msg, get_number_of_songs)
if __name__ == "__main__":
PLAYLIST_MODEL.load_from_disk("../data/playlists.pt")
ALBUM_MODEL.load_from_disk("../data/albums.pt")
print("I'm ready")
BOT.set_my_commands(
[
telebot.types.BotCommand("/start", "Для знакомства"),
]
)
BOT.polling(none_stop=True, interval=0)
| en | 0.857292 | This class represents user answer. This function generates a spotify playlist and returns a link to it :param track_ids: ID tracks in spotify :param image_url: link to picture for download :return: link to playlist # prepare cover This function is responsible for getting a link to the user's picture. This function is responsible for model selection (album/playlist). This function handles the start command. This function remembers the number of musical compositions that the user requires and calls the model to get the result. | 2.692566 | 3 |
extractdata.py | Gela1117/CL-research | 0 | 6615853 | from cStringIO import StringIO
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
import os
import sys, getopt
import nltk.tokenize
import nltk
from pathlib import Path
import codecs
#converts pdf, returns its text content as a string
def convert(fname, pages=None):
if not pages:
pagenums = set()
else:
pagenums = set(pages)
output = StringIO()
manager = PDFResourceManager()
converter = TextConverter(manager, output, laparams=LAParams())
interpreter = PDFPageInterpreter(manager, converter)
infile = file(fname, 'rb')
for page in PDFPage.get_pages(infile, pagenums):
interpreter.process_page(page)
infile.close()
converter.close()
text = output.getvalue()
output.close
return text
def convertMultiple(pdfDir, txtDir):
if pdfDir == "": pdfDir = os.getcwd() + "\\" #if no pdfDir passed in
for pdf in os.listdir(pdfDir): #iterate through pdfs in pdf directory
fileExtension = pdf.split(".")[-1]
if fileExtension == "pdf":
pdfFilename = pdfDir + pdf
text = convert(pdfFilename) #get string of text content of pdf
textFilename = txtDir + pdf + ".txt"
textFile = open(textFilename, "w") #make text file
textFile.write(text) #write text to text file
#textFile.close
pdfDir = "C:/Users/86136/Desktop/HW/IntroNLP/Final Project/con27/pdf/"
txtDir = "C:/Users/86136/Desktop/HW/IntroNLP/Final Project/Data/Papers_txt/"
convertMultiple(pdfDir, txtDir)
# pick out those without space
def pickout(path):
file_lst = [f.name for f in Path(path).iterdir() if f.is_file()]
for filename in file_lst:
full_path = path+filename
with codecs.open(full_path, 'r', encoding="utf-8") as f:
r = f.read()
w_lst= nltk.word_tokenize(r)
for w in w_lst:
if len(w) >= 100:
f.close()
os.remove(full_path)
break
pickout(txtDir) | from cStringIO import StringIO
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import TextConverter
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
import os
import sys, getopt
import nltk.tokenize
import nltk
from pathlib import Path
import codecs
#converts pdf, returns its text content as a string
def convert(fname, pages=None):
if not pages:
pagenums = set()
else:
pagenums = set(pages)
output = StringIO()
manager = PDFResourceManager()
converter = TextConverter(manager, output, laparams=LAParams())
interpreter = PDFPageInterpreter(manager, converter)
infile = file(fname, 'rb')
for page in PDFPage.get_pages(infile, pagenums):
interpreter.process_page(page)
infile.close()
converter.close()
text = output.getvalue()
output.close
return text
def convertMultiple(pdfDir, txtDir):
if pdfDir == "": pdfDir = os.getcwd() + "\\" #if no pdfDir passed in
for pdf in os.listdir(pdfDir): #iterate through pdfs in pdf directory
fileExtension = pdf.split(".")[-1]
if fileExtension == "pdf":
pdfFilename = pdfDir + pdf
text = convert(pdfFilename) #get string of text content of pdf
textFilename = txtDir + pdf + ".txt"
textFile = open(textFilename, "w") #make text file
textFile.write(text) #write text to text file
#textFile.close
pdfDir = "C:/Users/86136/Desktop/HW/IntroNLP/Final Project/con27/pdf/"
txtDir = "C:/Users/86136/Desktop/HW/IntroNLP/Final Project/Data/Papers_txt/"
convertMultiple(pdfDir, txtDir)
# pick out those without space
def pickout(path):
file_lst = [f.name for f in Path(path).iterdir() if f.is_file()]
for filename in file_lst:
full_path = path+filename
with codecs.open(full_path, 'r', encoding="utf-8") as f:
r = f.read()
w_lst= nltk.word_tokenize(r)
for w in w_lst:
if len(w) >= 100:
f.close()
os.remove(full_path)
break
pickout(txtDir) | en | 0.75568 | #converts pdf, returns its text content as a string #if no pdfDir passed in #iterate through pdfs in pdf directory #get string of text content of pdf #make text file #write text to text file #textFile.close # pick out those without space | 2.839546 | 3 |
pyserver/pymysql3/setup_windows.py | joeedh/fairmotion | 1 | 6615854 | def get_config():
import os, sys
from setup_common import get_metadata_and_options, enabled, create_release_file
metadata, options = get_metadata_and_options()
mysql_root = options['mysql_location']
extra_objects = []
static = enabled(options, 'static')
# XXX static doesn't actually do anything on Windows
if enabled(options, 'embedded'):
client = "mysqld"
else:
client = "mysqlclient"
library_dirs = [ os.path.join(mysql_root, r'lib') ]
libraries = [ 'kernel32', 'advapi32', 'wsock32', client ]
include_dirs = [ os.path.join(mysql_root, r'include') ]
extra_compile_args = [ '/Zl' ]
name = "MySQL-python"
if enabled(options, 'embedded'):
name = name + "-embedded"
metadata['name'] = name
define_macros = [
('version_info', metadata['version_info']),
('__version__', metadata['version']),
]
create_release_file(metadata)
del metadata['version_info']
ext_options = dict(
name = "_mysql",
library_dirs = library_dirs,
libraries = libraries,
extra_compile_args = extra_compile_args,
include_dirs = include_dirs,
extra_objects = extra_objects,
define_macros = define_macros,
)
return metadata, ext_options
if __name__ == "__main__":
print ("""You shouldn't be running this directly; it is used by setup.py.""")
| def get_config():
import os, sys
from setup_common import get_metadata_and_options, enabled, create_release_file
metadata, options = get_metadata_and_options()
mysql_root = options['mysql_location']
extra_objects = []
static = enabled(options, 'static')
# XXX static doesn't actually do anything on Windows
if enabled(options, 'embedded'):
client = "mysqld"
else:
client = "mysqlclient"
library_dirs = [ os.path.join(mysql_root, r'lib') ]
libraries = [ 'kernel32', 'advapi32', 'wsock32', client ]
include_dirs = [ os.path.join(mysql_root, r'include') ]
extra_compile_args = [ '/Zl' ]
name = "MySQL-python"
if enabled(options, 'embedded'):
name = name + "-embedded"
metadata['name'] = name
define_macros = [
('version_info', metadata['version_info']),
('__version__', metadata['version']),
]
create_release_file(metadata)
del metadata['version_info']
ext_options = dict(
name = "_mysql",
library_dirs = library_dirs,
libraries = libraries,
extra_compile_args = extra_compile_args,
include_dirs = include_dirs,
extra_objects = extra_objects,
define_macros = define_macros,
)
return metadata, ext_options
if __name__ == "__main__":
print ("""You shouldn't be running this directly; it is used by setup.py.""")
| en | 0.96225 | # XXX static doesn't actually do anything on Windows You shouldn't be running this directly; it is used by setup.py. | 2.005959 | 2 |
game.py | prim/SZMUD | 1 | 6615855 | <reponame>prim/SZMUD
# encoding: utf-8
# TODO(Prim): 考虑是不是要移除condition
from SZMUD.databases.database import ItemDB, RoomDB, AccountDB, PortalDB, CharacterDB, RegionDB
from SZMUD.utils.jsdict import Action, Event, Condition
from SZMUD.utils.timer import Timer
from SZMUD.utils.queue import PriorityQueue
from SZMUD.databases.script import CommandDB, LogicDB
# TODO(Prim): hook sysytem
from SZMUD.event import TimeEvent
# TODO: 中毒的实现,Game.AddLogic,为实体中毒的事件,在Game.ExecuteLoop定时处理,扣血
# 在DoEvent扣除相应血量,再添加实体中毒的定时事件,形成实体持续中毒的状态
# Event.valid去定有效性,LogicEntity.KillHook()另一个定时事件失效
class Game:
def __init__(self):
self.players = []
self.characters = []
self.is_running = True
self.timer = Timer()
self.eventqueue = PriorityQueue()
#------------------------------------------------------------------------------
# NOTE(Prim): 基础的Event优势可以对应单条Command
# 无论时Command或者Event的实习都需要自己检查逻辑错误,保证正确性
def ProcessEvent(self, event, **kw):
# 可以在这里针对event的类型写响应的处理代码
# 也可以自定义个方法调用
if type(event) == str:
event = Event(event, **kw)
if event.type == 'Announce':
self.ReactWorldPlayers(event)
if event.type == 'AttemptSay':
self.Say(event)
elif event.type == 'EnterWorld':
self.EnterWorld(event)
elif event.type == 'LeaveWorld':
self.LeaveWorld(event)
elif event.type == 'SpawnCharacter':
self.SpawnCharacter(event)
elif event.type == 'SpawnItem':
self.SpawnItem(event)
elif event.type == 'DestoryCharacter':
self.DestoryCharacter(event)
elif event.type == 'DestoryItem':
self.DestoryItem(event)
elif event.type == 'AttemptGetItem':
self.GetItem(event)
elif event.type == 'AttemptDropItem':
self.DropItem(event)
elif event.type == 'AttemptGiveItem':
self.GiveItem(event)
elif event.type == 'AttemptEnterPortal':
self.EnterPortal(event)
elif event.type == 'AttemptTransport':
self.AttemptTransport(event)
elif event.type == 'ForceTransport':
self.ForceTransport(event)
elif event.type == 'MessageLogic':
self.EnterPortal(event)
elif event.type == 'AddLogic':
self.AttemptTransport(event)
elif event.type == 'DelLogic':
self.ForceTransport(event)
else:
pass
def ExecuteCommand(self, player, command):
"""执行玩家的单条命令
Quiet模式下玩家的发送的数据一句当作命令解析;玩家只能通过say xxx发言,不能直接发言
普通模式下,玩家数据如果带前缀/就当作命令解析,否则就发言
"""
c = player
if command == u'/':
command = c.lastcommand
else:
c.lastcommand = command
if u' ' in command:
cmd, args = command.split(u' ', 1)
else:
cmd, args = command, u''
args = args.split()
# 普通模式的特别处理
if not c.is_quiet and cmd[0] != u'/':
string = [cmd]
string.extend(args)
self.Say(Event('AttempSay', who=c, content=u' '.join(string)))
return
if cmd[0] == u'/':
cmd = cmd[1:]
try:
cmd_i = c.FindCommand(cmd)
if not cmd_i:
c.React(Action('Error', message=u'输入的命令不能识别:%s' % cmd))
return
cmd_i.Execute(*args) # NOTE: !!!
except Exception, error:
# TODO(Prim): log error
import traceback; traceback.print_exc()
c.React(Action('Error', message=u'执行命令时发生严重错误,请通知管理员。\r\n错误信息:%s' % error))
#------------------------------------------------------------------------------
def ReactWorldPlayers(self, action):
for player in self.players:
player.React(action)
def ReactWorldCharacters(self, action):
for character in self.characters:
character.React(action)
def ReactRoomCharacters(self, action, room):
for character in room.characters:
character.React(action)
def ReactRoomItems(self, action, room):
for item in room.items:
item.React(action)
# TODO(Prim): ActionRegion?
#------------------------------------------------------------------------------
def _GetEntity(self, entityid, type):
if type == 'Item':
return ItemDB[entityid]
elif type == 'Character':
return CharacterDB[entityid]
elif type == 'Room':
return RoomDB[entityid]
elif type == 'Region':
return RegionDB[entityid]
elif type == 'Portal':
return PortalDB[entityid]
raise Exception, u'尝试获取未知类型的实体:%s' % type
def _DeleteItem(self, item):
i = item
if i.region:
i.region.DelItem(i)
i.room.DelItem(i)
else:
i.room.DelItem(i)
i.room = None
i.region = None
i.ClearHooks()
ItemDB.Delete(i)
def _DoJoinQuantities(self, character, keepitem):
for item in character.items:
if item != keepitem and item.templateid == keepitem.templateid:
keepitem.quantity += item.quantity
self._DeleteItem(item)
#------------------------------------------------------------------------------
def AddCharacter(self, c):
self.characters.append(c)
def AddPlayer(self, p):
self.players.append(p)
def RemoveCharacter(self, c):
self.characters.remove(c)
def RemovePlayer(self, p):
self.players.remove(p)
#------------------------------------------------------------------------------
def Say(self, event):
import time
print '============in Say'
print time.time()
print '============in Say'
"""某个角色在某个房间中发言"""
c = event.who
r = c.room
reg = c.region
content = event.content
if content:
condition = Condition('CanSay', who=c, content=content)
if c.Query(condition) and r.Query(condition) and reg.Query(condition):
action = Action('Say', who=c, content=content)
self.ReactRoomCharacters(action, r)
r.React(action)
reg.React(action)
else:
c.React(Action('Error', message=u'发言时必须要有内容!\r\n'))
def EnterWorld(self, event):
"""玩家上线"""
c = event.who
r = c.room
reg = c.region
c.is_loggin = True
self.AddCharacter(c)
self.AddPlayer(c)
r.AddCharacter(c)
reg.AddCharacter(c)
self.ReactWorldPlayers(Action('EnterWorld', who=c))
regaction = Action('EnterRegion', who=c)
reg.React(regaction)
c.React(regaction)
raction = Action('EnterRoom', who=c, room=r, portal=None)
r.React(raction)
self.ReactRoomCharacters(raction, r)
self.ReactRoomItems(raction, r)
c.React(Action('News'))
def LeaveWorld(self, event):
"""玩家下线"""
c = event.who
r = c.room
reg = c.region
raction = Action('LeaveRoom', who=c, room=r, portal=None)
self.ReactRoomItems(raction, r)
self.ReactRoomCharacters(raction, r)
r.React(raction)
regaction = Action('LeaveRegion', who=c)
reg.React(regaction)
c.React(regaction)
self.ReactWorldPlayers(Action('LeaveWorld', who=c))
r.DelCharacter(c)
reg.DelCharacter(c)
self.RemovePlayer(c)
self.RemoveCharacter(c)
c.is_loggin = False
def EnterPortal(self, event):
"""角色进入通道,进行移动"""
c = event.who
p = event.portal
direction = event.direction
oldroom = c.room
newroom = p.GetDestination(c.room, direction)
oldreg = oldroom.region
newreg = newroom.region
change_reg = oldreg != newreg
if change_reg:
lcondition = Condition('CanLeaveRegion', who=c, region=oldreg)
econdition = Condition('CanEnterRegion', who=c, region=newreg)
if not(
oldreg.Query(lcondition) and newreg.Query(econdition) and \
c.Query(lcondition) and c.Query(econdition)
):
return
lrcondition = Condition('CanLeaveRoom', who=c, portal=p)
ercondition = Condition('CanEnterRoom', who=c, portal=p)
pcondition = Condition('CanEnterPortal', who=c, portal=p)
if oldroom.Query(lrcondition) and newroom.Query(ercondition) and \
c.Query(lrcondition) and c.Query(ercondition) and \
c.Query(pcondition) and p.Query(pcondition):
if change_reg:
action = Action('LeaveRegion', who=c)
oldreg.React(action)
c.React(action)
action = Action('LeaveRoom', who=c, portal=p)
self.ReactRoomCharacters(action, oldroom)
self.ReactRoomItems(action, oldroom)
oldroom.React(action)
action = Action('EnterPortal', who=c, portal=p)
p.React(action)
c.React(action)
if change_reg:
oldreg.DelCharacter(c)
c.region = newreg
newreg.AddCharacter(c)
oldroom.DelCharacter(c)
c.room = newroom
newroom.AddCharacter(c)
if change_reg:
action = Action('EnterRegion', who=c)
newreg.React(action)
c.React(action)
action = Action('EnterRoom', who=c, portal=p)
newroom.React(action)
self.ReactRoomCharacters(action, newroom)
self.ReactRoomItems(action, newroom)
def AttemptTransport(self, event):
"""角色没有借助通道,直接在房间间移动"""
c = event.who
newroom = event.room
oldroom = c.room
oldreg = oldroom.region
newreg = newroom.region
change_reg = oldreg != newreg
if change_reg:
lcondition = Condition('CanLeaveRegion', who=c, region=oldreg)
econdition = Condition('CanEnterRegion', who=c, region=newreg)
if not(
oldreg.Query(lcondition) and newreg.Query(econdition) and \
c.Query(lcondition) and c.Query(econdition)
):
return
lrcondition = Condition('CanLeaveRoom', who=c, portal=p)
ercondition = Condition('CanEnterRoom', who=c, portal=p)
if oldroom.Query(lrcondition) and newroom.Query(ercondition) and \
c.Query(lrcondition) and c.Query(ercondition):
if change_reg:
action = Action('LeaveRegion', who=c)
oldreg.React(action)
c.React(action)
action = Action('LeaveRoom', who=c, portal=p)
self.ReactRoomCharacters(action, oldroom)
self.ReactRoomItems(action, oldroom)
oldroom.React(action)
if change_reg:
oldreg.DelCharacter(c)
c.region = newreg
newreg.AddCharacter(c)
oldroom.DelCharacter(c)
c.room = newroom
newroom.AddCharacter(c)
if change_reg:
action = Action('EnterRegion', who=c)
newreg.React(action)
c.React(action)
action = Action('EnterRoom', who=c, portal=p)
newroom.React(action)
self.ReactRoomCharacters(action, newroom)
self.ReactRoomItems(action, newroom)
def ForceTransport(self, event):
"""角色没有借助通道,直接在房间间移动,特权,不做可行性检查"""
# TODO(Prim): 合并函数
c = event.who
newroom = event.room
oldroom = c.room
oldreg = oldroom.region
newreg = newroom.region
change_reg = oldreg != newreg
if change_reg:
action = Action('LeaveRegion', who=c)
oldreg.React(action)
c.React(action)
action = Action('LeaveRoom', who=c, portal=p)
self.ReactRoomCharacters(action, oldroom)
self.ReactRoomItems(action, oldroom)
oldroom.React(action)
if change_reg:
oldreg.DelCharacter(c)
c.region = newreg
newreg.AddCharacter(c)
oldroom.DelCharacter(c)
c.room = newroom
newroom.AddCharacter(c)
if change_reg:
action = Action('EnterRegion', who=c)
newreg.React(action)
c.React(action)
action = Action('EnterRoom', who=c, portal=p)
newroom.React(action)
self.ReactRoomCharacters(action, newroom)
self.ReactRoomItems(action, newroom)
def GetItem(self, event):
c = event.who
i = event.item
quantity = event.quantity
r = c.room
reg = c.region
if i.room != c.room or i.region is None:
raise Exception, u'该物品在一个角色身上'
if i.is_quantity and quantity < 1:
c.React(Action('Error', message=u'输入的数目太少!'))
if i.is_quantity and quantity > i.quantity:
c.React(Action('Error', message=u'输入的数目太多!'))
condition = Condition('CanGetItem', who=c, item=i, quantity=quantity)
if i.Query(condition) and r.Query(condition) and reg.Query(condition) and c.Query(condition):
if i.is_quantity and quantity != i.quantity:
newitem = ItemDB.Generate(i.templateid)
newitem.quantity = quantity
i.quantity -= quantity
else:
r.DelItem(i)
reg.DelItem(i)
newitem = i
newitem.room = c
newitem.region = None
c.AddItem(newitem)
action = Action('GetItem', who=c, item=newitem)
r.React(action)
self.ReactRoomCharacters(action, c.room)
self.ReactRoomItems(action, c.room)
newitem.React(action)
if newitem.is_quantity:
self._DoJoinQuantities(c, newitem)
def DropItem(self, event):
c = event.who
i = event.item
quantity = event.quantity
r = c.room
reg = c.region
if i.room != c and i.region is None:
raise Exception, u'该物品不属于你!'
if i.is_quantity and quantity < 1:
c.React(Action('Error', message=u'输入的数目太少!'))
if i.is_quantity and quantity > i.quantity:
c.React(Action('Error', message=u'输入的数目太多!'))
condition = Condition('CanDroptem', who=c, item=i, quantity=quantity)
if i.Query(condition) and r.Query(condition) and reg.Query(condition) and c.Query(condition):
if i.is_quantity and quantity != i.quantity:
newitem = ItemDB.Generate(i.templateid)
newitem.quantity = quantity
i.quantity -= quantity
else:
c.DelItem(i)
newitem = i
newitem.room = r
newitem.region = reg
r.AddItem(newitem)
reg.AddItem(newitem)
action = Action('DropItem', who=c, item=newitem, quantity=quantity)
r.React(action)
self.ReactRoomCharacters(action, c.room)
self.ReactRoomItems(action, c.room)
newitem.React(action)
if newitem.is_quantity:
self._DoJoinQuantities(r, newitem)
def GiveItem(self, event):
g = event.giver
r = event.receiver
i = event.item
quantity = event.quantity
if g.room != r.room:
raise Exception, u'传递物品时两个角色必须在同一个房间!'
if i.is_quantity and quantity < 1:
raise Exception, u'输入的数目太少!'
if i.is_quantity and quantity > i.quantity:
raise Exception, u'输入的数目太多!'
gcondition = Condition('CanGiveItem', character=r, item=i, quantity=quantity)
rcondition = Condition('CanReceiveItem', character=g, item=i, quantity=quantity)
if i.Query(rcondition) and i.Query(gcondition) and \
g.Query(gcondition) and g.Query(rcondition):
if i.is_quantity and quantity != i.quantity:
newitem = ItemDB.Generate(i.templateid)
newitem.quantity = quantity
i.quantity -= quantity
else:
g.DelItem(i)
newitem = i
newitem.room = r
r.AddItem(newitem)
self.ReactRoomCharacters(event, g.room)
self.ReactRoomItems(event, g.room)
if newitem.is_quantity:
self._DoJoinQuantities(r, newitem)
def SpawnItem(self, event):
"""为某个房间/人物刷新物品"""
i = ItemDB.Generate(event.templateid)
if event.region:
r = event.room
reg = event.region
i.room = r
i.region = reg
r.AddItem(i)
reg.AddItem(i)
del event.templateid
event.item = i
r.React(event)
reg.React(event)
else:
c = event.room
i.room = c
i.region = None
c.AddItem(i)
del event.templateid
del event.region
del event.room
event.item = i
c.React(event)
def DestoryItem(self, event):
"""销毁单件物品
物品可以是在人物的身上,比如说用来实现定时自动消散的物品
"""
i = event.i
if not i.region:
c = i.room
i.React(event)
c.React(event)
else:
i.React(event)
i.room.React(event)
i.region.React(event)
self._DeleteItem(i)
def SpawnCharacter(self, event):
""""在某个房间刷新NPC"""
c = CharacterDB.Generate(event.templateid)
r = event.room
reg = r.region
c.rooom = r
c.region = reg
r.AddCharacter(c)
reg.AddCharacter(c)
action = Action('SpawnCharacter', character=c)
r.DoAction(action)
reg.DoAction(action)
def DestoryCharacter(self, event):
"""删除单个NPC, NPC身上物品掉落房间内"""
c = event.character
r = c.room
reg = c.region
if c.IsPlayer():
raise Exception, u'不可以删除玩家!'
c.React(event)
r.React(event)
reg.React(event)
for i in c.items:
r.AddItem(i)
reg.AddItem(i)
i.room = r
i.region = reg
action = Action('DropItem', who=c, item=i, quantity=i.quantity)
r.React(action)
reg.React(action)
r.DelCharacter(c)
reg.DelCharacter(c)
c.ClearHooks()
c.room = None
c.region = None
CharacterDB.Erase(c)
#------------------------------------------------------------------------------
# def LogicAction(self, entityid, type_, logicname, action):
# """触发特定实体的提定逻辑"""
# e = self._GetEntity(entityid, type_)
# # TODO(Prim):
# logic = e.GetLogic(logicname)
# logic.Execute(action)
# def AddLogic(self, entityid, logicname):
# """为特定实体添加提定逻辑"""
# e = self._GetEntity(entityid, type_)
# e.AddLogic(logicname)
#
# def DeleteLogic(self, entityid, logicname):
# """为特定实体删除提定逻辑"""
# e = self._GetEntity(entityid, type_)
# e.Deletelogic(logicname)
#------------------------------------------------------------------------------
# public misc
def ShutDown(self):
self.is_running = False
def CleanUp(self):
ItemDB.CleanUp()
CharacterDB.ClearnUp()
def SaveAll(self):
AccountDB.Save()
CharacterDB.SavePlayers()
RegionDB.SaveAll()
# TODO(Prim):
# self.SaveTimers()
def LoadAll(self):
CharacterDB.LoadTemplates()
ItemDB.LoadTemplates()
CommandDB.Load()
LogicDB.Load()
RegionDB.LoadAll()
CharacterDB.LoadPlayers()
AccountDB.Load()
# TODO(Prim):
# self.LoadTimers()
def SavePlayers(self):
AccountDB.Save()
CharacterDB.SavePlayers()
def SaveSingleRegion(self, regionid):
RegionDB.SaveRegion(regionid)
# def ReloadItemTemplates(self, filename):
# ItemDB.LoadTemplates(filename)
# def ReloadCharacterTempaltes(self, filename):
# CharacterDB.LoadTemplates(filename)
# def ReloadRegion(self, filename):
# RegionDB.LoadRegion(filename)
# def ReloadCommandScript(self, filename, mode):
# pass
# def ReloadLogicSript(self, filename, mode):
# pass
def LoadTimers(self):
pass
def SaveTimers(self):
pass
def _GetTime(self):
return self.timer.GetS()
#------------------------------------------------------------------------------
# NOTE(Prim): 对于定时事件,你无法预知未来会怎么样
# 你约定的处理方法所需要的各个实体可能都会不再存在
# TODO(Prim):
# def AddEventAbsolute(self, time, event):
# # 某个游戏时间发生某个事件
# pass
def AddEventRelative(self, second, event, **kw):
if kw:
tevent = TimeEvent(self._GetTime()+second, event, **kw)
else:
tevent = TimeEvent(self._GetTime()+second, event)
self.eventqueue.push(tevent)
tevent.Hook()
def ExecuteLoop(self):
time = self._GetTime()
for tevent in self.eventqueue:
if tevent.time < time and tevent.is_valid:
tevent.Unhook()
game.ProcessEvent(tevent.event)
else:
self.eventqueue.push(tevent)
break
game = Game()
| # encoding: utf-8
# TODO(Prim): 考虑是不是要移除condition
from SZMUD.databases.database import ItemDB, RoomDB, AccountDB, PortalDB, CharacterDB, RegionDB
from SZMUD.utils.jsdict import Action, Event, Condition
from SZMUD.utils.timer import Timer
from SZMUD.utils.queue import PriorityQueue
from SZMUD.databases.script import CommandDB, LogicDB
# TODO(Prim): hook sysytem
from SZMUD.event import TimeEvent
# TODO: 中毒的实现,Game.AddLogic,为实体中毒的事件,在Game.ExecuteLoop定时处理,扣血
# 在DoEvent扣除相应血量,再添加实体中毒的定时事件,形成实体持续中毒的状态
# Event.valid去定有效性,LogicEntity.KillHook()另一个定时事件失效
class Game:
def __init__(self):
self.players = []
self.characters = []
self.is_running = True
self.timer = Timer()
self.eventqueue = PriorityQueue()
#------------------------------------------------------------------------------
# NOTE(Prim): 基础的Event优势可以对应单条Command
# 无论时Command或者Event的实习都需要自己检查逻辑错误,保证正确性
def ProcessEvent(self, event, **kw):
# 可以在这里针对event的类型写响应的处理代码
# 也可以自定义个方法调用
if type(event) == str:
event = Event(event, **kw)
if event.type == 'Announce':
self.ReactWorldPlayers(event)
if event.type == 'AttemptSay':
self.Say(event)
elif event.type == 'EnterWorld':
self.EnterWorld(event)
elif event.type == 'LeaveWorld':
self.LeaveWorld(event)
elif event.type == 'SpawnCharacter':
self.SpawnCharacter(event)
elif event.type == 'SpawnItem':
self.SpawnItem(event)
elif event.type == 'DestoryCharacter':
self.DestoryCharacter(event)
elif event.type == 'DestoryItem':
self.DestoryItem(event)
elif event.type == 'AttemptGetItem':
self.GetItem(event)
elif event.type == 'AttemptDropItem':
self.DropItem(event)
elif event.type == 'AttemptGiveItem':
self.GiveItem(event)
elif event.type == 'AttemptEnterPortal':
self.EnterPortal(event)
elif event.type == 'AttemptTransport':
self.AttemptTransport(event)
elif event.type == 'ForceTransport':
self.ForceTransport(event)
elif event.type == 'MessageLogic':
self.EnterPortal(event)
elif event.type == 'AddLogic':
self.AttemptTransport(event)
elif event.type == 'DelLogic':
self.ForceTransport(event)
else:
pass
def ExecuteCommand(self, player, command):
"""执行玩家的单条命令
Quiet模式下玩家的发送的数据一句当作命令解析;玩家只能通过say xxx发言,不能直接发言
普通模式下,玩家数据如果带前缀/就当作命令解析,否则就发言
"""
c = player
if command == u'/':
command = c.lastcommand
else:
c.lastcommand = command
if u' ' in command:
cmd, args = command.split(u' ', 1)
else:
cmd, args = command, u''
args = args.split()
# 普通模式的特别处理
if not c.is_quiet and cmd[0] != u'/':
string = [cmd]
string.extend(args)
self.Say(Event('AttempSay', who=c, content=u' '.join(string)))
return
if cmd[0] == u'/':
cmd = cmd[1:]
try:
cmd_i = c.FindCommand(cmd)
if not cmd_i:
c.React(Action('Error', message=u'输入的命令不能识别:%s' % cmd))
return
cmd_i.Execute(*args) # NOTE: !!!
except Exception, error:
# TODO(Prim): log error
import traceback; traceback.print_exc()
c.React(Action('Error', message=u'执行命令时发生严重错误,请通知管理员。\r\n错误信息:%s' % error))
#------------------------------------------------------------------------------
def ReactWorldPlayers(self, action):
for player in self.players:
player.React(action)
def ReactWorldCharacters(self, action):
for character in self.characters:
character.React(action)
def ReactRoomCharacters(self, action, room):
for character in room.characters:
character.React(action)
def ReactRoomItems(self, action, room):
for item in room.items:
item.React(action)
# TODO(Prim): ActionRegion?
#------------------------------------------------------------------------------
def _GetEntity(self, entityid, type):
if type == 'Item':
return ItemDB[entityid]
elif type == 'Character':
return CharacterDB[entityid]
elif type == 'Room':
return RoomDB[entityid]
elif type == 'Region':
return RegionDB[entityid]
elif type == 'Portal':
return PortalDB[entityid]
raise Exception, u'尝试获取未知类型的实体:%s' % type
def _DeleteItem(self, item):
i = item
if i.region:
i.region.DelItem(i)
i.room.DelItem(i)
else:
i.room.DelItem(i)
i.room = None
i.region = None
i.ClearHooks()
ItemDB.Delete(i)
def _DoJoinQuantities(self, character, keepitem):
for item in character.items:
if item != keepitem and item.templateid == keepitem.templateid:
keepitem.quantity += item.quantity
self._DeleteItem(item)
#------------------------------------------------------------------------------
def AddCharacter(self, c):
self.characters.append(c)
def AddPlayer(self, p):
self.players.append(p)
def RemoveCharacter(self, c):
self.characters.remove(c)
def RemovePlayer(self, p):
self.players.remove(p)
#------------------------------------------------------------------------------
def Say(self, event):
import time
print '============in Say'
print time.time()
print '============in Say'
"""某个角色在某个房间中发言"""
c = event.who
r = c.room
reg = c.region
content = event.content
if content:
condition = Condition('CanSay', who=c, content=content)
if c.Query(condition) and r.Query(condition) and reg.Query(condition):
action = Action('Say', who=c, content=content)
self.ReactRoomCharacters(action, r)
r.React(action)
reg.React(action)
else:
c.React(Action('Error', message=u'发言时必须要有内容!\r\n'))
def EnterWorld(self, event):
"""玩家上线"""
c = event.who
r = c.room
reg = c.region
c.is_loggin = True
self.AddCharacter(c)
self.AddPlayer(c)
r.AddCharacter(c)
reg.AddCharacter(c)
self.ReactWorldPlayers(Action('EnterWorld', who=c))
regaction = Action('EnterRegion', who=c)
reg.React(regaction)
c.React(regaction)
raction = Action('EnterRoom', who=c, room=r, portal=None)
r.React(raction)
self.ReactRoomCharacters(raction, r)
self.ReactRoomItems(raction, r)
c.React(Action('News'))
def LeaveWorld(self, event):
"""玩家下线"""
c = event.who
r = c.room
reg = c.region
raction = Action('LeaveRoom', who=c, room=r, portal=None)
self.ReactRoomItems(raction, r)
self.ReactRoomCharacters(raction, r)
r.React(raction)
regaction = Action('LeaveRegion', who=c)
reg.React(regaction)
c.React(regaction)
self.ReactWorldPlayers(Action('LeaveWorld', who=c))
r.DelCharacter(c)
reg.DelCharacter(c)
self.RemovePlayer(c)
self.RemoveCharacter(c)
c.is_loggin = False
def EnterPortal(self, event):
"""角色进入通道,进行移动"""
c = event.who
p = event.portal
direction = event.direction
oldroom = c.room
newroom = p.GetDestination(c.room, direction)
oldreg = oldroom.region
newreg = newroom.region
change_reg = oldreg != newreg
if change_reg:
lcondition = Condition('CanLeaveRegion', who=c, region=oldreg)
econdition = Condition('CanEnterRegion', who=c, region=newreg)
if not(
oldreg.Query(lcondition) and newreg.Query(econdition) and \
c.Query(lcondition) and c.Query(econdition)
):
return
lrcondition = Condition('CanLeaveRoom', who=c, portal=p)
ercondition = Condition('CanEnterRoom', who=c, portal=p)
pcondition = Condition('CanEnterPortal', who=c, portal=p)
if oldroom.Query(lrcondition) and newroom.Query(ercondition) and \
c.Query(lrcondition) and c.Query(ercondition) and \
c.Query(pcondition) and p.Query(pcondition):
if change_reg:
action = Action('LeaveRegion', who=c)
oldreg.React(action)
c.React(action)
action = Action('LeaveRoom', who=c, portal=p)
self.ReactRoomCharacters(action, oldroom)
self.ReactRoomItems(action, oldroom)
oldroom.React(action)
action = Action('EnterPortal', who=c, portal=p)
p.React(action)
c.React(action)
if change_reg:
oldreg.DelCharacter(c)
c.region = newreg
newreg.AddCharacter(c)
oldroom.DelCharacter(c)
c.room = newroom
newroom.AddCharacter(c)
if change_reg:
action = Action('EnterRegion', who=c)
newreg.React(action)
c.React(action)
action = Action('EnterRoom', who=c, portal=p)
newroom.React(action)
self.ReactRoomCharacters(action, newroom)
self.ReactRoomItems(action, newroom)
def AttemptTransport(self, event):
"""角色没有借助通道,直接在房间间移动"""
c = event.who
newroom = event.room
oldroom = c.room
oldreg = oldroom.region
newreg = newroom.region
change_reg = oldreg != newreg
if change_reg:
lcondition = Condition('CanLeaveRegion', who=c, region=oldreg)
econdition = Condition('CanEnterRegion', who=c, region=newreg)
if not(
oldreg.Query(lcondition) and newreg.Query(econdition) and \
c.Query(lcondition) and c.Query(econdition)
):
return
lrcondition = Condition('CanLeaveRoom', who=c, portal=p)
ercondition = Condition('CanEnterRoom', who=c, portal=p)
if oldroom.Query(lrcondition) and newroom.Query(ercondition) and \
c.Query(lrcondition) and c.Query(ercondition):
if change_reg:
action = Action('LeaveRegion', who=c)
oldreg.React(action)
c.React(action)
action = Action('LeaveRoom', who=c, portal=p)
self.ReactRoomCharacters(action, oldroom)
self.ReactRoomItems(action, oldroom)
oldroom.React(action)
if change_reg:
oldreg.DelCharacter(c)
c.region = newreg
newreg.AddCharacter(c)
oldroom.DelCharacter(c)
c.room = newroom
newroom.AddCharacter(c)
if change_reg:
action = Action('EnterRegion', who=c)
newreg.React(action)
c.React(action)
action = Action('EnterRoom', who=c, portal=p)
newroom.React(action)
self.ReactRoomCharacters(action, newroom)
self.ReactRoomItems(action, newroom)
def ForceTransport(self, event):
"""角色没有借助通道,直接在房间间移动,特权,不做可行性检查"""
# TODO(Prim): 合并函数
c = event.who
newroom = event.room
oldroom = c.room
oldreg = oldroom.region
newreg = newroom.region
change_reg = oldreg != newreg
if change_reg:
action = Action('LeaveRegion', who=c)
oldreg.React(action)
c.React(action)
action = Action('LeaveRoom', who=c, portal=p)
self.ReactRoomCharacters(action, oldroom)
self.ReactRoomItems(action, oldroom)
oldroom.React(action)
if change_reg:
oldreg.DelCharacter(c)
c.region = newreg
newreg.AddCharacter(c)
oldroom.DelCharacter(c)
c.room = newroom
newroom.AddCharacter(c)
if change_reg:
action = Action('EnterRegion', who=c)
newreg.React(action)
c.React(action)
action = Action('EnterRoom', who=c, portal=p)
newroom.React(action)
self.ReactRoomCharacters(action, newroom)
self.ReactRoomItems(action, newroom)
def GetItem(self, event):
c = event.who
i = event.item
quantity = event.quantity
r = c.room
reg = c.region
if i.room != c.room or i.region is None:
raise Exception, u'该物品在一个角色身上'
if i.is_quantity and quantity < 1:
c.React(Action('Error', message=u'输入的数目太少!'))
if i.is_quantity and quantity > i.quantity:
c.React(Action('Error', message=u'输入的数目太多!'))
condition = Condition('CanGetItem', who=c, item=i, quantity=quantity)
if i.Query(condition) and r.Query(condition) and reg.Query(condition) and c.Query(condition):
if i.is_quantity and quantity != i.quantity:
newitem = ItemDB.Generate(i.templateid)
newitem.quantity = quantity
i.quantity -= quantity
else:
r.DelItem(i)
reg.DelItem(i)
newitem = i
newitem.room = c
newitem.region = None
c.AddItem(newitem)
action = Action('GetItem', who=c, item=newitem)
r.React(action)
self.ReactRoomCharacters(action, c.room)
self.ReactRoomItems(action, c.room)
newitem.React(action)
if newitem.is_quantity:
self._DoJoinQuantities(c, newitem)
def DropItem(self, event):
c = event.who
i = event.item
quantity = event.quantity
r = c.room
reg = c.region
if i.room != c and i.region is None:
raise Exception, u'该物品不属于你!'
if i.is_quantity and quantity < 1:
c.React(Action('Error', message=u'输入的数目太少!'))
if i.is_quantity and quantity > i.quantity:
c.React(Action('Error', message=u'输入的数目太多!'))
condition = Condition('CanDroptem', who=c, item=i, quantity=quantity)
if i.Query(condition) and r.Query(condition) and reg.Query(condition) and c.Query(condition):
if i.is_quantity and quantity != i.quantity:
newitem = ItemDB.Generate(i.templateid)
newitem.quantity = quantity
i.quantity -= quantity
else:
c.DelItem(i)
newitem = i
newitem.room = r
newitem.region = reg
r.AddItem(newitem)
reg.AddItem(newitem)
action = Action('DropItem', who=c, item=newitem, quantity=quantity)
r.React(action)
self.ReactRoomCharacters(action, c.room)
self.ReactRoomItems(action, c.room)
newitem.React(action)
if newitem.is_quantity:
self._DoJoinQuantities(r, newitem)
def GiveItem(self, event):
g = event.giver
r = event.receiver
i = event.item
quantity = event.quantity
if g.room != r.room:
raise Exception, u'传递物品时两个角色必须在同一个房间!'
if i.is_quantity and quantity < 1:
raise Exception, u'输入的数目太少!'
if i.is_quantity and quantity > i.quantity:
raise Exception, u'输入的数目太多!'
gcondition = Condition('CanGiveItem', character=r, item=i, quantity=quantity)
rcondition = Condition('CanReceiveItem', character=g, item=i, quantity=quantity)
if i.Query(rcondition) and i.Query(gcondition) and \
g.Query(gcondition) and g.Query(rcondition):
if i.is_quantity and quantity != i.quantity:
newitem = ItemDB.Generate(i.templateid)
newitem.quantity = quantity
i.quantity -= quantity
else:
g.DelItem(i)
newitem = i
newitem.room = r
r.AddItem(newitem)
self.ReactRoomCharacters(event, g.room)
self.ReactRoomItems(event, g.room)
if newitem.is_quantity:
self._DoJoinQuantities(r, newitem)
def SpawnItem(self, event):
"""为某个房间/人物刷新物品"""
i = ItemDB.Generate(event.templateid)
if event.region:
r = event.room
reg = event.region
i.room = r
i.region = reg
r.AddItem(i)
reg.AddItem(i)
del event.templateid
event.item = i
r.React(event)
reg.React(event)
else:
c = event.room
i.room = c
i.region = None
c.AddItem(i)
del event.templateid
del event.region
del event.room
event.item = i
c.React(event)
def DestoryItem(self, event):
"""销毁单件物品
物品可以是在人物的身上,比如说用来实现定时自动消散的物品
"""
i = event.i
if not i.region:
c = i.room
i.React(event)
c.React(event)
else:
i.React(event)
i.room.React(event)
i.region.React(event)
self._DeleteItem(i)
def SpawnCharacter(self, event):
""""在某个房间刷新NPC"""
c = CharacterDB.Generate(event.templateid)
r = event.room
reg = r.region
c.rooom = r
c.region = reg
r.AddCharacter(c)
reg.AddCharacter(c)
action = Action('SpawnCharacter', character=c)
r.DoAction(action)
reg.DoAction(action)
def DestoryCharacter(self, event):
"""删除单个NPC, NPC身上物品掉落房间内"""
c = event.character
r = c.room
reg = c.region
if c.IsPlayer():
raise Exception, u'不可以删除玩家!'
c.React(event)
r.React(event)
reg.React(event)
for i in c.items:
r.AddItem(i)
reg.AddItem(i)
i.room = r
i.region = reg
action = Action('DropItem', who=c, item=i, quantity=i.quantity)
r.React(action)
reg.React(action)
r.DelCharacter(c)
reg.DelCharacter(c)
c.ClearHooks()
c.room = None
c.region = None
CharacterDB.Erase(c)
#------------------------------------------------------------------------------
# def LogicAction(self, entityid, type_, logicname, action):
# """触发特定实体的提定逻辑"""
# e = self._GetEntity(entityid, type_)
# # TODO(Prim):
# logic = e.GetLogic(logicname)
# logic.Execute(action)
# def AddLogic(self, entityid, logicname):
# """为特定实体添加提定逻辑"""
# e = self._GetEntity(entityid, type_)
# e.AddLogic(logicname)
#
# def DeleteLogic(self, entityid, logicname):
# """为特定实体删除提定逻辑"""
# e = self._GetEntity(entityid, type_)
# e.Deletelogic(logicname)
#------------------------------------------------------------------------------
# public misc
def ShutDown(self):
self.is_running = False
def CleanUp(self):
ItemDB.CleanUp()
CharacterDB.ClearnUp()
def SaveAll(self):
AccountDB.Save()
CharacterDB.SavePlayers()
RegionDB.SaveAll()
# TODO(Prim):
# self.SaveTimers()
def LoadAll(self):
CharacterDB.LoadTemplates()
ItemDB.LoadTemplates()
CommandDB.Load()
LogicDB.Load()
RegionDB.LoadAll()
CharacterDB.LoadPlayers()
AccountDB.Load()
# TODO(Prim):
# self.LoadTimers()
def SavePlayers(self):
AccountDB.Save()
CharacterDB.SavePlayers()
def SaveSingleRegion(self, regionid):
RegionDB.SaveRegion(regionid)
# def ReloadItemTemplates(self, filename):
# ItemDB.LoadTemplates(filename)
# def ReloadCharacterTempaltes(self, filename):
# CharacterDB.LoadTemplates(filename)
# def ReloadRegion(self, filename):
# RegionDB.LoadRegion(filename)
# def ReloadCommandScript(self, filename, mode):
# pass
# def ReloadLogicSript(self, filename, mode):
# pass
def LoadTimers(self):
pass
def SaveTimers(self):
pass
def _GetTime(self):
return self.timer.GetS()
#------------------------------------------------------------------------------
# NOTE(Prim): 对于定时事件,你无法预知未来会怎么样
# 你约定的处理方法所需要的各个实体可能都会不再存在
# TODO(Prim):
# def AddEventAbsolute(self, time, event):
# # 某个游戏时间发生某个事件
# pass
def AddEventRelative(self, second, event, **kw):
if kw:
tevent = TimeEvent(self._GetTime()+second, event, **kw)
else:
tevent = TimeEvent(self._GetTime()+second, event)
self.eventqueue.push(tevent)
tevent.Hook()
def ExecuteLoop(self):
time = self._GetTime()
for tevent in self.eventqueue:
if tevent.time < time and tevent.is_valid:
tevent.Unhook()
game.ProcessEvent(tevent.event)
else:
self.eventqueue.push(tevent)
break
game = Game() | zh | 0.304752 | # encoding: utf-8 # TODO(Prim): 考虑是不是要移除condition # TODO(Prim): hook sysytem # TODO: 中毒的实现,Game.AddLogic,为实体中毒的事件,在Game.ExecuteLoop定时处理,扣血 # 在DoEvent扣除相应血量,再添加实体中毒的定时事件,形成实体持续中毒的状态 # Event.valid去定有效性,LogicEntity.KillHook()另一个定时事件失效 #------------------------------------------------------------------------------ # NOTE(Prim): 基础的Event优势可以对应单条Command # 无论时Command或者Event的实习都需要自己检查逻辑错误,保证正确性 # 可以在这里针对event的类型写响应的处理代码 # 也可以自定义个方法调用 执行玩家的单条命令 Quiet模式下玩家的发送的数据一句当作命令解析;玩家只能通过say xxx发言,不能直接发言 普通模式下,玩家数据如果带前缀/就当作命令解析,否则就发言 # 普通模式的特别处理 # NOTE: !!! # TODO(Prim): log error #------------------------------------------------------------------------------ # TODO(Prim): ActionRegion? #------------------------------------------------------------------------------ #------------------------------------------------------------------------------ #------------------------------------------------------------------------------ 某个角色在某个房间中发言 玩家上线 玩家下线 角色进入通道,进行移动 角色没有借助通道,直接在房间间移动 角色没有借助通道,直接在房间间移动,特权,不做可行性检查 # TODO(Prim): 合并函数 为某个房间/人物刷新物品 销毁单件物品 物品可以是在人物的身上,比如说用来实现定时自动消散的物品 "在某个房间刷新NPC 删除单个NPC, NPC身上物品掉落房间内 #------------------------------------------------------------------------------ # def LogicAction(self, entityid, type_, logicname, action): # """触发特定实体的提定逻辑""" # e = self._GetEntity(entityid, type_) # # TODO(Prim): # logic = e.GetLogic(logicname) # logic.Execute(action) # def AddLogic(self, entityid, logicname): # """为特定实体添加提定逻辑""" # e = self._GetEntity(entityid, type_) # e.AddLogic(logicname) # # def DeleteLogic(self, entityid, logicname): # """为特定实体删除提定逻辑""" # e = self._GetEntity(entityid, type_) # e.Deletelogic(logicname) #------------------------------------------------------------------------------ # public misc # TODO(Prim): # self.SaveTimers() # TODO(Prim): # self.LoadTimers() # def ReloadItemTemplates(self, filename): # ItemDB.LoadTemplates(filename) # def ReloadCharacterTempaltes(self, filename): # CharacterDB.LoadTemplates(filename) # def ReloadRegion(self, filename): # RegionDB.LoadRegion(filename) # def ReloadCommandScript(self, filename, mode): # pass # def ReloadLogicSript(self, filename, mode): # pass #------------------------------------------------------------------------------ # NOTE(Prim): 对于定时事件,你无法预知未来会怎么样 # 你约定的处理方法所需要的各个实体可能都会不再存在 # TODO(Prim): # def AddEventAbsolute(self, time, event): # # 某个游戏时间发生某个事件 # pass | 2.023187 | 2 |
tests/test_finder_sidebar.py | Ajordat/FinderSidebarEditor | 12 | 6615856 | <reponame>Ajordat/FinderSidebarEditor<filename>tests/test_finder_sidebar.py<gh_stars>10-100
import unittest
from finder_sidebar_editor import FinderSidebar
class TestFinderSidebar(unittest.TestCase):
def setUp(self):
self.finder = FinderSidebar()
self.pre_items = [
str(uri).split("file://")[1]
for uri in self.finder.favorites.values()
]
self.finder.remove_all()
def tearDown(self):
self.finder.remove_all()
for uri in reversed(self.pre_items):
self.finder.add(uri)
def test_add(self):
self.finder.add("/tmp")
self.assertIn('tmp', self.finder.favorites.keys())
def test_get_index(self):
self.finder.add("/tmp")
self.assertEqual(self.finder.get_index_from_name("tmp"), 0)
self.finder.add("/usr")
self.assertEqual(self.finder.get_index_from_name("usr"), 0)
self.assertEqual(self.finder.get_index_from_name("tmp"), 1)
def test_remove_all(self):
self.finder.add("/tmp")
self.finder.remove_all()
self.assertFalse(self.finder.favorites)
def test_remove(self):
self.finder.add("/usr")
self.finder.remove("usr")
self.assertFalse(self.finder.favorites)
def test_remove_by_path(self):
self.finder.add("/usr")
self.finder.remove_by_path("/usr")
self.assertFalse(self.finder.favorites)
def test_move_items(self):
self.finder.add("/usr")
self.finder.add("/tmp")
self.assertEqual(self.finder.get_index_from_name('tmp'), 0)
self.assertEqual(self.finder.get_index_from_name('usr'), 1)
self.finder.move("tmp", "usr")
self.assertEqual(self.finder.get_index_from_name('usr'), 0)
self.assertEqual(self.finder.get_index_from_name('tmp'), 1)
if __name__ == '__main__':
unittest.main()
| import unittest
from finder_sidebar_editor import FinderSidebar
class TestFinderSidebar(unittest.TestCase):
def setUp(self):
self.finder = FinderSidebar()
self.pre_items = [
str(uri).split("file://")[1]
for uri in self.finder.favorites.values()
]
self.finder.remove_all()
def tearDown(self):
self.finder.remove_all()
for uri in reversed(self.pre_items):
self.finder.add(uri)
def test_add(self):
self.finder.add("/tmp")
self.assertIn('tmp', self.finder.favorites.keys())
def test_get_index(self):
self.finder.add("/tmp")
self.assertEqual(self.finder.get_index_from_name("tmp"), 0)
self.finder.add("/usr")
self.assertEqual(self.finder.get_index_from_name("usr"), 0)
self.assertEqual(self.finder.get_index_from_name("tmp"), 1)
def test_remove_all(self):
self.finder.add("/tmp")
self.finder.remove_all()
self.assertFalse(self.finder.favorites)
def test_remove(self):
self.finder.add("/usr")
self.finder.remove("usr")
self.assertFalse(self.finder.favorites)
def test_remove_by_path(self):
self.finder.add("/usr")
self.finder.remove_by_path("/usr")
self.assertFalse(self.finder.favorites)
def test_move_items(self):
self.finder.add("/usr")
self.finder.add("/tmp")
self.assertEqual(self.finder.get_index_from_name('tmp'), 0)
self.assertEqual(self.finder.get_index_from_name('usr'), 1)
self.finder.move("tmp", "usr")
self.assertEqual(self.finder.get_index_from_name('usr'), 0)
self.assertEqual(self.finder.get_index_from_name('tmp'), 1)
if __name__ == '__main__':
unittest.main() | none | 1 | 2.607998 | 3 | |
src/embedding/test/face_detect/align/__init__.py | mykiscool/DeepCamera | 914 | 6615857 | <reponame>mykiscool/DeepCamera
# from . import detect_face | # from . import detect_face | en | 0.685736 | # from . import detect_face | 1.009604 | 1 |
onlineShop/Library/models.py | alirezaryahi/django-onlineShop | 0 | 6615858 | from django.db import models
# Create your models here.
class Category(models.Model):
title = models.CharField(max_length=200, verbose_name='عنوان')
class Meta:
verbose_name = 'موضوع'
verbose_name_plural = 'موضوع ها'
def __str__(self):
return self.title
class Author(models.Model):
first_name = models.CharField(max_length=100, verbose_name='نام')
last_name = models.CharField(max_length=100, verbose_name='نام خانوادگی')
class Meta:
verbose_name = 'نویسنده'
verbose_name_plural = 'نویسندگان'
def __str__(self):
return self.last_name
class Book(models.Model):
category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name='موضوع')
author = models.ForeignKey(Author, on_delete=models.CASCADE, verbose_name='نویسنده')
title = models.CharField(max_length=200, verbose_name='عنوان کتاب')
description = models.TextField(verbose_name='توضیحات', null=True, blank=True)
price = models.IntegerField(default=0, verbose_name='قیمت')
image = models.ImageField(upload_to='books/', null=True, blank=True, verbose_name='تصویر')
vote = models.IntegerField(default=0)
is_exist = models.BooleanField(default=True, verbose_name='موجود')
select = models.CharField(max_length=100, default='book')
class Meta:
verbose_name = 'کتاب'
verbose_name_plural = 'کتاب ها'
ordering = ['-vote']
def __str__(self):
return self.title
| from django.db import models
# Create your models here.
class Category(models.Model):
title = models.CharField(max_length=200, verbose_name='عنوان')
class Meta:
verbose_name = 'موضوع'
verbose_name_plural = 'موضوع ها'
def __str__(self):
return self.title
class Author(models.Model):
first_name = models.CharField(max_length=100, verbose_name='نام')
last_name = models.CharField(max_length=100, verbose_name='نام خانوادگی')
class Meta:
verbose_name = 'نویسنده'
verbose_name_plural = 'نویسندگان'
def __str__(self):
return self.last_name
class Book(models.Model):
category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name='موضوع')
author = models.ForeignKey(Author, on_delete=models.CASCADE, verbose_name='نویسنده')
title = models.CharField(max_length=200, verbose_name='عنوان کتاب')
description = models.TextField(verbose_name='توضیحات', null=True, blank=True)
price = models.IntegerField(default=0, verbose_name='قیمت')
image = models.ImageField(upload_to='books/', null=True, blank=True, verbose_name='تصویر')
vote = models.IntegerField(default=0)
is_exist = models.BooleanField(default=True, verbose_name='موجود')
select = models.CharField(max_length=100, default='book')
class Meta:
verbose_name = 'کتاب'
verbose_name_plural = 'کتاب ها'
ordering = ['-vote']
def __str__(self):
return self.title
| en | 0.963489 | # Create your models here. | 2.387708 | 2 |
scripts/KD_REID.py | fremigereau/MTDA_KD_REID | 0 | 6615859 | <reponame>fremigereau/MTDA_KD_REID
import torch as torch
import torchreid
from torchreid.utils import Logger
import sys
import os.path as osp
import torch.optim
import torch.nn as nn
import argparse
def main():
args = parser.parse_args()
log_dir = 'log/KD_ReID_{stda}/{sources}2{targets}{cuhk_split}/{teacher}_t_{student}_s/{target_alt}{kd_style}{target_order}{lkd_s_w}{lda_w}'.format(
stda=args.stda,
target_alt='' if args.target_alt == 'full' else '_target_alt_' + args.target_alt,
kd_style='_kd_style_outs_feats' if args.kd_style == 'all' else '_kd_style_' + args.kd_style,
target_order='' if args.target_order == 'random' else '_' + args.target_order + '_order',
sources=args.dataset_source if isinstance(args.dataset_source, str) else
'+'.join([str(elem) for elem in args.dataset_source]),
targets=args.dataset_target if isinstance(args.dataset_target, str) else
'+'.join([str(elem) for elem in args.dataset_target]),
teacher=args.arch_teacher,
student=args.arch_student,
lkd_s_w='' if args.lkds_weight == 0 else '_lkds_' + str(args.lkds_weight),
lda_w='' if args.lda_weight == 0 else '_lda_' + str(args.lda_weight),
cuhk_split='_new_cuhk' if args.new_cuhk else ''
)
log_name = 'console_txt.log'
sys.stdout = Logger(osp.join(log_dir, log_name))
print("Saving experiment data to : {}".format(log_dir))
print("==========\nArgs:{}\n==========".format(args))
if args.multi_head:
multi_head = len(args.dataset_target)
else:
multi_head = 0
if args.new_cuhk:
cuhk_classic = False
else:
cuhk_classic = True
datamanager = torchreid.data.ImageDataManager(
root=args.data_dir,
sources=args.dataset_source,
targets=args.dataset_target,
height=args.height,
width=args.width,
batch_size_train=args.batch_size,
batch_size_test=100,
transforms=['random_flip', 'random_crop', 'random_erasing'],
num_instances=args.num_instances,
train_sampler='RandomIdentitySampler',
load_train_targets=True,
workers=args.workers,
cuhk03_labeled=True,
cuhk03_classic_split=cuhk_classic
)
if args.stda == 'D-MMD':
num_classes = datamanager.num_train_pids
spcl = False
else:
num_classes = 0
spcl = True
print("Initialize model student")
model_student, optimizer_student, scheduler_student, start_epoch = torchreid.initialize_model_optimizer_scheduler(
name=args.arch_student, num_classes=datamanager.num_train_pids,
loss='kd_reid', pretrained=True,
optimizer_type=args.optimizer, lr=args.lr,
lr_scheduler=args.scheduler, stepsize=args.step_size,
path_model=args.model_path_student,
teacher_arch=args.arch_teacher,
spcl=False,
load_optim=False,
pcgrad=False,
fc_dim=args.features,
multi_head=multi_head
)
print("Initialize model(s) teacher")
models_teacher_list = list()
optimizer_teacher_list = list()
scheduler_teacher_list = list()
for i in range(0, len(datamanager.targets)):
model, optimizer, scheduler, start_epoch = torchreid.initialize_model_optimizer_scheduler(
name=args.arch_teacher, num_classes=num_classes,
loss='kd_reid', pretrained=True,
optimizer_type=args.optimizer, lr=args.lr,
lr_scheduler=args.scheduler, stepsize=args.step_size,
path_model=args.model_path_teachers[i],
teacher_arch=None,
spcl=spcl,
load_optim=False,
fc_dim=args.features
)
models_teacher_list.append(model)
optimizer_teacher_list.append(optimizer)
scheduler_teacher_list.append(scheduler)
if args.target_alt == 'full':
engine = torchreid.engine.KDMTDAEngineOnebyOne(
datamanager=datamanager,
model_student=model_student,
optimizer_student=optimizer_student,
scheduler_student=scheduler_student,
models_teacher_list=models_teacher_list,
optimizer_teacher_list=optimizer_teacher_list,
scheduler_teacher_list=scheduler_teacher_list,
label_smooth=True,
mmd_only=False,
kd_style=args.kd_style,
lda_weight=args.lda_weight,
lkds_weight=args.lkds_weight,
lkdt_weight=args.lkdt_weight,
target_order=args.target_order,
log_loss=args.log_loss
)
elif args.target_alt == 'batch':
engine = torchreid.engine.MTDAEnginePerBatch(
datamanager=datamanager,
model_student=model_student,
optimizer_student=optimizer_student,
scheduler_student=scheduler_student,
models_teacher_list=models_teacher_list,
optimizer_teacher_list=optimizer_teacher_list,
scheduler_teacher_list=scheduler_teacher_list,
label_smooth=True,
mmd_only=False,
kd_style=args.kd_style,
lda_weight=args.lda_weight,
lkds_weight=args.lkds_weight,
lkdt_weight=args.lkdt_weight,
target_order=args.target_order,
log_loss=args.log_loss
)
else:
engine = None
# engine.run(
# save_dir=log_dir,
# test_only=True
# )
start_epoch = 0
# Start the domain adaptation
engine.run(
save_dir=log_dir,
max_epoch=args.epochs,
eval_freq=args.eval_freq,
print_freq=args.print_freq,
test_only=False,
visrank=False,
start_epoch=start_epoch,
use_tensorboard=args.tensorboard,
eval_teachers=False,
use_metric_cuhk03=True
)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Pre-training on source for D-MMD")
# data
parser.add_argument('-ds', '--dataset-source', type=str, default='msmt17')
parser.add_argument('-dt', '--dataset-target',type=str, nargs='+', default='market1501')
parser.add_argument('--new-cuhk', action='store_true', default=False)
parser.add_argument('-b', '--batch-size', type=int, default=32)
parser.add_argument('-j', '--workers', type=int, default=4)
parser.add_argument('--height', type=int, default=256, help="input height")
parser.add_argument('--width', type=int, default=128, help="input width")
parser.add_argument('--num-instances', type=int, default=4,
help="each minibatch consist of "
"(batch_size // num_instances) identities, and "
"each identity has num_instances instances, "
"default: 0 (NOT USE)")
# model
parser.add_argument('-at', '--arch-teacher', type=str, default='resnet50')
parser.add_argument('-as', '--arch-student', type=str, default='resnet50')
parser.add_argument('--features', type=int, default=2048)
parser.add_argument('--dropout', type=float, default=0)
parser.add_argument('--momentum', type=float, default=0.2,
help="update momentum for the hybrid memory")
parser.add_argument('--multi-head', action='store_true', default=False)
# optimizer
parser.add_argument('--optimizer', type=str, default='sgd')
parser.add_argument('--lr', type=float, default=0.01,
help="learning rate")
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--scheduler', type=str, default='single_step')
parser.add_argument('--step-size', type=int, default=5)
# training configs
parser.add_argument('--print-freq', type=int, default=200)
parser.add_argument('--eval-freq', type=int, default=5)
parser.add_argument('--tensorboard', action='store_true', default=False)
parser.add_argument('--target-alt', type=str ,choices=['full','batch','combined'], default='batch')
parser.add_argument('--kd-style', type=str ,choices=['all', 'only_feats', 'only_outputs'], default='only_outputs')
parser.add_argument('--lda_weight', type=float, default=0)
parser.add_argument('--lkds_weight', type=float, default=0)
parser.add_argument('--lkdt_weight', type=float, default=1)
parser.add_argument('--target-order', type=str ,choices=['random', 'fixed', 'shift'], default='random')
parser.add_argument('--stda', type=str ,choices=['D-MMD', 'SPCL'], default='D-MMD')
parser.add_argument('--log-loss', action='store_true', default=False)
# path
working_dir = osp.dirname(osp.abspath(__file__))
parser.add_argument('--data-dir', type=str, metavar='PATH', default=osp.join(working_dir, 'reid-data'))
parser.add_argument('-mps', '--model-path-student', type=str, metavar='PATH')
parser.add_argument('-mpt', '--model-path-teachers',type=str, nargs='+', metavar='PATH')
main() | import torch as torch
import torchreid
from torchreid.utils import Logger
import sys
import os.path as osp
import torch.optim
import torch.nn as nn
import argparse
def main():
args = parser.parse_args()
log_dir = 'log/KD_ReID_{stda}/{sources}2{targets}{cuhk_split}/{teacher}_t_{student}_s/{target_alt}{kd_style}{target_order}{lkd_s_w}{lda_w}'.format(
stda=args.stda,
target_alt='' if args.target_alt == 'full' else '_target_alt_' + args.target_alt,
kd_style='_kd_style_outs_feats' if args.kd_style == 'all' else '_kd_style_' + args.kd_style,
target_order='' if args.target_order == 'random' else '_' + args.target_order + '_order',
sources=args.dataset_source if isinstance(args.dataset_source, str) else
'+'.join([str(elem) for elem in args.dataset_source]),
targets=args.dataset_target if isinstance(args.dataset_target, str) else
'+'.join([str(elem) for elem in args.dataset_target]),
teacher=args.arch_teacher,
student=args.arch_student,
lkd_s_w='' if args.lkds_weight == 0 else '_lkds_' + str(args.lkds_weight),
lda_w='' if args.lda_weight == 0 else '_lda_' + str(args.lda_weight),
cuhk_split='_new_cuhk' if args.new_cuhk else ''
)
log_name = 'console_txt.log'
sys.stdout = Logger(osp.join(log_dir, log_name))
print("Saving experiment data to : {}".format(log_dir))
print("==========\nArgs:{}\n==========".format(args))
if args.multi_head:
multi_head = len(args.dataset_target)
else:
multi_head = 0
if args.new_cuhk:
cuhk_classic = False
else:
cuhk_classic = True
datamanager = torchreid.data.ImageDataManager(
root=args.data_dir,
sources=args.dataset_source,
targets=args.dataset_target,
height=args.height,
width=args.width,
batch_size_train=args.batch_size,
batch_size_test=100,
transforms=['random_flip', 'random_crop', 'random_erasing'],
num_instances=args.num_instances,
train_sampler='RandomIdentitySampler',
load_train_targets=True,
workers=args.workers,
cuhk03_labeled=True,
cuhk03_classic_split=cuhk_classic
)
if args.stda == 'D-MMD':
num_classes = datamanager.num_train_pids
spcl = False
else:
num_classes = 0
spcl = True
print("Initialize model student")
model_student, optimizer_student, scheduler_student, start_epoch = torchreid.initialize_model_optimizer_scheduler(
name=args.arch_student, num_classes=datamanager.num_train_pids,
loss='kd_reid', pretrained=True,
optimizer_type=args.optimizer, lr=args.lr,
lr_scheduler=args.scheduler, stepsize=args.step_size,
path_model=args.model_path_student,
teacher_arch=args.arch_teacher,
spcl=False,
load_optim=False,
pcgrad=False,
fc_dim=args.features,
multi_head=multi_head
)
print("Initialize model(s) teacher")
models_teacher_list = list()
optimizer_teacher_list = list()
scheduler_teacher_list = list()
for i in range(0, len(datamanager.targets)):
model, optimizer, scheduler, start_epoch = torchreid.initialize_model_optimizer_scheduler(
name=args.arch_teacher, num_classes=num_classes,
loss='kd_reid', pretrained=True,
optimizer_type=args.optimizer, lr=args.lr,
lr_scheduler=args.scheduler, stepsize=args.step_size,
path_model=args.model_path_teachers[i],
teacher_arch=None,
spcl=spcl,
load_optim=False,
fc_dim=args.features
)
models_teacher_list.append(model)
optimizer_teacher_list.append(optimizer)
scheduler_teacher_list.append(scheduler)
if args.target_alt == 'full':
engine = torchreid.engine.KDMTDAEngineOnebyOne(
datamanager=datamanager,
model_student=model_student,
optimizer_student=optimizer_student,
scheduler_student=scheduler_student,
models_teacher_list=models_teacher_list,
optimizer_teacher_list=optimizer_teacher_list,
scheduler_teacher_list=scheduler_teacher_list,
label_smooth=True,
mmd_only=False,
kd_style=args.kd_style,
lda_weight=args.lda_weight,
lkds_weight=args.lkds_weight,
lkdt_weight=args.lkdt_weight,
target_order=args.target_order,
log_loss=args.log_loss
)
elif args.target_alt == 'batch':
engine = torchreid.engine.MTDAEnginePerBatch(
datamanager=datamanager,
model_student=model_student,
optimizer_student=optimizer_student,
scheduler_student=scheduler_student,
models_teacher_list=models_teacher_list,
optimizer_teacher_list=optimizer_teacher_list,
scheduler_teacher_list=scheduler_teacher_list,
label_smooth=True,
mmd_only=False,
kd_style=args.kd_style,
lda_weight=args.lda_weight,
lkds_weight=args.lkds_weight,
lkdt_weight=args.lkdt_weight,
target_order=args.target_order,
log_loss=args.log_loss
)
else:
engine = None
# engine.run(
# save_dir=log_dir,
# test_only=True
# )
start_epoch = 0
# Start the domain adaptation
engine.run(
save_dir=log_dir,
max_epoch=args.epochs,
eval_freq=args.eval_freq,
print_freq=args.print_freq,
test_only=False,
visrank=False,
start_epoch=start_epoch,
use_tensorboard=args.tensorboard,
eval_teachers=False,
use_metric_cuhk03=True
)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Pre-training on source for D-MMD")
# data
parser.add_argument('-ds', '--dataset-source', type=str, default='msmt17')
parser.add_argument('-dt', '--dataset-target',type=str, nargs='+', default='market1501')
parser.add_argument('--new-cuhk', action='store_true', default=False)
parser.add_argument('-b', '--batch-size', type=int, default=32)
parser.add_argument('-j', '--workers', type=int, default=4)
parser.add_argument('--height', type=int, default=256, help="input height")
parser.add_argument('--width', type=int, default=128, help="input width")
parser.add_argument('--num-instances', type=int, default=4,
help="each minibatch consist of "
"(batch_size // num_instances) identities, and "
"each identity has num_instances instances, "
"default: 0 (NOT USE)")
# model
parser.add_argument('-at', '--arch-teacher', type=str, default='resnet50')
parser.add_argument('-as', '--arch-student', type=str, default='resnet50')
parser.add_argument('--features', type=int, default=2048)
parser.add_argument('--dropout', type=float, default=0)
parser.add_argument('--momentum', type=float, default=0.2,
help="update momentum for the hybrid memory")
parser.add_argument('--multi-head', action='store_true', default=False)
# optimizer
parser.add_argument('--optimizer', type=str, default='sgd')
parser.add_argument('--lr', type=float, default=0.01,
help="learning rate")
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--scheduler', type=str, default='single_step')
parser.add_argument('--step-size', type=int, default=5)
# training configs
parser.add_argument('--print-freq', type=int, default=200)
parser.add_argument('--eval-freq', type=int, default=5)
parser.add_argument('--tensorboard', action='store_true', default=False)
parser.add_argument('--target-alt', type=str ,choices=['full','batch','combined'], default='batch')
parser.add_argument('--kd-style', type=str ,choices=['all', 'only_feats', 'only_outputs'], default='only_outputs')
parser.add_argument('--lda_weight', type=float, default=0)
parser.add_argument('--lkds_weight', type=float, default=0)
parser.add_argument('--lkdt_weight', type=float, default=1)
parser.add_argument('--target-order', type=str ,choices=['random', 'fixed', 'shift'], default='random')
parser.add_argument('--stda', type=str ,choices=['D-MMD', 'SPCL'], default='D-MMD')
parser.add_argument('--log-loss', action='store_true', default=False)
# path
working_dir = osp.dirname(osp.abspath(__file__))
parser.add_argument('--data-dir', type=str, metavar='PATH', default=osp.join(working_dir, 'reid-data'))
parser.add_argument('-mps', '--model-path-student', type=str, metavar='PATH')
parser.add_argument('-mpt', '--model-path-teachers',type=str, nargs='+', metavar='PATH')
main() | en | 0.42947 | # engine.run( # save_dir=log_dir, # test_only=True # ) # Start the domain adaptation # data # model # optimizer # training configs # path | 2.15026 | 2 |
main.py | liqinnetgain/redenv | 0 | 6615860 | <reponame>liqinnetgain/redenv<gh_stars>0
# -*- coding: utf-8 -*-
"""
@author: aoqingy
"""
import os
import sys
import cv2
import json
import time
import model
def detect(path):
_,result,_ = model.model(cv2.imread(path),
detectAngle = False,
config = dict(MAX_HORIZONTAL_GAP = 50,#字符之间最大间隔,用于文本行合并
MIN_V_OVERLAPS = 0.6,
MIN_SIZE_SIM = 0.6,
TEXT_PROPOSALS_MIN_SCORE = 0.1,
TEXT_PROPOSALS_NMS_THRESH = 0.3,
TEXT_LINE_NMS_THRESH = 0.7,#文本行之间测iou值
),
leftAdjust = True, #对检测的文本行进行向左延伸
rightAdjust = True,#对检测的文本行进行向右延伸
alph = 0.01, #对检测的文本行进行向右、左延伸的倍数
)
return result
def parse_text(result):
for item in result:
print(item['text'])
def parse_sender(result):
for item in result:
if u"的红包" in item['text']:
return item['text'][:-4]
return ''
def parse_speed(result):
for item in result:
if u"被抢光" in item['text']:
total,speed = item['text'].split(',')
return total[:-2], speed[:-3]
return '', ''
def parse_players(result):
rplayers = []
start = 0
found = 0
index = 0
while True:
if not start:
if u"被抢光" in result[index]['text']:
start = index + 1
index += 1
continue
print('---', index, '---')
rplayer = {}
if result[index]['text'].endswith(u'元'):
try:
rplayer['amount'] = str(float(result[index]['text'][:-1]))
rplayer['player'] = result[index+1]['text']
index += 2
except:
rplayer['player'] = result[index]['text']
rplayer['amount'] = str(float(result[index+1]['text'][:-1]))
index += 2
else:
rplayer['player'] = result[index]['text']
rplayer['amount'] = str(float(result[index+1]['text'][:-1]))
index += 2
if (index < len(result) and
not u'手气' in result[index]['text'] and
not u'元' in result[index]['text'] and
not ':' in result[index]['text']):
index += 1
if ((index < len(result) and u'手气' in result[index]['text']) or
(index+1 < len(result) and u'手气' in result[index+1]['text'])):
rplayer['largest'] = 'True'
index += 1
if ((index < len(result) and ':' in result[index]['text']) or
(index+1 < len(result) and ':' in result[index+1]['text'])):
index += 1
tflag = True
if ((index < len(result) and u'手气' in result[index]['text']) or
(index+1 < len(result) and u'手气' in result[index+1]['text'])):
rplayer['largest'] = 'True'
index += 1
rplayers.append(rplayer)
if index >= len(result):
break
return rplayers
if __name__ == "__main__":
if len(sys.argv) != 2:
print("No path specified!")
sys.exit(1)
print(len(sys.argv))
if not os.path.isdir(sys.argv[1]):
print("Path invalid!")
sys.exit(1)
print(os.path.dirname(sys.argv[1]))
print(os.path.basename(sys.argv[1]))
_path = sys.argv[1]
_date = os.path.basename(sys.argv[1])
_xlsx = os.path.join(os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1])+'.xlsx')
_send = os.path.join(os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1])+'.send.html')
_play = os.path.join(os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1])+'.play.html')
print(_xlsx)
import xlrd
import xlsxwriter
book = xlsxwriter.Workbook(_xlsx)
sheet = book.add_worksheet()
iformat = book.add_format()
iformat.set_text_wrap()
iformat.set_font_name('Microsoft YaHei')
iformat.set_bold(False)
iformat.set_align('left')
iformat.set_align('vcenter')
iformat.set_font_color('black')
vformat = book.add_format()
vformat.set_text_wrap()
vformat.set_font_name('Microsoft YaHei')
vformat.set_bold(False)
vformat.set_align('left')
vformat.set_align('vcenter')
vformat.set_font_color('red')
sheet.set_column('A:A',6)
sheet.set_column('B:B',18)
sheet.set_column('C:C',10)
sheet.set_column('D:M',12)
#写标题行
sheet.write('A1', u"序号", iformat)
sheet.write('B1', u"发红包", iformat)
sheet.write('C1', u"抢包时间", iformat)
sheet.write('D1', u"抢包一", iformat)
sheet.write('E1', u"抢包二", iformat)
sheet.write('F1', u"抢包三", iformat)
sheet.write('G1', u"抢包四", iformat)
sheet.write('H1', u"抢包五", iformat)
sheet.write('I1', u"抢包六", iformat)
sheet.write('J1', u"抢包七", iformat)
sheet.write('K1', u"抢包八", iformat)
sheet.write('L1', u"抢包九", iformat)
sheet.write('M1', u"抢包十", iformat)
sdict = {} #发红包榜
pdict = {} #抢红包榜
count = 1
for _file in sorted(os.listdir(_path), reverse=False):
print("=============================================")
print(_file)
result = detect(os.path.join(_path, _file))
print(parse_text(result))
sheet.write('A'+str(count+1), str(count), iformat)
sender = parse_sender(result)
sheet.write('B'+str(count+1), sender, iformat)
sdict[sender] = sdict.get(sender, 0) + 10
sheet.write('C'+str(count+1), '/'.join(parse_speed(result)), iformat)
try:
players = parse_players(result)
except Exception as e:
print("********************************************")
print("********************************************")
print("********************************************")
count += 1
continue
print(players)
LABELS = ['D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M']
for i in range(0, 10):
if len(players) <= i:
continue
player = players[i].get('player', '无名氏')
amount = players[i].get('amount', 0.0)
if players[i].get('largest', ''):
sheet.write(LABELS[i]+str(count+1), player + '/' + str(amount), vformat)
else:
sheet.write(LABELS[i]+str(count+1), player + '/' + str(amount), iformat)
pdict[player] = round(pdict.get(player, 0) + float(amount), 2)
count += 1
#if count == 10:
# break
book.close()
from pyecharts import options as opts
from pyecharts.charts import Bar
#按从大到小的顺序显示红包发放榜
slist = list(sdict.items())
slist.sort(key=lambda x:x[1],reverse=True)
sbar = Bar()
sbar.add_xaxis([x[0] for x in slist])
sbar.add_yaxis("交大校友交流学习群"+_date[:4]+"年"+_date[4:6]+"月"+_date[6:]+"日红包爱心榜", [x[1] for x in slist])
sbar.set_global_opts(xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(interval=0, rotate=30)))
sbar.render(_send)
#按从小到大的顺序显示红包收益榜
plist = list(pdict.items())
plist.sort(key=lambda x:x[1],reverse=False)
pbar = Bar()
pbar.add_xaxis([x[0] for x in plist])
pbar.add_yaxis("交大校友交流学习群"+_date[:4]+"年"+_date[4:6]+"月"+_date[6:]+"日红包福利榜", [x[1] for x in plist])
pbar.set_global_opts(xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(interval=0, rotate=45)))
pbar.render(_play)
| # -*- coding: utf-8 -*-
"""
@author: aoqingy
"""
import os
import sys
import cv2
import json
import time
import model
def detect(path):
_,result,_ = model.model(cv2.imread(path),
detectAngle = False,
config = dict(MAX_HORIZONTAL_GAP = 50,#字符之间最大间隔,用于文本行合并
MIN_V_OVERLAPS = 0.6,
MIN_SIZE_SIM = 0.6,
TEXT_PROPOSALS_MIN_SCORE = 0.1,
TEXT_PROPOSALS_NMS_THRESH = 0.3,
TEXT_LINE_NMS_THRESH = 0.7,#文本行之间测iou值
),
leftAdjust = True, #对检测的文本行进行向左延伸
rightAdjust = True,#对检测的文本行进行向右延伸
alph = 0.01, #对检测的文本行进行向右、左延伸的倍数
)
return result
def parse_text(result):
for item in result:
print(item['text'])
def parse_sender(result):
for item in result:
if u"的红包" in item['text']:
return item['text'][:-4]
return ''
def parse_speed(result):
for item in result:
if u"被抢光" in item['text']:
total,speed = item['text'].split(',')
return total[:-2], speed[:-3]
return '', ''
def parse_players(result):
rplayers = []
start = 0
found = 0
index = 0
while True:
if not start:
if u"被抢光" in result[index]['text']:
start = index + 1
index += 1
continue
print('---', index, '---')
rplayer = {}
if result[index]['text'].endswith(u'元'):
try:
rplayer['amount'] = str(float(result[index]['text'][:-1]))
rplayer['player'] = result[index+1]['text']
index += 2
except:
rplayer['player'] = result[index]['text']
rplayer['amount'] = str(float(result[index+1]['text'][:-1]))
index += 2
else:
rplayer['player'] = result[index]['text']
rplayer['amount'] = str(float(result[index+1]['text'][:-1]))
index += 2
if (index < len(result) and
not u'手气' in result[index]['text'] and
not u'元' in result[index]['text'] and
not ':' in result[index]['text']):
index += 1
if ((index < len(result) and u'手气' in result[index]['text']) or
(index+1 < len(result) and u'手气' in result[index+1]['text'])):
rplayer['largest'] = 'True'
index += 1
if ((index < len(result) and ':' in result[index]['text']) or
(index+1 < len(result) and ':' in result[index+1]['text'])):
index += 1
tflag = True
if ((index < len(result) and u'手气' in result[index]['text']) or
(index+1 < len(result) and u'手气' in result[index+1]['text'])):
rplayer['largest'] = 'True'
index += 1
rplayers.append(rplayer)
if index >= len(result):
break
return rplayers
if __name__ == "__main__":
if len(sys.argv) != 2:
print("No path specified!")
sys.exit(1)
print(len(sys.argv))
if not os.path.isdir(sys.argv[1]):
print("Path invalid!")
sys.exit(1)
print(os.path.dirname(sys.argv[1]))
print(os.path.basename(sys.argv[1]))
_path = sys.argv[1]
_date = os.path.basename(sys.argv[1])
_xlsx = os.path.join(os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1])+'.xlsx')
_send = os.path.join(os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1])+'.send.html')
_play = os.path.join(os.path.dirname(sys.argv[1]), os.path.basename(sys.argv[1])+'.play.html')
print(_xlsx)
import xlrd
import xlsxwriter
book = xlsxwriter.Workbook(_xlsx)
sheet = book.add_worksheet()
iformat = book.add_format()
iformat.set_text_wrap()
iformat.set_font_name('Microsoft YaHei')
iformat.set_bold(False)
iformat.set_align('left')
iformat.set_align('vcenter')
iformat.set_font_color('black')
vformat = book.add_format()
vformat.set_text_wrap()
vformat.set_font_name('Microsoft YaHei')
vformat.set_bold(False)
vformat.set_align('left')
vformat.set_align('vcenter')
vformat.set_font_color('red')
sheet.set_column('A:A',6)
sheet.set_column('B:B',18)
sheet.set_column('C:C',10)
sheet.set_column('D:M',12)
#写标题行
sheet.write('A1', u"序号", iformat)
sheet.write('B1', u"发红包", iformat)
sheet.write('C1', u"抢包时间", iformat)
sheet.write('D1', u"抢包一", iformat)
sheet.write('E1', u"抢包二", iformat)
sheet.write('F1', u"抢包三", iformat)
sheet.write('G1', u"抢包四", iformat)
sheet.write('H1', u"抢包五", iformat)
sheet.write('I1', u"抢包六", iformat)
sheet.write('J1', u"抢包七", iformat)
sheet.write('K1', u"抢包八", iformat)
sheet.write('L1', u"抢包九", iformat)
sheet.write('M1', u"抢包十", iformat)
sdict = {} #发红包榜
pdict = {} #抢红包榜
count = 1
for _file in sorted(os.listdir(_path), reverse=False):
print("=============================================")
print(_file)
result = detect(os.path.join(_path, _file))
print(parse_text(result))
sheet.write('A'+str(count+1), str(count), iformat)
sender = parse_sender(result)
sheet.write('B'+str(count+1), sender, iformat)
sdict[sender] = sdict.get(sender, 0) + 10
sheet.write('C'+str(count+1), '/'.join(parse_speed(result)), iformat)
try:
players = parse_players(result)
except Exception as e:
print("********************************************")
print("********************************************")
print("********************************************")
count += 1
continue
print(players)
LABELS = ['D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M']
for i in range(0, 10):
if len(players) <= i:
continue
player = players[i].get('player', '无名氏')
amount = players[i].get('amount', 0.0)
if players[i].get('largest', ''):
sheet.write(LABELS[i]+str(count+1), player + '/' + str(amount), vformat)
else:
sheet.write(LABELS[i]+str(count+1), player + '/' + str(amount), iformat)
pdict[player] = round(pdict.get(player, 0) + float(amount), 2)
count += 1
#if count == 10:
# break
book.close()
from pyecharts import options as opts
from pyecharts.charts import Bar
#按从大到小的顺序显示红包发放榜
slist = list(sdict.items())
slist.sort(key=lambda x:x[1],reverse=True)
sbar = Bar()
sbar.add_xaxis([x[0] for x in slist])
sbar.add_yaxis("交大校友交流学习群"+_date[:4]+"年"+_date[4:6]+"月"+_date[6:]+"日红包爱心榜", [x[1] for x in slist])
sbar.set_global_opts(xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(interval=0, rotate=30)))
sbar.render(_send)
#按从小到大的顺序显示红包收益榜
plist = list(pdict.items())
plist.sort(key=lambda x:x[1],reverse=False)
pbar = Bar()
pbar.add_xaxis([x[0] for x in plist])
pbar.add_yaxis("交大校友交流学习群"+_date[:4]+"年"+_date[4:6]+"月"+_date[6:]+"日红包福利榜", [x[1] for x in plist])
pbar.set_global_opts(xaxis_opts=opts.AxisOpts(axislabel_opts=opts.LabelOpts(interval=0, rotate=45)))
pbar.render(_play) | zh | 0.873289 | # -*- coding: utf-8 -*- @author: aoqingy #字符之间最大间隔,用于文本行合并 #文本行之间测iou值 #对检测的文本行进行向左延伸 #对检测的文本行进行向右延伸 #对检测的文本行进行向右、左延伸的倍数 #写标题行 #发红包榜 #抢红包榜 #if count == 10: # break #按从大到小的顺序显示红包发放榜 #按从小到大的顺序显示红包收益榜 | 2.369799 | 2 |
telemetry/telemetry/internal/backends/chrome_inspector/native_profiling_backend.py | BearerPipelineTest/catapult | 0 | 6615861 | # Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
import logging
import threading
import traceback
from telemetry.internal.backends.chrome_inspector import inspector_websocket
from telemetry.internal.backends.chrome_inspector import websocket
class NativeProfilingTimeoutException(Exception):
pass
class NativeProfilingUnrecoverableException(Exception):
pass
class NativeProfilingUnexpectedResponseException(Exception):
pass
class NativeProfilingBackend(object):
def __init__(self, inspector_socket):
self._inspector_websocket = inspector_socket
def DumpProfilingDataOfAllProcesses(self, timeout=120):
"""Causes all profiling data of all Chrome processes to be dumped to disk.
"""
method = 'NativeProfiling.dumpProfilingDataOfAllProcesses'
request = {'method': method}
try:
logging.warning('Requesting PGO profiles to be dumped')
response_event = threading.Event()
def ws_callback(unused_response):
logging.warning('PGO profile dump done')
response_event.set()
self._inspector_websocket.AsyncRequest(request, ws_callback)
response_event.wait(timeout)
except inspector_websocket.WebSocketException as err:
if issubclass(
err.websocket_error_type, websocket.WebSocketTimeoutException):
raise NativeProfilingTimeoutException(
'Exception raised while sending a %s request:\n%s' %
(method, traceback.format_exc()))
else:
raise NativeProfilingUnrecoverableException(
'Exception raised while sending a %s request:\n%s' %
(method, traceback.format_exc()))
raise NativeProfilingUnrecoverableException(
'Exception raised while sending a %s request:\n%s' %
(method, traceback.format_exc()))
def Close(self):
self._inspector_websocket = None
| # Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
import logging
import threading
import traceback
from telemetry.internal.backends.chrome_inspector import inspector_websocket
from telemetry.internal.backends.chrome_inspector import websocket
class NativeProfilingTimeoutException(Exception):
pass
class NativeProfilingUnrecoverableException(Exception):
pass
class NativeProfilingUnexpectedResponseException(Exception):
pass
class NativeProfilingBackend(object):
def __init__(self, inspector_socket):
self._inspector_websocket = inspector_socket
def DumpProfilingDataOfAllProcesses(self, timeout=120):
"""Causes all profiling data of all Chrome processes to be dumped to disk.
"""
method = 'NativeProfiling.dumpProfilingDataOfAllProcesses'
request = {'method': method}
try:
logging.warning('Requesting PGO profiles to be dumped')
response_event = threading.Event()
def ws_callback(unused_response):
logging.warning('PGO profile dump done')
response_event.set()
self._inspector_websocket.AsyncRequest(request, ws_callback)
response_event.wait(timeout)
except inspector_websocket.WebSocketException as err:
if issubclass(
err.websocket_error_type, websocket.WebSocketTimeoutException):
raise NativeProfilingTimeoutException(
'Exception raised while sending a %s request:\n%s' %
(method, traceback.format_exc()))
else:
raise NativeProfilingUnrecoverableException(
'Exception raised while sending a %s request:\n%s' %
(method, traceback.format_exc()))
raise NativeProfilingUnrecoverableException(
'Exception raised while sending a %s request:\n%s' %
(method, traceback.format_exc()))
def Close(self):
self._inspector_websocket = None
| en | 0.893331 | # Copyright 2021 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. Causes all profiling data of all Chrome processes to be dumped to disk. | 1.687901 | 2 |
Python/Random/File-Handling/02-read-Function.py | shihab4t/Books-Code | 0 | 6615862 | <filename>Python/Random/File-Handling/02-read-Function.py
with open("text.txt", 'r') as file:
# print(file.read())
# print(file.read(100))
# print(file.read(100))
size_to_read = 10
content = file.read(size_to_read)
while content != "":
print(content, end="*")
content = file.read(size_to_read)
| <filename>Python/Random/File-Handling/02-read-Function.py
with open("text.txt", 'r') as file:
# print(file.read())
# print(file.read(100))
# print(file.read(100))
size_to_read = 10
content = file.read(size_to_read)
while content != "":
print(content, end="*")
content = file.read(size_to_read)
| sr | 0.108946 | # print(file.read()) # print(file.read(100)) # print(file.read(100)) | 3.579904 | 4 |
neural_cotraining/models/neural_model_builder.py | Shahdsaf/neural_cotraining | 5 | 6615863 | from nnvision.models.models import se_core_gauss_readout, se_core_point_readout
from neural_cotraining.models.utils import get_model_parameters
def neural_cnn_builder(data_loaders, seed: int = 1000, **config):
config.pop("comment", None)
readout_type = config.pop("readout_type", None)
if readout_type == "point":
model = se_core_point_readout(dataloaders=data_loaders, seed=seed, **config)
elif readout_type == "gauss":
model = se_core_gauss_readout(dataloaders=data_loaders, seed=seed, **config)
print("Model with {} parameters.".format(get_model_parameters(model)))
return model
| from nnvision.models.models import se_core_gauss_readout, se_core_point_readout
from neural_cotraining.models.utils import get_model_parameters
def neural_cnn_builder(data_loaders, seed: int = 1000, **config):
config.pop("comment", None)
readout_type = config.pop("readout_type", None)
if readout_type == "point":
model = se_core_point_readout(dataloaders=data_loaders, seed=seed, **config)
elif readout_type == "gauss":
model = se_core_gauss_readout(dataloaders=data_loaders, seed=seed, **config)
print("Model with {} parameters.".format(get_model_parameters(model)))
return model
| none | 1 | 2.667974 | 3 | |
master_code/otherMiscCode/tools_da_riordinare/correct_substructures_coords.py | brunetto/MasterThesisCode | 0 | 6615864 | <reponame>brunetto/MasterThesisCode<filename>master_code/otherMiscCode/tools_da_riordinare/correct_substructures_coords.py
#!/usr/bin/env python
import numpy as np
import tables as tb
import time
import sys
"""Legge il file con id e centri degli aloni con sottostrutture, per
ogni alone (id) apre il file Sub.3.<id>.053.gv, legge le coordinate
alle colonne 8, 9, 10 (in kpc!!!) e le corregge (tenendo conto delle
condizioni periodiche) con le coordinate del centro prese dalla lista
degli aloni iniziale. Le coordinate vengono aggiunte ad un vettore
che alla fine viene salvato in un file hdf5.
"""
t = time.time()
print "Start"
halo_centers_file = 'haloes_with_substructures_centers.h5'
h5 = tb.openFile(halo_centers_file, 'r')
haloes = h5.root.data.read()
h5.close()
haloes = haloes.astype(float)
substructure_coord = np.empty((0, 3))
files_num = haloes[:, 0].shape[0]
void_files = 0
for i in xrange(files_num):
print "Loop ", i, " di ", files_num
sub_file = "cm/Sub.3."+'%07d'%int(haloes[i,0])+".053.gv"
try:
sub_coord = np.genfromtxt(sub_file, dtype='float', usecols=(8, 9, 10))
file_check = True
except:
print "Void file ", sub_file
file_check = False
void_files += 1
if file_check:
if sub_coord.ndim == 1:
sub_coord = sub_coord.reshape((1, sub_coord.shape[0]))
#print "sh min ", np.amin(sub_coord, 0)
#print "sh min ", np.amax(sub_coord, 0)
try:
sub_x = sub_coord[:, 0]/1000. + haloes[i,1]
if not np.all(sub_x > 0):
sub_x[sub_x<0]+=110 # condizioni periodiche
if not np.all(sub_x < 110):
sub_x[sub_x>110]-=110
if not (np.all(sub_x > 0) and np.all(sub_x < 110)):
print "negative or too big x"
print np.all(sub_x > 0)
print sub_x
print haloes[i, 1:3]
sys.exit()
sub_y = sub_coord[:, 1]/1000. + haloes[i,2]
if not np.all(sub_y > 0):
sub_y[sub_y<0]+=110 # condizioni periodiche
if not np.all(sub_y < 110):
sub_y[sub_y>110]-=110
if not (np.all(sub_y > 0) and np.all(sub_y < 110)):
print "negative or too big y"
print np.all(sub_y > 0)
print sub_y
print haloes[i, 1:3]
sys.exit()
sub_z = sub_coord[:, 2]/1000. + haloes[i,3]
if not np.all(sub_z > 0):
sub_z[sub_z<0]+=110 # condizioni periodiche
if not np.all(sub_z < 110):
sub_z[sub_z>110]-=110
if not (np.all(sub_z > 0) and np.all(sub_z < 110)):
print "negative or too big z"
print np.all(sub_z > 0)
print sub_z
print haloes[i, 1:3]
sys.exit()
substructure_coord = np.vstack((substructure_coord, np.hstack((sub_x.reshape((sub_x.shape[0], 1)),
sub_y.reshape((sub_y.shape[0], 1)),
sub_z.reshape((sub_z.shape[0], 1))))))
except:
print "file ", sub_file
print "sub_coord.shape ", sub_coord.shape
print "sub_coord ", sub_coord
print "haloes coord ", haloes[i, :]
print "exit"
sys.exit()
else:
pass
h5 = tb.openFile('sub_haloes_global_coords.h5', 'w')
h5.createArray(h5.root, 'data', substructure_coord)
h5.flush()
h5.close()
print "Done in ", time.time()-t, " with ", void_files, " void files"
| #!/usr/bin/env python
import numpy as np
import tables as tb
import time
import sys
"""Legge il file con id e centri degli aloni con sottostrutture, per
ogni alone (id) apre il file Sub.3.<id>.053.gv, legge le coordinate
alle colonne 8, 9, 10 (in kpc!!!) e le corregge (tenendo conto delle
condizioni periodiche) con le coordinate del centro prese dalla lista
degli aloni iniziale. Le coordinate vengono aggiunte ad un vettore
che alla fine viene salvato in un file hdf5.
"""
t = time.time()
print "Start"
halo_centers_file = 'haloes_with_substructures_centers.h5'
h5 = tb.openFile(halo_centers_file, 'r')
haloes = h5.root.data.read()
h5.close()
haloes = haloes.astype(float)
substructure_coord = np.empty((0, 3))
files_num = haloes[:, 0].shape[0]
void_files = 0
for i in xrange(files_num):
print "Loop ", i, " di ", files_num
sub_file = "cm/Sub.3."+'%07d'%int(haloes[i,0])+".053.gv"
try:
sub_coord = np.genfromtxt(sub_file, dtype='float', usecols=(8, 9, 10))
file_check = True
except:
print "Void file ", sub_file
file_check = False
void_files += 1
if file_check:
if sub_coord.ndim == 1:
sub_coord = sub_coord.reshape((1, sub_coord.shape[0]))
#print "sh min ", np.amin(sub_coord, 0)
#print "sh min ", np.amax(sub_coord, 0)
try:
sub_x = sub_coord[:, 0]/1000. + haloes[i,1]
if not np.all(sub_x > 0):
sub_x[sub_x<0]+=110 # condizioni periodiche
if not np.all(sub_x < 110):
sub_x[sub_x>110]-=110
if not (np.all(sub_x > 0) and np.all(sub_x < 110)):
print "negative or too big x"
print np.all(sub_x > 0)
print sub_x
print haloes[i, 1:3]
sys.exit()
sub_y = sub_coord[:, 1]/1000. + haloes[i,2]
if not np.all(sub_y > 0):
sub_y[sub_y<0]+=110 # condizioni periodiche
if not np.all(sub_y < 110):
sub_y[sub_y>110]-=110
if not (np.all(sub_y > 0) and np.all(sub_y < 110)):
print "negative or too big y"
print np.all(sub_y > 0)
print sub_y
print haloes[i, 1:3]
sys.exit()
sub_z = sub_coord[:, 2]/1000. + haloes[i,3]
if not np.all(sub_z > 0):
sub_z[sub_z<0]+=110 # condizioni periodiche
if not np.all(sub_z < 110):
sub_z[sub_z>110]-=110
if not (np.all(sub_z > 0) and np.all(sub_z < 110)):
print "negative or too big z"
print np.all(sub_z > 0)
print sub_z
print haloes[i, 1:3]
sys.exit()
substructure_coord = np.vstack((substructure_coord, np.hstack((sub_x.reshape((sub_x.shape[0], 1)),
sub_y.reshape((sub_y.shape[0], 1)),
sub_z.reshape((sub_z.shape[0], 1))))))
except:
print "file ", sub_file
print "sub_coord.shape ", sub_coord.shape
print "sub_coord ", sub_coord
print "haloes coord ", haloes[i, :]
print "exit"
sys.exit()
else:
pass
h5 = tb.openFile('sub_haloes_global_coords.h5', 'w')
h5.createArray(h5.root, 'data', substructure_coord)
h5.flush()
h5.close()
print "Done in ", time.time()-t, " with ", void_files, " void files" | it | 0.925485 | #!/usr/bin/env python Legge il file con id e centri degli aloni con sottostrutture, per ogni alone (id) apre il file Sub.3.<id>.053.gv, legge le coordinate alle colonne 8, 9, 10 (in kpc!!!) e le corregge (tenendo conto delle condizioni periodiche) con le coordinate del centro prese dalla lista degli aloni iniziale. Le coordinate vengono aggiunte ad un vettore che alla fine viene salvato in un file hdf5. #print "sh min ", np.amin(sub_coord, 0) #print "sh min ", np.amax(sub_coord, 0) # condizioni periodiche # condizioni periodiche # condizioni periodiche | 2.382922 | 2 |
example/main.py | adolfogc/pyxtensor | 0 | 6615865 | import example
import numpy as np
a = np.arange(5)
b = example.timesTwo(a)
assert np.allclose(2.0 * a, b)
| import example
import numpy as np
a = np.arange(5)
b = example.timesTwo(a)
assert np.allclose(2.0 * a, b)
| none | 1 | 2.070872 | 2 | |
const.py | tmr232/gopy | 0 | 6615866 | import itertools
import operator
from typing import Any
class Iota:
"""Go's iota"""
lhs: Any
op: Any
rhs: Any
def __init__(self, lhs, op, rhs):
self.lhs = lhs
self.op = op
self.rhs = rhs
@classmethod
def make_iota(cls):
return cls(lhs=itertools.count(), op=operator.add, rhs=0)
def __add__(self, other):
return Iota(self, operator.add, other)
def __radd__(self, other):
return Iota(other, operator.add, self)
def __sub__(self, other):
return Iota(self, operator.sub, other)
def __rsub__(self, other):
return Iota(other, operator.sub, self)
def __mul__(self, other):
return Iota(self, operator.mul, other)
def __rmul__(self, other):
return Iota(other, operator.mul, self)
def __truediv__(self, other):
return Iota(self, operator.truediv, other)
def __rtruediv__(self, other):
return Iota(other, operator.truediv, self)
def __floordiv__(self, other):
return Iota(self, operator.floordiv, other)
def __rfloordiv__(self, other):
return Iota(other, operator.floordiv, self)
def __pow__(self, other, modulo=None):
return Iota(self, operator.pow, other)
def __rpow__(self, other):
return Iota(other, operator.pow, self)
def __lshift__(self, other):
return Iota(self, operator.lshift, other)
def __rlshift__(self, other):
return Iota(other, operator.lshift, self)
def __rshift__(self, other):
return Iota(self, operator.rshift, other)
def __rrshift__(self, other):
return Iota(other, operator.rshift, self)
def __calculate__(self):
lhs = self.lhs
if isinstance(lhs, type(self)):
lhs = lhs.__calculate__()
elif isinstance(lhs, itertools.count):
lhs = next(lhs)
rhs = self.rhs
if isinstance(rhs, type(self)):
rhs = rhs.__calculate__()
return self.op(lhs, rhs)
class ConstNamespace(dict):
"""Class namespace for const generation & iota
See https://docs.python.org/3/reference/datamodel.html#preparing-the-class-namespace
and https://snarky.ca/unravelling-pythons-classes/ for more info.
"""
def __init__(self):
super().__init__()
self.formula = None
def __setitem__(self, key, value):
if isinstance(value, Iota):
self.formula = value
super().__setitem__(key, value.__calculate__())
else:
super().__setitem__(key, value)
def __getitem__(self, item):
try:
super().__getitem__(item)
except KeyError:
if item == "__name__":
raise
if item == "iota":
return Iota.make_iota()
value = self.formula.__calculate__()
self[item] = value
return value
class ConstMeta(type):
@classmethod
def __prepare__(metacls, name, bases):
# Return our custom namespace object
return ConstNamespace()
def __new__(cls, name, bases, classdict):
# Convert the custom object to a regular dict, to avoid unwanted shenanigans.
return type.__new__(cls, name, bases, dict(classdict))
class Const(metaclass=ConstMeta): pass
class Flags(Const):
A = 1 << iota
B
C
def main():
assert Flags.A == 1
assert Flags.B == 2
assert Flags.C == 4
if __name__ == '__main__':
main()
| import itertools
import operator
from typing import Any
class Iota:
"""Go's iota"""
lhs: Any
op: Any
rhs: Any
def __init__(self, lhs, op, rhs):
self.lhs = lhs
self.op = op
self.rhs = rhs
@classmethod
def make_iota(cls):
return cls(lhs=itertools.count(), op=operator.add, rhs=0)
def __add__(self, other):
return Iota(self, operator.add, other)
def __radd__(self, other):
return Iota(other, operator.add, self)
def __sub__(self, other):
return Iota(self, operator.sub, other)
def __rsub__(self, other):
return Iota(other, operator.sub, self)
def __mul__(self, other):
return Iota(self, operator.mul, other)
def __rmul__(self, other):
return Iota(other, operator.mul, self)
def __truediv__(self, other):
return Iota(self, operator.truediv, other)
def __rtruediv__(self, other):
return Iota(other, operator.truediv, self)
def __floordiv__(self, other):
return Iota(self, operator.floordiv, other)
def __rfloordiv__(self, other):
return Iota(other, operator.floordiv, self)
def __pow__(self, other, modulo=None):
return Iota(self, operator.pow, other)
def __rpow__(self, other):
return Iota(other, operator.pow, self)
def __lshift__(self, other):
return Iota(self, operator.lshift, other)
def __rlshift__(self, other):
return Iota(other, operator.lshift, self)
def __rshift__(self, other):
return Iota(self, operator.rshift, other)
def __rrshift__(self, other):
return Iota(other, operator.rshift, self)
def __calculate__(self):
lhs = self.lhs
if isinstance(lhs, type(self)):
lhs = lhs.__calculate__()
elif isinstance(lhs, itertools.count):
lhs = next(lhs)
rhs = self.rhs
if isinstance(rhs, type(self)):
rhs = rhs.__calculate__()
return self.op(lhs, rhs)
class ConstNamespace(dict):
"""Class namespace for const generation & iota
See https://docs.python.org/3/reference/datamodel.html#preparing-the-class-namespace
and https://snarky.ca/unravelling-pythons-classes/ for more info.
"""
def __init__(self):
super().__init__()
self.formula = None
def __setitem__(self, key, value):
if isinstance(value, Iota):
self.formula = value
super().__setitem__(key, value.__calculate__())
else:
super().__setitem__(key, value)
def __getitem__(self, item):
try:
super().__getitem__(item)
except KeyError:
if item == "__name__":
raise
if item == "iota":
return Iota.make_iota()
value = self.formula.__calculate__()
self[item] = value
return value
class ConstMeta(type):
@classmethod
def __prepare__(metacls, name, bases):
# Return our custom namespace object
return ConstNamespace()
def __new__(cls, name, bases, classdict):
# Convert the custom object to a regular dict, to avoid unwanted shenanigans.
return type.__new__(cls, name, bases, dict(classdict))
class Const(metaclass=ConstMeta): pass
class Flags(Const):
A = 1 << iota
B
C
def main():
assert Flags.A == 1
assert Flags.B == 2
assert Flags.C == 4
if __name__ == '__main__':
main()
| en | 0.550066 | Go's iota Class namespace for const generation & iota
See https://docs.python.org/3/reference/datamodel.html#preparing-the-class-namespace
and https://snarky.ca/unravelling-pythons-classes/ for more info. # Return our custom namespace object # Convert the custom object to a regular dict, to avoid unwanted shenanigans. | 3.286913 | 3 |
settings.py | ktaroabobon/SSD_portfolio_kit | 0 | 6615867 | from pathlib import Path
import configparser
def get_save_filename(filename):
if "." in filename:
return filename.rsplit(".", 1)[0] + "_predict." + filename.rsplit(".", 1)[1]
return
BASE_DIR = Path(__file__).resolve().parent
UPLOAD_FOLDER = BASE_DIR / "media" / "upload"
PREDICT_FOLDER = BASE_DIR / "media" / "predict"
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg'}
config = configparser.ConfigParser()
config.read(BASE_DIR / "settings.ini", encoding="utf-8")
url = config["api"]["url"]
file_name = config["image"]["file_name"]
image_show = config.getboolean("image", "show")
file_path = UPLOAD_FOLDER / file_name
save_path = PREDICT_FOLDER / get_save_filename(file_name)
log_path = BASE_DIR / "SSD_portfolio.log"
| from pathlib import Path
import configparser
def get_save_filename(filename):
if "." in filename:
return filename.rsplit(".", 1)[0] + "_predict." + filename.rsplit(".", 1)[1]
return
BASE_DIR = Path(__file__).resolve().parent
UPLOAD_FOLDER = BASE_DIR / "media" / "upload"
PREDICT_FOLDER = BASE_DIR / "media" / "predict"
ALLOWED_EXTENSIONS = {'png', 'jpg', 'jpeg'}
config = configparser.ConfigParser()
config.read(BASE_DIR / "settings.ini", encoding="utf-8")
url = config["api"]["url"]
file_name = config["image"]["file_name"]
image_show = config.getboolean("image", "show")
file_path = UPLOAD_FOLDER / file_name
save_path = PREDICT_FOLDER / get_save_filename(file_name)
log_path = BASE_DIR / "SSD_portfolio.log"
| none | 1 | 2.458293 | 2 | |
viou_tracker.py | heretic1993/iou_tracker | 2 | 6615868 | # ---------------------------------------------------------
# IOU Tracker
# Copyright (c) 2019 TU Berlin, Communication Systems Group
# Licensed under The MIT License [see LICENSE for details]
# Written by <NAME>
# ---------------------------------------------------------
import cv2
import numpy as np
from lapsolver import solve_dense
from tqdm import tqdm
from time import time
from util import iou, load_mot
from vis_tracker import VisTracker
def track_viou(frames_path, detections, sigma_l, sigma_h, sigma_iou, t_min, ttl, tracker_type, keep_upper_height_ratio):
""" V-IOU Tracker.
See "Extending IOU Based Multi-Object Tracking by Visual Information by <NAME>, <NAME>, <NAME>" for
more information.
Args:
frames_path (str): path to ALL frames.
string must contain a placeholder like {:07d} to be replaced with the frame numbers.
detections (list): list of detections per frame, usually generated by util.load_mot
sigma_l (float): low detection threshold.
sigma_h (float): high detection threshold.
sigma_iou (float): IOU threshold.
t_min (float): minimum track length in frames.
ttl (float): maximum number of frames to perform visual tracking.
this can fill 'gaps' of up to 2*ttl frames (ttl times forward and backward).
tracker_type (str): name of the visual tracker to use. see VisTracker for more details.
keep_upper_height_ratio (float): float between 0.0 and 1.0 that determines the ratio of height of the object
to track to the total height of the object used for visual tracking.
Returns:
list: list of tracks.
"""
# if tracker_type == 'NONE':
# assert ttl == 1, "ttl should not be larger than 1 if no visual tracker is selected"
tracks_active = []
# tracks_extendable = []
tracks_finished = []
frame_buffer = []
for frame_num, detections_frame in enumerate(tqdm(detections), start=1):
# load frame and put into buffer
frame_path = frames_path.format(frame_num)
frame = cv2.imread(frame_path)
assert frame is not None, "could not read '{}'".format(frame_path)
frame_buffer.append(frame)
if len(frame_buffer) > ttl + 1:
frame_buffer.pop(0)
# apply low threshold to detections
dets = [det for det in detections_frame if det['score'] >= sigma_l]
track_ids, det_ids = associate(tracks_active, dets, sigma_iou)
updated_tracks = []
for track_id, det_id in zip(track_ids, det_ids):
tracks_active[track_id]['bboxes'].append(dets[det_id]['bbox'])
tracks_active[track_id]['max_score'] = max(tracks_active[track_id]['max_score'], dets[det_id]['score'])
tracks_active[track_id]['classes'].append(dets[det_id]['class'])
tracks_active[track_id]['det_counter'] += 1
if tracks_active[track_id]['ttl'] != ttl:
# reset visual tracker if active
tracks_active[track_id]['ttl'] = ttl
# tracks_active[track_id]['visual_tracker'] = None
updated_tracks.append(tracks_active[track_id])
tracks_not_updated = [tracks_active[idx] for idx in set(range(len(tracks_active))).difference(set(track_ids))]
# create new tracks
new_tracks = [{'bboxes': [det['bbox']], 'max_score': det['score'], 'start_frame': frame_num, 'ttl': ttl,
'classes': [det['class']], 'det_counter': 1, 'visual_tracker': None} for det in dets_for_new]
tracks_active = []
for track in updated_tracks + new_tracks:
if track['ttl'] == 0:
# tracks_extendable.append(track)
pass
else:
tracks_active.append(track)
# finish all remaining active and extendable tracks
tracks_finished = tracks_finished + \
[track for track in tracks_active + tracks_extendable
if track['max_score'] >= sigma_h and track['det_counter'] >= t_min]
# remove last visually tracked frames and compute the track classes
for track in tracks_finished:
if ttl != track['ttl']:
track['bboxes'] = track['bboxes'][:-(ttl - track['ttl'])]
track['class'] = max(set(track['classes']), key=track['classes'].count)
del track['visual_tracker']
return tracks_finished
def associate(tracks, detections, sigma_iou):
""" perform association between tracks and detections in a frame.
Args:
tracks (list): input tracks
detections (list): input detections
sigma_iou (float): minimum intersection-over-union of a valid association
Returns:
(tuple): tuple containing:
track_ids (numpy.array): 1D array with indexes of the tracks
det_ids (numpy.array): 1D array of the associated indexes of the detections
"""
costs = np.empty(shape=(len(tracks), len(detections)), dtype=np.float32)
for row, track in enumerate(tracks):
for col, detection in enumerate(detections):
costs[row, col] = 1 - iou(track['bboxes'][-1], detection['bbox'])
np.nan_to_num(costs)
costs[costs > 1 - sigma_iou] = np.nan
track_ids, det_ids = solve_dense(costs)
return track_ids, det_ids
def track_viou_matlab_wrapper(frames_path, detections, sigma_l, sigma_h, sigma_iou, t_min, ttl, tracker_type,
keep_upper_height_ratio=1.):
"""
Matlab wrapper of the v-iou tracker for the detrac evaluation toolkit.
Args:
detections (numpy.array): numpy array of detections, usually supplied by run_tracker.m
sigma_l (float): low detection threshold.
sigma_h (float): high detection threshold.
sigma_iou (float): IOU threshold.
t_min (float): minimum track length in frames.
Returns:
float: speed in frames per second.
list: list of tracks.
"""
detections = detections.reshape((7, -1)).transpose()
dets = load_mot(detections, with_classes=False)
start = time()
tracks = track_viou(frames_path+"img{:05d}.jpg", dets, sigma_l, sigma_h, sigma_iou, int(t_min), int(ttl), tracker_type, keep_upper_height_ratio)
end = time()
id_ = 1
out = []
for track in tracks:
for i, bbox in enumerate(track['bboxes']):
out += [float(bbox[0]), float(bbox[1]), float(bbox[2] - bbox[0]), float(bbox[3] - bbox[1]),
float(track['start_frame'] + i), float(id_)]
id_ += 1
num_frames = len(dets)
speed = num_frames / (end - start)
return speed, out
| # ---------------------------------------------------------
# IOU Tracker
# Copyright (c) 2019 TU Berlin, Communication Systems Group
# Licensed under The MIT License [see LICENSE for details]
# Written by <NAME>
# ---------------------------------------------------------
import cv2
import numpy as np
from lapsolver import solve_dense
from tqdm import tqdm
from time import time
from util import iou, load_mot
from vis_tracker import VisTracker
def track_viou(frames_path, detections, sigma_l, sigma_h, sigma_iou, t_min, ttl, tracker_type, keep_upper_height_ratio):
""" V-IOU Tracker.
See "Extending IOU Based Multi-Object Tracking by Visual Information by <NAME>, <NAME>, <NAME>" for
more information.
Args:
frames_path (str): path to ALL frames.
string must contain a placeholder like {:07d} to be replaced with the frame numbers.
detections (list): list of detections per frame, usually generated by util.load_mot
sigma_l (float): low detection threshold.
sigma_h (float): high detection threshold.
sigma_iou (float): IOU threshold.
t_min (float): minimum track length in frames.
ttl (float): maximum number of frames to perform visual tracking.
this can fill 'gaps' of up to 2*ttl frames (ttl times forward and backward).
tracker_type (str): name of the visual tracker to use. see VisTracker for more details.
keep_upper_height_ratio (float): float between 0.0 and 1.0 that determines the ratio of height of the object
to track to the total height of the object used for visual tracking.
Returns:
list: list of tracks.
"""
# if tracker_type == 'NONE':
# assert ttl == 1, "ttl should not be larger than 1 if no visual tracker is selected"
tracks_active = []
# tracks_extendable = []
tracks_finished = []
frame_buffer = []
for frame_num, detections_frame in enumerate(tqdm(detections), start=1):
# load frame and put into buffer
frame_path = frames_path.format(frame_num)
frame = cv2.imread(frame_path)
assert frame is not None, "could not read '{}'".format(frame_path)
frame_buffer.append(frame)
if len(frame_buffer) > ttl + 1:
frame_buffer.pop(0)
# apply low threshold to detections
dets = [det for det in detections_frame if det['score'] >= sigma_l]
track_ids, det_ids = associate(tracks_active, dets, sigma_iou)
updated_tracks = []
for track_id, det_id in zip(track_ids, det_ids):
tracks_active[track_id]['bboxes'].append(dets[det_id]['bbox'])
tracks_active[track_id]['max_score'] = max(tracks_active[track_id]['max_score'], dets[det_id]['score'])
tracks_active[track_id]['classes'].append(dets[det_id]['class'])
tracks_active[track_id]['det_counter'] += 1
if tracks_active[track_id]['ttl'] != ttl:
# reset visual tracker if active
tracks_active[track_id]['ttl'] = ttl
# tracks_active[track_id]['visual_tracker'] = None
updated_tracks.append(tracks_active[track_id])
tracks_not_updated = [tracks_active[idx] for idx in set(range(len(tracks_active))).difference(set(track_ids))]
# create new tracks
new_tracks = [{'bboxes': [det['bbox']], 'max_score': det['score'], 'start_frame': frame_num, 'ttl': ttl,
'classes': [det['class']], 'det_counter': 1, 'visual_tracker': None} for det in dets_for_new]
tracks_active = []
for track in updated_tracks + new_tracks:
if track['ttl'] == 0:
# tracks_extendable.append(track)
pass
else:
tracks_active.append(track)
# finish all remaining active and extendable tracks
tracks_finished = tracks_finished + \
[track for track in tracks_active + tracks_extendable
if track['max_score'] >= sigma_h and track['det_counter'] >= t_min]
# remove last visually tracked frames and compute the track classes
for track in tracks_finished:
if ttl != track['ttl']:
track['bboxes'] = track['bboxes'][:-(ttl - track['ttl'])]
track['class'] = max(set(track['classes']), key=track['classes'].count)
del track['visual_tracker']
return tracks_finished
def associate(tracks, detections, sigma_iou):
""" perform association between tracks and detections in a frame.
Args:
tracks (list): input tracks
detections (list): input detections
sigma_iou (float): minimum intersection-over-union of a valid association
Returns:
(tuple): tuple containing:
track_ids (numpy.array): 1D array with indexes of the tracks
det_ids (numpy.array): 1D array of the associated indexes of the detections
"""
costs = np.empty(shape=(len(tracks), len(detections)), dtype=np.float32)
for row, track in enumerate(tracks):
for col, detection in enumerate(detections):
costs[row, col] = 1 - iou(track['bboxes'][-1], detection['bbox'])
np.nan_to_num(costs)
costs[costs > 1 - sigma_iou] = np.nan
track_ids, det_ids = solve_dense(costs)
return track_ids, det_ids
def track_viou_matlab_wrapper(frames_path, detections, sigma_l, sigma_h, sigma_iou, t_min, ttl, tracker_type,
keep_upper_height_ratio=1.):
"""
Matlab wrapper of the v-iou tracker for the detrac evaluation toolkit.
Args:
detections (numpy.array): numpy array of detections, usually supplied by run_tracker.m
sigma_l (float): low detection threshold.
sigma_h (float): high detection threshold.
sigma_iou (float): IOU threshold.
t_min (float): minimum track length in frames.
Returns:
float: speed in frames per second.
list: list of tracks.
"""
detections = detections.reshape((7, -1)).transpose()
dets = load_mot(detections, with_classes=False)
start = time()
tracks = track_viou(frames_path+"img{:05d}.jpg", dets, sigma_l, sigma_h, sigma_iou, int(t_min), int(ttl), tracker_type, keep_upper_height_ratio)
end = time()
id_ = 1
out = []
for track in tracks:
for i, bbox in enumerate(track['bboxes']):
out += [float(bbox[0]), float(bbox[1]), float(bbox[2] - bbox[0]), float(bbox[3] - bbox[1]),
float(track['start_frame'] + i), float(id_)]
id_ += 1
num_frames = len(dets)
speed = num_frames / (end - start)
return speed, out
| en | 0.685353 | # --------------------------------------------------------- # IOU Tracker # Copyright (c) 2019 TU Berlin, Communication Systems Group # Licensed under The MIT License [see LICENSE for details] # Written by <NAME> # --------------------------------------------------------- V-IOU Tracker. See "Extending IOU Based Multi-Object Tracking by Visual Information by <NAME>, <NAME>, <NAME>" for more information. Args: frames_path (str): path to ALL frames. string must contain a placeholder like {:07d} to be replaced with the frame numbers. detections (list): list of detections per frame, usually generated by util.load_mot sigma_l (float): low detection threshold. sigma_h (float): high detection threshold. sigma_iou (float): IOU threshold. t_min (float): minimum track length in frames. ttl (float): maximum number of frames to perform visual tracking. this can fill 'gaps' of up to 2*ttl frames (ttl times forward and backward). tracker_type (str): name of the visual tracker to use. see VisTracker for more details. keep_upper_height_ratio (float): float between 0.0 and 1.0 that determines the ratio of height of the object to track to the total height of the object used for visual tracking. Returns: list: list of tracks. # if tracker_type == 'NONE': # assert ttl == 1, "ttl should not be larger than 1 if no visual tracker is selected" # tracks_extendable = [] # load frame and put into buffer # apply low threshold to detections # reset visual tracker if active # tracks_active[track_id]['visual_tracker'] = None # create new tracks # tracks_extendable.append(track) # finish all remaining active and extendable tracks # remove last visually tracked frames and compute the track classes perform association between tracks and detections in a frame. Args: tracks (list): input tracks detections (list): input detections sigma_iou (float): minimum intersection-over-union of a valid association Returns: (tuple): tuple containing: track_ids (numpy.array): 1D array with indexes of the tracks det_ids (numpy.array): 1D array of the associated indexes of the detections Matlab wrapper of the v-iou tracker for the detrac evaluation toolkit. Args: detections (numpy.array): numpy array of detections, usually supplied by run_tracker.m sigma_l (float): low detection threshold. sigma_h (float): high detection threshold. sigma_iou (float): IOU threshold. t_min (float): minimum track length in frames. Returns: float: speed in frames per second. list: list of tracks. | 2.429442 | 2 |
pipelines/0037-cell_cell_interaction/bin/0015-h5ad_to_cellphonedb.py | ckrilow/dev-ckrilow | 0 | 6615869 | #!/usr/bin/env python
__author__ = '<NAME>'
__date__ = '2020-03-13'
__version__ = '0.0.1'
import argparse
import os
import scanpy as sc
import pandas as pd
import csv
def main():
"""Run CLI."""
parser = argparse.ArgumentParser(
description="""
Makes callphonedb input from h5ad file.
"""
)
parser.add_argument(
'-v', '--version',
action='version',
version='%(prog)s {version}'.format(version=__version__)
)
parser.add_argument(
'-h5', '--h5_anndata',
action='store',
dest='h5',
required=True,
help='H5 AnnData file.'
)
parser.add_argument(
'-of', '--output_file',
action='store',
dest='of',
default='',
help='Basename of output files, assuming output in current working \
directory.\
(default: <adata>-cellphonedb)'
)
options = parser.parse_args()
# Fixed settings.
verbose = True
# Get the out file base.
out_file_base = options.of
if out_file_base == '':
out_file_base = '{}-cellphonedb'.format(
os.path.basename(options.h5.rstrip('h5ad').rstrip('.'))
)
# Load the AnnData file
adata = sc.read_h5ad(filename=options.h5)
# If the out file is adata one can save like this.
# df_pca.to_csv(
# '{}.tsv.gz'.format(out_file_base),
# sep='\t',
# index=True,
# quoting=csv.QUOTE_NONNUMERIC,
# index_label='cell_barcode',
# na_rep='',
# compression='gzip'
# )
if __name__ == '__main__':
main()
| #!/usr/bin/env python
__author__ = '<NAME>'
__date__ = '2020-03-13'
__version__ = '0.0.1'
import argparse
import os
import scanpy as sc
import pandas as pd
import csv
def main():
"""Run CLI."""
parser = argparse.ArgumentParser(
description="""
Makes callphonedb input from h5ad file.
"""
)
parser.add_argument(
'-v', '--version',
action='version',
version='%(prog)s {version}'.format(version=__version__)
)
parser.add_argument(
'-h5', '--h5_anndata',
action='store',
dest='h5',
required=True,
help='H5 AnnData file.'
)
parser.add_argument(
'-of', '--output_file',
action='store',
dest='of',
default='',
help='Basename of output files, assuming output in current working \
directory.\
(default: <adata>-cellphonedb)'
)
options = parser.parse_args()
# Fixed settings.
verbose = True
# Get the out file base.
out_file_base = options.of
if out_file_base == '':
out_file_base = '{}-cellphonedb'.format(
os.path.basename(options.h5.rstrip('h5ad').rstrip('.'))
)
# Load the AnnData file
adata = sc.read_h5ad(filename=options.h5)
# If the out file is adata one can save like this.
# df_pca.to_csv(
# '{}.tsv.gz'.format(out_file_base),
# sep='\t',
# index=True,
# quoting=csv.QUOTE_NONNUMERIC,
# index_label='cell_barcode',
# na_rep='',
# compression='gzip'
# )
if __name__ == '__main__':
main()
| en | 0.374773 | #!/usr/bin/env python Run CLI. Makes callphonedb input from h5ad file. # Fixed settings. # Get the out file base. # Load the AnnData file # If the out file is adata one can save like this. # df_pca.to_csv( # '{}.tsv.gz'.format(out_file_base), # sep='\t', # index=True, # quoting=csv.QUOTE_NONNUMERIC, # index_label='cell_barcode', # na_rep='', # compression='gzip' # ) | 2.75756 | 3 |
cgi/chatbot.py | redienhcs/estrelabot | 0 | 6615870 | <filename>cgi/chatbot.py
#!/usr/bin/env python3
from flask import Flask, request
from flask_cors import CORS
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
import unicodedata
app = Flask(__name__)
cors = CORS(app)
#Ler o conteúdo de arquivos e descobrir se a resposta do chatbot foi útil
lista_de_arquivos = {
"portuguese/eventos.yml",
"portuguese/historia.yml",
"portuguese/meio_ambiente.yml",
"portuguese/perguntas_frequentes.yml"
}
def ler_conteudo_dos_arquivos( ):
conteudo_arquivos = ""
for arquivo in lista_de_arquivos:
file = open(arquivo,'r', encoding="utf_8_sig")
conteudo_arquivos += file.read()
return conteudo_arquivos
#Utilizado para
def verificar_presenca_frase( frase):
conteudo_arquivos = ler_conteudo_dos_arquivos()
return conteudo_arquivos.find( frase)
def gravar_log_chatbot( str_log):
file = open("chatbot_log.txt","a")
file.write( str_log)
file.close()
@app.route("/", methods=['GET','POST'])
def send_form():
if request.method == 'GET' :
dados_enviados = request.args.get('msg')
return "Atingiu o servidor:{}".format(dados_enviados)
#Se o tipo de methodo utilizado é igual a POST
if request.method == 'POST':
#Pega o tipo de requisição enviada
op = request.form.get('op')
#Verifica se o tipo de requisição é para o ChatterBot
if op == 'chat_response':
#Pega a mensagem enviada pelo usuário via POST
msg_post = request.form.get('mensagem')
#grava a pergunta do usuário no log do chatbot
gravar_log_chatbot( "#{}".format(msg_post)+"\n")
#Inicializa o chatterbot
chatterbot = ChatBot('Prefeitura Estrela')
#executar chatterbot
chatterbot_response = str(chatterbot.get_response( msg_post))
if not chatterbot_response :
chatterbot_response = "Desculpe, não entendi... poderia repetir?"
#Grava a resposta do chatterbot no log do chatbot
gravar_log_chatbot( "]{}".format(chatterbot_response)+"\n")
#verificar se a pergunta contêm alguma coisa sobre o coronavírus
lower_mensagem_usuario = msg_post.lower()
lower_mensagem_usuario = str(unicodedata.normalize('NFKD', lower_mensagem_usuario).encode('ASCII', 'ignore'))[2:]
if lower_mensagem_usuario.find("coronavirus") > -1 or lower_mensagem_usuario.find("covid") > -1:
chatterbot_response = "Parece que você está buscando por questões relacionadas ao COVID-19. Para mais informações sobre o assunto, por favor siga esse link da Prefeitura Municipal de estrela com as notívias mais recentes <a href=\"https://estrela.atende.net/#!/tipo/noticia\">Notícias prefeitura de Estrela </a>"
chatterbot_response += "<div class=\"useful_info\"><br />Esta resposta foi útil? Gostaria de responder a um questionário para ajudar a melhorar este bot?"
chatterbot_response += "<br />resposta as perguntas neste formulário: <a href=\"https://forms.gle/e2J1jDvtYdHyvD8R8\">Link para o formulário</a></div> "
if verificar_presenca_frase( chatterbot_response) > -1 :
mensagem_de_retorno = '{}'.format(chatterbot_response)
mensagem_de_retorno += "<div class=\"useful_info\"><br />Esta resposta foi útil? Gostaria de responder a um questionário para ajudar a melhorar este bot?"
mensagem_de_retorno += "<br />resposta as perguntas neste formulário: <a href=\"https://forms.gle/e2J1jDvtYdHyvD8R8\">Link para o formulário</a></div> "
return mensagem_de_retorno
else:
return '{}'.format( chatterbot_response)
else :
msg_post = request.form.get('mensagem')
return "recebi:{}".format(msg_post)
#if __name__ == "__main__":
# app.run()
| <filename>cgi/chatbot.py
#!/usr/bin/env python3
from flask import Flask, request
from flask_cors import CORS
from chatterbot import ChatBot
from chatterbot.trainers import ChatterBotCorpusTrainer
import unicodedata
app = Flask(__name__)
cors = CORS(app)
#Ler o conteúdo de arquivos e descobrir se a resposta do chatbot foi útil
lista_de_arquivos = {
"portuguese/eventos.yml",
"portuguese/historia.yml",
"portuguese/meio_ambiente.yml",
"portuguese/perguntas_frequentes.yml"
}
def ler_conteudo_dos_arquivos( ):
conteudo_arquivos = ""
for arquivo in lista_de_arquivos:
file = open(arquivo,'r', encoding="utf_8_sig")
conteudo_arquivos += file.read()
return conteudo_arquivos
#Utilizado para
def verificar_presenca_frase( frase):
conteudo_arquivos = ler_conteudo_dos_arquivos()
return conteudo_arquivos.find( frase)
def gravar_log_chatbot( str_log):
file = open("chatbot_log.txt","a")
file.write( str_log)
file.close()
@app.route("/", methods=['GET','POST'])
def send_form():
if request.method == 'GET' :
dados_enviados = request.args.get('msg')
return "Atingiu o servidor:{}".format(dados_enviados)
#Se o tipo de methodo utilizado é igual a POST
if request.method == 'POST':
#Pega o tipo de requisição enviada
op = request.form.get('op')
#Verifica se o tipo de requisição é para o ChatterBot
if op == 'chat_response':
#Pega a mensagem enviada pelo usuário via POST
msg_post = request.form.get('mensagem')
#grava a pergunta do usuário no log do chatbot
gravar_log_chatbot( "#{}".format(msg_post)+"\n")
#Inicializa o chatterbot
chatterbot = ChatBot('Prefeitura Estrela')
#executar chatterbot
chatterbot_response = str(chatterbot.get_response( msg_post))
if not chatterbot_response :
chatterbot_response = "Desculpe, não entendi... poderia repetir?"
#Grava a resposta do chatterbot no log do chatbot
gravar_log_chatbot( "]{}".format(chatterbot_response)+"\n")
#verificar se a pergunta contêm alguma coisa sobre o coronavírus
lower_mensagem_usuario = msg_post.lower()
lower_mensagem_usuario = str(unicodedata.normalize('NFKD', lower_mensagem_usuario).encode('ASCII', 'ignore'))[2:]
if lower_mensagem_usuario.find("coronavirus") > -1 or lower_mensagem_usuario.find("covid") > -1:
chatterbot_response = "Parece que você está buscando por questões relacionadas ao COVID-19. Para mais informações sobre o assunto, por favor siga esse link da Prefeitura Municipal de estrela com as notívias mais recentes <a href=\"https://estrela.atende.net/#!/tipo/noticia\">Notícias prefeitura de Estrela </a>"
chatterbot_response += "<div class=\"useful_info\"><br />Esta resposta foi útil? Gostaria de responder a um questionário para ajudar a melhorar este bot?"
chatterbot_response += "<br />resposta as perguntas neste formulário: <a href=\"https://forms.gle/e2J1jDvtYdHyvD8R8\">Link para o formulário</a></div> "
if verificar_presenca_frase( chatterbot_response) > -1 :
mensagem_de_retorno = '{}'.format(chatterbot_response)
mensagem_de_retorno += "<div class=\"useful_info\"><br />Esta resposta foi útil? Gostaria de responder a um questionário para ajudar a melhorar este bot?"
mensagem_de_retorno += "<br />resposta as perguntas neste formulário: <a href=\"https://forms.gle/e2J1jDvtYdHyvD8R8\">Link para o formulário</a></div> "
return mensagem_de_retorno
else:
return '{}'.format( chatterbot_response)
else :
msg_post = request.form.get('mensagem')
return "recebi:{}".format(msg_post)
#if __name__ == "__main__":
# app.run()
| pt | 0.914518 | #!/usr/bin/env python3 #Ler o conteúdo de arquivos e descobrir se a resposta do chatbot foi útil #Utilizado para #Se o tipo de methodo utilizado é igual a POST #Pega o tipo de requisição enviada #Verifica se o tipo de requisição é para o ChatterBot #Pega a mensagem enviada pelo usuário via POST #grava a pergunta do usuário no log do chatbot #Inicializa o chatterbot #executar chatterbot #Grava a resposta do chatterbot no log do chatbot #verificar se a pergunta contêm alguma coisa sobre o coronavírus #!/tipo/noticia\">Notícias prefeitura de Estrela </a>" #if __name__ == "__main__": # app.run() | 2.712909 | 3 |
src/plugins/senec/senec_db.py | smashnet/solar-wallbox | 2 | 6615871 | <gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Persistency layer for data from senec.py
"""
import os
import sqlite3
import logging
from datetime import datetime, timedelta, timezone, date
import pytz
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, <NAME>"
__credits__ = ["<NAME>"]
__license__ = "Apache-2.0 License"
__version__ = "1.1.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Alpha"
log = logging.getLogger("SenecDB")
log.setLevel(logging.INFO)
class SenecDB():
def __init__(self, db_file):
self.db_path = os.path.dirname(db_file)
self.db_filename = os.path.basename(db_file)
self.db_full_path = db_file
self.db_version = "0.0.1"
self.timezone = pytz.timezone("Europe/Berlin")
# Ensure directories exist
try:
os.makedirs(self.db_path)
except FileExistsError:
log.debug("DB path already exists.")
# Establish connection
self.connection = sqlite3.connect(self.db_full_path)
self.cursor = self.connection.cursor()
# Check if DB exists and is correct version
try:
version = self.cursor.execute("SELECT version FROM db_info").fetchone()[0]
if version == self.db_version:
log.debug(f"DB found and has correct version {version}. No migration needed :)")
else:
log.debug(f"Found DB, has version {version}. Target version is {self.db_version} ... migrating...")
self.__migrate(version)
except sqlite3.OperationalError:
# db_info does not exist -> wrong or empty db_file
log.debug("No not a valid DB file. Creating...")
self.__init_tables_v0_0_1()
def __init_tables_v0_0_1(self):
self.cursor.execute("CREATE TABLE IF NOT EXISTS db_info (version TEXT)")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS senec (
ts TIMESTAMP,
stats_current_state TEXT,
stats_battery_charged_energy FLOAT,
stats_battery_discharged_energy FLOAT,
stats_grid_export FLOAT,
stats_grid_import FLOAT,
stats_house_consumption FLOAT,
stats_pv_production FLOAT,
live_house_power FLOAT,
live_pv_production FLOAT,
live_grid_power FLOAT,
live_battery_charge_power FLOAT,
live_battery_charge_current FLOAT,
live_battery_voltage FLOAT,
live_battery_percentage FLOAT)""")
self.cursor.execute(f"INSERT INTO db_info VALUES ('{self.db_version}')")
self.connection.commit()
def __migrate(self, from_version):
# TODO
log.error(f"Migration from DB version {from_version} to DB version {self.db_version} not yet implemented.")
def close(self):
self.cursor.close()
self.connection.close()
def insert_measurement(self, json):
self.cursor.execute("INSERT INTO senec VALUES (CURRENT_TIMESTAMP, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(json['general']['current_state'],
json['statistics']['battery_charged_energy'],
json['statistics']['battery_discharged_energy'],
json['statistics']['grid_export'],
json['statistics']['grid_import'],
json['statistics']['house_consumption'],
json['statistics']['pv_production'],
json['live_data']['house_power'],
json['live_data']['pv_production'],
json['live_data']['grid_power'],
json['live_data']['battery_charge_power'],
json['live_data']['battery_charge_current'],
json['live_data']['battery_voltage'],
json['live_data']['battery_percentage']))
self.connection.commit()
def insert_measurement_with_custom_ts(self, json, datetime_ts):
self.cursor.execute("INSERT INTO senec VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(datetime_ts,
json['general']['current_state'],
json['statistics']['battery_charged_energy'],
json['statistics']['battery_discharged_energy'],
json['statistics']['grid_export'],
json['statistics']['grid_import'],
json['statistics']['house_consumption'],
json['statistics']['pv_production'],
json['live_data']['house_power'],
json['live_data']['pv_production'],
json['live_data']['grid_power'],
json['live_data']['battery_charge_power'],
json['live_data']['battery_charge_current'],
json['live_data']['battery_voltage'],
json['live_data']['battery_percentage']))
self.connection.commit()
def get_max_val_between_tss(self, column, ts1, ts2):
log.debug(f"SELECT MAX({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'")
return self.cursor.execute(f"SELECT MAX({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'").fetchone()[0]
def get_min_val_between_tss(self, column, ts1, ts2):
log.debug(f"SELECT MIN({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'")
return self.cursor.execute(f"SELECT MIN({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'").fetchone()[0]
def get_avg_val_between_tss(self, column, ts1, ts2):
log.debug(f"SELECT AVERAGE({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'")
return self.cursor.execute(f"SELECT AVERAGE({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'").fetchone()[0]
def get_diff_val_between_tss(self, column, ts1, ts2):
log.debug(f"{column}, {ts1}, {ts2}")
val1 = self.cursor.execute(f"SELECT {column} FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}' ORDER BY ts ASC LIMIT 1").fetchone()[0]
val2 = self.cursor.execute(f"SELECT {column} FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}' ORDER BY ts DESC LIMIT 1").fetchone()[0]
return val2-val1
def get_todays(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_diff_val_between_tss(metric, today_zero, today_now)
def get_todays_max(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_max_val_between_tss(metric, today_zero, today_now)
def get_todays_min(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_min_val_between_tss(metric, today_zero, today_now)
def get_todays_avg(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_avg_val_between_tss(metric, today_zero, today_now) | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Persistency layer for data from senec.py
"""
import os
import sqlite3
import logging
from datetime import datetime, timedelta, timezone, date
import pytz
__author__ = "<NAME>"
__copyright__ = "Copyright 2021, <NAME>"
__credits__ = ["<NAME>"]
__license__ = "Apache-2.0 License"
__version__ = "1.1.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Alpha"
log = logging.getLogger("SenecDB")
log.setLevel(logging.INFO)
class SenecDB():
def __init__(self, db_file):
self.db_path = os.path.dirname(db_file)
self.db_filename = os.path.basename(db_file)
self.db_full_path = db_file
self.db_version = "0.0.1"
self.timezone = pytz.timezone("Europe/Berlin")
# Ensure directories exist
try:
os.makedirs(self.db_path)
except FileExistsError:
log.debug("DB path already exists.")
# Establish connection
self.connection = sqlite3.connect(self.db_full_path)
self.cursor = self.connection.cursor()
# Check if DB exists and is correct version
try:
version = self.cursor.execute("SELECT version FROM db_info").fetchone()[0]
if version == self.db_version:
log.debug(f"DB found and has correct version {version}. No migration needed :)")
else:
log.debug(f"Found DB, has version {version}. Target version is {self.db_version} ... migrating...")
self.__migrate(version)
except sqlite3.OperationalError:
# db_info does not exist -> wrong or empty db_file
log.debug("No not a valid DB file. Creating...")
self.__init_tables_v0_0_1()
def __init_tables_v0_0_1(self):
self.cursor.execute("CREATE TABLE IF NOT EXISTS db_info (version TEXT)")
self.cursor.execute("""CREATE TABLE IF NOT EXISTS senec (
ts TIMESTAMP,
stats_current_state TEXT,
stats_battery_charged_energy FLOAT,
stats_battery_discharged_energy FLOAT,
stats_grid_export FLOAT,
stats_grid_import FLOAT,
stats_house_consumption FLOAT,
stats_pv_production FLOAT,
live_house_power FLOAT,
live_pv_production FLOAT,
live_grid_power FLOAT,
live_battery_charge_power FLOAT,
live_battery_charge_current FLOAT,
live_battery_voltage FLOAT,
live_battery_percentage FLOAT)""")
self.cursor.execute(f"INSERT INTO db_info VALUES ('{self.db_version}')")
self.connection.commit()
def __migrate(self, from_version):
# TODO
log.error(f"Migration from DB version {from_version} to DB version {self.db_version} not yet implemented.")
def close(self):
self.cursor.close()
self.connection.close()
def insert_measurement(self, json):
self.cursor.execute("INSERT INTO senec VALUES (CURRENT_TIMESTAMP, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(json['general']['current_state'],
json['statistics']['battery_charged_energy'],
json['statistics']['battery_discharged_energy'],
json['statistics']['grid_export'],
json['statistics']['grid_import'],
json['statistics']['house_consumption'],
json['statistics']['pv_production'],
json['live_data']['house_power'],
json['live_data']['pv_production'],
json['live_data']['grid_power'],
json['live_data']['battery_charge_power'],
json['live_data']['battery_charge_current'],
json['live_data']['battery_voltage'],
json['live_data']['battery_percentage']))
self.connection.commit()
def insert_measurement_with_custom_ts(self, json, datetime_ts):
self.cursor.execute("INSERT INTO senec VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
(datetime_ts,
json['general']['current_state'],
json['statistics']['battery_charged_energy'],
json['statistics']['battery_discharged_energy'],
json['statistics']['grid_export'],
json['statistics']['grid_import'],
json['statistics']['house_consumption'],
json['statistics']['pv_production'],
json['live_data']['house_power'],
json['live_data']['pv_production'],
json['live_data']['grid_power'],
json['live_data']['battery_charge_power'],
json['live_data']['battery_charge_current'],
json['live_data']['battery_voltage'],
json['live_data']['battery_percentage']))
self.connection.commit()
def get_max_val_between_tss(self, column, ts1, ts2):
log.debug(f"SELECT MAX({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'")
return self.cursor.execute(f"SELECT MAX({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'").fetchone()[0]
def get_min_val_between_tss(self, column, ts1, ts2):
log.debug(f"SELECT MIN({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'")
return self.cursor.execute(f"SELECT MIN({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'").fetchone()[0]
def get_avg_val_between_tss(self, column, ts1, ts2):
log.debug(f"SELECT AVERAGE({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'")
return self.cursor.execute(f"SELECT AVERAGE({column}) FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}'").fetchone()[0]
def get_diff_val_between_tss(self, column, ts1, ts2):
log.debug(f"{column}, {ts1}, {ts2}")
val1 = self.cursor.execute(f"SELECT {column} FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}' ORDER BY ts ASC LIMIT 1").fetchone()[0]
val2 = self.cursor.execute(f"SELECT {column} FROM senec WHERE ts BETWEEN '{ts1.isoformat(sep=' ')}' AND '{ts2.isoformat(sep=' ')}' ORDER BY ts DESC LIMIT 1").fetchone()[0]
return val2-val1
def get_todays(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_diff_val_between_tss(metric, today_zero, today_now)
def get_todays_max(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_max_val_between_tss(metric, today_zero, today_now)
def get_todays_min(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_min_val_between_tss(metric, today_zero, today_now)
def get_todays_avg(self, metric):
today_zero = datetime.now(tz=self.timezone).replace(hour=0, minute=0, second=0, microsecond=0).astimezone(tz=timezone.utc)
today_now = datetime.utcnow()
return self.get_avg_val_between_tss(metric, today_zero, today_now) | en | 0.56358 | #!/usr/bin/env python3 # -*- coding: utf-8 -*- Persistency layer for data from senec.py # Ensure directories exist # Establish connection # Check if DB exists and is correct version # db_info does not exist -> wrong or empty db_file CREATE TABLE IF NOT EXISTS senec ( ts TIMESTAMP, stats_current_state TEXT, stats_battery_charged_energy FLOAT, stats_battery_discharged_energy FLOAT, stats_grid_export FLOAT, stats_grid_import FLOAT, stats_house_consumption FLOAT, stats_pv_production FLOAT, live_house_power FLOAT, live_pv_production FLOAT, live_grid_power FLOAT, live_battery_charge_power FLOAT, live_battery_charge_current FLOAT, live_battery_voltage FLOAT, live_battery_percentage FLOAT) # TODO | 2.735182 | 3 |
Part-03-Understanding-Software-Crafting-Your-Own-Tools/models/edx-platform/common/lib/xmodule/xmodule/tests/test_utils_escape_html_characters.py | osoco/better-ways-of-thinking-about-software | 3 | 6615872 | """Tests for methods defined in util/misc.py"""
from unittest import TestCase
from xmodule.util.misc import escape_html_characters
class UtilHtmlEscapeTests(TestCase):
"""
Tests for methods exposed in util/misc
"""
final_content = " This is a paragraph. "
def test_escape_html_comments(self):
html_content = """
<!--This is a comment. Comments are not displayed in the browser-->
This is a paragraph.
"""
assert escape_html_characters(html_content) == self.final_content
def test_escape_cdata_comments(self):
html_content = """
<![CDATA[
function matchwo(a,b)
{
if (a < b && a < 0) then
{
return 1;
}
else
{
return 0;
}
}
]]>
This is a paragraph.
"""
assert escape_html_characters(html_content) == self.final_content
def test_escape_non_breaking_space(self):
html_content = """
<![CDATA[
function matchwo(a,b)
{
if (a < b && a < 0) then
{
return 1;
}
else
{
return 0;
}
}
]]>
This is a paragraph.
"""
assert escape_html_characters(html_content) == self.final_content
| """Tests for methods defined in util/misc.py"""
from unittest import TestCase
from xmodule.util.misc import escape_html_characters
class UtilHtmlEscapeTests(TestCase):
"""
Tests for methods exposed in util/misc
"""
final_content = " This is a paragraph. "
def test_escape_html_comments(self):
html_content = """
<!--This is a comment. Comments are not displayed in the browser-->
This is a paragraph.
"""
assert escape_html_characters(html_content) == self.final_content
def test_escape_cdata_comments(self):
html_content = """
<![CDATA[
function matchwo(a,b)
{
if (a < b && a < 0) then
{
return 1;
}
else
{
return 0;
}
}
]]>
This is a paragraph.
"""
assert escape_html_characters(html_content) == self.final_content
def test_escape_non_breaking_space(self):
html_content = """
<![CDATA[
function matchwo(a,b)
{
if (a < b && a < 0) then
{
return 1;
}
else
{
return 0;
}
}
]]>
This is a paragraph.
"""
assert escape_html_characters(html_content) == self.final_content
| en | 0.478753 | Tests for methods defined in util/misc.py Tests for methods exposed in util/misc <!--This is a comment. Comments are not displayed in the browser--> This is a paragraph. <![CDATA[ function matchwo(a,b) { if (a < b && a < 0) then { return 1; } else { return 0; } } ]]> This is a paragraph. <![CDATA[ function matchwo(a,b) { if (a < b && a < 0) then { return 1; } else { return 0; } } ]]> This is a paragraph. | 3.060583 | 3 |
extractor.py | Myugeli/Palette-Extractor | 0 | 6615873 | '''
Created on Nov 10, 2020
@author: myuey
'''
from PIL import Image, ImageTk
from numpy import zeros, reshape, array, vectorize, asarray, linalg
from tkinter import Tk, Button, Entry, Label, messagebox, Toplevel, LEFT, W
def gui_initiate():
root = Tk()
root.title("Palette Extractor by Myui")
root.geometry('900x400')
root.resizable(width=False, height=False)
min_val = 1
dim_max_val = 100
rnd_max_val = 255
tun_max_val = 300
dim_val = 50
rnd_val = 35
tun_val = 110
img_prev_size = 200
tar_lbl = Label(root, text="Target Image File: ")
tar_lbl.grid(column = 0, row = 0)
tar_txt = Entry(root, width = 20, justify='center')
tar_txt.grid(column = 1, row = 0)
tar_img = zeros((img_prev_size, img_prev_size * 2, 3))
tar_img_disp = ImageTk.PhotoImage(Image.fromarray(tar_img, mode = "RGB"))
tar_prev_img = Label(root, image = tar_img_disp)
tar_prev_img.grid(column = 4, row = 0)
def tar_btn_click():
tar_file_path = tar_txt.get()
if not tar_file_path:
messagebox.showerror(title="Error", message="No filepath entered.")
else:
try:
tar_img = Image.open(tar_file_path)
global tar_img_disp
tar_img = tar_img.resize((img_prev_size * 2, img_prev_size), Image.ANTIALIAS)
tar_img_disp = ImageTk.PhotoImage(tar_img)
tar_prev_img.configure(image = tar_img_disp)
except FileNotFoundError:
messagebox.showerror(title="Error", message="File '" + tar_file_path + "' not found.")
tar_btn = Button(root, text="Set Target", command = tar_btn_click)
tar_btn.grid(column = 2, row = 0)
tar_prev_lbl = Label(root, text = "Target Preview: ")
tar_prev_lbl.grid(column = 3, row = 0)
dim_lbl = Label(root, text = "Resize To: ")
dim_lbl.grid(column = 0, row = 1)
dim_txt = Entry(root, width = 10, justify='center')
dim_txt.insert(0, str(dim_val))
dim_txt.grid(column = 1, row = 1)
dim_lim_lbl = Label(root, text = "(" + str(min_val) + "-" + str(dim_max_val) + ")")
dim_lim_lbl.grid(column = 2, row = 1)
rnd_lbl = Label(root, text = "Round Colours To: ")
rnd_lbl.grid(column = 0, row = 2)
rnd_txt = Entry(root, width = 10, justify='center')
rnd_txt.insert(0, str(rnd_val))
rnd_txt.grid(column = 1, row = 2)
rnd_lim_lbl = Label(root, text = "(" + str(min_val) + "-" + str(rnd_max_val) + ")")
rnd_lim_lbl.grid(column = 2, row = 2)
tun_lbl = Label(root, text = "Tuning Distance: ")
tun_lbl.grid(column = 0, row = 3)
tun_txt = Entry(root, width = 10, justify='center')
tun_txt.insert(0, str(tun_val))
tun_txt.grid(column = 1, row = 3)
tun_lim_lbl = Label(root, text ="(" + str(min_val) + "-" + str(tun_max_val) + ")")
tun_lim_lbl.grid(column = 2, row = 3)
pal_file_lbl = Label(root, text = "Palette File Name:")
pal_file_lbl.grid(column = 3, row = 5)
pal_file_txt = Entry(root, width = 30, justify='center')
pal_file_txt.insert(0, "palette.png")
pal_file_txt.grid(column = 4, row = 5)
pal_amt_lbl = Label(root, text = "Number of Colours in Palette:")
pal_amt_lbl.grid(column = 3, row = 4)
pal_num_lbl = Label(root, text = "0")
pal_num_lbl.grid(column = 4, row = 4)
pal_img = zeros((int(img_prev_size / 4), img_prev_size * 2, 3))
pal_img_disp = ImageTk.PhotoImage(Image.fromarray(pal_img, mode = "RGB"))
pal_prev_img = Label(root, image = pal_img_disp)
pal_prev_img.grid(column = 4, row = 2)
def inst_window():
inst_disp = Toplevel(root)
inst_disp.title("Instructions")
inst_disp.geometry('900x150')
inst_disp.focus_force()
inst_disp.resizable(width=False, height=False)
welc_txt = "This application will make a palette image from an input image."
step1_txt = "Step 1: Enter target image filepath. Click 'Set Target' to make sure you have the right one. Please make sure the application and the input image are in the same folder."
step2_txt = "Step 2: Enter resize value. The larger this is, the more colours there will be in the palette, but it will take more time to process."
step3_txt = "Step 3: Enter rounding value. This controls colour variation. The larger this is, the less colours there will be in the palette."
step4_txt = "Step 4: Enter tuning value. This controls how close together the palette colours will be. The larger this is, the less colours there will be in the palette."
step5_txt = "Step 5: Click 'Preview' to preview to palette. The number of colours in it will also be displayed."
step6_txt = "Step 6: Enter palette filename and click 'Save Palette'. The palette will be saved to the folder the application is in."
welc_lbl = Label(inst_disp, text = welc_txt, justify=LEFT)
welc_lbl.grid(sticky = W, column=0,row=0)
step1_lbl = Label(inst_disp, text = step1_txt, justify=LEFT)
step1_lbl.grid(sticky = W, column=0,row=1)
step2_lbl = Label(inst_disp, text = step2_txt, justify=LEFT)
step2_lbl.grid(sticky = W, column=0,row=2)
step3_lbl = Label(inst_disp, text = step3_txt, justify=LEFT)
step3_lbl.grid(sticky = W, column=0,row=3)
step4_lbl = Label(inst_disp, text = step4_txt, justify=LEFT)
step4_lbl.grid(sticky = W, column=0,row=4)
step5_lbl = Label(inst_disp, text = step5_txt, justify=LEFT)
step5_lbl.grid(sticky = W, column=0,row=5)
step6_lbl = Label(inst_disp, text = step6_txt, justify=LEFT)
step6_lbl.grid(sticky = W, column=0,row=6)
inst_btn = Button(root, text = "Instructions", command = inst_window)
inst_btn.grid(column = 0, row = 7)
def pal_prev_btn_click():
img_path = tar_txt.get()
dim = dim_txt.get()
rnd = rnd_txt.get()
tun = tun_txt.get()
num_chk_flag = dim.isnumeric() and rnd.isnumeric() and tun.isnumeric()
if not num_chk_flag:
messagebox.showerror(title="Error", message="Invalid parameter values.")
else:
dim_rng_flag = int(dim) <= dim_max_val and int(dim) >= min_val
rnd_rng_flag = int(rnd) <= rnd_max_val and int(rnd) >= min_val
tun_rng_flag = int(tun) <= tun_max_val and int(tun) >= min_val
if not (dim_rng_flag and rnd_rng_flag and tun_rng_flag):
messagebox.showerror(title="Error", message="Parameter values out of range.")
elif not img_path:
messagebox.showerror(title="Error", message="No image filepath entered.")
else:
try:
palette, num_col = create_palette(img_path, int(dim), int(rnd), int(tun))
global pal_img_disp
pal_img = palette.resize((img_prev_size * 2, int(img_prev_size / 4)))
pal_img_disp = ImageTk.PhotoImage(pal_img)
pal_prev_img.configure(image = pal_img_disp)
pal_num_lbl.configure(text = str(num_col))
except FileNotFoundError:
messagebox.showerror(title="Error", message="File '" + img_path + "' not found.")
pal_prev_btn = Button(root, text = "Preview", command = pal_prev_btn_click)
pal_prev_btn.grid(column = 1, row = 6)
pal_prev_lbl = Label(root, text = "Palette Preview:")
pal_prev_lbl.grid(column = 3, row = 2)
def pal_save_btn_click():
img_path = tar_txt.get()
dim = dim_txt.get()
rnd = rnd_txt.get()
tun = tun_txt.get()
pal_file_path = pal_file_txt.get()
num_chk_flag = dim.isnumeric() and rnd.isnumeric() and tun.isnumeric()
if not num_chk_flag:
messagebox.showerror(title="Error", message="Invalid parameter values.")
else:
dim_rng_flag = int(dim) <= dim_max_val and int(dim) >= min_val
rnd_rng_flag = int(rnd) <= rnd_max_val and int(rnd) >= min_val
tun_rng_flag = int(tun) <= tun_max_val and int(tun) >= min_val
if not (dim_rng_flag and rnd_rng_flag and tun_rng_flag):
messagebox.showerror(title="Error", message="Parameter values out of range.")
elif not img_path:
messagebox.showerror(title="Error", message="No image filepath entered.")
elif not pal_file_path:
messagebox.showerror(title="Error", message="No palette filepath entered.")
else:
try:
palette, num_col = create_palette(img_path, int(dim), int(rnd), int(tun))
save_palette(palette, pal_file_path)
except ValueError:
messagebox.showerror(title="Error", message="Invalid palette filepath.")
pal_save_btn = Button(root, text = "Save Palette", command = pal_save_btn_click)
pal_save_btn.grid(column = 4, row = 6)
root.mainloop()
def get_image(image_path, dim = 50):
#returns np array of pixel values
img = Image.open(image_path, "r")
img = img.convert("RGB")
dim_w,dim_h = img.size
width = dim
height = dim
if dim_w < dim:
width = dim_w
if dim_h < dim:
height = dim_h
img = img.resize((width,height), Image.ANTIALIAS)
pix_values = array(list(img.getdata())).reshape((width, height, 3))
return pix_values
def round_image(img, round_factor = 35):
rounder = lambda t: int(t/round_factor)*round_factor
vec_rounder = vectorize(rounder)
img = vec_rounder(img)
return img
def get_unique_colours(img):
#returns list of different rgb values in the img given
img_list = []
shape = img.shape
for i in range(shape[0]):
for j in range(shape[1]):
pix_val = tuple(img[i][j])
img_list.append(pix_val)
res = list(set(img_list))
return res
def tune_colours(colour_list, tune_amt = 110):
#Trims colour list to remove colours that are too close, defined by tune_amt value
#tune_amt is a distance in RGB space. Defaults to 20
res = []
#loop through colour_list, pull an entry, and compare to entries in res list
for in_entry in colour_list:
entry_flag = False
for res_entry in res:
#calculate distance between the colours
dist = dist_calc(in_entry, res_entry)
entry_flag = dist < tune_amt
if not entry_flag:
res.append(in_entry)
res.sort()
return res
def dist_calc(tup1, tup2):
#returns space distance between two tuples with the same length
ar1 = asarray(tup1)
ar2 = asarray(tup2)
dist = linalg.norm(ar1 - ar2)
return dist
def create_palette_img(colour_list, sq_dim = 500):
#creates a palette image containing each colour in the list as a square of size sq_dim, dimH squares tall
dimH = len(colour_list)
#create numpy array
res_arr = zeros((sq_dim, dimH * sq_dim, 3), dtype = 'uint8')
#loop through colour list and populate the appropriate numpy array region
for i,colour in zip(range(len(colour_list)),colour_list):
#create array replacement
col_arr = zeros((sq_dim, sq_dim, 3), dtype = 'uint8')
ind_arr = asarray(colour)
col_arr[:] = ind_arr
#place array replacement
#calculate boundaries
x_bnd1 = 0
x_bnd2 = sq_dim
y_bnd1 = i * sq_dim
y_bnd2 = y_bnd1 + sq_dim
res_arr[x_bnd1:x_bnd2,y_bnd1:y_bnd2, 0:3] = col_arr
#write the numpy array to an image
res_img = Image.fromarray(res_arr, mode = "RGB")
return res_img
def hex_to_rgb(value):
"""Return (red, green, blue) for the color given as #rrggbb."""
value = value.lstrip('#')
lv = len(value)
return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))
def rgb_to_hex(red, green, blue):
"""Return color as #rrggbb for the given color values."""
return '#%02x%02x%02x' % (red, green, blue)
def save_palette(palette, palette_file = "palette.png"):
palette.save(palette_file)
def create_palette(img_file, img_dim = 50, round_val = 35, tune_val = 110):
img = get_image(img_file, img_dim)
img = round_image(img, round_val)
uni_colour = get_unique_colours(img)
tune_colour = tune_colours(uni_colour, tune_val)
palette = create_palette_img(tune_colour)
return palette, len(tune_colour)
gui_initiate()
| '''
Created on Nov 10, 2020
@author: myuey
'''
from PIL import Image, ImageTk
from numpy import zeros, reshape, array, vectorize, asarray, linalg
from tkinter import Tk, Button, Entry, Label, messagebox, Toplevel, LEFT, W
def gui_initiate():
root = Tk()
root.title("Palette Extractor by Myui")
root.geometry('900x400')
root.resizable(width=False, height=False)
min_val = 1
dim_max_val = 100
rnd_max_val = 255
tun_max_val = 300
dim_val = 50
rnd_val = 35
tun_val = 110
img_prev_size = 200
tar_lbl = Label(root, text="Target Image File: ")
tar_lbl.grid(column = 0, row = 0)
tar_txt = Entry(root, width = 20, justify='center')
tar_txt.grid(column = 1, row = 0)
tar_img = zeros((img_prev_size, img_prev_size * 2, 3))
tar_img_disp = ImageTk.PhotoImage(Image.fromarray(tar_img, mode = "RGB"))
tar_prev_img = Label(root, image = tar_img_disp)
tar_prev_img.grid(column = 4, row = 0)
def tar_btn_click():
tar_file_path = tar_txt.get()
if not tar_file_path:
messagebox.showerror(title="Error", message="No filepath entered.")
else:
try:
tar_img = Image.open(tar_file_path)
global tar_img_disp
tar_img = tar_img.resize((img_prev_size * 2, img_prev_size), Image.ANTIALIAS)
tar_img_disp = ImageTk.PhotoImage(tar_img)
tar_prev_img.configure(image = tar_img_disp)
except FileNotFoundError:
messagebox.showerror(title="Error", message="File '" + tar_file_path + "' not found.")
tar_btn = Button(root, text="Set Target", command = tar_btn_click)
tar_btn.grid(column = 2, row = 0)
tar_prev_lbl = Label(root, text = "Target Preview: ")
tar_prev_lbl.grid(column = 3, row = 0)
dim_lbl = Label(root, text = "Resize To: ")
dim_lbl.grid(column = 0, row = 1)
dim_txt = Entry(root, width = 10, justify='center')
dim_txt.insert(0, str(dim_val))
dim_txt.grid(column = 1, row = 1)
dim_lim_lbl = Label(root, text = "(" + str(min_val) + "-" + str(dim_max_val) + ")")
dim_lim_lbl.grid(column = 2, row = 1)
rnd_lbl = Label(root, text = "Round Colours To: ")
rnd_lbl.grid(column = 0, row = 2)
rnd_txt = Entry(root, width = 10, justify='center')
rnd_txt.insert(0, str(rnd_val))
rnd_txt.grid(column = 1, row = 2)
rnd_lim_lbl = Label(root, text = "(" + str(min_val) + "-" + str(rnd_max_val) + ")")
rnd_lim_lbl.grid(column = 2, row = 2)
tun_lbl = Label(root, text = "Tuning Distance: ")
tun_lbl.grid(column = 0, row = 3)
tun_txt = Entry(root, width = 10, justify='center')
tun_txt.insert(0, str(tun_val))
tun_txt.grid(column = 1, row = 3)
tun_lim_lbl = Label(root, text ="(" + str(min_val) + "-" + str(tun_max_val) + ")")
tun_lim_lbl.grid(column = 2, row = 3)
pal_file_lbl = Label(root, text = "Palette File Name:")
pal_file_lbl.grid(column = 3, row = 5)
pal_file_txt = Entry(root, width = 30, justify='center')
pal_file_txt.insert(0, "palette.png")
pal_file_txt.grid(column = 4, row = 5)
pal_amt_lbl = Label(root, text = "Number of Colours in Palette:")
pal_amt_lbl.grid(column = 3, row = 4)
pal_num_lbl = Label(root, text = "0")
pal_num_lbl.grid(column = 4, row = 4)
pal_img = zeros((int(img_prev_size / 4), img_prev_size * 2, 3))
pal_img_disp = ImageTk.PhotoImage(Image.fromarray(pal_img, mode = "RGB"))
pal_prev_img = Label(root, image = pal_img_disp)
pal_prev_img.grid(column = 4, row = 2)
def inst_window():
inst_disp = Toplevel(root)
inst_disp.title("Instructions")
inst_disp.geometry('900x150')
inst_disp.focus_force()
inst_disp.resizable(width=False, height=False)
welc_txt = "This application will make a palette image from an input image."
step1_txt = "Step 1: Enter target image filepath. Click 'Set Target' to make sure you have the right one. Please make sure the application and the input image are in the same folder."
step2_txt = "Step 2: Enter resize value. The larger this is, the more colours there will be in the palette, but it will take more time to process."
step3_txt = "Step 3: Enter rounding value. This controls colour variation. The larger this is, the less colours there will be in the palette."
step4_txt = "Step 4: Enter tuning value. This controls how close together the palette colours will be. The larger this is, the less colours there will be in the palette."
step5_txt = "Step 5: Click 'Preview' to preview to palette. The number of colours in it will also be displayed."
step6_txt = "Step 6: Enter palette filename and click 'Save Palette'. The palette will be saved to the folder the application is in."
welc_lbl = Label(inst_disp, text = welc_txt, justify=LEFT)
welc_lbl.grid(sticky = W, column=0,row=0)
step1_lbl = Label(inst_disp, text = step1_txt, justify=LEFT)
step1_lbl.grid(sticky = W, column=0,row=1)
step2_lbl = Label(inst_disp, text = step2_txt, justify=LEFT)
step2_lbl.grid(sticky = W, column=0,row=2)
step3_lbl = Label(inst_disp, text = step3_txt, justify=LEFT)
step3_lbl.grid(sticky = W, column=0,row=3)
step4_lbl = Label(inst_disp, text = step4_txt, justify=LEFT)
step4_lbl.grid(sticky = W, column=0,row=4)
step5_lbl = Label(inst_disp, text = step5_txt, justify=LEFT)
step5_lbl.grid(sticky = W, column=0,row=5)
step6_lbl = Label(inst_disp, text = step6_txt, justify=LEFT)
step6_lbl.grid(sticky = W, column=0,row=6)
inst_btn = Button(root, text = "Instructions", command = inst_window)
inst_btn.grid(column = 0, row = 7)
def pal_prev_btn_click():
img_path = tar_txt.get()
dim = dim_txt.get()
rnd = rnd_txt.get()
tun = tun_txt.get()
num_chk_flag = dim.isnumeric() and rnd.isnumeric() and tun.isnumeric()
if not num_chk_flag:
messagebox.showerror(title="Error", message="Invalid parameter values.")
else:
dim_rng_flag = int(dim) <= dim_max_val and int(dim) >= min_val
rnd_rng_flag = int(rnd) <= rnd_max_val and int(rnd) >= min_val
tun_rng_flag = int(tun) <= tun_max_val and int(tun) >= min_val
if not (dim_rng_flag and rnd_rng_flag and tun_rng_flag):
messagebox.showerror(title="Error", message="Parameter values out of range.")
elif not img_path:
messagebox.showerror(title="Error", message="No image filepath entered.")
else:
try:
palette, num_col = create_palette(img_path, int(dim), int(rnd), int(tun))
global pal_img_disp
pal_img = palette.resize((img_prev_size * 2, int(img_prev_size / 4)))
pal_img_disp = ImageTk.PhotoImage(pal_img)
pal_prev_img.configure(image = pal_img_disp)
pal_num_lbl.configure(text = str(num_col))
except FileNotFoundError:
messagebox.showerror(title="Error", message="File '" + img_path + "' not found.")
pal_prev_btn = Button(root, text = "Preview", command = pal_prev_btn_click)
pal_prev_btn.grid(column = 1, row = 6)
pal_prev_lbl = Label(root, text = "Palette Preview:")
pal_prev_lbl.grid(column = 3, row = 2)
def pal_save_btn_click():
img_path = tar_txt.get()
dim = dim_txt.get()
rnd = rnd_txt.get()
tun = tun_txt.get()
pal_file_path = pal_file_txt.get()
num_chk_flag = dim.isnumeric() and rnd.isnumeric() and tun.isnumeric()
if not num_chk_flag:
messagebox.showerror(title="Error", message="Invalid parameter values.")
else:
dim_rng_flag = int(dim) <= dim_max_val and int(dim) >= min_val
rnd_rng_flag = int(rnd) <= rnd_max_val and int(rnd) >= min_val
tun_rng_flag = int(tun) <= tun_max_val and int(tun) >= min_val
if not (dim_rng_flag and rnd_rng_flag and tun_rng_flag):
messagebox.showerror(title="Error", message="Parameter values out of range.")
elif not img_path:
messagebox.showerror(title="Error", message="No image filepath entered.")
elif not pal_file_path:
messagebox.showerror(title="Error", message="No palette filepath entered.")
else:
try:
palette, num_col = create_palette(img_path, int(dim), int(rnd), int(tun))
save_palette(palette, pal_file_path)
except ValueError:
messagebox.showerror(title="Error", message="Invalid palette filepath.")
pal_save_btn = Button(root, text = "Save Palette", command = pal_save_btn_click)
pal_save_btn.grid(column = 4, row = 6)
root.mainloop()
def get_image(image_path, dim = 50):
#returns np array of pixel values
img = Image.open(image_path, "r")
img = img.convert("RGB")
dim_w,dim_h = img.size
width = dim
height = dim
if dim_w < dim:
width = dim_w
if dim_h < dim:
height = dim_h
img = img.resize((width,height), Image.ANTIALIAS)
pix_values = array(list(img.getdata())).reshape((width, height, 3))
return pix_values
def round_image(img, round_factor = 35):
rounder = lambda t: int(t/round_factor)*round_factor
vec_rounder = vectorize(rounder)
img = vec_rounder(img)
return img
def get_unique_colours(img):
#returns list of different rgb values in the img given
img_list = []
shape = img.shape
for i in range(shape[0]):
for j in range(shape[1]):
pix_val = tuple(img[i][j])
img_list.append(pix_val)
res = list(set(img_list))
return res
def tune_colours(colour_list, tune_amt = 110):
#Trims colour list to remove colours that are too close, defined by tune_amt value
#tune_amt is a distance in RGB space. Defaults to 20
res = []
#loop through colour_list, pull an entry, and compare to entries in res list
for in_entry in colour_list:
entry_flag = False
for res_entry in res:
#calculate distance between the colours
dist = dist_calc(in_entry, res_entry)
entry_flag = dist < tune_amt
if not entry_flag:
res.append(in_entry)
res.sort()
return res
def dist_calc(tup1, tup2):
#returns space distance between two tuples with the same length
ar1 = asarray(tup1)
ar2 = asarray(tup2)
dist = linalg.norm(ar1 - ar2)
return dist
def create_palette_img(colour_list, sq_dim = 500):
#creates a palette image containing each colour in the list as a square of size sq_dim, dimH squares tall
dimH = len(colour_list)
#create numpy array
res_arr = zeros((sq_dim, dimH * sq_dim, 3), dtype = 'uint8')
#loop through colour list and populate the appropriate numpy array region
for i,colour in zip(range(len(colour_list)),colour_list):
#create array replacement
col_arr = zeros((sq_dim, sq_dim, 3), dtype = 'uint8')
ind_arr = asarray(colour)
col_arr[:] = ind_arr
#place array replacement
#calculate boundaries
x_bnd1 = 0
x_bnd2 = sq_dim
y_bnd1 = i * sq_dim
y_bnd2 = y_bnd1 + sq_dim
res_arr[x_bnd1:x_bnd2,y_bnd1:y_bnd2, 0:3] = col_arr
#write the numpy array to an image
res_img = Image.fromarray(res_arr, mode = "RGB")
return res_img
def hex_to_rgb(value):
"""Return (red, green, blue) for the color given as #rrggbb."""
value = value.lstrip('#')
lv = len(value)
return tuple(int(value[i:i + lv // 3], 16) for i in range(0, lv, lv // 3))
def rgb_to_hex(red, green, blue):
"""Return color as #rrggbb for the given color values."""
return '#%02x%02x%02x' % (red, green, blue)
def save_palette(palette, palette_file = "palette.png"):
palette.save(palette_file)
def create_palette(img_file, img_dim = 50, round_val = 35, tune_val = 110):
img = get_image(img_file, img_dim)
img = round_image(img, round_val)
uni_colour = get_unique_colours(img)
tune_colour = tune_colours(uni_colour, tune_val)
palette = create_palette_img(tune_colour)
return palette, len(tune_colour)
gui_initiate()
| en | 0.765049 | Created on Nov 10, 2020 @author: myuey #returns np array of pixel values #returns list of different rgb values in the img given #Trims colour list to remove colours that are too close, defined by tune_amt value #tune_amt is a distance in RGB space. Defaults to 20 #loop through colour_list, pull an entry, and compare to entries in res list #calculate distance between the colours #returns space distance between two tuples with the same length #creates a palette image containing each colour in the list as a square of size sq_dim, dimH squares tall #create numpy array #loop through colour list and populate the appropriate numpy array region #create array replacement #place array replacement #calculate boundaries #write the numpy array to an image Return (red, green, blue) for the color given as #rrggbb. Return color as #rrggbb for the given color values. | 2.553109 | 3 |
lcm_number.py | tushartrip1010/100_days_code_py | 0 | 6615874 | <gh_stars>0
def lc(a,b):
if a>b:
greater=a
else:
greater=b
while(True):
if(greater%a==0)and(greater%b==0):
result=greater
break
else:
greater+=1
return result
a = int(input("Enter 1st Number: "))
b = int(input("Enter 2nd Number: "))
print(f"LCM of {a} and {b}: {lc(a,b)}")
| def lc(a,b):
if a>b:
greater=a
else:
greater=b
while(True):
if(greater%a==0)and(greater%b==0):
result=greater
break
else:
greater+=1
return result
a = int(input("Enter 1st Number: "))
b = int(input("Enter 2nd Number: "))
print(f"LCM of {a} and {b}: {lc(a,b)}") | none | 1 | 4.030804 | 4 | |
challenges/challenge014_Decorators_for_Fun_and_Profit/test_challenge014.py | alex-vegan/100daysofcode-with-python-course | 2 | 6615875 | from challenge014 import (timeit, gen_files, fun_files, gen_lines, fun_lines,
gen_grep, fun_grep, gen_count, fun_count)
def test_timeit():
pass
def test_gen_files():
pass
def test_fun_files():
pass
def test_gen_lines():
pass
def test_fun_lines():
pass
def test_gen_grep():
pass
def test_fun_grep():
pass
def test_gen_count():
pass
def test_fun_count():
pass
| from challenge014 import (timeit, gen_files, fun_files, gen_lines, fun_lines,
gen_grep, fun_grep, gen_count, fun_count)
def test_timeit():
pass
def test_gen_files():
pass
def test_fun_files():
pass
def test_gen_lines():
pass
def test_fun_lines():
pass
def test_gen_grep():
pass
def test_fun_grep():
pass
def test_gen_count():
pass
def test_fun_count():
pass
| none | 1 | 1.403133 | 1 | |
tests/__init__.py | khchine5/xl | 1 | 6615876 | import sys
from unipath import Path
from lino.utils.pythontest import TestCase
from lino_xl import SETUP_INFO
from lino import PYAFTER26
class LinoTestCase(TestCase):
django_settings_module = "lino.projects.std.settings_test"
project_root = Path(__file__).parent.parent
class PackagesTests(LinoTestCase):
def test_01(self):
self.run_packages_test(SETUP_INFO['packages'])
class LibTests(LinoTestCase):
# def test_users(self):
# self.run_simple_doctests("docs/dev/users.rst")
def test_cal_utils(self):
self.run_simple_doctests('lino_xl/lib/cal/utils.py')
def test_vat_utils(self):
self.run_simple_doctests('lino_xl/lib/vat/utils.py')
class UtilsTests(LinoTestCase):
def test_contacts_utils(self):
self.run_simple_doctests('lino_xl/lib/contacts/utils.py')
from . import test_appy_pod
| import sys
from unipath import Path
from lino.utils.pythontest import TestCase
from lino_xl import SETUP_INFO
from lino import PYAFTER26
class LinoTestCase(TestCase):
django_settings_module = "lino.projects.std.settings_test"
project_root = Path(__file__).parent.parent
class PackagesTests(LinoTestCase):
def test_01(self):
self.run_packages_test(SETUP_INFO['packages'])
class LibTests(LinoTestCase):
# def test_users(self):
# self.run_simple_doctests("docs/dev/users.rst")
def test_cal_utils(self):
self.run_simple_doctests('lino_xl/lib/cal/utils.py')
def test_vat_utils(self):
self.run_simple_doctests('lino_xl/lib/vat/utils.py')
class UtilsTests(LinoTestCase):
def test_contacts_utils(self):
self.run_simple_doctests('lino_xl/lib/contacts/utils.py')
from . import test_appy_pod
| ru | 0.230784 | # def test_users(self): # self.run_simple_doctests("docs/dev/users.rst") | 1.863224 | 2 |
Ejercicios progra.usm.cl/Parte 1/4- Patrones Comunes/productos-especiales_2.py | csaldias/python-usm | 7 | 6615877 | #Productos especiales: Potencia factorial creciente
#Pedimos los numeros
n = int(raw_input("Ingrese n: "))
m = int(raw_input("Ingrese m: "))
#Calculamos la potencia
potencia = n
for i in range(1,m):
potencia *= (n + i)
print potencia
| #Productos especiales: Potencia factorial creciente
#Pedimos los numeros
n = int(raw_input("Ingrese n: "))
m = int(raw_input("Ingrese m: "))
#Calculamos la potencia
potencia = n
for i in range(1,m):
potencia *= (n + i)
print potencia
| es | 0.864934 | #Productos especiales: Potencia factorial creciente #Pedimos los numeros #Calculamos la potencia | 4.010885 | 4 |
elasticbackup/restore.py | nickstenning/elasticbackup | 6 | 6615878 | <gh_stars>1-10
#!/usr/bin/env python
from __future__ import print_function
import argparse
import logging
import json
import elasticsearch
logging.basicConfig(format='%(asctime)s [%(name)s] [%(levelname)s] '
'%(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
log_levels = [logging.WARN, logging.INFO, logging.DEBUG]
log = logging.getLogger('elasticbackup')
log_es = logging.getLogger('elasticsearch')
parser = argparse.ArgumentParser(
'elasticrestore',
description='Restore data and mappings to an ElasticSearch index')
parser.add_argument('host',
help='elasticsearch host')
parser.add_argument('index',
help='elasticsearch index name')
parser.add_argument('-m', '--mappings-file',
help='mappings output filename',
required=True)
parser.add_argument('-d', '--documents-file',
help='documents output filename',
required=True)
parser.add_argument('-b', '--batch-size',
help='document upload batch size',
type=int,
default=1000)
parser.add_argument('-v', '--verbose',
help='increase output verbosity',
action='count',
default=0)
parser.add_argument('-u', '--user',
help='HTTP auth (in format user:pass)')
def create_index(es, index, f):
mappings = json.load(f)
es.indices.create(index=index, body=mappings)
def create_documents(es, index, f, batch_size=1000):
total = 0
for size, batch in document_batches(f, batch_size):
es.bulk(index=index, body=batch)
total += size
log.info("uploaded %s (total: %s)", size, total)
def document_batches(fp, batch_size):
i = 0
batch = []
for line in fp:
obj = json.loads(line)
src = obj.pop('_source')
batch.append(json.dumps({"create": obj}))
batch.append(json.dumps(src))
i += 1
if i >= batch_size:
yield i, batch
i = 0
batch = []
if batch:
yield i, batch
def main():
args = parser.parse_args()
verbose = min(args.verbose, 2)
log.setLevel(log_levels[verbose])
log_es.setLevel(log_levels[verbose])
conn_kwargs = {}
if args.user:
conn_kwargs['http_auth'] = args.user
es = elasticsearch.Elasticsearch([args.host], **conn_kwargs)
with open(args.mappings_file) as f:
create_index(es, args.index, f)
with open(args.documents_file) as f:
create_documents(es, args.index, f, batch_size=args.batch_size)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
from __future__ import print_function
import argparse
import logging
import json
import elasticsearch
logging.basicConfig(format='%(asctime)s [%(name)s] [%(levelname)s] '
'%(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
log_levels = [logging.WARN, logging.INFO, logging.DEBUG]
log = logging.getLogger('elasticbackup')
log_es = logging.getLogger('elasticsearch')
parser = argparse.ArgumentParser(
'elasticrestore',
description='Restore data and mappings to an ElasticSearch index')
parser.add_argument('host',
help='elasticsearch host')
parser.add_argument('index',
help='elasticsearch index name')
parser.add_argument('-m', '--mappings-file',
help='mappings output filename',
required=True)
parser.add_argument('-d', '--documents-file',
help='documents output filename',
required=True)
parser.add_argument('-b', '--batch-size',
help='document upload batch size',
type=int,
default=1000)
parser.add_argument('-v', '--verbose',
help='increase output verbosity',
action='count',
default=0)
parser.add_argument('-u', '--user',
help='HTTP auth (in format user:pass)')
def create_index(es, index, f):
mappings = json.load(f)
es.indices.create(index=index, body=mappings)
def create_documents(es, index, f, batch_size=1000):
total = 0
for size, batch in document_batches(f, batch_size):
es.bulk(index=index, body=batch)
total += size
log.info("uploaded %s (total: %s)", size, total)
def document_batches(fp, batch_size):
i = 0
batch = []
for line in fp:
obj = json.loads(line)
src = obj.pop('_source')
batch.append(json.dumps({"create": obj}))
batch.append(json.dumps(src))
i += 1
if i >= batch_size:
yield i, batch
i = 0
batch = []
if batch:
yield i, batch
def main():
args = parser.parse_args()
verbose = min(args.verbose, 2)
log.setLevel(log_levels[verbose])
log_es.setLevel(log_levels[verbose])
conn_kwargs = {}
if args.user:
conn_kwargs['http_auth'] = args.user
es = elasticsearch.Elasticsearch([args.host], **conn_kwargs)
with open(args.mappings_file) as f:
create_index(es, args.index, f)
with open(args.documents_file) as f:
create_documents(es, args.index, f, batch_size=args.batch_size)
if __name__ == '__main__':
main() | ru | 0.26433 | #!/usr/bin/env python | 2.447741 | 2 |
cnltk/test/corpus_fixt.py | gouthambs/cnltk | 0 | 6615879 | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from cnltk.corpus import teardown_module | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from cnltk.corpus import teardown_module | en | 0.769321 | # -*- coding: utf-8 -*- | 0.90411 | 1 |
tests/util_test.py | yoloseem/sqlalchemy-imageattach | 83 | 6615880 | from __future__ import absolute_import
from sqlalchemy_imageattach.util import append_docstring, get_minimum_indent
def test_minimum_indent():
assert get_minimum_indent('Hello') == ''
assert get_minimum_indent('''Hello
world
''') == ' '
assert get_minimum_indent('''Hello
world::
code
asdf
''') == ' '
assert get_minimum_indent('''\
Hello
world::
code
asdf
''', 0) == ' '
def test_append_docstring():
def test_func():
"""Hello.
:returns: any integer
:rtype: :class:`numbers.Integral`
-"""
return 123
assert append_docstring(
test_func.__doc__,
'.. note::',
'',
' Appended docstring!'
) == '''Hello.
:returns: any integer
:rtype: :class:`numbers.Integral`
-
.. note::
Appended docstring!
'''.rstrip()
| from __future__ import absolute_import
from sqlalchemy_imageattach.util import append_docstring, get_minimum_indent
def test_minimum_indent():
assert get_minimum_indent('Hello') == ''
assert get_minimum_indent('''Hello
world
''') == ' '
assert get_minimum_indent('''Hello
world::
code
asdf
''') == ' '
assert get_minimum_indent('''\
Hello
world::
code
asdf
''', 0) == ' '
def test_append_docstring():
def test_func():
"""Hello.
:returns: any integer
:rtype: :class:`numbers.Integral`
-"""
return 123
assert append_docstring(
test_func.__doc__,
'.. note::',
'',
' Appended docstring!'
) == '''Hello.
:returns: any integer
:rtype: :class:`numbers.Integral`
-
.. note::
Appended docstring!
'''.rstrip()
| en | 0.25775 | Hello world Hello world:: code asdf \ Hello world:: code asdf Hello. :returns: any integer :rtype: :class:`numbers.Integral` - Hello. :returns: any integer :rtype: :class:`numbers.Integral` - .. note:: Appended docstring! | 2.429755 | 2 |
NotYetCurated/PlottingSurfaceValuesWithPysurfer/pysurfer_plot_VISAN_surface_values.py | MatildeVaghi/BrainsForPublication | 19 | 6615881 | <reponame>MatildeVaghi/BrainsForPublication
#!/usr/bin/env python
#=============================================================================
# Created by <NAME>
# September 2014
# Contact: <EMAIL>
#=============================================================================
#=============================================================================
# IMPORTS
#=============================================================================
import os
import sys
import argparse
import numpy as np
from surfer import Brain, io
import itertools as it
import matplotlib.pylab as plt
import matplotlib.image as mpimg
import matplotlib.gridspec as gridspec
#=============================================================================
# FUNCTIONS
#=============================================================================
def setup_argparser():
'''
Code to read in arguments from the command line
Aso allows you to change some settings
'''
# Build a basic parser.
help_text = ('Plot the NORA VISAN data on a freesurfer surface')
sign_off = 'Author: <NAME> <<EMAIL>>'
parser = argparse.ArgumentParser(description=help_text, epilog=sign_off)
# Now add the arguments
parser.add_argument(dest='output_dir',
type=str,
metavar='output_dir',
help='output directory')
parser.add_argument('-cope1', '--cope1_file',
type=str,
metavar='cope1_file',
help='cope1 nifti file in MNI space',
default=None)
parser.add_argument('-cope2', '--cope2_file',
type=str,
metavar='cope2_file',
help='cope2 nifti file in MNI space',
default=None)
parser.add_argument('-cope4', '--cope4_file',
type=str,
metavar='cope4_file',
help='cope4 nifti file in MNI space',
default=None)
parser.add_argument('--subject_id',
type=str,
metavar='subject id',
help='freesurfer subject id',
default='fsaverage')
parser.add_argument('-sd', '--subjects_dir',
type=str,
metavar='subjects_dir',
help='freesurfer subjects dir',
default=os.environ["SUBJECTS_DIR"])
parser.add_argument('-c', '--cmap',
type=str,
metavar='cmap',
help='colormap',
default='autumn')
parser.add_argument('-cf', '--color_file',
type=str,
metavar='color_file',
help='file containing list of custom colors',
default=None)
parser.add_argument('--center',
action='store_true',
help='center the color bar around 0')
parser.add_argument('-t', '--thresh',
type=float,
metavar='thresh',
help='mask values below this value',
default=-98)
parser.add_argument('-m', '--mask',
type=float,
metavar='mask',
help='mask values that are exactly this value',
default=0)
parser.add_argument('-l', '--lower',
type=float,
metavar='lowerthr',
help='lower limit for colorbar',
default=None)
parser.add_argument('-u', '--upper',
type=float,
metavar='upperthr',
help='upper limit for colorbar',
default=None)
parser.add_argument('-s', '--surface',
type=str,
metavar='surface',
help='surface - one of "pial", "inflated" or "both"',
default='both')
arguments = parser.parse_args()
return arguments, parser
#------------------------------------------------------------------------------
def read_in_data(cope1_file, cope2_file, cope4_file, subject_id, subjects_dir):
'''
Read in the three nifti files for each hemisphere
and combine into one surface (for each hemisphere)
'''
vtx_data_dict = {}
for hemi in [ 'lh', 'rh' ]:
cope1 = io.project_volume_data(cope1_file,
hemi,
subject_id=subject_id)
cope2 = io.project_volume_data(cope2_file,
hemi,
subject_id=subject_id)
cope4 = io.project_volume_data(cope4_file,
hemi,
subject_id=subject_id)
# Binarize the maps and threshold to get rid of vertices that are only
# created from the smoothing kernel
cope1_bin = np.copy(cope1)
cope1_bin[cope1>0] = 1
cope2_bin = np.copy(cope2)
cope2_bin[cope2>0] = 2
cope4_bin = np.copy(cope4)
cope4_bin[cope4>0] = 4
cope124_bin = cope1_bin + cope2_bin + cope4_bin
vtx_data_dict[hemi] = cope124_bin
# Mask the data so you are only visualising cortex
cortex_fname = os.path.join(subjects_dir, subject_id, 'label', hemi + '.cortex.label')
# Read the data in and mask it so that non-cortex is -99
vtx_data_dict[hemi] = mask_vtx_data(vtx_data_dict[hemi], cortex_fname, thresh)
return vtx_data_dict
#------------------------------------------------------------------------------
def mask_vtx_data(vtx_data, cortex_fname, thresh):
cortex_data = io.nib.freesurfer.read_label(cortex_fname)
# Create a mask of 1s where there is cortex and 0s on the medial wall
mask = np.zeros_like(vtx_data)
mask[cortex_data] = 1
# Set all values that are not in cortex to thresh-1
vtx_data[mask == 0] = thresh-1
return vtx_data
#------------------------------------------------------------------------------
def calc_range(vtx_data_left, vtx_data_right, thresh, l, u):
'''
This is an important step to ensure that the colorbar is exactly
the same for the right and left hemispheres.
'''
if l == None:
# Figure out the min and max for each hemisphere
l_l = vtx_data_left[vtx_data_left>=thresh].min()
l_r = vtx_data_right[vtx_data_right>=thresh].min()
# Take the smallest of these two
l = np.min([l_l, l_r])
# And round to a nice number
l = np.floor(l*20)/20.0
if u == None:
# Figure out the min and max for each hemisphere
u_l = vtx_data_left[vtx_data_left>=thresh].max()
u_r = vtx_data_right[vtx_data_right>=thresh].max()
# Take the largest of these two
u = np.max([u_l, u_r])
# And round to a nice number
u = np.ceil(u*20)/20.0
# Return the lower and upper bounds
return l, u
#------------------------------------------------------------------------------
def plot_surface(vtx_data, subject_id, subjects_dir, hemi, surface, output_dir, prefix, l, u, cmap, center, thresh):
# Open up a brain in pysurfer
brain = Brain(subject_id, hemi, surface,
subjects_dir = subjects_dir,
config_opts=dict(background="white",
height=665,
width=800))
if center:
# Make sure the colorbar is centered
if l**2 < u **2:
l = u*-1
else:
u = l*-1
# Create an empty brain if the values are all below threshold
if np.max(vtx_data) < thresh:
# Add your data to the brain
brain.add_data(vtx_data*0,
l,
u,
thresh = thresh,
colormap=cmap,
alpha=0.0)
# Otherwise, add the data appropriately!
else:
# Add your data to the brain
brain.add_data(vtx_data,
l,
u,
thresh = thresh,
colormap=cmap,
alpha=.8)
# Save the images for medial and lateral
# putting a color bar on all of them
brain.save_imageset(prefix = os.path.join(output_dir, prefix),
views = views_list,
colorbar = range(len(views_list)) )
#-----------------------------------------------------------------------------
def combine_pngs(surface, output_dir):
'''
Find four images and combine them into one nice picture
'''
figsize = (5,4)
fig = plt.figure(figsize = figsize, facecolor='white')
grid = gridspec.GridSpec(2, 2)
grid.update(left=0, right=1, top=1, bottom = 0.08, wspace=0, hspace=0)
f_list = [ os.path.join(output_dir, '_'.join(['lh', surface, 'lateral.png'])),
os.path.join(output_dir, '_'.join(['rh', surface, 'lateral.png'])),
os.path.join(output_dir, '_'.join(['lh', surface, 'medial.png'])),
os.path.join(output_dir, '_'.join(['rh', surface, 'medial.png'])) ]
# Plot each figure in turn
for g_loc, f in zip(grid, f_list):
ax = plt.Subplot(fig, g_loc)
fig.add_subplot(ax)
img = mpimg.imread(f)
# Crop the figures appropriately
# NOTE: this can change depending on which system you've made the
# images on originally - it's a bug that needs to be sorted out!
if 'lateral' in f:
img_cropped = img[75:589,55:(-50),:]
else:
img_cropped = img[45:600,25:(-25),:]
ax.imshow(img_cropped, interpolation='none')
ax.set_axis_off()
# Add the bottom of one of the images as the color bar
# at the bottom of the combo figure
grid_cbar = gridspec.GridSpec(1,1)
grid_cbar.update(left=0, right=1, top=0.08, bottom=0, wspace=0, hspace=0)
ax = plt.Subplot(fig, grid_cbar[0])
fig.add_subplot(ax)
img = mpimg.imread(f)
img_cbar = img[600:,:]
ax.imshow(img_cbar, interpolation='none')
ax.set_axis_off()
# Save the figure
filename = os.path.join(output_dir, '{}_combined.png'.format(surface))
print filename
fig.savefig(filename, bbox_inches=0, dpi=300)
#=============================================================================
# SET SOME VARIABLES
#=============================================================================
# Read in the arguments from argparse
arguments, parser = setup_argparser()
cope1_file = arguments.cope1_file
cope2_file = arguments.cope2_file
cope4_file = arguments.cope4_file
output_dir = arguments.output_dir
subject_id = arguments.subject_id
subjects_dir = arguments.subjects_dir
l = arguments.lower
u = arguments.upper
cmap = arguments.cmap
color_file = arguments.color_file
center = arguments.center
surface = arguments.surface
thresh = arguments.thresh
mask = arguments.mask
if surface == 'both':
surface_list = [ "inflated", "pial" ]
elif surface == 'inflated':
surface_list = [ "inflated" ]
elif surface == 'pial':
surface_list = [ "pial" ]
else:
print "Do not recognise surface. Check {}".format(surface)
parser.print_help()
sys.exit()
hemi_list = [ "lh", "rh" ]
views_list = [ 'medial', 'lateral' ]
# Check how many of the three cope inputs exist:
cope_dict = {}
if cope1_file is not None:
cope_dict[1] = cope1_file
if cope2_file is not None:
cope_dict[2] = cope2_file
if cope4_file is not None:
cope_dict[4] = cope4_file
if len(cope_dict.keys()) == 0:
print "No cope files provided! Exiting."
sys.exit()
# Now check that the files exist
for cope, cope_file in cope_dict.items():
if not os.path.isfile(cope_file):
print "{} file doesn't exist, check {}".format(cope, cope_file)
sys.exit()
# Make the output directory if it doesn't already exist
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
# Set the subjects dir
os.environ['SUBJECTS_DIR'] = subjects_dir
#=============================================================================
# READ IN THE VERTEX DATA
#=============================================================================
vtx_data_dict = read_in_data(cope1_file,
cope2_file,
cope4_file,
subject_id,
subjects_dir)
#=============================================================================
# CALCULATE THE COLOR BAR RANGE
#=============================================================================
# Calculate the lower and upper values if they haven't been defined:
l, u = calc_range(vtx_data_dict['lh'], vtx_data_dict['rh'], thresh, l, u)
# Unless there's a given color file
if color_file:
cmap = [line.strip() for line in open(color_file)]
l = 1
u = len(cmap)
# If you've passed rgb values you need to convert
# these to tuples
if len(cmap[0].split()) == 3:
cmap = [ (np.float(x.split()[0]),
np.float(x.split()[1]),
np.float(x.split()[2])) for x in cmap ]
#=============================================================================
# MAKE THE INDIVIDUAL PICTURES
#=============================================================================
for hemi, surface in it.product(hemi_list, surface_list):
prefix = '_'.join([hemi, surface])
# Show this data on a brain
if colors:
plot_surface(vtx_data_dict[hemi], subject_id, subjects_dir,
hemi, surface,
output_dir, prefix,
l, u, colors, center,
thresh)
else:
plot_surface(vtx_data_dict[hemi], subject_id, subjects_dir,
hemi, surface,
output_dir, prefix,
l, u, cmap, center,
thresh)
#=============================================================================
# COMBINE THE IMAGES
#=============================================================================
for surface in surface_list:
combine_pngs(surface, output_dir)
| #!/usr/bin/env python
#=============================================================================
# Created by <NAME>
# September 2014
# Contact: <EMAIL>
#=============================================================================
#=============================================================================
# IMPORTS
#=============================================================================
import os
import sys
import argparse
import numpy as np
from surfer import Brain, io
import itertools as it
import matplotlib.pylab as plt
import matplotlib.image as mpimg
import matplotlib.gridspec as gridspec
#=============================================================================
# FUNCTIONS
#=============================================================================
def setup_argparser():
'''
Code to read in arguments from the command line
Aso allows you to change some settings
'''
# Build a basic parser.
help_text = ('Plot the NORA VISAN data on a freesurfer surface')
sign_off = 'Author: <NAME> <<EMAIL>>'
parser = argparse.ArgumentParser(description=help_text, epilog=sign_off)
# Now add the arguments
parser.add_argument(dest='output_dir',
type=str,
metavar='output_dir',
help='output directory')
parser.add_argument('-cope1', '--cope1_file',
type=str,
metavar='cope1_file',
help='cope1 nifti file in MNI space',
default=None)
parser.add_argument('-cope2', '--cope2_file',
type=str,
metavar='cope2_file',
help='cope2 nifti file in MNI space',
default=None)
parser.add_argument('-cope4', '--cope4_file',
type=str,
metavar='cope4_file',
help='cope4 nifti file in MNI space',
default=None)
parser.add_argument('--subject_id',
type=str,
metavar='subject id',
help='freesurfer subject id',
default='fsaverage')
parser.add_argument('-sd', '--subjects_dir',
type=str,
metavar='subjects_dir',
help='freesurfer subjects dir',
default=os.environ["SUBJECTS_DIR"])
parser.add_argument('-c', '--cmap',
type=str,
metavar='cmap',
help='colormap',
default='autumn')
parser.add_argument('-cf', '--color_file',
type=str,
metavar='color_file',
help='file containing list of custom colors',
default=None)
parser.add_argument('--center',
action='store_true',
help='center the color bar around 0')
parser.add_argument('-t', '--thresh',
type=float,
metavar='thresh',
help='mask values below this value',
default=-98)
parser.add_argument('-m', '--mask',
type=float,
metavar='mask',
help='mask values that are exactly this value',
default=0)
parser.add_argument('-l', '--lower',
type=float,
metavar='lowerthr',
help='lower limit for colorbar',
default=None)
parser.add_argument('-u', '--upper',
type=float,
metavar='upperthr',
help='upper limit for colorbar',
default=None)
parser.add_argument('-s', '--surface',
type=str,
metavar='surface',
help='surface - one of "pial", "inflated" or "both"',
default='both')
arguments = parser.parse_args()
return arguments, parser
#------------------------------------------------------------------------------
def read_in_data(cope1_file, cope2_file, cope4_file, subject_id, subjects_dir):
'''
Read in the three nifti files for each hemisphere
and combine into one surface (for each hemisphere)
'''
vtx_data_dict = {}
for hemi in [ 'lh', 'rh' ]:
cope1 = io.project_volume_data(cope1_file,
hemi,
subject_id=subject_id)
cope2 = io.project_volume_data(cope2_file,
hemi,
subject_id=subject_id)
cope4 = io.project_volume_data(cope4_file,
hemi,
subject_id=subject_id)
# Binarize the maps and threshold to get rid of vertices that are only
# created from the smoothing kernel
cope1_bin = np.copy(cope1)
cope1_bin[cope1>0] = 1
cope2_bin = np.copy(cope2)
cope2_bin[cope2>0] = 2
cope4_bin = np.copy(cope4)
cope4_bin[cope4>0] = 4
cope124_bin = cope1_bin + cope2_bin + cope4_bin
vtx_data_dict[hemi] = cope124_bin
# Mask the data so you are only visualising cortex
cortex_fname = os.path.join(subjects_dir, subject_id, 'label', hemi + '.cortex.label')
# Read the data in and mask it so that non-cortex is -99
vtx_data_dict[hemi] = mask_vtx_data(vtx_data_dict[hemi], cortex_fname, thresh)
return vtx_data_dict
#------------------------------------------------------------------------------
def mask_vtx_data(vtx_data, cortex_fname, thresh):
cortex_data = io.nib.freesurfer.read_label(cortex_fname)
# Create a mask of 1s where there is cortex and 0s on the medial wall
mask = np.zeros_like(vtx_data)
mask[cortex_data] = 1
# Set all values that are not in cortex to thresh-1
vtx_data[mask == 0] = thresh-1
return vtx_data
#------------------------------------------------------------------------------
def calc_range(vtx_data_left, vtx_data_right, thresh, l, u):
'''
This is an important step to ensure that the colorbar is exactly
the same for the right and left hemispheres.
'''
if l == None:
# Figure out the min and max for each hemisphere
l_l = vtx_data_left[vtx_data_left>=thresh].min()
l_r = vtx_data_right[vtx_data_right>=thresh].min()
# Take the smallest of these two
l = np.min([l_l, l_r])
# And round to a nice number
l = np.floor(l*20)/20.0
if u == None:
# Figure out the min and max for each hemisphere
u_l = vtx_data_left[vtx_data_left>=thresh].max()
u_r = vtx_data_right[vtx_data_right>=thresh].max()
# Take the largest of these two
u = np.max([u_l, u_r])
# And round to a nice number
u = np.ceil(u*20)/20.0
# Return the lower and upper bounds
return l, u
#------------------------------------------------------------------------------
def plot_surface(vtx_data, subject_id, subjects_dir, hemi, surface, output_dir, prefix, l, u, cmap, center, thresh):
# Open up a brain in pysurfer
brain = Brain(subject_id, hemi, surface,
subjects_dir = subjects_dir,
config_opts=dict(background="white",
height=665,
width=800))
if center:
# Make sure the colorbar is centered
if l**2 < u **2:
l = u*-1
else:
u = l*-1
# Create an empty brain if the values are all below threshold
if np.max(vtx_data) < thresh:
# Add your data to the brain
brain.add_data(vtx_data*0,
l,
u,
thresh = thresh,
colormap=cmap,
alpha=0.0)
# Otherwise, add the data appropriately!
else:
# Add your data to the brain
brain.add_data(vtx_data,
l,
u,
thresh = thresh,
colormap=cmap,
alpha=.8)
# Save the images for medial and lateral
# putting a color bar on all of them
brain.save_imageset(prefix = os.path.join(output_dir, prefix),
views = views_list,
colorbar = range(len(views_list)) )
#-----------------------------------------------------------------------------
def combine_pngs(surface, output_dir):
'''
Find four images and combine them into one nice picture
'''
figsize = (5,4)
fig = plt.figure(figsize = figsize, facecolor='white')
grid = gridspec.GridSpec(2, 2)
grid.update(left=0, right=1, top=1, bottom = 0.08, wspace=0, hspace=0)
f_list = [ os.path.join(output_dir, '_'.join(['lh', surface, 'lateral.png'])),
os.path.join(output_dir, '_'.join(['rh', surface, 'lateral.png'])),
os.path.join(output_dir, '_'.join(['lh', surface, 'medial.png'])),
os.path.join(output_dir, '_'.join(['rh', surface, 'medial.png'])) ]
# Plot each figure in turn
for g_loc, f in zip(grid, f_list):
ax = plt.Subplot(fig, g_loc)
fig.add_subplot(ax)
img = mpimg.imread(f)
# Crop the figures appropriately
# NOTE: this can change depending on which system you've made the
# images on originally - it's a bug that needs to be sorted out!
if 'lateral' in f:
img_cropped = img[75:589,55:(-50),:]
else:
img_cropped = img[45:600,25:(-25),:]
ax.imshow(img_cropped, interpolation='none')
ax.set_axis_off()
# Add the bottom of one of the images as the color bar
# at the bottom of the combo figure
grid_cbar = gridspec.GridSpec(1,1)
grid_cbar.update(left=0, right=1, top=0.08, bottom=0, wspace=0, hspace=0)
ax = plt.Subplot(fig, grid_cbar[0])
fig.add_subplot(ax)
img = mpimg.imread(f)
img_cbar = img[600:,:]
ax.imshow(img_cbar, interpolation='none')
ax.set_axis_off()
# Save the figure
filename = os.path.join(output_dir, '{}_combined.png'.format(surface))
print filename
fig.savefig(filename, bbox_inches=0, dpi=300)
#=============================================================================
# SET SOME VARIABLES
#=============================================================================
# Read in the arguments from argparse
arguments, parser = setup_argparser()
cope1_file = arguments.cope1_file
cope2_file = arguments.cope2_file
cope4_file = arguments.cope4_file
output_dir = arguments.output_dir
subject_id = arguments.subject_id
subjects_dir = arguments.subjects_dir
l = arguments.lower
u = arguments.upper
cmap = arguments.cmap
color_file = arguments.color_file
center = arguments.center
surface = arguments.surface
thresh = arguments.thresh
mask = arguments.mask
if surface == 'both':
surface_list = [ "inflated", "pial" ]
elif surface == 'inflated':
surface_list = [ "inflated" ]
elif surface == 'pial':
surface_list = [ "pial" ]
else:
print "Do not recognise surface. Check {}".format(surface)
parser.print_help()
sys.exit()
hemi_list = [ "lh", "rh" ]
views_list = [ 'medial', 'lateral' ]
# Check how many of the three cope inputs exist:
cope_dict = {}
if cope1_file is not None:
cope_dict[1] = cope1_file
if cope2_file is not None:
cope_dict[2] = cope2_file
if cope4_file is not None:
cope_dict[4] = cope4_file
if len(cope_dict.keys()) == 0:
print "No cope files provided! Exiting."
sys.exit()
# Now check that the files exist
for cope, cope_file in cope_dict.items():
if not os.path.isfile(cope_file):
print "{} file doesn't exist, check {}".format(cope, cope_file)
sys.exit()
# Make the output directory if it doesn't already exist
if not os.path.isdir(output_dir):
os.makedirs(output_dir)
# Set the subjects dir
os.environ['SUBJECTS_DIR'] = subjects_dir
#=============================================================================
# READ IN THE VERTEX DATA
#=============================================================================
vtx_data_dict = read_in_data(cope1_file,
cope2_file,
cope4_file,
subject_id,
subjects_dir)
#=============================================================================
# CALCULATE THE COLOR BAR RANGE
#=============================================================================
# Calculate the lower and upper values if they haven't been defined:
l, u = calc_range(vtx_data_dict['lh'], vtx_data_dict['rh'], thresh, l, u)
# Unless there's a given color file
if color_file:
cmap = [line.strip() for line in open(color_file)]
l = 1
u = len(cmap)
# If you've passed rgb values you need to convert
# these to tuples
if len(cmap[0].split()) == 3:
cmap = [ (np.float(x.split()[0]),
np.float(x.split()[1]),
np.float(x.split()[2])) for x in cmap ]
#=============================================================================
# MAKE THE INDIVIDUAL PICTURES
#=============================================================================
for hemi, surface in it.product(hemi_list, surface_list):
prefix = '_'.join([hemi, surface])
# Show this data on a brain
if colors:
plot_surface(vtx_data_dict[hemi], subject_id, subjects_dir,
hemi, surface,
output_dir, prefix,
l, u, colors, center,
thresh)
else:
plot_surface(vtx_data_dict[hemi], subject_id, subjects_dir,
hemi, surface,
output_dir, prefix,
l, u, cmap, center,
thresh)
#=============================================================================
# COMBINE THE IMAGES
#=============================================================================
for surface in surface_list:
combine_pngs(surface, output_dir) | en | 0.59124 | #!/usr/bin/env python #============================================================================= # Created by <NAME> # September 2014 # Contact: <EMAIL> #============================================================================= #============================================================================= # IMPORTS #============================================================================= #============================================================================= # FUNCTIONS #============================================================================= Code to read in arguments from the command line Aso allows you to change some settings # Build a basic parser. # Now add the arguments #------------------------------------------------------------------------------ Read in the three nifti files for each hemisphere and combine into one surface (for each hemisphere) # Binarize the maps and threshold to get rid of vertices that are only # created from the smoothing kernel # Mask the data so you are only visualising cortex # Read the data in and mask it so that non-cortex is -99 #------------------------------------------------------------------------------ # Create a mask of 1s where there is cortex and 0s on the medial wall # Set all values that are not in cortex to thresh-1 #------------------------------------------------------------------------------ This is an important step to ensure that the colorbar is exactly the same for the right and left hemispheres. # Figure out the min and max for each hemisphere # Take the smallest of these two # And round to a nice number # Figure out the min and max for each hemisphere # Take the largest of these two # And round to a nice number # Return the lower and upper bounds #------------------------------------------------------------------------------ # Open up a brain in pysurfer # Make sure the colorbar is centered # Create an empty brain if the values are all below threshold # Add your data to the brain # Otherwise, add the data appropriately! # Add your data to the brain # Save the images for medial and lateral # putting a color bar on all of them #----------------------------------------------------------------------------- Find four images and combine them into one nice picture # Plot each figure in turn # Crop the figures appropriately # NOTE: this can change depending on which system you've made the # images on originally - it's a bug that needs to be sorted out! # Add the bottom of one of the images as the color bar # at the bottom of the combo figure # Save the figure #============================================================================= # SET SOME VARIABLES #============================================================================= # Read in the arguments from argparse # Check how many of the three cope inputs exist: # Now check that the files exist # Make the output directory if it doesn't already exist # Set the subjects dir #============================================================================= # READ IN THE VERTEX DATA #============================================================================= #============================================================================= # CALCULATE THE COLOR BAR RANGE #============================================================================= # Calculate the lower and upper values if they haven't been defined: # Unless there's a given color file # If you've passed rgb values you need to convert # these to tuples #============================================================================= # MAKE THE INDIVIDUAL PICTURES #============================================================================= # Show this data on a brain #============================================================================= # COMBINE THE IMAGES #============================================================================= | 1.796541 | 2 |
webkit/build/generate_devtools_html.py | rwatson/chromium-capsicum | 11 | 6615882 | #!/usr/bin/env python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import sys
def GenerateIncludeTag(resource_path):
(dir_name, file_name) = os.path.split(resource_path)
if (file_name.endswith('.js')):
return ' <script type="text/javascript" src="%s"></script>\n' % file_name
elif (file_name.endswith('.css')):
return ' <link rel="stylesheet" type="text/css" href="%s">\n' % file_name
else:
assert false
def main(argv):
if len(argv) < 4:
print('usage: %s ignored inspector_html devtools_html'
' css_and_js_files_list' % argv[0])
return 1
# The first argument is ignored. We put 'webkit.gyp' in the inputs list
# for this script, so every time the list of script gets changed, our html
# file is rebuilt.
inspector_html_name = argv[2]
devtools_html_name = argv[3]
inspector_html = open(inspector_html_name, 'r')
devtools_html = open(devtools_html_name, 'w')
for line in inspector_html:
if '</head>' in line:
devtools_html.write('\n <!-- The following lines are added to include DevTools resources -->\n')
for resource in argv[4:]:
devtools_html.write(GenerateIncludeTag(resource))
devtools_html.write(' <!-- End of auto-added files list -->\n')
devtools_html.write(line)
devtools_html.close()
inspector_html.close()
# Touch output file directory to make sure that Xcode will copy
# modified resource files.
if sys.platform == 'darwin':
output_dir_name = os.path.dirname(devtools_html_name)
os.utime(output_dir_name, None)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| #!/usr/bin/env python
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os.path
import sys
def GenerateIncludeTag(resource_path):
(dir_name, file_name) = os.path.split(resource_path)
if (file_name.endswith('.js')):
return ' <script type="text/javascript" src="%s"></script>\n' % file_name
elif (file_name.endswith('.css')):
return ' <link rel="stylesheet" type="text/css" href="%s">\n' % file_name
else:
assert false
def main(argv):
if len(argv) < 4:
print('usage: %s ignored inspector_html devtools_html'
' css_and_js_files_list' % argv[0])
return 1
# The first argument is ignored. We put 'webkit.gyp' in the inputs list
# for this script, so every time the list of script gets changed, our html
# file is rebuilt.
inspector_html_name = argv[2]
devtools_html_name = argv[3]
inspector_html = open(inspector_html_name, 'r')
devtools_html = open(devtools_html_name, 'w')
for line in inspector_html:
if '</head>' in line:
devtools_html.write('\n <!-- The following lines are added to include DevTools resources -->\n')
for resource in argv[4:]:
devtools_html.write(GenerateIncludeTag(resource))
devtools_html.write(' <!-- End of auto-added files list -->\n')
devtools_html.write(line)
devtools_html.close()
inspector_html.close()
# Touch output file directory to make sure that Xcode will copy
# modified resource files.
if sys.platform == 'darwin':
output_dir_name = os.path.dirname(devtools_html_name)
os.utime(output_dir_name, None)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| en | 0.839478 | #!/usr/bin/env python # Copyright (c) 2009 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # The first argument is ignored. We put 'webkit.gyp' in the inputs list # for this script, so every time the list of script gets changed, our html # file is rebuilt. # Touch output file directory to make sure that Xcode will copy # modified resource files. | 2.286416 | 2 |
algorithm_learning/figure/generate_figure.py | SigmaQuan/NTM-Keras | 33 | 6615883 | <filename>algorithm_learning/figure/generate_figure.py
import dataset
import visualization
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def show_repeat_copy_data(
output_sequence_copy,
input_sequence_copy,
repeat_times_copy,
output_sequence_recall,
input_sequence_recall,
output_sequence_sort,
input_sequence_sort,
input_name,
output_name,
image_file):
# set figure size
fig = plt.figure(figsize=(12, 5))
#
# # draw first line
# axes_input_10 = plt.subplot2grid((9, 1), (1, 0), colspan=1)
# axes_input_10.set_aspect('equal')
# plt.imshow(output_sequence_copy, interpolation='none')
# axes_input_10.set_xticks([])
# axes_input_10.set_yticks([])
# # draw second line
# axes_output_10 = plt.subplot2grid((9, 1), (2, 0), colspan=1)
# plt.imshow(input_sequence_copy, interpolation='none')
# axes_output_10.set_xticks([])
# axes_output_10.set_yticks([])
# # draw third line
# # plt.text(80, 12, "(a) repeat copy", ha='center')
# # title = "Repeat times = %d" % repeat_times_copy
# # plt.text(80, -12, title, ha='center')
# # plt.text(-2, 5, output_name, ha='right')
# # plt.text(-2, -5, input_name, ha='right')
# # # plt.text(18, 12, 'Time $t$ $\longrightarrow$', ha='right')
# # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right')
#
# # draw first line
# axes_input_10 = plt.subplot2grid((9, 1), (4, 0), colspan=1)
# axes_input_10.set_aspect('equal')
# plt.imshow(output_sequence_recall, interpolation='none')
# axes_input_10.set_xticks([])
# axes_input_10.set_yticks([])
# # draw second line
# axes_output_10 = plt.subplot2grid((9, 1), (5, 0), colspan=1)
# plt.imshow(input_sequence_recall, interpolation='none')
# axes_output_10.set_xticks([])
# axes_output_10.set_yticks([])
# # draw third line
# # plt.text(80, 12, "(b) associative recall", ha='center')
# # plt.text(-2, 5, output_name, ha='right')
# # plt.text(-2, -5, input_name, ha='right')
# # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right')
# draw first line
axes_input_10 = plt.subplot2grid((9, 1), (7, 0), colspan=1)
axes_input_10.set_aspect('equal')
plt.imshow(output_sequence_sort, interpolation='none')
axes_input_10.set_xticks([])
axes_input_10.set_yticks([])
# draw second line
axes_output_10 = plt.subplot2grid((9, 1), (8, 0), colspan=1)
plt.imshow(input_sequence_sort, interpolation='none')
axes_output_10.set_xticks([])
axes_output_10.set_yticks([])
# draw third line
# plt.text(80, 12, "(c) priority sort", ha='center')
# plt.text(-2, 5, output_name, ha='right')
# plt.text(-2, -5, input_name, ha='right')
# plt.text(9, 12, '$t$ $\longrightarrow$', ha='right')
# add color bars
# # *rect* = [left, bottom, width, height]
# cax = plt.axes([0.84, 0.1, 0.005, 0.71])
cax = plt.axes([0.84, 0.1, 0.005, 0.165])
cbar = plt.colorbar(cax=cax)
# show colorbar
# cbar = plt.colorbar(gci)
# cbar.set_label('$T_B(K)$', fontdict=font)
cbar.set_ticks(np.linspace(0, 1, 3))
cbar.set_ticklabels(('0', '0.5', '1'))
# show figure
plt.show()
# save image
fig.savefig(image_file, dpi=75, format='pdf')
# close plot GUI
plt.close()
def show_algorithm_learning_example():
input_size_copy = 8
sequence_length_copy = 10
repeat_times = 15
input_sequence_copy, output_sequence_copy = \
dataset.generate_repeat_copy_sample(
input_size_copy, sequence_length_copy, repeat_times)
print(input_sequence_copy.shape)
print(output_sequence_copy.shape)
input_size_recall = 6
# item_size = 4
item_size = 3
episode_size = 38
max_episode_size = 38
input_sequence_recall = np.zeros(input_sequence_copy.shape)
output_sequence_recall = np.zeros(output_sequence_copy.shape)
input_sequence_recall_, output_sequence_recall_ = \
dataset.generate_associative_recall_sample(
input_size_recall, item_size, episode_size, max_episode_size)
input_sequence_recall[:-1] = input_sequence_recall_
output_sequence_recall[:-1] = output_sequence_recall_
print(input_sequence_recall.shape)
print(output_sequence_recall.shape)
input_size_sort = 6
input_sequence_length = 80
output_sequence_length = 80
priority_lower_bound = 0
priority_upper_bound = 1
# input_sequence_sort = np.zeros(input_sequence_copy.shape)
# output_sequence_sort = np.zeros(output_sequence_copy.shape)
input_sequence_sort_, input_priority_, output_sequence_sort_, output_priority_ = \
dataset.generate_priority_sort_sample(
input_size_sort,
input_sequence_length,
output_sequence_length,
priority_lower_bound,
priority_upper_bound)
sequence_length = input_sequence_length + output_sequence_length
input_sequence_sort = np.zeros((sequence_length+1, input_size_sort+2), dtype=np.float32)
output_sequence_sort = np.zeros((sequence_length+1, input_size_sort+2), dtype=np.float32)
input_sequence_sort = input_sequence_sort_
output_sequence_sort = output_sequence_sort_
input_sequence_sort[:, -2] = input_priority_.transpose()[0]
output_sequence_sort[:, -2] = output_priority_.transpose()[0]
print(input_sequence_sort.shape)
print(output_sequence_sort.shape)
# print(input_sequence_sort[1:50, :])
print(input_sequence_sort[:, -2])
print(input_priority_.transpose()[0])
show_repeat_copy_data(
output_sequence_copy.transpose(),
input_sequence_copy.transpose(),
repeat_times,
output_sequence_recall.transpose(),
input_sequence_recall.transpose(),
output_sequence_sort.transpose(),
input_sequence_sort.transpose(),
"$y^{(t)}$",
"$x^{(t)}$",
"../experiment/algorithm_learning_data.pdf")
print("end..")
# file_priority_input_sequence = "../experiment/file_priority_input_sequence.txt"
# file_priority_output_sequence = "../experiment/file_priority_output_sequence.txt"
#
# priority_input_sequence = open(file_priority_input_sequence, 'w')
# (row, column) = input_sequence_sort.shape
# for i in range(row):
# for j in range(column):
# one_point = "%d %d %f\n"%(i, j, input_sequence_sort[i][j])
# priority_input_sequence.write(one_point)
# priority_input_sequence.close()
if __name__ == "__main__":
show_algorithm_learning_example()
| <filename>algorithm_learning/figure/generate_figure.py
import dataset
import visualization
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_pdf import PdfPages
def show_repeat_copy_data(
output_sequence_copy,
input_sequence_copy,
repeat_times_copy,
output_sequence_recall,
input_sequence_recall,
output_sequence_sort,
input_sequence_sort,
input_name,
output_name,
image_file):
# set figure size
fig = plt.figure(figsize=(12, 5))
#
# # draw first line
# axes_input_10 = plt.subplot2grid((9, 1), (1, 0), colspan=1)
# axes_input_10.set_aspect('equal')
# plt.imshow(output_sequence_copy, interpolation='none')
# axes_input_10.set_xticks([])
# axes_input_10.set_yticks([])
# # draw second line
# axes_output_10 = plt.subplot2grid((9, 1), (2, 0), colspan=1)
# plt.imshow(input_sequence_copy, interpolation='none')
# axes_output_10.set_xticks([])
# axes_output_10.set_yticks([])
# # draw third line
# # plt.text(80, 12, "(a) repeat copy", ha='center')
# # title = "Repeat times = %d" % repeat_times_copy
# # plt.text(80, -12, title, ha='center')
# # plt.text(-2, 5, output_name, ha='right')
# # plt.text(-2, -5, input_name, ha='right')
# # # plt.text(18, 12, 'Time $t$ $\longrightarrow$', ha='right')
# # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right')
#
# # draw first line
# axes_input_10 = plt.subplot2grid((9, 1), (4, 0), colspan=1)
# axes_input_10.set_aspect('equal')
# plt.imshow(output_sequence_recall, interpolation='none')
# axes_input_10.set_xticks([])
# axes_input_10.set_yticks([])
# # draw second line
# axes_output_10 = plt.subplot2grid((9, 1), (5, 0), colspan=1)
# plt.imshow(input_sequence_recall, interpolation='none')
# axes_output_10.set_xticks([])
# axes_output_10.set_yticks([])
# # draw third line
# # plt.text(80, 12, "(b) associative recall", ha='center')
# # plt.text(-2, 5, output_name, ha='right')
# # plt.text(-2, -5, input_name, ha='right')
# # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right')
# draw first line
axes_input_10 = plt.subplot2grid((9, 1), (7, 0), colspan=1)
axes_input_10.set_aspect('equal')
plt.imshow(output_sequence_sort, interpolation='none')
axes_input_10.set_xticks([])
axes_input_10.set_yticks([])
# draw second line
axes_output_10 = plt.subplot2grid((9, 1), (8, 0), colspan=1)
plt.imshow(input_sequence_sort, interpolation='none')
axes_output_10.set_xticks([])
axes_output_10.set_yticks([])
# draw third line
# plt.text(80, 12, "(c) priority sort", ha='center')
# plt.text(-2, 5, output_name, ha='right')
# plt.text(-2, -5, input_name, ha='right')
# plt.text(9, 12, '$t$ $\longrightarrow$', ha='right')
# add color bars
# # *rect* = [left, bottom, width, height]
# cax = plt.axes([0.84, 0.1, 0.005, 0.71])
cax = plt.axes([0.84, 0.1, 0.005, 0.165])
cbar = plt.colorbar(cax=cax)
# show colorbar
# cbar = plt.colorbar(gci)
# cbar.set_label('$T_B(K)$', fontdict=font)
cbar.set_ticks(np.linspace(0, 1, 3))
cbar.set_ticklabels(('0', '0.5', '1'))
# show figure
plt.show()
# save image
fig.savefig(image_file, dpi=75, format='pdf')
# close plot GUI
plt.close()
def show_algorithm_learning_example():
input_size_copy = 8
sequence_length_copy = 10
repeat_times = 15
input_sequence_copy, output_sequence_copy = \
dataset.generate_repeat_copy_sample(
input_size_copy, sequence_length_copy, repeat_times)
print(input_sequence_copy.shape)
print(output_sequence_copy.shape)
input_size_recall = 6
# item_size = 4
item_size = 3
episode_size = 38
max_episode_size = 38
input_sequence_recall = np.zeros(input_sequence_copy.shape)
output_sequence_recall = np.zeros(output_sequence_copy.shape)
input_sequence_recall_, output_sequence_recall_ = \
dataset.generate_associative_recall_sample(
input_size_recall, item_size, episode_size, max_episode_size)
input_sequence_recall[:-1] = input_sequence_recall_
output_sequence_recall[:-1] = output_sequence_recall_
print(input_sequence_recall.shape)
print(output_sequence_recall.shape)
input_size_sort = 6
input_sequence_length = 80
output_sequence_length = 80
priority_lower_bound = 0
priority_upper_bound = 1
# input_sequence_sort = np.zeros(input_sequence_copy.shape)
# output_sequence_sort = np.zeros(output_sequence_copy.shape)
input_sequence_sort_, input_priority_, output_sequence_sort_, output_priority_ = \
dataset.generate_priority_sort_sample(
input_size_sort,
input_sequence_length,
output_sequence_length,
priority_lower_bound,
priority_upper_bound)
sequence_length = input_sequence_length + output_sequence_length
input_sequence_sort = np.zeros((sequence_length+1, input_size_sort+2), dtype=np.float32)
output_sequence_sort = np.zeros((sequence_length+1, input_size_sort+2), dtype=np.float32)
input_sequence_sort = input_sequence_sort_
output_sequence_sort = output_sequence_sort_
input_sequence_sort[:, -2] = input_priority_.transpose()[0]
output_sequence_sort[:, -2] = output_priority_.transpose()[0]
print(input_sequence_sort.shape)
print(output_sequence_sort.shape)
# print(input_sequence_sort[1:50, :])
print(input_sequence_sort[:, -2])
print(input_priority_.transpose()[0])
show_repeat_copy_data(
output_sequence_copy.transpose(),
input_sequence_copy.transpose(),
repeat_times,
output_sequence_recall.transpose(),
input_sequence_recall.transpose(),
output_sequence_sort.transpose(),
input_sequence_sort.transpose(),
"$y^{(t)}$",
"$x^{(t)}$",
"../experiment/algorithm_learning_data.pdf")
print("end..")
# file_priority_input_sequence = "../experiment/file_priority_input_sequence.txt"
# file_priority_output_sequence = "../experiment/file_priority_output_sequence.txt"
#
# priority_input_sequence = open(file_priority_input_sequence, 'w')
# (row, column) = input_sequence_sort.shape
# for i in range(row):
# for j in range(column):
# one_point = "%d %d %f\n"%(i, j, input_sequence_sort[i][j])
# priority_input_sequence.write(one_point)
# priority_input_sequence.close()
if __name__ == "__main__":
show_algorithm_learning_example()
| en | 0.342816 | # set figure size # # # draw first line # axes_input_10 = plt.subplot2grid((9, 1), (1, 0), colspan=1) # axes_input_10.set_aspect('equal') # plt.imshow(output_sequence_copy, interpolation='none') # axes_input_10.set_xticks([]) # axes_input_10.set_yticks([]) # # draw second line # axes_output_10 = plt.subplot2grid((9, 1), (2, 0), colspan=1) # plt.imshow(input_sequence_copy, interpolation='none') # axes_output_10.set_xticks([]) # axes_output_10.set_yticks([]) # # draw third line # # plt.text(80, 12, "(a) repeat copy", ha='center') # # title = "Repeat times = %d" % repeat_times_copy # # plt.text(80, -12, title, ha='center') # # plt.text(-2, 5, output_name, ha='right') # # plt.text(-2, -5, input_name, ha='right') # # # plt.text(18, 12, 'Time $t$ $\longrightarrow$', ha='right') # # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right') # # # draw first line # axes_input_10 = plt.subplot2grid((9, 1), (4, 0), colspan=1) # axes_input_10.set_aspect('equal') # plt.imshow(output_sequence_recall, interpolation='none') # axes_input_10.set_xticks([]) # axes_input_10.set_yticks([]) # # draw second line # axes_output_10 = plt.subplot2grid((9, 1), (5, 0), colspan=1) # plt.imshow(input_sequence_recall, interpolation='none') # axes_output_10.set_xticks([]) # axes_output_10.set_yticks([]) # # draw third line # # plt.text(80, 12, "(b) associative recall", ha='center') # # plt.text(-2, 5, output_name, ha='right') # # plt.text(-2, -5, input_name, ha='right') # # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right') # draw first line # draw second line # draw third line # plt.text(80, 12, "(c) priority sort", ha='center') # plt.text(-2, 5, output_name, ha='right') # plt.text(-2, -5, input_name, ha='right') # plt.text(9, 12, '$t$ $\longrightarrow$', ha='right') # add color bars # # *rect* = [left, bottom, width, height] # cax = plt.axes([0.84, 0.1, 0.005, 0.71]) # show colorbar # cbar = plt.colorbar(gci) # cbar.set_label('$T_B(K)$', fontdict=font) # show figure # save image # close plot GUI # item_size = 4 # input_sequence_sort = np.zeros(input_sequence_copy.shape) # output_sequence_sort = np.zeros(output_sequence_copy.shape) # print(input_sequence_sort[1:50, :]) # file_priority_input_sequence = "../experiment/file_priority_input_sequence.txt" # file_priority_output_sequence = "../experiment/file_priority_output_sequence.txt" # # priority_input_sequence = open(file_priority_input_sequence, 'w') # (row, column) = input_sequence_sort.shape # for i in range(row): # for j in range(column): # one_point = "%d %d %f\n"%(i, j, input_sequence_sort[i][j]) # priority_input_sequence.write(one_point) # priority_input_sequence.close() | 3.09306 | 3 |
python/QuickSort.py | yandong2023/The-sword-pointing-to-offer-code | 2 | 6615884 | # coding: utf-8
def quickSort(alist):
quickSortHelper(alist, 0, len(alist)-1)
def quickSortHelper(alist, first, last):
if first < last:
splitPoint = partition(alist, first, last)
quickSortHelper(alist, first, splitPoint-1)
quickSortHelper(alist, splitPoint+1, last)
def partition(alist, first, last):
pivotvlue = alist[first]
leftmark = first+1
rightmark = last
done = False
while not done:
while leftmark <= rightmark and alist[leftmark] <= pivotvlue: # bugfix: 先比较index, 不然数组会越界
leftmark += 1
while rightmark >= leftmark and alist[rightmark] >= pivotvlue:
rightmark -= 1
if leftmark > rightmark:
done = True
else:
alist[leftmark], alist[rightmark] = alist[rightmark], alist[leftmark]
alist[rightmark], alist[first] = alist[first], alist[rightmark]
return rightmark
alist = [54,26,93,17,77,31,44,55,20]
alist2 = [1]
quickSort(alist2)
print(alist2)
if __name__ == "__main__":
test_data = [3,2,111,3,-1,0,0,1,0,2,4]
res_stable = sorted(test_data)
quickSort(test_data)
print(test_data)
print(res_stable)
assert all(map(lambda x: x[0] == x[1], zip(res_stable, test_data))) | # coding: utf-8
def quickSort(alist):
quickSortHelper(alist, 0, len(alist)-1)
def quickSortHelper(alist, first, last):
if first < last:
splitPoint = partition(alist, first, last)
quickSortHelper(alist, first, splitPoint-1)
quickSortHelper(alist, splitPoint+1, last)
def partition(alist, first, last):
pivotvlue = alist[first]
leftmark = first+1
rightmark = last
done = False
while not done:
while leftmark <= rightmark and alist[leftmark] <= pivotvlue: # bugfix: 先比较index, 不然数组会越界
leftmark += 1
while rightmark >= leftmark and alist[rightmark] >= pivotvlue:
rightmark -= 1
if leftmark > rightmark:
done = True
else:
alist[leftmark], alist[rightmark] = alist[rightmark], alist[leftmark]
alist[rightmark], alist[first] = alist[first], alist[rightmark]
return rightmark
alist = [54,26,93,17,77,31,44,55,20]
alist2 = [1]
quickSort(alist2)
print(alist2)
if __name__ == "__main__":
test_data = [3,2,111,3,-1,0,0,1,0,2,4]
res_stable = sorted(test_data)
quickSort(test_data)
print(test_data)
print(res_stable)
assert all(map(lambda x: x[0] == x[1], zip(res_stable, test_data))) | zh | 0.434221 | # coding: utf-8 # bugfix: 先比较index, 不然数组会越界 | 3.884791 | 4 |
tools/rollout.py | fredericgo/rl_morph_pytorch | 0 | 6615885 | import argparse
import datetime
import numpy as np
import itertools
import torch
import imageio
import envs
import gym
from rl.sac import SAC
from rl.replay_memory import ReplayMemory
from rl.model import GaussianPolicy, QNetwork, DeterministicPolicy
parser = argparse.ArgumentParser(description='PyTorch Soft Actor-Critic Args')
parser.add_argument('--env-name', default="ant_s1-v0",
help='Mujoco Gym environment (default: HalfCheetah-v2)')
parser.add_argument('--policy', default="Gaussian",
help='Policy Type: Gaussian | Deterministic (default: Gaussian)')
parser.add_argument('--seed', type=int, default=123456, metavar='N',
help='random seed (default: 123456)')
parser.add_argument('--num_episodes', type=int, default=3, metavar='N',
help='maximum number of steps (default: 1000000)')
parser.add_argument('--hidden_size', type=int, default=256, metavar='N',
help='hidden size (default: 256)')
parser.add_argument('--cuda', action="store_true",
help='run on CUDA (default: False)')
parser.add_argument('--actor_path',
help='checkpoint training model every # steps')
parser.add_argument('--video_file_name',
help='output file name')
args = parser.parse_args()
# Environment
# env = NormalizedActions(gym.make(args.env_name))
env = gym.make(args.env_name)
env.seed(args.seed)
env.action_space.seed(args.seed)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
# Agent
device = torch.device("cuda" if args.cuda else "cpu")
policy = GaussianPolicy(env.observation_space.shape[0], env.action_space.shape[0],
args.hidden_size, env.action_space).to(device)
policy.load_state_dict(torch.load(args.actor_path))
def select_action(state):
state = torch.FloatTensor(state).to(device).unsqueeze(0)
_, _, action = policy.sample(state)
return action.detach().cpu().numpy()[0]
# Evaluation loop
total_numsteps = 0
avg_reward = 0.
with imageio.get_writer(args.video_file_name, fps=30) as video:
for i_episode in itertools.count(1):
episode_reward = 0
episode_steps = 0
done = False
state = env.reset()
video.append_data(env.render('rgb_array'))
done = False
while not done:
action = select_action(state)
next_state, reward, done, _ = env.step(action)
video.append_data(env.render('rgb_array'))
episode_reward += reward
state = next_state
avg_reward += episode_reward
if i_episode > args.num_episodes:
break
avg_reward /= args.num_episodes
print("----------------------------------------")
print("Test Episodes: {}, Avg. Reward: {}".format(args.num_episodes, round(avg_reward, 2)))
print("----------------------------------------")
env.close()
| import argparse
import datetime
import numpy as np
import itertools
import torch
import imageio
import envs
import gym
from rl.sac import SAC
from rl.replay_memory import ReplayMemory
from rl.model import GaussianPolicy, QNetwork, DeterministicPolicy
parser = argparse.ArgumentParser(description='PyTorch Soft Actor-Critic Args')
parser.add_argument('--env-name', default="ant_s1-v0",
help='Mujoco Gym environment (default: HalfCheetah-v2)')
parser.add_argument('--policy', default="Gaussian",
help='Policy Type: Gaussian | Deterministic (default: Gaussian)')
parser.add_argument('--seed', type=int, default=123456, metavar='N',
help='random seed (default: 123456)')
parser.add_argument('--num_episodes', type=int, default=3, metavar='N',
help='maximum number of steps (default: 1000000)')
parser.add_argument('--hidden_size', type=int, default=256, metavar='N',
help='hidden size (default: 256)')
parser.add_argument('--cuda', action="store_true",
help='run on CUDA (default: False)')
parser.add_argument('--actor_path',
help='checkpoint training model every # steps')
parser.add_argument('--video_file_name',
help='output file name')
args = parser.parse_args()
# Environment
# env = NormalizedActions(gym.make(args.env_name))
env = gym.make(args.env_name)
env.seed(args.seed)
env.action_space.seed(args.seed)
torch.manual_seed(args.seed)
np.random.seed(args.seed)
# Agent
device = torch.device("cuda" if args.cuda else "cpu")
policy = GaussianPolicy(env.observation_space.shape[0], env.action_space.shape[0],
args.hidden_size, env.action_space).to(device)
policy.load_state_dict(torch.load(args.actor_path))
def select_action(state):
state = torch.FloatTensor(state).to(device).unsqueeze(0)
_, _, action = policy.sample(state)
return action.detach().cpu().numpy()[0]
# Evaluation loop
total_numsteps = 0
avg_reward = 0.
with imageio.get_writer(args.video_file_name, fps=30) as video:
for i_episode in itertools.count(1):
episode_reward = 0
episode_steps = 0
done = False
state = env.reset()
video.append_data(env.render('rgb_array'))
done = False
while not done:
action = select_action(state)
next_state, reward, done, _ = env.step(action)
video.append_data(env.render('rgb_array'))
episode_reward += reward
state = next_state
avg_reward += episode_reward
if i_episode > args.num_episodes:
break
avg_reward /= args.num_episodes
print("----------------------------------------")
print("Test Episodes: {}, Avg. Reward: {}".format(args.num_episodes, round(avg_reward, 2)))
print("----------------------------------------")
env.close()
| en | 0.474423 | # steps') # Environment # env = NormalizedActions(gym.make(args.env_name)) # Agent # Evaluation loop | 1.989962 | 2 |
twilioquest/python/codepath/fizzbuzz.py | greysondn/gamesolutions | 0 | 6615886 | # TwilioQuest version 3.1.26
# Works in:
# 3.1.26
import argparse
# standard main function
def main():
# Note that this uses code from a previous lesson as boilerplate.
parser = argparse.ArgumentParser(
description="Twilioquest: Python: Fizzbuzz Challenge"
)
parser.add_argument(
"nums",
metavar="number",
nargs="+",
type=int,
help="A number or numbers to fizzbuzz"
)
args = parser.parse_args()
for num in args.nums:
# there's a few ways to do this, and I'm just taking one that's semantically
# clear because I have tutoring students.
three = False
five = False
# check if it's divisible by either number
if ((num % 3) == 0):
three = True
if ((num % 5) == 0):
five = True
# stack conditions in a specific order.
if (three and five):
print("fizzbuzz")
elif(three):
print("fizz")
elif(five):
print("buzz")
else:
# note the use of an fstring here!
print(f"{num}")
# standard main guard
if ("__main__" == __name__):
main() | # TwilioQuest version 3.1.26
# Works in:
# 3.1.26
import argparse
# standard main function
def main():
# Note that this uses code from a previous lesson as boilerplate.
parser = argparse.ArgumentParser(
description="Twilioquest: Python: Fizzbuzz Challenge"
)
parser.add_argument(
"nums",
metavar="number",
nargs="+",
type=int,
help="A number or numbers to fizzbuzz"
)
args = parser.parse_args()
for num in args.nums:
# there's a few ways to do this, and I'm just taking one that's semantically
# clear because I have tutoring students.
three = False
five = False
# check if it's divisible by either number
if ((num % 3) == 0):
three = True
if ((num % 5) == 0):
five = True
# stack conditions in a specific order.
if (three and five):
print("fizzbuzz")
elif(three):
print("fizz")
elif(five):
print("buzz")
else:
# note the use of an fstring here!
print(f"{num}")
# standard main guard
if ("__main__" == __name__):
main() | en | 0.898897 | # TwilioQuest version 3.1.26 # Works in: # 3.1.26 # standard main function # Note that this uses code from a previous lesson as boilerplate. # there's a few ways to do this, and I'm just taking one that's semantically # clear because I have tutoring students. # check if it's divisible by either number # stack conditions in a specific order. # note the use of an fstring here! # standard main guard | 3.70721 | 4 |
src/utils_date.py | philip-papasavvas/PythonSkills | 2 | 6615887 | """
Created 17 June 2020
Generic utility methods for handling dates
"""
import datetime
import numpy as np
def np_dt_to_str(d: np.datetime64) -> str:
"""Convert from np.datetime64 to str without hyphens"""
return d.astype(str).replace("-", "")
def excel_date_to_np(xl_date):
"""Excel date serial (as int) to numpy datetime"""
return np.array(['1899-12-30'], dtype='datetime64[D]') + xl_date
def date_to_excel(pdate):
"""converts datetime to Excel date serial"""
delta = pdate.to_datetime() - datetime.datetime(1899, 12, 30)
return (delta.days.astype(float) + delta.seconds.astype(float) / 86400).astype(int)
def time_delta_to_days(td):
"""Returns the day difference of a pandas series of timedelta64[ns]"""
return (td.values / np.timedelta64(1, 'D')).astype(int)
def datetime_to_str(input_date: datetime.datetime):
"""Method to extract date in YYYYMMDD from datetime object"""
return datetime.datetime.strftime(input_date, format="%Y%m%d")
if __name__ == "__main__":
pass
| """
Created 17 June 2020
Generic utility methods for handling dates
"""
import datetime
import numpy as np
def np_dt_to_str(d: np.datetime64) -> str:
"""Convert from np.datetime64 to str without hyphens"""
return d.astype(str).replace("-", "")
def excel_date_to_np(xl_date):
"""Excel date serial (as int) to numpy datetime"""
return np.array(['1899-12-30'], dtype='datetime64[D]') + xl_date
def date_to_excel(pdate):
"""converts datetime to Excel date serial"""
delta = pdate.to_datetime() - datetime.datetime(1899, 12, 30)
return (delta.days.astype(float) + delta.seconds.astype(float) / 86400).astype(int)
def time_delta_to_days(td):
"""Returns the day difference of a pandas series of timedelta64[ns]"""
return (td.values / np.timedelta64(1, 'D')).astype(int)
def datetime_to_str(input_date: datetime.datetime):
"""Method to extract date in YYYYMMDD from datetime object"""
return datetime.datetime.strftime(input_date, format="%Y%m%d")
if __name__ == "__main__":
pass
| en | 0.750128 | Created 17 June 2020 Generic utility methods for handling dates Convert from np.datetime64 to str without hyphens Excel date serial (as int) to numpy datetime converts datetime to Excel date serial Returns the day difference of a pandas series of timedelta64[ns] Method to extract date in YYYYMMDD from datetime object | 3.396386 | 3 |
hash_maps/ransom_note.py | franloza/hackerrank | 0 | 6615888 | #!/bin/python3
import os
from collections import Counter
# Complete the checkMagazine function below.
def checkMagazine(magazine, note):
c = Counter(magazine.split(' '))
for word in note.split(' '):
count = c.get(word, 0)
if count == 0:
return "No"
else:
c[word] -= 1
return "Yes"
# Read from input
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
mn = input().split()
m = int(mn[0])
n = int(mn[1])
magazine = input().rstrip()
note = input().rstrip()
res = checkMagazine(magazine, note)
fptr.write(str(res) + '\n')
fptr.close()
# Toy case
if __name__ == '__main__':
magazine = "give me one grand today night"
note = "give one grand today"
print(checkMagazine(magazine, note)) # Yes | #!/bin/python3
import os
from collections import Counter
# Complete the checkMagazine function below.
def checkMagazine(magazine, note):
c = Counter(magazine.split(' '))
for word in note.split(' '):
count = c.get(word, 0)
if count == 0:
return "No"
else:
c[word] -= 1
return "Yes"
# Read from input
if __name__ == '__main__':
fptr = open(os.environ['OUTPUT_PATH'], 'w')
mn = input().split()
m = int(mn[0])
n = int(mn[1])
magazine = input().rstrip()
note = input().rstrip()
res = checkMagazine(magazine, note)
fptr.write(str(res) + '\n')
fptr.close()
# Toy case
if __name__ == '__main__':
magazine = "give me one grand today night"
note = "give one grand today"
print(checkMagazine(magazine, note)) # Yes | en | 0.541868 | #!/bin/python3 # Complete the checkMagazine function below. # Read from input # Toy case # Yes | 3.566288 | 4 |
student.py | Pranay221/Library-Management-System | 1 | 6615889 | from tkinter import *
from tkinter import ttk, messagebox
import backend
#This file creates a GUI for student user, which will be used in loginBackend file
class student:
def __init__(self, window):
self.window = window
self.frame = Frame(self.window, bg="orange", width='700', height='400')
self.label = Label(self.frame, text='Student User', bg='Orange', font=('Georgia', 30, 'bold'))
self.label.place(x=20, y=20, width=400, height=50)
self.label_title = Label(self.frame, text='TITLE', bg='orange', font=('Georgia', 14, 'bold'))
self.label_title.place(x=20, y=100, width=100, height=50)
self.label_year = Label(self.frame, text='YEAR', bg='orange', font=('Georgia', 14, 'bold'))
self.label_year.place(x=20, y=150, width=100, height=30)
self.label_author = Label(self.frame, text='AUTHOR', bg='orange', font=('Georgia', 14, 'bold'))
self.label_author.place(x=350, y=100, width=100, height=30)
self.label_isbn = Label(self.frame, text='ISBN', bg='orange', font=('Georgia', 14, 'bold'))
self.label_isbn.place(x=350, y=150, width=100, height=30)
self.title_text = StringVar()
self.entry_title = Entry(self.frame, fg='gray', textvariable=self.title_text, width=25, font=('Arial', 12, 'bold'))
self.entry_title.place(x=120, y=100, width=150, height=30)
self.year_text = StringVar()
self.entry_year = Entry(self.frame, fg='gray', textvariable=self.year_text, width=25, font=('Arial', 12, 'bold'))
self.entry_year.place(x=120, y=150, width=150, height=30)
self.author_text = StringVar()
self.entry_author = Entry(self.frame, fg='gray', textvariable=self.author_text, width=25, font=('Arial', 12, 'bold'))
self.entry_author.place(x=470, y=100, width=150, height=30)
self.isbn_text = StringVar()
self.entry_isbn = Entry(self.frame, fg='gray', textvariable=self.isbn_text, width=25, font=('Arial', 12, 'bold'))
self.entry_isbn.place(x=470, y=150, width=150, height=30)
self.listbox = Listbox(self.frame)
self.listbox.place(x=100, y=200, width=500, height=100)
self.button_view = Button(self.frame, text='View All', command=self.view_command)
self.button_view.place(x=100, y=320, width=100, height=40)
self.button_search = Button(self.frame, text='Search ', command=self.search_command)
self.button_search.place(x=200, y=320, width=100, height=40)
self.button_issue = Button(self.frame, text='Issue', command=self.issue_command)
self.button_issue.place(x=300, y=320, width=100, height=40)
self.button_request = Button(self.frame, text='Request', command=self.request_command)
self.button_request.place(x=400, y=320, width=100, height=40)
self.button_issue = Button(self.frame, text='Clear Fields', command=self.clear_command)
self.button_issue.place(x=500, y=320, width=100, height=40)
self.frame.pack()
def clear_command(self):
self.entry_title.delete(0, END)
self.entry_year.delete(0, END)
self.entry_author.delete(0, END)
self.entry_isbn.delete(0, END)
def request_command(self):
backend.request_insert(self.title_text.get(), self.author_text.get(), self.year_text.get(),self.isbn_text.get())
self.listbox.delete(0, END)
self.listbox.insert(END,(self.title_text.get(), self.author_text.get(), self.year_text.get(), self.isbn_text.get()))
def issue_command(self):
selected_tuple = self.listbox.curselection()
value = self.listbox.get(selected_tuple)
self.entry_title.delete(0, END)
self.entry_title.insert(END, value[1])
self.entry_year.delete(0, END)
self.entry_year.insert(END, value[2])
self.entry_author.delete(0, END)
self.entry_author.insert(END, value[3])
self.entry_isbn.delete(0, END)
self.entry_isbn.insert(END, value[4])
backend.issue_insert(value[0])
def view_command(self):
self.listbox.delete(0, END)
for row in backend.view():
self.listbox.insert(END, row)
def search_command(self):
self.listbox.delete(0, END)
for row in backend.search(self.title_text.get(), self.author_text.get(), self.year_text.get(),self.isbn_text.get()):
self.listbox.insert(END, row)
'''window = Tk()
window.title('Student_User')
window.geometry('700x400')
obj = student(window)
window.mainloop()'''
| from tkinter import *
from tkinter import ttk, messagebox
import backend
#This file creates a GUI for student user, which will be used in loginBackend file
class student:
def __init__(self, window):
self.window = window
self.frame = Frame(self.window, bg="orange", width='700', height='400')
self.label = Label(self.frame, text='Student User', bg='Orange', font=('Georgia', 30, 'bold'))
self.label.place(x=20, y=20, width=400, height=50)
self.label_title = Label(self.frame, text='TITLE', bg='orange', font=('Georgia', 14, 'bold'))
self.label_title.place(x=20, y=100, width=100, height=50)
self.label_year = Label(self.frame, text='YEAR', bg='orange', font=('Georgia', 14, 'bold'))
self.label_year.place(x=20, y=150, width=100, height=30)
self.label_author = Label(self.frame, text='AUTHOR', bg='orange', font=('Georgia', 14, 'bold'))
self.label_author.place(x=350, y=100, width=100, height=30)
self.label_isbn = Label(self.frame, text='ISBN', bg='orange', font=('Georgia', 14, 'bold'))
self.label_isbn.place(x=350, y=150, width=100, height=30)
self.title_text = StringVar()
self.entry_title = Entry(self.frame, fg='gray', textvariable=self.title_text, width=25, font=('Arial', 12, 'bold'))
self.entry_title.place(x=120, y=100, width=150, height=30)
self.year_text = StringVar()
self.entry_year = Entry(self.frame, fg='gray', textvariable=self.year_text, width=25, font=('Arial', 12, 'bold'))
self.entry_year.place(x=120, y=150, width=150, height=30)
self.author_text = StringVar()
self.entry_author = Entry(self.frame, fg='gray', textvariable=self.author_text, width=25, font=('Arial', 12, 'bold'))
self.entry_author.place(x=470, y=100, width=150, height=30)
self.isbn_text = StringVar()
self.entry_isbn = Entry(self.frame, fg='gray', textvariable=self.isbn_text, width=25, font=('Arial', 12, 'bold'))
self.entry_isbn.place(x=470, y=150, width=150, height=30)
self.listbox = Listbox(self.frame)
self.listbox.place(x=100, y=200, width=500, height=100)
self.button_view = Button(self.frame, text='View All', command=self.view_command)
self.button_view.place(x=100, y=320, width=100, height=40)
self.button_search = Button(self.frame, text='Search ', command=self.search_command)
self.button_search.place(x=200, y=320, width=100, height=40)
self.button_issue = Button(self.frame, text='Issue', command=self.issue_command)
self.button_issue.place(x=300, y=320, width=100, height=40)
self.button_request = Button(self.frame, text='Request', command=self.request_command)
self.button_request.place(x=400, y=320, width=100, height=40)
self.button_issue = Button(self.frame, text='Clear Fields', command=self.clear_command)
self.button_issue.place(x=500, y=320, width=100, height=40)
self.frame.pack()
def clear_command(self):
self.entry_title.delete(0, END)
self.entry_year.delete(0, END)
self.entry_author.delete(0, END)
self.entry_isbn.delete(0, END)
def request_command(self):
backend.request_insert(self.title_text.get(), self.author_text.get(), self.year_text.get(),self.isbn_text.get())
self.listbox.delete(0, END)
self.listbox.insert(END,(self.title_text.get(), self.author_text.get(), self.year_text.get(), self.isbn_text.get()))
def issue_command(self):
selected_tuple = self.listbox.curselection()
value = self.listbox.get(selected_tuple)
self.entry_title.delete(0, END)
self.entry_title.insert(END, value[1])
self.entry_year.delete(0, END)
self.entry_year.insert(END, value[2])
self.entry_author.delete(0, END)
self.entry_author.insert(END, value[3])
self.entry_isbn.delete(0, END)
self.entry_isbn.insert(END, value[4])
backend.issue_insert(value[0])
def view_command(self):
self.listbox.delete(0, END)
for row in backend.view():
self.listbox.insert(END, row)
def search_command(self):
self.listbox.delete(0, END)
for row in backend.search(self.title_text.get(), self.author_text.get(), self.year_text.get(),self.isbn_text.get()):
self.listbox.insert(END, row)
'''window = Tk()
window.title('Student_User')
window.geometry('700x400')
obj = student(window)
window.mainloop()'''
| en | 0.622881 | #This file creates a GUI for student user, which will be used in loginBackend file window = Tk() window.title('Student_User') window.geometry('700x400') obj = student(window) window.mainloop() | 3.551688 | 4 |
app/api/helpers/auth.py | akashtalole/python-flask-restful-api | 3 | 6615890 | import datetime
import pytz
import flask_login as login
from flask_login import current_user
from app.models import db
from app.models.user import User
from app.models.user_token_blacklist import UserTokenBlackListTime
class AuthManager:
def __init__(self):
pass
@staticmethod
def init_login(app):
from flask import request, url_for, redirect
login_manager = login.LoginManager()
login_manager.init_app(app)
# Create user loader function
@login_manager.user_loader
def load_user(user_id):
return db.session.query(User).get(user_id)
@login_manager.unauthorized_handler
def unauthorized():
return redirect(url_for('admin.login_view', next=request.url))
@staticmethod
def is_verified_user():
return current_user.is_verified
@staticmethod
def is_accessible():
return current_user.is_authenticated
@staticmethod
def check_auth_admin(username, password):
# This function is called to check for proper authentication & admin rights
if username and password:
user = User.query.filter_by(_email=username).first()
if user and user.is_correct_password(password) and user.is_admin:
return True
return False
def blacklist_token(user):
blacklist_time = UserTokenBlackListTime.query.filter_by(user_id=user.id).first()
if blacklist_time:
blacklist_time.blacklisted_at = datetime.datetime.now(pytz.utc)
else:
blacklist_time = UserTokenBlackListTime(user.id)
db.session.add(blacklist_time)
db.session.commit()
def is_token_blacklisted(token):
blacklist_time = UserTokenBlackListTime.query.filter_by(user_id=token['identity']).first()
if not blacklist_time:
return False
return token['iat'] < blacklist_time.blacklisted_at.timestamp()
| import datetime
import pytz
import flask_login as login
from flask_login import current_user
from app.models import db
from app.models.user import User
from app.models.user_token_blacklist import UserTokenBlackListTime
class AuthManager:
def __init__(self):
pass
@staticmethod
def init_login(app):
from flask import request, url_for, redirect
login_manager = login.LoginManager()
login_manager.init_app(app)
# Create user loader function
@login_manager.user_loader
def load_user(user_id):
return db.session.query(User).get(user_id)
@login_manager.unauthorized_handler
def unauthorized():
return redirect(url_for('admin.login_view', next=request.url))
@staticmethod
def is_verified_user():
return current_user.is_verified
@staticmethod
def is_accessible():
return current_user.is_authenticated
@staticmethod
def check_auth_admin(username, password):
# This function is called to check for proper authentication & admin rights
if username and password:
user = User.query.filter_by(_email=username).first()
if user and user.is_correct_password(password) and user.is_admin:
return True
return False
def blacklist_token(user):
blacklist_time = UserTokenBlackListTime.query.filter_by(user_id=user.id).first()
if blacklist_time:
blacklist_time.blacklisted_at = datetime.datetime.now(pytz.utc)
else:
blacklist_time = UserTokenBlackListTime(user.id)
db.session.add(blacklist_time)
db.session.commit()
def is_token_blacklisted(token):
blacklist_time = UserTokenBlackListTime.query.filter_by(user_id=token['identity']).first()
if not blacklist_time:
return False
return token['iat'] < blacklist_time.blacklisted_at.timestamp()
| en | 0.835522 | # Create user loader function # This function is called to check for proper authentication & admin rights | 2.484887 | 2 |
app/messageHandler.py | MordorDie/vk-schedule-bot | 1 | 6615891 | from app import vkapi
import os
import importlib
from app.command_system import command_list
from app.scheduledb import ScheduleDB
import difflib
def load_modules():
# путь от рабочей директории, ее можно изменить в настройках приложения
files = os.listdir("app/commands")
modules = filter(lambda x: x.endswith('.py'), files)
for m in modules:
importlib.import_module("app.commands." + m[0:-3])
def get_answer(uid, body):
data = body.split(' ', maxsplit=1)
user_command = data[0]
arg = ""
if len(data) == 2:
arg = data[1]
# Сообщение по умолчанию если распознать не удастся
message = "Не удалось распознать запрос. Напишите 'помощь', чтобы узнать доступные команды"
max_ratio = 0
command = None
key = ''
for c in command_list:
for k in c.keys:
ratio1 = difflib.SequenceMatcher(None, k, user_command).ratio()
ratio2 = difflib.SequenceMatcher(None, k, body).ratio()
ratio = max(ratio1, ratio2)
if ratio > max_ratio:
max_ratio = ratio
command = c
key = k
if ratio >= 0.95:
message = c.process(uid, key, arg)
return message
if max_ratio > 0.5:
message = command.process(uid, key, arg)
message = 'Ваш запрос распознан как: {}\n\n{}'.format(key, message)
return message
return message
def create_answer(data, token):
load_modules()
user_id = data['user_id']
message = get_answer(user_id, data['body'].lower())
vkapi.send_message(user_id, token, message)
| from app import vkapi
import os
import importlib
from app.command_system import command_list
from app.scheduledb import ScheduleDB
import difflib
def load_modules():
# путь от рабочей директории, ее можно изменить в настройках приложения
files = os.listdir("app/commands")
modules = filter(lambda x: x.endswith('.py'), files)
for m in modules:
importlib.import_module("app.commands." + m[0:-3])
def get_answer(uid, body):
data = body.split(' ', maxsplit=1)
user_command = data[0]
arg = ""
if len(data) == 2:
arg = data[1]
# Сообщение по умолчанию если распознать не удастся
message = "Не удалось распознать запрос. Напишите 'помощь', чтобы узнать доступные команды"
max_ratio = 0
command = None
key = ''
for c in command_list:
for k in c.keys:
ratio1 = difflib.SequenceMatcher(None, k, user_command).ratio()
ratio2 = difflib.SequenceMatcher(None, k, body).ratio()
ratio = max(ratio1, ratio2)
if ratio > max_ratio:
max_ratio = ratio
command = c
key = k
if ratio >= 0.95:
message = c.process(uid, key, arg)
return message
if max_ratio > 0.5:
message = command.process(uid, key, arg)
message = 'Ваш запрос распознан как: {}\n\n{}'.format(key, message)
return message
return message
def create_answer(data, token):
load_modules()
user_id = data['user_id']
message = get_answer(user_id, data['body'].lower())
vkapi.send_message(user_id, token, message)
| ru | 0.998012 | # путь от рабочей директории, ее можно изменить в настройках приложения # Сообщение по умолчанию если распознать не удастся | 1.9783 | 2 |
tests/test_fortimail.py | fortinet-solutions-cse/fortimailapi | 0 | 6615892 | #!/usr/bin/python
import pytest
import logging
import fortimailapi
from packaging.version import Version
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger("fortimailapi")
@pytest.fixture
def session(request):
session = fortimailapi.FortiMailAPI()
session.debug("on")
session.https("off")
res = session.login("192.168.122.12","admin","")
log.info(res)
return session
def test_license(session):
res = session.license()
log.info(res)
def test_version(session):
ver = session.get_version()
if Version(ver) > Version('5.3'):
log.info("Version is later than 5.3")
pass
else:
log.error ("Version is older than 5.3")
assert False, "Version is older than 5.3: "+ver
def test_get_empty_domain(session):
#Remove, just in case it comes from older execution
res = session.delete("","dsa.com")
log.info(res)
res = session.get("", "dsa.com")
if not (res["errorType"] == 3):
log.info(res)
assert False, "Domain dsa.com is not empty, even after deleting"
def test_create_new_domain(session):
data = """{
'fallbackport':25,
'disclaimer_outgoing_header_status':False,
'hide':False,
'usessl':False,
'disclaimer_outgoing_body_status':False,
'recipient_verification_smtp_cmd':0,
'other_recipient':False,
'max_user_quota':1000,
'is_service_domain':False,
'migration_status':False,
'max_mailbox':10,
'mail_access':7,
'sender_addr_rate_ctrl_max_spam_state':False,
'recipient_verification_smtp_accept_reply_string_pattern':'',
'disclaimer_incoming_header_insertion_name':'',
'relay_auth_status':False,
'domain_interval':False,
'system_domain':1,
'reqAction':1,
'sender_addr_rate_notification_state':False,
'disclaimer_incoming_body_location':0,
'ldap_group_recipient':False,
'sender_addr_rate_control_max_megabytes':100,
'ldap_asav_state':False,
'disclaimer_outgoing_body_location':0,
'is_subdomain':False,
'disclaimer_outgoing_header_insertion_name':'',
'original':False,
'sender_addr_rate_ctrl_max_spam':5,
'disclaimer_outgoing_header_insertion_value':'',
'relay_ip_pool_port':25,
'relay_ip_pool_ssl':False,
'sender_addr_rate_ctrl_max_recipients':60,
'recipient_verification':0,
'relay_auth_username':'',
'other_address':'',
'disclaimer_incoming_header_insertion_value':'',
'disclaimer_incoming_body_content':'',
'disclaimer_incoming_body_content_html':'',
'fallbackhost':'fhtrtt.vom',
'rcptvrfy_try_mhost':False,
'relay_auth_type':0,
'sender_addr_rate_ctrl_exempt':[
],
'sender_addr_rate_ctrl_max_recipients_state':False,
'ldap_generic_routing_ssl':False,
'hours':0,
'port':25,
'objectID':'DomainSetting:{D:dsa.com}',
'sender_addr_rate_control_max_megabytes_state':False,
'recipient_verification_background':0,
'relay_auth_password':'******',
'group_recipient_only':False,
'max_msg_size':204800,
'alt_smtp_ena':False,
'ip':'mysmtp',
'bypass_bounce_verification':False,
'ldap_routing_state':False,
'sender_addr_rate_ctrl_action':512,
'alternative_domain_name':'',
'maindomain':'dsa.com',
'ldap_generic_routing_port':25,
'addressbook_add_option':2,
'disclaimer_outgoing_body_content':'',
'alt_smtp_ssl':False,
'disclaimer_incoming_header_status':False,
'remove_outgoing_header':False,
'sender_addr_rate_control_state':False,
'mdomain':'dsa.com',
'sender_addr_rate_control_max_messages_state':True,
'sender_addr_rate_control_max_messages':30,
'alt_smtp_port':25,
'report_template_name':'default',
'global_bayesian':True,
'fallbackusessl':False,
'days':0,
'disclaimer_status':1,
'disclaimer_incoming_body_status':False,
'mxflag':0,
'ip_pool_direction':1,
'default_language':'',
'other_greeting':'',
'alt_smtp_host':'',
'domain_recipient':True,
'ldap_service_status':True,
'default_theme':8,
'webmail_service_type':0,
'disclaimer_outgoing_body_content_html':'',
'group_exclude_individual':False,
'domain_report':False
}"""
res = session.post("", "dsa.com", data)
if not (res["mdomain"] == "dsa.com" and
res["objectID"] == "DomainSetting:{D:dsa.com}"):
log.info(res)
assert 0, "Domain dsa.com was not created correctly"
def test_get_domain(session):
res = session.get("", "dsa.com")
if not (res["mdomain"] == "dsa.com" and
res["objectID"] == "DomainSetting:{D:dsa.com}"):
log.info(res)
assert 0, "Domain dsa.com was not created correctly"
def test_change_attribute_in_domain(session):
payload = """{
"objectID": "DomainSetting:{D:dsa.com}",
"mdomain": "dsa.com",
"max_msg_size": 10800}"""
res = session.put("", "dsa.com", payload)
if res["max_msg_size"] != 10800:
log.info(res)
assert 0, "Max_msg_size was not changed after put operation:" + \
res["max_msg_size"]
res = session.get("", "dsa.com")
if res["max_msg_size"] != 10800:
log.info(res)
assert 0, "Max_msg_size was not changed after put & get operation:" + \
res["max_msg_size"]
def test_delete_domain(session):
res = session.delete("","dsa.com")
if res["errorType"] != 0:
log.info(res)
assert False, "Domain dsa.com can not be removed"
res = session.get("", "dsa.com")
if res["errorType"] == 0:
log.info(res)
assert False, "Domain dsa.com can not be removed"
| #!/usr/bin/python
import pytest
import logging
import fortimailapi
from packaging.version import Version
logging.basicConfig(level=logging.DEBUG)
log = logging.getLogger("fortimailapi")
@pytest.fixture
def session(request):
session = fortimailapi.FortiMailAPI()
session.debug("on")
session.https("off")
res = session.login("192.168.122.12","admin","")
log.info(res)
return session
def test_license(session):
res = session.license()
log.info(res)
def test_version(session):
ver = session.get_version()
if Version(ver) > Version('5.3'):
log.info("Version is later than 5.3")
pass
else:
log.error ("Version is older than 5.3")
assert False, "Version is older than 5.3: "+ver
def test_get_empty_domain(session):
#Remove, just in case it comes from older execution
res = session.delete("","dsa.com")
log.info(res)
res = session.get("", "dsa.com")
if not (res["errorType"] == 3):
log.info(res)
assert False, "Domain dsa.com is not empty, even after deleting"
def test_create_new_domain(session):
data = """{
'fallbackport':25,
'disclaimer_outgoing_header_status':False,
'hide':False,
'usessl':False,
'disclaimer_outgoing_body_status':False,
'recipient_verification_smtp_cmd':0,
'other_recipient':False,
'max_user_quota':1000,
'is_service_domain':False,
'migration_status':False,
'max_mailbox':10,
'mail_access':7,
'sender_addr_rate_ctrl_max_spam_state':False,
'recipient_verification_smtp_accept_reply_string_pattern':'',
'disclaimer_incoming_header_insertion_name':'',
'relay_auth_status':False,
'domain_interval':False,
'system_domain':1,
'reqAction':1,
'sender_addr_rate_notification_state':False,
'disclaimer_incoming_body_location':0,
'ldap_group_recipient':False,
'sender_addr_rate_control_max_megabytes':100,
'ldap_asav_state':False,
'disclaimer_outgoing_body_location':0,
'is_subdomain':False,
'disclaimer_outgoing_header_insertion_name':'',
'original':False,
'sender_addr_rate_ctrl_max_spam':5,
'disclaimer_outgoing_header_insertion_value':'',
'relay_ip_pool_port':25,
'relay_ip_pool_ssl':False,
'sender_addr_rate_ctrl_max_recipients':60,
'recipient_verification':0,
'relay_auth_username':'',
'other_address':'',
'disclaimer_incoming_header_insertion_value':'',
'disclaimer_incoming_body_content':'',
'disclaimer_incoming_body_content_html':'',
'fallbackhost':'fhtrtt.vom',
'rcptvrfy_try_mhost':False,
'relay_auth_type':0,
'sender_addr_rate_ctrl_exempt':[
],
'sender_addr_rate_ctrl_max_recipients_state':False,
'ldap_generic_routing_ssl':False,
'hours':0,
'port':25,
'objectID':'DomainSetting:{D:dsa.com}',
'sender_addr_rate_control_max_megabytes_state':False,
'recipient_verification_background':0,
'relay_auth_password':'******',
'group_recipient_only':False,
'max_msg_size':204800,
'alt_smtp_ena':False,
'ip':'mysmtp',
'bypass_bounce_verification':False,
'ldap_routing_state':False,
'sender_addr_rate_ctrl_action':512,
'alternative_domain_name':'',
'maindomain':'dsa.com',
'ldap_generic_routing_port':25,
'addressbook_add_option':2,
'disclaimer_outgoing_body_content':'',
'alt_smtp_ssl':False,
'disclaimer_incoming_header_status':False,
'remove_outgoing_header':False,
'sender_addr_rate_control_state':False,
'mdomain':'dsa.com',
'sender_addr_rate_control_max_messages_state':True,
'sender_addr_rate_control_max_messages':30,
'alt_smtp_port':25,
'report_template_name':'default',
'global_bayesian':True,
'fallbackusessl':False,
'days':0,
'disclaimer_status':1,
'disclaimer_incoming_body_status':False,
'mxflag':0,
'ip_pool_direction':1,
'default_language':'',
'other_greeting':'',
'alt_smtp_host':'',
'domain_recipient':True,
'ldap_service_status':True,
'default_theme':8,
'webmail_service_type':0,
'disclaimer_outgoing_body_content_html':'',
'group_exclude_individual':False,
'domain_report':False
}"""
res = session.post("", "dsa.com", data)
if not (res["mdomain"] == "dsa.com" and
res["objectID"] == "DomainSetting:{D:dsa.com}"):
log.info(res)
assert 0, "Domain dsa.com was not created correctly"
def test_get_domain(session):
res = session.get("", "dsa.com")
if not (res["mdomain"] == "dsa.com" and
res["objectID"] == "DomainSetting:{D:dsa.com}"):
log.info(res)
assert 0, "Domain dsa.com was not created correctly"
def test_change_attribute_in_domain(session):
payload = """{
"objectID": "DomainSetting:{D:dsa.com}",
"mdomain": "dsa.com",
"max_msg_size": 10800}"""
res = session.put("", "dsa.com", payload)
if res["max_msg_size"] != 10800:
log.info(res)
assert 0, "Max_msg_size was not changed after put operation:" + \
res["max_msg_size"]
res = session.get("", "dsa.com")
if res["max_msg_size"] != 10800:
log.info(res)
assert 0, "Max_msg_size was not changed after put & get operation:" + \
res["max_msg_size"]
def test_delete_domain(session):
res = session.delete("","dsa.com")
if res["errorType"] != 0:
log.info(res)
assert False, "Domain dsa.com can not be removed"
res = session.get("", "dsa.com")
if res["errorType"] == 0:
log.info(res)
assert False, "Domain dsa.com can not be removed"
| en | 0.196495 | #!/usr/bin/python #Remove, just in case it comes from older execution { 'fallbackport':25, 'disclaimer_outgoing_header_status':False, 'hide':False, 'usessl':False, 'disclaimer_outgoing_body_status':False, 'recipient_verification_smtp_cmd':0, 'other_recipient':False, 'max_user_quota':1000, 'is_service_domain':False, 'migration_status':False, 'max_mailbox':10, 'mail_access':7, 'sender_addr_rate_ctrl_max_spam_state':False, 'recipient_verification_smtp_accept_reply_string_pattern':'', 'disclaimer_incoming_header_insertion_name':'', 'relay_auth_status':False, 'domain_interval':False, 'system_domain':1, 'reqAction':1, 'sender_addr_rate_notification_state':False, 'disclaimer_incoming_body_location':0, 'ldap_group_recipient':False, 'sender_addr_rate_control_max_megabytes':100, 'ldap_asav_state':False, 'disclaimer_outgoing_body_location':0, 'is_subdomain':False, 'disclaimer_outgoing_header_insertion_name':'', 'original':False, 'sender_addr_rate_ctrl_max_spam':5, 'disclaimer_outgoing_header_insertion_value':'', 'relay_ip_pool_port':25, 'relay_ip_pool_ssl':False, 'sender_addr_rate_ctrl_max_recipients':60, 'recipient_verification':0, 'relay_auth_username':'', 'other_address':'', 'disclaimer_incoming_header_insertion_value':'', 'disclaimer_incoming_body_content':'', 'disclaimer_incoming_body_content_html':'', 'fallbackhost':'fhtrtt.vom', 'rcptvrfy_try_mhost':False, 'relay_auth_type':0, 'sender_addr_rate_ctrl_exempt':[ ], 'sender_addr_rate_ctrl_max_recipients_state':False, 'ldap_generic_routing_ssl':False, 'hours':0, 'port':25, 'objectID':'DomainSetting:{D:dsa.com}', 'sender_addr_rate_control_max_megabytes_state':False, 'recipient_verification_background':0, 'relay_auth_password':'******', 'group_recipient_only':False, 'max_msg_size':204800, 'alt_smtp_ena':False, 'ip':'mysmtp', 'bypass_bounce_verification':False, 'ldap_routing_state':False, 'sender_addr_rate_ctrl_action':512, 'alternative_domain_name':'', 'maindomain':'dsa.com', 'ldap_generic_routing_port':25, 'addressbook_add_option':2, 'disclaimer_outgoing_body_content':'', 'alt_smtp_ssl':False, 'disclaimer_incoming_header_status':False, 'remove_outgoing_header':False, 'sender_addr_rate_control_state':False, 'mdomain':'dsa.com', 'sender_addr_rate_control_max_messages_state':True, 'sender_addr_rate_control_max_messages':30, 'alt_smtp_port':25, 'report_template_name':'default', 'global_bayesian':True, 'fallbackusessl':False, 'days':0, 'disclaimer_status':1, 'disclaimer_incoming_body_status':False, 'mxflag':0, 'ip_pool_direction':1, 'default_language':'', 'other_greeting':'', 'alt_smtp_host':'', 'domain_recipient':True, 'ldap_service_status':True, 'default_theme':8, 'webmail_service_type':0, 'disclaimer_outgoing_body_content_html':'', 'group_exclude_individual':False, 'domain_report':False } { "objectID": "DomainSetting:{D:dsa.com}", "mdomain": "dsa.com", "max_msg_size": 10800} | 2.018002 | 2 |
learn_django/youtube_toturial/website/music/views.py | challow0/learn_python | 0 | 6615893 | # # from django.http import Http404
# # from django.http import HttpResponse
# # from django.template import loader
# from django.shortcuts import render,get_object_or_404
# from .models import Album,Song
#
#
#
# # def index(request):
# # all_albums = Album.objects.all()
# # html = ''
# # for album in all_albums:
# # url = '/music/' + str(album.id) + '/'
# # html += '<a href="' + url + '">' + album.album_title + '</a><br>'
# # return HttpResponse(html)
#
# # return HttpResponse("<h1>There will be a list of albums</h1>")
#
# # def index(request):
# # all_albums = Album.objects.all()
# # template = loader.get_template('music/index.html')
# # context = {
# # 'all_albums': all_albums,
# # }
# # return HttpResponse(template.render(context, request))
#
# def index(request):
# all_albums = Album.objects.all()
# context = {'all_albums': all_albums}
# return render(request,'music/index.html',context)
#
# # def detail(request, album_id):
# # return HttpResponse("<h2>Details for album id:" + str(album_id) + "</h2>")
#
# def detail(request, album_id):
# # try:
# # album = Album.objects.get(pk=album_id)
# # except Album.DoesNotExist:
# # raise Http404("Album does not exist")
# album = get_object_or_404(Album,pk=album_id)
# return render(request, 'music/detail.html', {'album': album})
#
# def favorite(request, album_id):
# album = get_object_or_404(Albummpk=album_id)
# try:
# selected_song = album.song_set.get(pk=request.POST['post'])
# except (KeyError,Song.DoesNotExist):
# return render(request, 'music/detail.html',{
# 'album':album,
# 'error_message':"You did not select a valid song",
# })
# else:
# selected_song.is_favorite = True
# selected_song.save()
# return render(request, 'music/detail.html', {'album': album})
from django.views import generic
from django.views.generic.edit import CreateView,UpdateView,DeleteView
from django.urls import reverse
from .models import Album
class IndexView(generic.ListView):
template_name = 'music/index.html'
def get_queryset(self):
return Album.objects.all()
class DetailView(generic.DeleteView):
model = Album
template_name = 'music/detail.html'
class AlbumCreate(CreateView):
model = Album
fields = ('artist', 'album_title','genre','alnum_logo')
class AlbumUpdate(UpdateView):
model = Album
fields = ['artist', 'album_title','genre','alnum_logo']
class AlbumDelete(DeleteView):
model = Album
success_url = reverse('music:index')
| # # from django.http import Http404
# # from django.http import HttpResponse
# # from django.template import loader
# from django.shortcuts import render,get_object_or_404
# from .models import Album,Song
#
#
#
# # def index(request):
# # all_albums = Album.objects.all()
# # html = ''
# # for album in all_albums:
# # url = '/music/' + str(album.id) + '/'
# # html += '<a href="' + url + '">' + album.album_title + '</a><br>'
# # return HttpResponse(html)
#
# # return HttpResponse("<h1>There will be a list of albums</h1>")
#
# # def index(request):
# # all_albums = Album.objects.all()
# # template = loader.get_template('music/index.html')
# # context = {
# # 'all_albums': all_albums,
# # }
# # return HttpResponse(template.render(context, request))
#
# def index(request):
# all_albums = Album.objects.all()
# context = {'all_albums': all_albums}
# return render(request,'music/index.html',context)
#
# # def detail(request, album_id):
# # return HttpResponse("<h2>Details for album id:" + str(album_id) + "</h2>")
#
# def detail(request, album_id):
# # try:
# # album = Album.objects.get(pk=album_id)
# # except Album.DoesNotExist:
# # raise Http404("Album does not exist")
# album = get_object_or_404(Album,pk=album_id)
# return render(request, 'music/detail.html', {'album': album})
#
# def favorite(request, album_id):
# album = get_object_or_404(Albummpk=album_id)
# try:
# selected_song = album.song_set.get(pk=request.POST['post'])
# except (KeyError,Song.DoesNotExist):
# return render(request, 'music/detail.html',{
# 'album':album,
# 'error_message':"You did not select a valid song",
# })
# else:
# selected_song.is_favorite = True
# selected_song.save()
# return render(request, 'music/detail.html', {'album': album})
from django.views import generic
from django.views.generic.edit import CreateView,UpdateView,DeleteView
from django.urls import reverse
from .models import Album
class IndexView(generic.ListView):
template_name = 'music/index.html'
def get_queryset(self):
return Album.objects.all()
class DetailView(generic.DeleteView):
model = Album
template_name = 'music/detail.html'
class AlbumCreate(CreateView):
model = Album
fields = ('artist', 'album_title','genre','alnum_logo')
class AlbumUpdate(UpdateView):
model = Album
fields = ['artist', 'album_title','genre','alnum_logo']
class AlbumDelete(DeleteView):
model = Album
success_url = reverse('music:index')
| en | 0.315882 | # # from django.http import Http404 # # from django.http import HttpResponse # # from django.template import loader # from django.shortcuts import render,get_object_or_404 # from .models import Album,Song # # # # # def index(request): # # all_albums = Album.objects.all() # # html = '' # # for album in all_albums: # # url = '/music/' + str(album.id) + '/' # # html += '<a href="' + url + '">' + album.album_title + '</a><br>' # # return HttpResponse(html) # # # return HttpResponse("<h1>There will be a list of albums</h1>") # # # def index(request): # # all_albums = Album.objects.all() # # template = loader.get_template('music/index.html') # # context = { # # 'all_albums': all_albums, # # } # # return HttpResponse(template.render(context, request)) # # def index(request): # all_albums = Album.objects.all() # context = {'all_albums': all_albums} # return render(request,'music/index.html',context) # # # def detail(request, album_id): # # return HttpResponse("<h2>Details for album id:" + str(album_id) + "</h2>") # # def detail(request, album_id): # # try: # # album = Album.objects.get(pk=album_id) # # except Album.DoesNotExist: # # raise Http404("Album does not exist") # album = get_object_or_404(Album,pk=album_id) # return render(request, 'music/detail.html', {'album': album}) # # def favorite(request, album_id): # album = get_object_or_404(Albummpk=album_id) # try: # selected_song = album.song_set.get(pk=request.POST['post']) # except (KeyError,Song.DoesNotExist): # return render(request, 'music/detail.html',{ # 'album':album, # 'error_message':"You did not select a valid song", # }) # else: # selected_song.is_favorite = True # selected_song.save() # return render(request, 'music/detail.html', {'album': album}) | 2.250743 | 2 |
quicksandpy/preprocessing.py | kiankd/quicksand | 0 | 6615894 | import wordninja
import preprocessor
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import TweetTokenizer
from autocorrect import spell
from string import punctuation
"""
Global objects.
"""
LEMMATIZER = WordNetLemmatizer()
TOKENIZER = TweetTokenizer(preserve_case=False, reduce_len=True)
PUNCTUATION = set(punctuation)
"""
Utility functions.
"""
def should_be_spell_corrected(string):
return len(set(string).intersection(PUNCTUATION)) == 0
"""
Preprocessing functions in the pipeline order.
"""
def tokenize(text):
return TOKENIZER.tokenize(text)
def remove_mentions(tokens):
return [t for t in tokens if not t.startswith('@')]
def filter_tokens(tokens, filter_list=set()):
return [t for t in tokens if t not in filter_list]
def split_hashtags(tokens):
"""
Applies hashtag splitting on list of tokens into most likely words.
E.g., '#trumpisbad' -> ['#', 'trump', 'is', 'bad']
:param tokens: list of strings
:return: list of strings
"""
new_toks = []
for token in tokens:
if token.startswith('#'):
splits = wordninja.split(token[1:])
new_toks.append('#')
for w in splits:
new_toks.append(w)
else:
new_toks.append(token)
return new_toks
def autocorrect(tokens, skipwords=set()):
"""
Applies autocorrect on list of strings (only if they don't have punctuation
in them. E.g., 'dancin' -> 'dancing'.
:param tokens: list of strings
:param skipwords: set of words NOT to spellcheck.
:return: list of strings
"""
corrected = []
# labelled looks like this: [steve, is, :), happy] -> [steve, is, $EMOTICON$, happy]
labelled = map(preprocessor.tokenize, tokens)
for token, label in zip(tokens, labelled):
if should_be_spell_corrected(label) and not label in skipwords:
corrected.append(spell(token))
else:
corrected.append(token)
return corrected
def lemmatize(tokens):
return list(map(LEMMATIZER.lemmatize, tokens))
"""
Primary application functions.
"""
def preprocess_tweets(tweets, verbose=True):
# Pipeline is a list of tuples of functions and optional arguments.
pipeline = [
(tokenize, {}),
(filter_tokens, {'filter_list': {'<br/>', '<br>'}}),
(remove_mentions, {}),
(split_hashtags, {}),
(autocorrect, {'skipwords': {'lol', 'xbox'}}),
(lemmatize, {}),
]
for tweet in tweets:
uncorrected = apply_pipeline(pipeline[:-2], tweet.orig_text, verbose=verbose)
tweet.uncorrected_tokens = uncorrected
corrected = apply_pipeline(pipeline[-2:], uncorrected, verbose=verbose)
tweet.corrected_tokens = corrected
def apply_pipeline(pipeline, data, verbose=True):
output = data
if verbose:
print(f'Current tweet text: {output}')
for fun, args in pipeline:
orig = output
output = fun(output, **args)
if verbose:
print(f'Applying function \"{fun.__name__}\" with args {args} onto:')
if type(orig) is not str:
print('\tOrigin: ' + ', '.join(orig))
print(f'\tResult: {", ".join(output)}')
if verbose:
print(f'FINAL OUTPUT: {output}\n\n')
return output
| import wordninja
import preprocessor
from nltk.stem import WordNetLemmatizer
from nltk.tokenize import TweetTokenizer
from autocorrect import spell
from string import punctuation
"""
Global objects.
"""
LEMMATIZER = WordNetLemmatizer()
TOKENIZER = TweetTokenizer(preserve_case=False, reduce_len=True)
PUNCTUATION = set(punctuation)
"""
Utility functions.
"""
def should_be_spell_corrected(string):
return len(set(string).intersection(PUNCTUATION)) == 0
"""
Preprocessing functions in the pipeline order.
"""
def tokenize(text):
return TOKENIZER.tokenize(text)
def remove_mentions(tokens):
return [t for t in tokens if not t.startswith('@')]
def filter_tokens(tokens, filter_list=set()):
return [t for t in tokens if t not in filter_list]
def split_hashtags(tokens):
"""
Applies hashtag splitting on list of tokens into most likely words.
E.g., '#trumpisbad' -> ['#', 'trump', 'is', 'bad']
:param tokens: list of strings
:return: list of strings
"""
new_toks = []
for token in tokens:
if token.startswith('#'):
splits = wordninja.split(token[1:])
new_toks.append('#')
for w in splits:
new_toks.append(w)
else:
new_toks.append(token)
return new_toks
def autocorrect(tokens, skipwords=set()):
"""
Applies autocorrect on list of strings (only if they don't have punctuation
in them. E.g., 'dancin' -> 'dancing'.
:param tokens: list of strings
:param skipwords: set of words NOT to spellcheck.
:return: list of strings
"""
corrected = []
# labelled looks like this: [steve, is, :), happy] -> [steve, is, $EMOTICON$, happy]
labelled = map(preprocessor.tokenize, tokens)
for token, label in zip(tokens, labelled):
if should_be_spell_corrected(label) and not label in skipwords:
corrected.append(spell(token))
else:
corrected.append(token)
return corrected
def lemmatize(tokens):
return list(map(LEMMATIZER.lemmatize, tokens))
"""
Primary application functions.
"""
def preprocess_tweets(tweets, verbose=True):
# Pipeline is a list of tuples of functions and optional arguments.
pipeline = [
(tokenize, {}),
(filter_tokens, {'filter_list': {'<br/>', '<br>'}}),
(remove_mentions, {}),
(split_hashtags, {}),
(autocorrect, {'skipwords': {'lol', 'xbox'}}),
(lemmatize, {}),
]
for tweet in tweets:
uncorrected = apply_pipeline(pipeline[:-2], tweet.orig_text, verbose=verbose)
tweet.uncorrected_tokens = uncorrected
corrected = apply_pipeline(pipeline[-2:], uncorrected, verbose=verbose)
tweet.corrected_tokens = corrected
def apply_pipeline(pipeline, data, verbose=True):
output = data
if verbose:
print(f'Current tweet text: {output}')
for fun, args in pipeline:
orig = output
output = fun(output, **args)
if verbose:
print(f'Applying function \"{fun.__name__}\" with args {args} onto:')
if type(orig) is not str:
print('\tOrigin: ' + ', '.join(orig))
print(f'\tResult: {", ".join(output)}')
if verbose:
print(f'FINAL OUTPUT: {output}\n\n')
return output
| en | 0.813621 | Global objects. Utility functions. Preprocessing functions in the pipeline order. Applies hashtag splitting on list of tokens into most likely words. E.g., '#trumpisbad' -> ['#', 'trump', 'is', 'bad'] :param tokens: list of strings :return: list of strings Applies autocorrect on list of strings (only if they don't have punctuation in them. E.g., 'dancin' -> 'dancing'. :param tokens: list of strings :param skipwords: set of words NOT to spellcheck. :return: list of strings # labelled looks like this: [steve, is, :), happy] -> [steve, is, $EMOTICON$, happy] Primary application functions. # Pipeline is a list of tuples of functions and optional arguments. | 3.281337 | 3 |
src/storage/challenge.py | Cassianokunsch/unfollow-app-graphene | 0 | 6615895 | from graphql import GraphQLError
from shared.messages import UNAUTHORIZED_CHALLENGE_ERROR
users_challenge = dict()
def set_challenge(api, link):
users_challenge[link] = api
def get_challenge(link):
try:
return users_challenge[link]
except KeyError:
raise GraphQLError(UNAUTHORIZED_CHALLENGE_ERROR)
def remove_challenge(link):
try:
del users_challenge[link]
except KeyError:
raise GraphQLError(UNAUTHORIZED_CHALLENGE_ERROR)
| from graphql import GraphQLError
from shared.messages import UNAUTHORIZED_CHALLENGE_ERROR
users_challenge = dict()
def set_challenge(api, link):
users_challenge[link] = api
def get_challenge(link):
try:
return users_challenge[link]
except KeyError:
raise GraphQLError(UNAUTHORIZED_CHALLENGE_ERROR)
def remove_challenge(link):
try:
del users_challenge[link]
except KeyError:
raise GraphQLError(UNAUTHORIZED_CHALLENGE_ERROR)
| none | 1 | 2.280672 | 2 | |
app/ml/internalCoherence.py | darrenvong/local-lead-finder | 2 | 6615896 | '''
INTERNAL TOPIC SCORER
FUNC: - User internal topics determined by presence of keyword term
- PER TWEET = 1 (not multi score within tweet)
- DENSITY = Multiple tweets per tweet history for user
SEND TO: Final Output
CRUNCH POINTS: 1. Number of keywords - potentially expansive??? 2. Text matching!!!
### VERSION 1: This version inputs only users that have matches with keywords
### VERSION 2: This version will give scores for every keyword for each user
### VERSION 3 (FINAL): Matches score only if alongside organising/campaigning terms
'''
from ml.localityScorer import localUsersDict
from ml.wordNetExpander import keywordsExpanded
from db import db
from models import Tweet, User, KeywordScore, KeywordTweet
def commit_keyword_score(keyword, userId, score):
keyword_score = KeywordScore(userID=userId, keyword=keyword, score=score)
db.session.add(keyword_score)
db.session.commit()
def commit_tweet_keyword(keyword, tweetid):
keyword_tweet = KeywordTweet(keyword=keyword, tweetID=tweetid)
db.session.add(keyword_tweet)
db.session.commit()
def topicScore(keywordsExpanded):
'''
INPUT:
- Keywords with WordNet expanded terms - keywordsExpanded
- usersDict: {userID: [statusesCount, [tweets]], userID2: [statusesCount, [tweets]]}
OUTPUT:
- Topic ranking: per user, per topic
'''
users = User.query.all()
for user in users:
tweets = Tweet.query.filter(userID=user.id)
### This section is for searching for all keyword
for key in keywordsExpanded.keys():
keywordScore = 0
for tweet in tweets:
tweet.lower()
for val in key:
val.lower()
if val in tweet.content:
keywordScore += 1
commit_tweet_keyword(key, tweet)
commit_keyword_score(key, user, keywordScore) | '''
INTERNAL TOPIC SCORER
FUNC: - User internal topics determined by presence of keyword term
- PER TWEET = 1 (not multi score within tweet)
- DENSITY = Multiple tweets per tweet history for user
SEND TO: Final Output
CRUNCH POINTS: 1. Number of keywords - potentially expansive??? 2. Text matching!!!
### VERSION 1: This version inputs only users that have matches with keywords
### VERSION 2: This version will give scores for every keyword for each user
### VERSION 3 (FINAL): Matches score only if alongside organising/campaigning terms
'''
from ml.localityScorer import localUsersDict
from ml.wordNetExpander import keywordsExpanded
from db import db
from models import Tweet, User, KeywordScore, KeywordTweet
def commit_keyword_score(keyword, userId, score):
keyword_score = KeywordScore(userID=userId, keyword=keyword, score=score)
db.session.add(keyword_score)
db.session.commit()
def commit_tweet_keyword(keyword, tweetid):
keyword_tweet = KeywordTweet(keyword=keyword, tweetID=tweetid)
db.session.add(keyword_tweet)
db.session.commit()
def topicScore(keywordsExpanded):
'''
INPUT:
- Keywords with WordNet expanded terms - keywordsExpanded
- usersDict: {userID: [statusesCount, [tweets]], userID2: [statusesCount, [tweets]]}
OUTPUT:
- Topic ranking: per user, per topic
'''
users = User.query.all()
for user in users:
tweets = Tweet.query.filter(userID=user.id)
### This section is for searching for all keyword
for key in keywordsExpanded.keys():
keywordScore = 0
for tweet in tweets:
tweet.lower()
for val in key:
val.lower()
if val in tweet.content:
keywordScore += 1
commit_tweet_keyword(key, tweet)
commit_keyword_score(key, user, keywordScore) | en | 0.69259 | INTERNAL TOPIC SCORER FUNC: - User internal topics determined by presence of keyword term - PER TWEET = 1 (not multi score within tweet) - DENSITY = Multiple tweets per tweet history for user SEND TO: Final Output CRUNCH POINTS: 1. Number of keywords - potentially expansive??? 2. Text matching!!! ### VERSION 1: This version inputs only users that have matches with keywords ### VERSION 2: This version will give scores for every keyword for each user ### VERSION 3 (FINAL): Matches score only if alongside organising/campaigning terms INPUT: - Keywords with WordNet expanded terms - keywordsExpanded - usersDict: {userID: [statusesCount, [tweets]], userID2: [statusesCount, [tweets]]} OUTPUT: - Topic ranking: per user, per topic ### This section is for searching for all keyword | 3.094786 | 3 |
routes/ui/tools.py | timb-machine-mirrors/pcf | 0 | 6615897 | from routes.ui import routes
from app import check_session, db, redirect, render_template, request, \
send_log_data, requires_authorization, csrf, config
from .project import check_project_access, check_project_archived
from urllib.parse import urlparse
from system.forms import *
from libnmap.parser import NmapParser
from libnessus.parser import NessusParser
import email_validator
import json
import codecs
import re
import io
from flask import Response, send_file
from bs4 import BeautifulSoup
import urllib.parse
from IPy import IP
import socket
import csv
import dicttoxml
import time
from xml.dom.minidom import parseString
import ipwhois
import shodan
from shutil import copyfile
import ipaddress
import whois
from os import path, remove
from system.crypto_functions import *
from system.security_functions import htmlspecialchars
from routes.ui.tools_addons import nmap_scripts
@routes.route('/project/<uuid:project_id>/tools/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def project_tools(project_id, current_project, current_user):
return render_template('project/tools/list.html',
current_project=current_project,
tab_name='Tools')
@routes.route('/project/<uuid:project_id>/tools/nmap/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nmap_page(project_id, current_project, current_user):
return render_template('project/tools/import/nmap.html',
current_project=current_project,
tab_name='Nmap')
@routes.route('/project/<uuid:project_id>/tools/nmap/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nmap_page_form(project_id, current_project, current_user):
form = NmapForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
add_empty_hosts = form.add_no_open.data
# parse ports
ignore_ports = form.ignore_ports.data.replace(' ', '')
ignore_port_arr1 = ignore_ports.split(',') if ignore_ports else []
ignore_port_array = []
for port_str in ignore_port_arr1:
protocol = 'tcp'
port_num = port_str
if '/' in port_str:
if port_str.split('/')[1].lower() == 'udp':
protocol = 'udp'
port_num = port_str.split('/')[0]
port_num = int(port_num)
ignore_port_array.append([port_num, protocol])
ignore_services_array = [service.lower() for service in form.ignore_services.data.replace(' ', '').split(',')]
for file in form.files.data:
try:
xml_report_data = file.read().decode('charmap')
nmap_report = NmapParser.parse_fromstring(xml_report_data)
except:
return render_template('project/tools/import/nmap.html',
current_project=current_project,
errors=['Оne of uploaded files was incorrect!'],
success=1,
tab_name='Nmap')
try:
command_str = nmap_report.commandline
except:
command_str = ''
for host in nmap_report.hosts:
# check if we will add host
found = 0
os = ''
if host.os and host.os.osmatches:
os = host.os.osmatches[0].name
for service in host.services:
protocol = service.protocol.lower()
port_num = int(service.port)
service_name = service.service.lower()
if [port_num, protocol] not in ignore_port_array and service_name not in ignore_services_array:
if service.state == 'open':
found = 1
elif service.state == 'filtered' and \
form.rule.data in ['filtered', 'closed']:
found = 1
elif service.state == 'closed' and \
form.rule.data == 'closed':
found = 1
if found or add_empty_hosts:
host_id = db.select_project_host_by_ip(
current_project['id'], host.address)
if not host_id:
host_info = form.hosts_description.data
host_id = db.insert_host(current_project['id'],
host.address,
current_user['id'],
host_info)
else:
host_id = host_id[0]['id']
if os:
db.update_host_os(host_id, os)
for hostname in host.hostnames:
if hostname and hostname != host.address:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user[
'id'])
else:
hostname_id = hostname_id[0]['id']
for service in host.services:
is_tcp = service.protocol == 'tcp'
protocol_str = service.protocol.lower()
port_num = int(service.port)
service_name = service.service
service_banner = service.banner
add = 0
if [port_num,
protocol_str] not in ignore_port_array and service_name.lower() not in ignore_services_array:
if service.state == 'open':
add = 1
elif service.state == 'filtered' and \
form.rule.data in ['filtered', 'closed']:
add = 1
service_banner += '\nstate: filtered'
elif service.state == 'closed' and \
form.rule.data == 'closed':
add = 1
service_banner += '\nstate: closed'
if add == 1:
port_id = db.select_ip_port(host_id, service.port,
is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
service.port,
is_tcp,
service_name,
service_banner,
current_user[
'id'],
current_project[
'id'])
else:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id,
service_name,
service_banner)
for script_xml in service.scripts_results:
for script in nmap_scripts.modules:
script_class = script.nmap_plugin
if script_class.script_id == script_xml['id'] and \
script_class.script_source == 'service':
script_obj = script_class(script_xml)
if 'port_info' in script_obj.script_types:
result = script_obj.port_info()
update = False
if 'protocol' in result and result['protocol'] and \
result['protocol'].lower() not in service_name.lower():
service_name = result['protocol']
update = True
if 'info' in result and result['info'] and \
result['info'].lower() not in service_banner.lower():
service_banner += '\n' + result['info']
update = True
if update:
db.update_port_proto_description(port_id,
service_name,
service_banner)
if 'issue' in script_obj.script_types:
issues = script_obj.issues()
for issue in issues:
db.insert_new_issue_no_dublicate(issue['name'],
issue[
'description'] if 'description' in issue else '',
issue['path'] if 'path' in issue else '',
issue['cvss'] if 'cvss' in issue else 0.0,
current_user['id'],
{port_id: ['0']},
'need to recheck',
current_project['id'],
cve=issue['cve'] if 'cve' in issue else '',
cwe=issue['cwe'] if 'cwe' in issue else 0,
issue_type='service',
fix=issue['fix'] if 'fix' in issue else '',
param=issue[
'params'] if 'params' in issue else '')
if 'credentials' in script_obj.script_types:
credentials = script_obj.credentials()
for cred in credentials:
login = cred['login'] if 'login' in cred else ''
cleartext = cred['cleartext'] if 'cleartext' in cred else ''
hash_str = cred['hash'] if 'hash' in cred else ''
description = cred['description'] if 'description' in cred else ''
source = cred['source'] if 'source' in cred else ''
dublicates_creds = db.select_creds_dublicates(current_project['id'],
login,
hash_str, cleartext,
description,
source,
'')
if dublicates_creds:
dublicates_creds = dublicates_creds[0]
services = json.loads(dublicates_creds['services'])
if port_id not in services:
services[port_id] = ["0"]
else:
services[port_id].append("0")
db.update_creds(dublicates_creds['id'],
login,
hash_str,
dublicates_creds['hash_type'],
cleartext,
description,
source,
services)
else:
db.insert_new_cred(login,
hash_str,
'other',
cleartext,
description,
source,
{port_id: ["0"]},
current_user['id'],
current_project['id'])
current_host = db.select_host(host_id)[0]
host_zero_port = db.select_host_port(current_host['id'])[0]
for script_xml in host.scripts_results:
for script in nmap_scripts.modules:
script_class = script.nmap_plugin
if script_class.script_id == script_xml['id'] and \
script_class.script_source == 'host':
script_obj = script_class(script_xml)
if 'server_info' in script_obj.script_types:
result = script_obj.host_info()
update = False
if 'os' in result and result['os'] and \
result['os'].lower() not in current_host['os'].lower():
current_host['os'] = result['os']
update = True
if 'info' in result and result['info'] and \
result['info'].lower() not in current_host['comment'].lower():
current_host['comment'] += '\n' + result['info']
update = True
if update:
db.update_host_comment_threats(current_host['id'],
current_host['comment'],
current_host['threats'],
current_host['os'])
if 'hostnames' in result:
for hostname in result['hostnames']:
hostnames_found = db.select_ip_hostname(current_host['id'], hostname)
if not hostnames_found:
db.insert_hostname(current_host['id'], hostname,
form.hostnames_description.data, current_user['id'])
if 'issue' in script_obj.script_types:
issues = script_obj.issues()
for issue in issues:
db.insert_new_issue_no_dublicate(issue['name'],
issue[
'description'] if 'description' in issue else '',
issue['path'] if 'path' in issue else '',
issue['cvss'] if 'cvss' in issue else 0.0,
current_user['id'],
{host_zero_port['id']: ['0']},
'need to recheck',
current_project['id'],
cve=issue['cve'] if 'cve' in issue else '',
cwe=issue['cwe'] if 'cwe' in issue else 0,
issue_type='service',
fix=issue['fix'] if 'fix' in issue else '',
param=issue[
'params'] if 'params' in issue else '')
if 'credentials' in script_obj.script_types:
credentials = script_obj.credentials()
for cred in credentials:
login = cred['login'] if 'login' in cred else ''
cleartext = cred['cleartext'] if 'cleartext' in cred else ''
hash_str = cred['hash'] if 'hash' in cred else ''
description = cred['description'] if 'description' in cred else ''
source = cred['source'] if 'source' in cred else ''
dublicates_creds = db.select_creds_dublicates(current_project['id'],
login,
hash_str, cleartext,
description,
source,
'')
if dublicates_creds:
dublicates_creds = dublicates_creds[0]
services = json.loads(dublicates_creds['services'])
if host_zero_port['id'] not in services:
services[host_zero_port['id']] = ["0"]
else:
services[host_zero_port['id']].append("0")
db.update_creds(dublicates_creds['id'],
login,
hash_str,
dublicates_creds['hash_type'],
cleartext,
description,
source,
services)
else:
db.insert_new_cred(login,
hash_str,
'other',
cleartext,
description,
source,
{host_zero_port['id']: ["0"]},
current_user['id'],
current_project['id'])
return render_template('project/tools/import/nmap.html',
current_project=current_project,
errors=errors,
success=1,
tab_name='Nmap')
@routes.route('/project/<uuid:project_id>/tools/nessus/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nessus_page(project_id, current_project, current_user):
return render_template('project/tools/import/nessus.html',
current_project=current_project,
tab_name='Nessus')
@routes.route('/project/<uuid:project_id>/tools/nessus/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nessus_page_form(project_id, current_project, current_user):
form = NessusForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
add_info_issues = form.add_info_issues.data
# xml files
for file in form.xml_files.data:
if file.filename:
xml_report_data = file.read().decode('charmap')
scan_result = NessusParser.parse_fromstring(xml_report_data)
for host in scan_result.hosts:
host_id = db.select_project_host_by_ip(
current_project['id'], host.ip)
if not host_id:
host_id = db.insert_host(current_project['id'],
host.ip,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
hostname = host.name if host.name != host.ip else ''
try:
test_hostname = IP(host.address)
except ValueError:
test_hostname = ''
if not hostname and not test_hostname and host.address:
hostname = host.address
if hostname:
hostname_id = db.select_ip_hostname(host_id, hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
for issue in host.get_report_items:
# create port
is_tcp = issue.protocol == 'tcp'
port_id = db.select_ip_port(host_id, int(issue.port),
is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
issue.port,
is_tcp,
issue.service,
form.ports_description.data,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
db.update_port_service(port_id,
issue.service)
# add issue to created port
name = 'Nessus: {}'.format(issue.plugin_name)
try:
issue_info = issue.synopsis
except KeyError:
issue_info = ''
description = 'Plugin name: {}\r\n\r\nInfo: \r\n{} \r\n\r\nOutput: \r\n {}'.format(
issue.plugin_name,
issue_info,
issue.description.strip('\n'))
# add host OS
if issue.get_vuln_plugin["pluginName"] == 'OS Identification':
os = issue.get_vuln_plugin["plugin_output"].split('\n')[1].split(' : ')[1]
db.update_host_os(host_id, os)
cve = issue.cve.replace('[', '').replace(']', '').replace("'", '').replace(",", ', ') if issue.cve else ''
cvss = 0
severity = float(issue.severity)
if severity == 0 and issue.get_vuln_info['risk_factor'] == 'None':
cvss = 0
elif 'cvss3_base_score' in issue.get_vuln_info:
cvss = float(issue.get_vuln_info['cvss3_base_score'])
elif 'cvss_base_score' in issue.get_vuln_info:
cvss = float(issue.get_vuln_info['cvss_base_score'])
else:
pass
if hostname_id:
services = {port_id: ['0', hostname_id]}
else:
services = {port_id: ['0']}
if severity > 0 or (severity == 0 and add_info_issues):
db.insert_new_issue_no_dublicate(name, description, '', cvss,
current_user['id'], services,
'need to check',
current_project['id'],
cve, cwe=0, issue_type='custom', fix=issue.solution)
return render_template('project/tools/import/nessus.html',
current_project=current_project,
errors=errors,
tab_name='Nessus')
@routes.route('/project/<uuid:project_id>/tools/nikto/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nikto_page(project_id, current_project, current_user):
return render_template('project/tools/import/nikto.html',
current_project=current_project,
tab_name='Nikto')
@routes.route('/project/<uuid:project_id>/tools/nikto/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nikto_page_form(project_id, current_project, current_user):
form = NiktoForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# json files
for file in form.json_files.data:
if file.filename:
json_report_data = file.read().decode('charmap').replace(',]', ']').replace(',}', '}')
scan_result = json.loads(json_report_data)
host = scan_result['ip']
hostname = scan_result['host'] if scan_result['ip'] != scan_result['host'] else ''
issues = scan_result['vulnerabilities']
port = int(scan_result['port'])
protocol = 'https' if '443' in str(port) else 'http'
is_tcp = 1
port_description = 'Added by Nikto scan'
if scan_result['banner']:
port_description = 'Nikto banner: {}'.format(
scan_result['banner'])
# add host
host_id = db.select_project_host_by_ip(current_project['id'],
host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id, hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
port_description,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
for issue in issues:
method = issue['method']
url = issue['url']
full_url = '{} {}'.format(method, url)
osvdb = int(issue['OSVDB'])
info = issue['msg']
full_info = 'OSVDB: {}\n\n{}'.format(osvdb, info)
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue('Nikto scan', full_info, full_url, 0,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=0,
)
# csv load
for file in form.csv_files.data:
if file.filename:
scan_result = csv.reader(codecs.iterdecode(file, 'charmap'),
delimiter=',')
for issue in scan_result:
if len(issue) == 7:
hostname = issue[0]
host = issue[1]
port = int(issue[2])
protocol = 'https' if '443' in str(port) else 'http'
is_tcp = 1
osvdb = issue[3]
full_url = '{} {}'.format(issue[4], issue[5])
full_info = 'OSVDB: {}\n{}'.format(osvdb, issue[6])
# add host
host_id = db.select_project_host_by_ip(
current_project['id'],
host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user[
'id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
form.ports_description.data,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
# add issue
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue('Nikto scan', full_info, full_url,
0,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=0,
)
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser").niktoscan.scandetails
host = scan_result['targetip']
port = int(scan_result['targetport'])
is_tcp = 1
port_banner = scan_result['targetbanner']
hostname = scan_result['targethostname']
issues = scan_result.findAll("item")
protocol = 'https' if '443' in str(port) else 'http'
port_description = ''
if port_banner:
port_description = 'Nikto banner: {}'.format(
scan_result['targetbanner'])
# add host
host_id = db.select_project_host_by_ip(
current_project['id'],
host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
port_description,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
for issue in issues:
method = issue['method']
url = issue.uri.contents[0]
full_url = '{} {}'.format(method, url)
osvdb = int(issue['osvdbid'])
info = issue.description.contents[0]
full_info = 'OSVDB: {}\n\n{}'.format(osvdb, info)
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue('Nikto scan', full_info, full_url, 0,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=0,
)
return render_template('project/tools/import/nikto.html',
current_project=current_project,
tab_name='Nikto',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/acunetix/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def acunetix_page(project_id, current_project, current_user):
return render_template('project/tools/import/acunetix.html',
current_project=current_project,
tab_name='Acunetix')
@routes.route('/project/<uuid:project_id>/tools/acunetix/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def acunetix_page_form(project_id, current_project, current_user):
form = AcunetixForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
auto_resolve = form.auto_resolve.data == 1
# xml files
for file in form.files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser").scangroup.scan
start_url = scan_result.starturl.contents[0]
parsed_url = urllib.parse.urlparse(start_url)
protocol = parsed_url.scheme
hostname = parsed_url.hostname
if hostname is None:
hostname = parsed_url.path
port = parsed_url.port
os_descr = scan_result.os.contents[0]
port_banner = scan_result.banner.contents[0]
web_banner = scan_result.webserver.contents[0]
port_description = 'Banner: {} Web: {}'.format(port_banner,
web_banner)
host_description = 'OS: {}'.format(os_descr)
is_tcp = 1
if not port:
port = 80
if protocol == 'https':
port = 443
try:
IP(hostname)
host = hostname
hostname = ''
except:
if form.host.data:
IP(form.host.data)
host = form.host.data
elif form.auto_resolve.data == 1:
host = socket.gethostbyname(hostname)
else:
errors.append('ip not resolved!')
if not errors:
# add host
host_id = db.select_project_host_by_ip(current_project['id'], host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
host_description)
else:
host_id = host_id[0]['id']
db.update_host_description(host_id, host_description)
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Acunetix scan',
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
port_description,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id, protocol,
port_description)
issues = scan_result.reportitems.findAll("reportitem")
for issue in issues:
issue_name = issue.contents[1].contents[0]
module_name = issue.modulename.contents[0]
uri = issue.affects.contents[0]
request_params = issue.parameter.contents[0]
full_uri = '{} params:{}'.format(uri, request_params)
impact = issue.impact.contents[0]
issue_description = issue.description.contents[0]
recomendations = issue.recommendation.contents[0]
issue_request = issue.technicaldetails.request.contents[
0]
cwe = 0
if issue.cwe:
cwe = int(issue.cwe['id'].replace('CWE-', ''))
cvss = float(issue.cvss.score.contents[0])
# TODO: check CVE field
full_info = '''Module: \n{}\n\nDescription: \n{}\n\nImpact: \n{}\n\nRecomendations: \n{}\n\nRequest: \n{}'''.format(
module_name, issue_description, impact,
recomendations, issue_request)
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue(issue_name,
full_info, full_uri,
cvss,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=cwe
)
return render_template('project/tools/import/acunetix.html',
current_project=current_project,
tab_name='Acunetix',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/exporter/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def exporter_page(project_id, current_project, current_user):
return render_template(
'project/tools/export/exporter.html',
current_project=current_project,
tab_name='Exporter')
@routes.route('/project/<uuid:project_id>/tools/exporter/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def exporter_page_form(project_id, current_project, current_user):
form = ExportHosts()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
result_hosts = db.search_hostlist(project_id=current_project['id'],
network=form.network.data,
ip_hostname=form.ip_hostname.data,
issue_name=form.issue_name.data,
port=form.port.data,
service=form.service.data,
comment=form.comment.data,
threats=form.threats.data)
else:
return render_template(
'project/tools/export/exporter.html',
current_project=current_project,
tab_name='Exporter',
errors=errors)
result = ''
separator = '\n' if form.separator.data == '[newline]' \
else form.separator.data
host_export = form.hosts_export.data
ports_array = []
if form.port.data:
ports_array = [[int(port.split('/')[0]), port.split('/')[1] == 'tcp']
for port in form.port.data.split(',')]
prefix = form.prefix.data
postfix = form.postfix.data
if form.filetype.data == 'txt':
# txt worker
response_type = 'text/plain'
if not form.add_ports.data:
# no ports
ips = [host['ip'] for host in result_hosts]
ips_hostnames = {}
hostnames = []
for host in result_hosts:
host_hostname = db.select_ip_hostnames(host['id'])
hostnames += [hostname['hostname'] for hostname in
host_hostname]
ips_hostnames[host['ip']] = host_hostname
hostnames = list(set(hostnames))
if host_export == 'ip':
result = separator.join([prefix + x + postfix for x in ips])
elif host_export == 'hostname':
result = separator.join([prefix + x + postfix for x in hostnames])
elif host_export == 'ip&hostname':
result = separator.join([prefix + x + postfix for x in ips + hostnames])
elif host_export == 'ip&hostname_unique':
host_hostnames_arr = []
for ip in ips_hostnames:
if not ips_hostnames[ip]:
host_hostnames_arr.append(ip)
else:
host_hostnames_arr += [hostname['hostname'] for
hostname in ips_hostnames[ip]]
result = separator.join([prefix + x + postfix for x in host_hostnames_arr])
else:
# with ports
# preparation: issues
if form.issue_name.data:
port_ids = db.search_issues_port_ids(current_project['id'],
form.issue_name.data)
for host in result_hosts:
ports = db.select_host_ports(host['id'])
hostnames = db.select_ip_hostnames(host['id'])
for port in ports:
if (not form.port.data) or (
[port['port'], port['is_tcp']] in ports_array):
if form.service.data in port['service']:
if (not form.issue_name.data) or (
port['id'] in port_ids):
if host_export == 'ip&hostname':
result += '{}{}{}:{}{}'.format(separator,
prefix,
host['ip'],
port['port'],
postfix)
for hostname in hostnames:
result += '{}{}{}:{}{}'.format(separator,
prefix,
hostname[
'hostname'],
port['port'],
postfix)
elif host_export == 'ip':
result += '{}{}{}:{}{}'.format(separator,
prefix,
host['ip'],
port['port'],
postfix)
elif host_export == 'hostname':
for hostname in hostnames:
result += '{}{}{}:{}{}'.format(separator,
prefix,
hostname[
'hostname'],
port['port'],
postfix)
elif host_export == 'ip&hostname_unique':
if hostnames:
for hostname in hostnames:
result += '{}{}{}:{}{}'.format(
separator,
prefix,
hostname[
'hostname'],
port['port'],
postfix)
else:
result += '{}{}{}:{}{}'.format(
separator,
prefix,
host['ip'],
port['port'],
postfix)
if result:
result = result[len(separator):]
elif form.filetype.data == 'csv':
response_type = 'text/plain'
# 'host/hostname','port', 'type', 'service', 'description'
# always with ports
csvfile = io.StringIO()
csv_writer = csv.writer(csvfile, dialect='excel', delimiter=';')
columns = ['host', 'port', 'type', 'service', 'description']
csv_writer.writerow(columns)
# preparation: issues
if form.issue_name.data:
port_ids = db.search_issues_port_ids(current_project['id'],
form.issue_name.data)
for host in result_hosts:
ports = db.select_host_ports(host['id'])
hostnames = db.select_ip_hostnames(host['id'])
for port in ports:
if (not form.port.data) or ([port['port'], port['is_tcp']]
in ports_array):
if form.service.data in port['service']:
if (not form.issue_name.data) or (
port['id'] in port_ids):
if host_export == 'ip&hostname':
csv_writer.writerow([host['ip'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
for hostname in hostnames:
csv_writer.writerow([hostname['hostname'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
elif host_export == 'ip':
csv_writer.writerow([host['ip'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
elif host_export == 'hostname':
for hostname in hostnames:
csv_writer.writerow([hostname['hostname'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
elif host_export == 'ip&hostname_unique':
if hostnames:
for hostname in hostnames:
csv_writer.writerow(
[hostname['hostname'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
else:
csv_writer.writerow([host['ip'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
result = csvfile.getvalue()
elif form.filetype.data == 'json' or form.filetype.data == 'xml':
if form.filetype.data == 'xml':
response_type = 'text/xml'
else:
response_type = 'application/json'
# first generates json
# [{"<ip>":"","hostnames":["<hostname_1",..],
# "ports":[ {"num":"<num>", "type":"tcp", "service":"<service>",
# "description": "<comment>"},...],},...]
json_object = []
# preparation: issues
if form.issue_name.data:
port_ids = db.search_issues_port_ids(current_project['id'],
form.issue_name.data)
for host in result_hosts:
ports = db.select_host_ports(host['id'])
hostnames = db.select_ip_hostnames(host['id'])
host_object = {}
host_object['ip'] = host['ip']
host_object['hostnames'] = [hostname['hostname'] for hostname in
hostnames]
host_object['ports'] = []
for port in ports:
if (not form.port.data) or ([port['port'], port['is_tcp']]
in ports_array):
if form.service.data in port['service']:
port_object = {}
port_object['num'] = port['port']
port_object['type'] = 'tcp' if port['is_tcp'] else 'udp'
port_object['service'] = port['service']
port_object['description'] = port['description']
if (not form.issue_name.data) or (
port['id'] in port_ids):
host_object['ports'].append(port_object)
if not ((not host_object['ports']) and (form.port.data or
form.service.data or
form.issue_name.data)):
json_object.append(host_object)
if form.filetype.data == 'xml':
s = dicttoxml.dicttoxml(json_object)
dom = parseString(s)
result = dom.toprettyxml()
else:
result = json.dumps(json_object, sort_keys=True, indent=4)
if form.open_in_browser.data:
return Response(result, content_type=response_type)
else:
return send_file(io.BytesIO(result.encode()),
attachment_filename='{}.{}'.format(form.filename.data,
form.filetype.data),
mimetype=response_type,
as_attachment=True)
@routes.route('/project/<uuid:project_id>/tools/http-sniffer/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def http_sniffer(project_id, current_project, current_user):
return render_template('project/tools/sniffers/http.html',
current_project=current_project,
tab_name='HTTP-Sniffer')
@routes.route('/project/<uuid:project_id>/tools/http-sniffer/add',
methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def http_sniffer_add_form(project_id, current_project, current_user):
form = NewHTTPSniffer()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
sniffer_id = db.insert_new_http_sniffer(form.name.data, current_project['id'])
return redirect(
'/project/{}/tools/http-sniffer/#/sniffer_{}'.format(current_project['id'], sniffer_id))
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
@routes.route(
'/project/<uuid:project_id>/tools/http-sniffer/<uuid:sniffer_id>/edit',
methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def http_sniffer_edit_form(project_id, current_project, current_user,
sniffer_id):
# check if sniffer in project
current_sniffer = db.select_http_sniffer_by_id(str(sniffer_id))
if not current_sniffer or current_sniffer[0]['project_id'] != \
current_project['id']:
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
current_sniffer = current_sniffer[0]
form = EditHTTPSniffer()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
if form.submit.data == 'Clear':
db.delete_http_sniffer_requests(current_sniffer['id'])
elif form.submit.data == 'Update':
db.update_http_sniffer(current_sniffer['id'],
form.status.data,
form.location.data,
form.body.data,
form.save_credentials.data)
return redirect(
'/project/{}/tools/http-sniffer/#/sniffer_{}'.format(current_project['id'], current_sniffer['id']))
@routes.route('/http_sniff/<uuid:sniffer_id>/', defaults={"route_path": ""},
methods=['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT',
'OPTIONS', 'TRACE', 'PATCH'])
@csrf.exempt
@routes.route('/http_sniff/<uuid:sniffer_id>/<path:route_path>',
methods=['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT',
'OPTIONS', 'TRACE', 'PATCH'])
@csrf.exempt
def http_sniffer_capture_page(sniffer_id, route_path):
current_sniffer = db.select_http_sniffer_by_id(str(sniffer_id))
if not current_sniffer:
return redirect('/')
current_sniffer = current_sniffer[0]
http_start_header = '''{} {} {}'''.format(request.method,
request.environ['RAW_URI'],
request.environ[
'SERVER_PROTOCOL'])
http_headers = str(request.headers)
data = request.get_data().decode('charmap')
ip = request.remote_addr
if current_sniffer['save_credentials']:
data_headers = http_headers.replace('\r', '')
post_data = ''
if '\n\n' in post_data:
post_data = data_headers.split('\n\n')[1]
# worker of headers
for header_str in data_headers.split('\n\n')[0].split('\n'):
header_name = header_str.split(':')[0].strip().lower()
header_data = ''
if ':' in header_str:
header_data = header_str.split(':')[1].strip()
if header_data:
# token header
if header_name == 'token':
db.insert_new_cred('',
'',
'other',
header_data,
'"Token" header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
if header_name == 'authorization':
auth_type = header_data.split(' ')[0].lower()
auth_data = ''
if ' ' in header_data:
auth_data = ' '.join(header_data.split(' ')[1:]).strip()
if auth_data:
if auth_type in ['bearer', 'token']:
db.insert_new_cred('',
'',
'other',
auth_data,
'"Authorization" bearer token',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
elif auth_type == 'basic':
try:
decoded = base64.b64decode(auth_data.encode('charmap')).decode('charmap')
login = decoded.split(':')[0]
password = ''
if ':' in decoded:
password = ':'.join(decoded.split(':')[1:])
if login or password:
db.insert_new_cred(login,
'',
'other',
password,
'"Authorization" basic header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
except Exception as e:
pass
elif auth_type == 'digest':
username = ""
if 'username="' in auth_data:
username = auth_data.split('username="')[1].split('"')[0]
if "username='" in auth_data:
username = auth_data.split("username='")[1].split("'")[0]
db.insert_new_cred(username,
'',
'other',
auth_data,
'"Authorization" digest header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
elif auth_type == 'ntlm':
db.insert_new_cred('',
'',
'other',
auth_data,
'"Authorization" unknown header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
else:
db.insert_new_cred('',
'',
'other',
auth_data,
'"Authorization" NTLM header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
# worker of post data
post_params = list(request.form)
login = ''
login_name = ''
password = ''
password_name = ''
for param_name in post_params:
param_value = request.form[param_name]
if param_name.lower() in ['pwd', 'pass', 'password', 'key', 'keyphrase', 'secret', 'token']:
password = param_value
password_name = param_name
elif param_name.lower() in ['login', 'user', 'username', 'email', 'id']:
login = param_value
login_name = param_name
if password_name:
db.insert_new_cred(login,
'',
'other',
password,
'POST data "{}" parameter'.format(password_name),
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
current_time = int(time.time() * 1000)
full_request_str = '''{}\n{}{}'''.format(http_start_header, http_headers,
data)
db.insert_new_http_sniffer_package(current_sniffer['id'], current_time,
ip, full_request_str)
if current_sniffer['location']:
return current_sniffer['body'], current_sniffer['status'], {
'Content-Location': current_sniffer['location'],
'Location': current_sniffer['location'],
'Content-Type': 'text/plain'}
else:
return current_sniffer['body'], current_sniffer['status'], \
{'Content-Type': 'text/plain'}
@routes.route(
'/project/<uuid:project_id>/tools/http-sniffer/<uuid:sniffer_id>/delete',
methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def http_sniffer_delete_form(project_id, current_project, current_user,
sniffer_id):
# check if sniffer in project
current_sniffer = db.select_http_sniffer_by_id(str(sniffer_id))
if not current_sniffer or current_sniffer[0]['project_id'] != \
current_project['id']:
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
current_sniffer = current_sniffer[0]
db.safe_delete_http_sniffer(current_sniffer['id'])
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
@routes.route('/project/<uuid:project_id>/tools/ipwhois/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def ipwhois_page(project_id, current_project, current_user):
return render_template('project/tools/scanners/ipwhois.html',
current_project=current_project,
tab_name='IPWhois')
@routes.route('/project/<uuid:project_id>/tools/ipwhois/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def ipwhois_page_form(project_id, current_project, current_user):
form = IPWhoisForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if form.ip.data:
try:
ip_object = ipwhois.IPWhois(form.ip.data)
ip_data = ip_object.lookup_rdap()
asn_num = ip_data["asn"]
if asn_num != 'NA':
network = ip_data["asn_cidr"]
gateway = network.split('/')[0]
mask = int(network.split('/')[1])
country = ip_data["asn_country_code"]
description = ip_data["asn_description"]
asn_date = ip_data['asn_date']
ip_version = ip_data["network"]["ip_version"]
# insert_new_network(self, ip, mask, asn, comment,
# project_id, user_id,is_ipv6):
full_description = "Country: {}\nDate: {}\nDescription: {}".format(
country,
asn_date,
description)
# check if exist
network = db.select_network_by_ip(current_project['id'],
gateway,
mask,
ipv6=(ip_version == 'v6'))
if not network:
network_id = db.insert_new_network(gateway, mask, asn_num,
full_description,
current_project['id'],
current_user['id'],
ip_version == 'v6')
else:
network_id = network[0]['id']
db.update_network(network_id, current_project['id'], gateway, mask, asn_num,
full_description, ip_version == 'v6', network[0]['internal_ip'],
network[0]['cmd'], json.loads(network[0]['access_from']), network[0]['name'])
return redirect(
'/project/{}/networks/'.format(current_project['id']))
else:
errors.append('ASN does not exist!')
except ipwhois.IPDefinedError:
errors.append('IP was defined in standards')
except ValueError:
errors.append('IP was defined in standards')
if form.hosts.data:
for host in form.hosts.data:
try:
ip_object = ipwhois.IPWhois(host)
ip_data = ip_object.lookup_rdap()
asn_num = ip_data["asn"]
if asn_num != 'NA':
network = ip_data["asn_cidr"]
gateway = network.split('/')[0]
mask = int(network.split('/')[1])
country = ip_data["asn_country_code"]
description = ip_data["asn_description"]
asn_date = ip_data['asn_date']
ip_version = ip_data["network"]["ip_version"]
# insert_new_network(self, ip, mask, asn, comment,
# project_id, user_id,is_ipv6):
full_description = "Country: {}\nDate: {}\nDescription: {}".format(
country,
asn_date,
description)
# check if exist
network = db.select_network_by_ip(current_project['id'],
gateway,
mask,
ipv6=(ip_version == 'v6'))
if not network:
network_id = db.insert_new_network(gateway, mask,
asn_num,
full_description,
current_project[
'id'],
current_user['id'],
ip_version == 'v6')
else:
network_id = network[0]['id']
db.update_network(network_id, current_project['id'], gateway, mask,
asn_num, full_description, ip_version == 'v6', network[0]['internal_ip'],
network[0]['cmd'], json.loads(network[0]['access_from']), network[0]['name'])
else:
errors.append('ASN does not exist!')
except ipwhois.IPDefinedError:
errors.append('IP was defined in standards')
except ValueError:
errors.append('IP was defined in standards')
if form.networks.data:
for host in form.networks.data:
try:
ip_object = ipwhois.IPWhois(host)
ip_data = ip_object.lookup_rdap()
asn_num = ip_data["asn"]
if asn_num != 'NA':
network = ip_data["asn_cidr"]
gateway = network.split('/')[0]
mask = int(network.split('/')[1])
country = ip_data["asn_country_code"]
description = ip_data["asn_description"]
asn_date = ip_data['asn_date']
ip_version = ip_data["network"]["ip_version"]
# insert_new_network(self, ip, mask, asn, comment,
# project_id, user_id,is_ipv6):
full_description = "Country: {}\nDate: {}\nDescription: {}".format(
country,
asn_date,
description)
# check if exist
network = db.select_network_by_ip(current_project['id'],
gateway,
mask,
ipv6=(ip_version == 'v6'))
if not network:
network_id = db.insert_new_network(gateway, mask,
asn_num,
full_description,
current_project[
'id'],
current_user['id'],
ip_version == 'v6')
else:
network_id = network[0]['id']
db.update_network(network_id, current_project['id'], gateway, mask, asn_num,
full_description, ip_version == 'v6', network[0]['internal_ip'],
network[0]['cmd'], json.loads(network[0]['access_from']), network[0]['name'])
else:
errors.append('ASN does not exist!')
except ipwhois.IPDefinedError:
errors.append('IP was defined in standards')
except ValueError:
errors.append('Wrong ip format')
return render_template('project/tools/scanners/ipwhois.html',
current_project=current_project,
errors=errors,
tab_name='IPWhois')
@routes.route('/project/<uuid:project_id>/tools/shodan/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def shodan_page(project_id, current_project, current_user):
return render_template('project/tools/scanners/shodan.html',
current_project=current_project,
tab_name='Shodan')
@routes.route('/project/<uuid:project_id>/tools/shodan/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def shodan_page_form(project_id, current_project, current_user):
form = ShodanForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
# api_key
shodan_api_key = form.api_key.data
if form.api_id.data and is_valid_uuid(form.api_id.data):
users_configs = db.select_configs(team_id='0',
user_id=current_user['id'],
name='shodan')
for team in db.select_user_teams(current_user['id']):
users_configs += db.select_configs(team_id=team['id'],
user_id='0',
name='shodan')
for config in users_configs:
if config['id'] == form.api_id.data:
shodan_api_key = config['data']
if not shodan_api_key:
errors.append('Key not found!')
shodan_api = shodan.Shodan(shodan_api_key)
# checker
try:
shodan_api.host('8.8.8.8')
except shodan.exception.APIError:
errors.append('Wrong API Shodan key!')
if not errors:
if form.ip.data:
try:
shodan_json = shodan_api.host(form.ip.data)
asn = int(shodan_json['asn'].replace('AS', ''))
os_info = shodan_json['os']
ip = shodan_json['ip_str']
ip_version = IP(ip).version()
asn_info = shodan_json['isp']
coords = ''
if 'latitude' in shodan_json:
coords = "lat {} long {}".format(shodan_json['latitude'],
shodan_json['longitude'])
country = ''
if 'country_name' in shodan_json:
country = shodan_json['country_name']
city = ''
if 'city' in shodan_json:
city = shodan_json['city']
organization = shodan_json['org']
if form.need_network.data:
# create network
net_tmp = ipwhois.net.Net('8.8.8.8')
asn_tmp = ipwhois.asn.ASNOrigin(net_tmp)
asn_full_data = asn_tmp.lookup(asn='AS{}'.format(asn))
for network in asn_full_data['nets']:
if ipaddress.ip_address(ip) in \
ipaddress.ip_network(network['cidr'], False):
cidr = network['cidr']
net_ip = cidr.split('/')[0]
net_mask = int(cidr.split('/')[1])
net_descr = network['description']
net_maintain = network['maintainer']
full_network_description = 'ASN info: {}\nCountry: {}\nCity: {}\nCoords: {}\nDescription: {}\nMaintainer: {}'.format(
asn_info, country, city,
coords, net_descr, net_maintain)
network_id = db.select_network_by_ip(
current_project['id'], net_ip, net_mask,
ip_version == 6)
if not network_id:
network_id = db.insert_new_network(net_ip,
net_mask,
asn,
full_network_description,
current_project[
'id'],
current_user[
'id'],
ip_version == 6)
else:
network_id = network_id[0]['id']
db.update_network(network_id, current_project['id'], net_ip, net_mask,
asn, full_network_description, ip_version == 6, network_id[0]['internal_ip'],
network_id[0]['cmd'], json.loads(network_id[0]['access_from']), network_id[0]['name'])
# create host
full_host_description = "Country: {}\nCity: {}\nOrganization: {}".format(
country, city, organization)
# hostnames = shodan_json["hostnames"]
host_id = db.select_project_host_by_ip(
current_project['id'],
ip)
if host_id:
host_id = host_id[0]['id']
db.update_host_description(host_id,
full_host_description)
else:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
full_host_description)
# add hostnames
for hostname in shodan_json["hostnames"]:
hostname_obj = db.select_ip_hostname(host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Shodan',
current_user['id'])
# add ports with cve
for port in shodan_json['data']:
product = ''
if 'product' in port:
product = port['product']
is_tcp = (port['transport'] == 'tcp')
port_num = int(port['port'])
port_info = ''
protocol = port['_shodan']["module"]
if 'info' in port:
port_info = port['info']
full_port_info = "Product: {}\nInfo: {}".format(
product,
port_info
)
port_id = db.select_ip_port(host_id, port_num,
is_tcp=is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id,
protocol,
full_port_info)
else:
port_id = db.insert_host_port(host_id, port_num,
is_tcp,
protocol,
full_port_info,
current_user['id'],
current_project['id'])
# add vulnerabilities
if "vulns" in port:
vulns = port['vulns']
for cve in vulns:
cvss = vulns[cve]['cvss']
summary = vulns[cve]['summary']
services = {port_id: ["0"]}
issue_id = db.insert_new_issue(cve, summary, '',
cvss,
current_user[
'id'],
services,
'need to check',
current_project[
'id'],
cve=cve)
except shodan.exception.APIError as e:
errors.append(e)
except ValueError:
errors.append('Wrong ip!')
elif form.hosts.data:
for host in form.hosts.data.split(','):
try:
shodan_json = shodan_api.host(host)
asn = int(shodan_json['asn'].replace('AS', ''))
os_info = shodan_json['os']
ip = shodan_json['ip_str']
ip_version = IP(ip).version()
asn_info = shodan_json['isp']
coords = ''
if 'latitude' in shodan_json:
coords = "lat {} long {}".format(
shodan_json['latitude'],
shodan_json['longitude'])
country = ''
if 'country_name' in shodan_json:
country = shodan_json['country_name']
city = ''
if 'city' in shodan_json:
city = shodan_json['city']
organization = shodan_json['org']
if form.need_network.data:
# create network
net_tmp = ipwhois.net.Net('8.8.8.8')
asn_tmp = ipwhois.asn.ASNOrigin(net_tmp)
asn_full_data = asn_tmp.lookup(asn='AS{}'.format(asn))
for network in asn_full_data['nets']:
if ipaddress.ip_address(ip) in \
ipaddress.ip_network(network['cidr'],
False):
cidr = network['cidr']
net_ip = cidr.split('/')[0]
net_mask = int(cidr.split('/')[1])
net_descr = network['description']
net_maintain = network['maintainer']
full_network_description = 'ASN info: {}\nCountry: {}\nCity: {}\nCoords: {}\nDescription: {}\nMaintainer: {}'.format(
asn_info, country, city,
coords, net_descr, net_maintain)
network_id = db.select_network_by_ip(
current_project['id'], net_ip, net_mask,
ip_version == 6)
if not network_id:
network_id = db.insert_new_network(net_ip,
net_mask,
asn,
full_network_description,
current_project[
'id'],
current_user[
'id'],
ip_version == 6)
else:
network_id = network_id[0]['id']
db.update_network(network_id, current_project['id'], net_ip, net_mask,
asn, full_network_description, ip_version == 6, network_id[0]['internal_ip'],
network_id[0]['cmd'], json.loads(network_id[0]['access_from']), network_id[0]['name'])
# create host
full_host_description = "Country: {}\nCity: {}\nOS: {}\nOrganization: {}".format(
country, city, organization)
# hostnames = shodan_json["hostnames"]
host_id = db.select_project_host_by_ip(
current_project['id'],
ip)
if host_id:
host_id = host_id[0]['id']
db.update_host_description(host_id,
full_host_description)
else:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
full_host_description)
if os_info:
db.update_host_os(host_id, os_info)
# add hostnames
for hostname in shodan_json["hostnames"]:
hostname_obj = db.select_ip_hostname(host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Shodan',
current_user['id'])
# add ports with cve
for port in shodan_json['data']:
product = ''
if 'product' in port:
product = port['product']
is_tcp = (port['transport'] == 'tcp')
port_num = int(port['port'])
port_info = ''
protocol = port['_shodan']["module"]
if 'info' in port:
port_info = port['info']
full_port_info = "Product: {}\nInfo: {}".format(
product,
port_info
)
port_id = db.select_ip_port(host_id, port_num,
is_tcp=is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id,
protocol,
full_port_info)
else:
port_id = db.insert_host_port(host_id, port_num,
is_tcp,
protocol,
full_port_info,
current_user['id'],
current_project['id'])
# add vulnerabilities
if "vulns" in port:
vulns = port['vulns']
for cve in vulns:
cvss = vulns[cve]['cvss']
summary = vulns[cve]['summary']
services = {port_id: ["0"]}
issue_id = db.insert_new_issue(cve, summary, '',
cvss,
current_user[
'id'],
services,
'need to check',
current_project[
'id'],
cve=cve)
except shodan.exception.APIError as e:
errors.append(e)
except ValueError:
errors.append('Wrong ip!')
time.sleep(1.1) # shodan delay
elif form.networks.data:
for network_id in form.networks.data.split(','):
if is_valid_uuid(network_id):
current_network = db.select_network(network_id)
if current_network and current_network[0]['asn'] and \
current_network[0]['asn'] > 0:
asn = int(current_network[0]['asn'])
result = shodan_api.search('asn:AS{}'.format(asn),
limit=1000)
for shodan_json in result['matches']:
try:
os_info = shodan_json['os']
ip = shodan_json['ip_str']
ip_version = IP(ip).version()
asn_info = shodan_json['isp']
coords = ''
if 'latitude' in shodan_json:
coords = "lat {} long {}".format(
shodan_json['latitude'],
shodan_json['longitude'])
country = ''
if 'country_name' in shodan_json:
country = shodan_json['country_name']
city = ''
if 'city' in shodan_json:
city = shodan_json['city']
organization = shodan_json['org']
if form.need_network.data:
# create network
net_tmp = ipwhois.net.Net('8.8.8.8')
asn_tmp = ipwhois.asn.ASNOrigin(net_tmp)
asn_full_data = asn_tmp.lookup(
asn='AS{}'.format(asn))
for network in asn_full_data['nets']:
if ipaddress.ip_address(ip) in \
ipaddress.ip_network(
network['cidr'],
False):
cidr = network['cidr']
net_ip = cidr.split('/')[0]
net_mask = int(cidr.split('/')[1])
net_descr = network['description']
net_maintain = network['maintainer']
full_network_description = 'ASN info: {}\nCountry: {}\nCity: {}\nCoords: {}\nDescription: {}\nMaintainer: {}'.format(
asn_info, country, city,
coords, net_descr, net_maintain)
network_id = db.select_network_by_ip(
current_project['id'], net_ip,
net_mask,
ip_version == 6)
if not network_id:
network_id = db.insert_new_network(
net_ip,
net_mask,
asn,
full_network_description,
current_project[
'id'],
current_user[
'id'],
ip_version == 6)
else:
network_id = network_id[0]['id']
db.update_network(network_id,
current_project['id'],
net_ip,
net_mask,
asn,
full_network_description,
ip_version == 6, network_id[0]['internal_ip'],
network_id[0]['cmd'], json.loads(network_id[0]['access_from']),
network_id[0]['name'])
# create host
full_host_description = "Country: {}\nCity: {}\nOS: {}\nOrganization: {}".format(
country, city, os_info, organization)
# hostnames = shodan_json["hostnames"]
host_id = db.select_project_host_by_ip(
current_project['id'],
ip)
if host_id:
host_id = host_id[0]['id']
db.update_host_description(host_id,
full_host_description)
else:
host_id = db.insert_host(
current_project['id'],
ip,
current_user['id'],
full_host_description)
# add hostnames
for hostname in shodan_json["hostnames"]:
hostname_obj = db.select_ip_hostname(
host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Shodan',
current_user['id'])
# add ports with cve
port_num = int(shodan_json['port'])
product = ''
if 'product' in shodan_json:
product = shodan_json['product']
is_tcp = int(shodan_json['transport'] == 'tcp')
port_info = ''
protocol = shodan_json['_shodan']["module"]
if 'info' in shodan_json:
port_info = shodan_json['info']
full_port_info = "Product: {}\nInfo: {}".format(
product,
port_info
)
port_id = db.select_ip_port(host_id,
port_num,
is_tcp=is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_proto_description(
port_id,
protocol,
full_port_info)
else:
port_id = db.insert_host_port(host_id,
port_num,
is_tcp,
protocol,
full_port_info,
current_user[
'id'],
current_project[
'id'])
# add vulnerabilities
if "vulns" in shodan_json:
vulns = shodan_json['vulns']
for cve in vulns:
cvss = vulns[cve]['cvss']
summary = vulns[cve]['summary']
services = {port_id: ["0"]}
issue_id = db.insert_new_issue(cve,
summary,
'',
cvss,
current_user[
'id'],
services,
'need to check',
current_project[
'id'],
cve=cve)
except shodan.exception.APIError as e:
pass # a lot of errors
except ValueError:
pass # a lot of errors
time.sleep(1.1) # shodan delay
return render_template('project/tools/scanners/shodan.html',
current_project=current_project,
errors=errors,
tab_name='Shodan')
@routes.route('/project/<uuid:project_id>/tools/checkmarx/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def checkmarx_page(project_id, current_project, current_user):
return render_template('project/tools/import/checkmarx.html',
current_project=current_project,
tab_name='Checkmarx')
@routes.route('/project/<uuid:project_id>/tools/checkmarx/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def checkmarx_page_form(project_id, current_project, current_user):
form = CheckmaxForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# xml files
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("query")
for query in query_list:
vulnerability_name = re.sub(' Version:[0-9]+', '', query.attrs['querypath'].split('\\')[-1])
language = query.attrs['language']
cwe = query.attrs['cweid']
vuln_array = query.find_all("result")
for vuln_example in vuln_array:
criticality = vuln_example.attrs['severity'] # High
filename = vuln_example.attrs['filename']
path_find = vuln_example.find_all("path")
paths_str_arrays = []
for path_obj in path_find:
paths_str = ''
path_nodes = vuln_example.find_all("pathnode")
if path_nodes:
paths_str = '########## Path {} ###########\n'.format(path_find.index(path_obj) + 1)
for path_node in path_nodes:
filename = path_node.find_all("filename")[0].text
line_num = int(path_node.find_all("line")[0].text)
colum_num = int(path_node.find_all("column")[0].text)
code_arr = path_node.find_all("code")
node_str = 'Filename: {}\nLine: {} Column: {}'.format(filename, line_num, colum_num)
for code in code_arr:
node_str += '\n' + code.text.strip(' \t')
paths_str += node_str + '\n\n'
if paths_str:
paths_str_arrays.append(paths_str + '\n\n')
all_paths_str = '\n'.join(paths_str_arrays)
if criticality == 'High':
cvss = 9.5
elif criticality == 'Medium':
cvss = 8.0
elif criticality == 'Low':
cvss = 2.0
else:
cvss = 0
issue_id = db.insert_new_issue(vulnerability_name,
'Language: {}\n'.format(language) + all_paths_str, filename,
cvss, current_user['id'],
{}, 'need to check', current_project['id'], cwe=cwe,
issue_type='custom')
return render_template('project/tools/import/checkmarx.html',
current_project=current_project,
errors=errors,
tab_name='Checkmarx')
@routes.route('/project/<uuid:project_id>/tools/depcheck/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def depcheck_page(project_id, current_project, current_user):
return render_template('project/tools/import/depcheck.html',
current_project=current_project,
tab_name='DepCheck')
@routes.route('/project/<uuid:project_id>/tools/depcheck/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def depcheck_page_form(project_id, current_project, current_user):
form = Depcheck()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("dependency")
for query in query_list:
filename = query.find("filename").text
filepath = query.find("filepath").text
vuln_array = query.find_all("vulnerability")
for vuln_example in vuln_array:
name = vuln_example.find('name').text
cve = ''
if name.startswith('CVE'):
cve = name
cvss_obj = vuln_example.find('cvssv3')
if cvss_obj:
cvss = float(cvss_obj.find('basescore').text)
elif vuln_example.find('cvssscore'):
cvss = float(vuln_example.find('cvssscore').text)
elif vuln_example.find('cvssv2'):
cvss = float(vuln_example.find('cvssv2').find('score').text)
else:
cvss = 0
cwes = vuln_example.find_all("cwe")
cwe = 0
if cwes:
cwe = int(cwes[0].text.replace('CWE-', '').split(' ')[0])
description = vuln_example.find('description').text
soft_search = vuln_example.find_all("software")
software_arr = []
for path_obj in soft_search:
s = str(path_obj.text)
versions = ''
if 'versionstartincluding' in path_obj.attrs:
versions += str(path_obj.attrs['versionstartincluding']) + '<=x'
if 'versionstartexcluding' in path_obj.attrs:
versions += str(path_obj.attrs['versionendexcluding']) + '<x'
if 'versionendincluding' in path_obj.attrs:
versions += '<=' + str(path_obj.attrs['versionendincluding'])
if 'versionendexcluding' in path_obj.attrs:
versions += '<' + str(path_obj.attrs['versionendexcluding'])
if versions:
s += ' versions ({})'.format(versions)
software_arr.append(s)
all_software_str = '\n\n'.join(software_arr)
full_description = 'File: ' + filepath + '\n\n' + description \
+ '\n\nVulnerable versions: \n' + all_software_str
issue_id = db.insert_new_issue(name, full_description, filepath, cvss, current_user['id'],
'{}', 'need to recheck', current_project['id'], cve, cwe,
'custom', '', filename)
return render_template('project/tools/import/depcheck.html',
current_project=current_project,
tab_name='DepCheck',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/openvas/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def openvas_page(project_id, current_project, current_user):
return render_template('project/tools/import/openvas.html',
current_project=current_project,
tab_name='OpenVAS')
@routes.route('/project/<uuid:project_id>/tools/openvas/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def openvas_page_form(project_id, current_project, current_user):
form = Openvas()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("result")
for query in query_list:
if query.find('host'): # disables result tags inside issue description
issue_host = query.find('host').text.split('\n')[0]
issue_hostname = query.find('host').find('hostname').text
port_str = query.find('port').text.split('/')[0]
if port_str == 'general':
issue_port = 0
else:
issue_port = int(port_str)
issue_is_tcp = int(query.find('port').text.split('/')[1] == 'tcp')
nvt_obj = query.find('nvt')
issue_name = nvt_obj.find('name').text
issue_type = nvt_obj.find('family').text
issue_cvss = float(nvt_obj.find('cvss_base').text)
issue_long_description = nvt_obj.find('tags').text
solution_obj = nvt_obj.find('solution')
issue_solution = ''
if solution_obj.get('type') != 'WillNotFix':
issue_solution = solution_obj.text
cve_list = []
links_list = []
refs_objects = nvt_obj.find('refs')
if refs_objects:
refs_objects = refs_objects.findAll('ref')
for ref_obj in refs_objects:
if ref_obj.get('type') == 'url':
links_list.append(ref_obj.get('id'))
if ref_obj.get('type') == 'cve':
cve_list.append(ref_obj.get('id'))
issue_short_description = ''
if query.find('description'):
issue_short_description = query.find('description').text
# check if host exists
host_id = db.select_project_host_by_ip(current_project['id'], issue_host)
if not host_id:
host_id = db.insert_host(current_project['id'], issue_host,
current_user['id'], form.hosts_description.data)
else:
host_id = host_id[0]['id']
# check if port exists
port_id = db.select_host_port(host_id, issue_port, issue_is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id, issue_port, issue_is_tcp, 'unknown', form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = port_id[0]['id']
# check if hostname exists
hostname_id = ''
if issue_hostname != '':
hostname_id = db.select_ip_hostname(host_id, issue_hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id, issue_hostname,
form.hostnames_description.data, current_user['id'])
else:
hostname_id = hostname_id[0]['id']
full_description = 'Short description: \n{}\n\nFull description:\n{}'.format(
issue_short_description,
issue_long_description)
cve_str = ','.join(cve_list)
if links_list:
full_description += '\n\nLinks:\n' + '\n'.join(links_list)
services = {
port_id: [hostname_id] if hostname_id else ['0']
}
db.insert_new_issue_no_dublicate(issue_name, full_description, '', issue_cvss, current_user['id'],
services, 'need to recheck', current_project['id'], cve_str,
0, 'custom', issue_solution, '')
return render_template('project/tools/import/openvas.html',
current_project=current_project,
tab_name='OpenVAS',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/netsparker/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def netsparker_page(project_id, current_project, current_user):
return render_template('project/tools/import/netsparker.html',
current_project=current_project,
tab_name='NetSparker')
@routes.route('/project/<uuid:project_id>/tools/netsparker/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def netsparker_page_form(project_id, current_project, current_user):
def beautify_output(xml_str):
if xml_str == ' ': xml_str = ''
xml_str = xml_str.replace('<p>', '\t').replace('</p>', '\n')
xml_str = xml_str.replace('<li>', '* ').replace('</li>', '\n')
xml_str = xml_str.replace('<ol>', '\n').replace('</ol>', '\n')
xml_str = xml_str.replace('<div>', '').replace('</div>', '\n')
xml_str = xml_str.replace("<a target='_blank' href='", '').replace("'><i class='icon-external-link'></i>",
' - ')
xml_str = xml_str.replace('<ul>', '').replace('</ul>', '')
xml_str = xml_str.replace('</a>', '\n')
return xml_str
form = Netsparker()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("vulnerability")
for vuln in query_list:
is_confirmed = vuln.get('confirmed') == 'True'
if is_confirmed or (not form.only_confirmed):
vuln_url = vuln.find('url').text
vuln_name = 'Netsparker: ' + vuln.find('type').text
vuln_severity = vuln.find('severity').text # High, Medium, Low, Information, BestPractice
vuln_description = beautify_output(vuln.find('description').text)
vuln_impact = beautify_output(vuln.find('impact').text)
vuln_fix = beautify_output(vuln.find('actionstotake').text)
vuln_fix1 = beautify_output(vuln.find('remedy').text)
vuln_skills = beautify_output(vuln.find('requiredskillsforexploitation').text)
vuln_links = beautify_output(vuln.find('externalreferences').text)
vuln_fix1_links = beautify_output(vuln.find('remedyreferences').text)
vuln_request = beautify_output(vuln.find('rawrequest').text)
vuln_response = beautify_output(vuln.find('rawresponse').text)
vuln_poc = beautify_output(vuln.find('proofofconcept').text)
vuln_path = ''
vuln_args = ''
# parse info
info_list = vuln.find('extrainformation').findAll('info')
for info_obj in info_list:
info_name = info_obj.get('name')
if info_name == 'ParserAnalyzerEngine_InputName':
vuln_args += ', (Input) ' + info_name
elif info_name == 'ParserAnalyzerEngine_FormTargetAction':
vuln_path = info_name
elif info_name == 'ParserAnalyzerEngine_IdentifiedFieldName':
vuln_args += ', (Input) ' + info_name
elif info_name == 'CookieAnalyzerEngine_IdentifiedCookies':
vuln_args += ', (Cookie) ' + info_name
elif info_name == 'ExtractedVersion':
vuln_description += '\n\nExtracted version: ' + info_name
elif info_name == 'IdentifiedErrorMessage':
vuln_description += '\n\nError message: \n' + info_name
elif info_name == 'ExtractedIPAddresses':
vuln_description += '\n\nExtracted IP: ' + info_name
elif info_name == 'CustomField_FormAction':
vuln_path = info_name
elif info_name == 'ParserAnalyzerEngine_ExternalLinks':
vuln_description += '\n\nExternal links: \n' + info_name
elif info_name == 'ParserAnalyzerEngine_FormName':
vuln_args += ', (Form) ' + info_name
elif info_name == 'EmailDisclosure_EmailAddresses':
vuln_description += '\n\nFound email: ' + info_name
elif info_name == 'Options_Allowed_Methods':
vuln_description += '\n\nAllowed methods: ' + info_name
elif info_name == 'ParserAnalyzerEngine_FormTargetAction':
vuln_description = '\n\nInternal path: ' + info_name
vuln_cwe = vuln.find('classification').find('cwe').text
if not vuln_cwe: vuln_cwe = 0
vuln_cvss = 0
classification_obj = vuln.find('classification')
if classification_obj.find('cvss'):
for cvss_obj in classification_obj.find('cvss').findAll('score'):
if cvss_obj.find('type').text == 'Base':
vuln_cvss = float(cvss_obj.find('value').text)
# parse url
splitted_url = urllib.parse.urlsplit(vuln_url)
vuln_scheme = splitted_url.scheme
if not vuln_scheme:
vuln_scheme = 'http'
vuln_host_unverified = splitted_url.hostname
vuln_path_unverified = splitted_url.path
vuln_port = splitted_url.port
if not vuln_port:
if vuln_scheme == 'https':
vuln_port = 443
elif vuln_scheme == 'ftp':
vuln_port = 21
else:
vuln_port = 80
vuln_port = int(vuln_port)
if not vuln_path:
vuln_path = vuln_path_unverified
is_ip = False
vuln_host = ''
vuln_hostname = ''
try:
vuln_host = str(ipaddress.ip_address(vuln_host_unverified))
except ValueError:
vuln_hostname = vuln_host_unverified
if not vuln_host and vuln_hostname:
try:
vuln_host = str(socket.gethostbyname(vuln_host_unverified))
except:
pass
hostname_id = ''
port_id = ''
host_id = ''
if vuln_host:
dublicate_host = db.select_project_host_by_ip(current_project['id'], vuln_host)
if not dublicate_host:
host_id = db.insert_host(current_project['id'],
vuln_host,
current_user['id'],
form.hosts_description.data)
else:
host_id = dublicate_host[0]['id']
# add port
dublicate_port = db.select_host_port(host_id, vuln_port, True)
if not dublicate_port:
port_id = db.insert_host_port(host_id, vuln_port, True,
vuln_scheme, form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = dublicate_port[0]['id']
# add hostname
if vuln_hostname:
dublicate_hostname = db.select_ip_hostname(host_id, vuln_hostname)
if not dublicate_hostname:
hostname_id = db.insert_hostname(host_id, vuln_hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = dublicate_hostname[0]['id']
# add issue
full_description = 'URL: {}\n\nDescription: \n{}\n\n'.format(vuln_url, vuln_description)
if vuln_impact:
full_description += 'Impact: ' + vuln_impact + '\n\n'
if vuln_skills:
full_description += 'Skills: ' + vuln_skills + '\n\n'
if vuln_poc:
full_description += 'PoC: ' + vuln_poc + '\n\n'
if vuln_links:
full_description += 'Links: \n' + vuln_links + '\n\n'
full_fix = 'Actions: ' + vuln_fix + '\n Fix:' + vuln_fix1 + '\n Links: ' + vuln_fix1_links
services = {}
if hostname_id:
services[port_id] = [hostname_id]
elif port_id:
services[port_id] = ["0"]
issue_id = db.insert_new_issue_no_dublicate(vuln_name, full_description,
vuln_path, vuln_cvss,
current_user['id'],
services,
'need to recheck',
current_project['id'],
'', vuln_cwe, 'web', full_fix, vuln_args)
# create PoC
poc_text = vuln_request + vuln_response
poc_text = poc_text.replace('\r', '')
file_data = b''
if config['files']['poc_storage'] == 'database':
file_data = poc_text.encode('charmap')
poc_id = db.insert_new_poc(port_id if port_id else "0",
'Added from Netsparker',
'text',
'HTTP.txt',
issue_id,
current_user['id'],
hostname_id if hostname_id else '0',
storage=config['files']['poc_storage'],
data=file_data)
if config['files']['poc_storage'] == 'filesystem':
file_path = './static/files/poc/{}'.format(poc_id)
file_object = open(file_path, 'w')
file_object.write(poc_text)
file_object.close()
return render_template('project/tools/import/netsparker.html',
current_project=current_project,
tab_name='NetSparker',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/qualys/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def qualys_page(project_id, current_project, current_user):
return render_template('project/tools/import/qualys.html',
current_project=current_project,
tab_name='Qualys')
@routes.route('/project/<uuid:project_id>/tools/qualys/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def qualys_form(project_id, current_project, current_user):
def beautify_output(xml_str):
xml_str = xml_str.replace('<p>', '\t').replace('<P>', '\t')
xml_str = xml_str.replace('<BR>', '\n').replace('</p>', '\n')
return xml_str
form = QualysForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# xml files
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(), "html.parser")
hosts_list = scan_result.find_all("ip")
for host in hosts_list:
host_id = ''
hostname = ''
ip = host.attrs['value']
tmp_host = db.select_project_host_by_ip(current_project['id'], ip)
if tmp_host:
host_id = tmp_host[0]['id']
if 'name' in host.attrs and ip != host.attrs['name']:
hostname = host.attrs['name']
# TODO: dont forget to add hostname
if form.add_empty_host and not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
ports_list = host.find('services')
if ports_list:
for port_obj in ports_list.findAll('cat'):
if 'port' in port_obj.attrs and 'protocol' in port_obj.attrs:
if not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
port = int(port_obj.attrs['port'])
is_tcp = int(port_obj.attrs['protocol'] == 'tcp')
service = port_obj.attrs['value']
port_id = db.select_host_port(host_id, port, is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_service(port_id, service)
else:
port_id = db.insert_host_port(host_id, port, is_tcp, service, form.ports_description.data,
current_user['id'], current_project['id'])
issues_list = host.find('vulns')
if issues_list:
for issue_obj in issues_list.findAll('cat'):
if not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
port_num = 0
is_tcp = 1
if 'port' in issue_obj.attrs and 'protocol' in issue_obj.attrs:
port_num = int(issue_obj.attrs['port'])
is_tcp = int(issue_obj.attrs['protocol'] == 'tcp')
port_id = db.select_host_port(host_id, port_num, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id, port_num, is_tcp, 'unknown', form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = port_id[0]['id']
cvss = 0
cvss_tmp1 = issue_obj.find('cvss3_base')
cvss_tmp2 = issue_obj.find('cvss3_temporal')
cvss_tmp3 = issue_obj.find('cvss_temporal')
if cvss_tmp1 and cvss_tmp1.text != '-':
cvss = float(cvss_tmp1.text)
elif cvss_tmp2 and cvss_tmp2.text != '-':
cvss = float(cvss_tmp2.text)
elif cvss_tmp3 and cvss_tmp3.text != '-':
cvss = float(cvss_tmp3.text)
issue_name = issue_obj.find('title').text
issue_diagnostic = issue_obj.find('diagnosis').text
issue_description = issue_obj.find('consequence').text
issue_solution = beautify_output(issue_obj.find('solution').text)
# TODO: add PoC
issue_output = issue_obj.find('result')
try:
issue_output = issue_obj.find('result').text
except AttributeError:
issue_output = ''
issue_full_description = 'Diagnosis: \n{} \n\nConsequence: \n{}'.format(issue_diagnostic, issue_description)
issue_full_description = beautify_output(issue_full_description)
services = {port_id: ['0']}
issue_id = db.insert_new_issue_no_dublicate(issue_name, issue_full_description, '', cvss, current_user['id'], services, 'need to recheck',
current_project['id'], '', 0, 'custom', issue_solution, '')
issues_list = host.find('practices')
if issues_list:
for issue_obj in issues_list.findAll('practice'):
if not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
cve = ''
if 'cveid' in issue_obj.attrs:
cve = issue_obj.attrs['cveid']
issue_name = issue_obj.find('title').text
issue_diagnostic = issue_obj.find('diagnosis').text
issue_description = issue_obj.find('consequence').text
issue_solution = beautify_output(issue_obj.find('solution').text)
# TODO: add PoC
issue_output = issue_obj.find('result')
try:
issue_output = issue_obj.find('result').text
except AttributeError:
issue_output = ''
issue_full_description = 'Diagnosis: \n{} \n\nConsequence: \n{}'.format(issue_diagnostic, issue_description)
issue_full_description = beautify_output(issue_full_description)
issue_links = []
for url in issue_obj.findAll('url'):
issue_links.append(url.text)
for url in issue_obj.findAll('link'):
issue_links.append(url.text)
if issue_links:
issue_full_description += '\n\nLinks:\n' + '\n'.join(['- ' + url for url in issue_links])
cvss = 0
cvss = 0
cvss_tmp1 = issue_obj.find('cvss3_base')
cvss_tmp2 = issue_obj.find('cvss3_temporal')
cvss_tmp3 = issue_obj.find('cvss_temporal')
if cvss_tmp1 and cvss_tmp1.text != '-':
cvss = float(cvss_tmp1.text)
elif cvss_tmp2 and cvss_tmp2.text != '-':
cvss = float(cvss_tmp2.text)
elif cvss_tmp3 and cvss_tmp3.text != '-':
cvss = float(cvss_tmp3.text)
# try to detect port
port = 0
is_tcp = 1
info_str = issue_output.split('\n')[0]
if ' detected on port ' in info_str:
port = int(info_str.split(' detected on port ')[1].split(' ')[0])
if ' over ' in info_str.split(' detected on port ')[1]:
is_tcp = int(info_str.split(' detected on port ')[1].split(' over ')[1].split(' ')[0] == 'TCP')
port_id = db.select_host_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id, port, is_tcp, 'unknown', form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = port_id[0]['id']
services = {port_id: ['0']}
issue_id = db.insert_new_issue_no_dublicate(issue_name, issue_full_description, cve, cvss, current_user['id'], services, 'need to recheck',
current_project['id'], '', 0, 'custom', issue_solution, '')
return render_template('project/tools/import/qualys.html',
current_project=current_project,
errors=errors,
tab_name='Qualys')
@routes.route('/project/<uuid:project_id>/tools/whois/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def whois_page(project_id, current_project, current_user):
return render_template('project/tools/scanners/whois.html',
current_project=current_project,
tab_name='Whois')
@routes.route('/project/<uuid:project_id>/tools/whois/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def whois_page_form(project_id, current_project, current_user):
form = WhoisForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
if form.host_id.data and is_valid_uuid(form.host_id.data):
host = db.select_project_host(current_project['id'], form.host_id.data)
if not host:
errors.append('Host not found!')
else:
host_id = host[0]['id']
hostname = db.select_ip_hostname(host_id, form.hostname.data)
if not hostname:
errors.append('Hostname not found!')
else:
hostname_id = hostname[0]['id']
if not errors:
if form.host_id.data:
whois_obj = whois.whois(form.hostname.data)
result_str = ''
if 'registrar' in whois_obj and whois_obj['registrar']:
result_str += 'Registrar: {}\n'.format(whois_obj['registrar'])
if 'whois_server' in whois_obj and whois_obj['whois_server']:
result_str += 'Whois server: {}\n'.format(whois_obj['whois_server'])
if 'referral_url' in whois_obj and whois_obj['referral_url']:
result_str += 'Referral URL: {}\n'.format(whois_obj['referral_url'])
if 'name_servers' in whois_obj and whois_obj['name_servers']:
result_str += 'Name servers: \n{}\n'.format('\n'.join([' ' + x.lower() for x in set(whois_obj['name_servers'])]))
if 'emails' in whois_obj and whois_obj['emails']:
result_str += 'Emails: \n{}\n'.format('\n'.join([' ' + x for x in set(whois_obj['emails'])]))
if 'dnssec' in whois_obj and whois_obj['dnssec']:
result_str += 'DNSSec: {}\n'.format(whois_obj['dnssec'])
if 'name' in whois_obj and whois_obj['name']:
result_str += 'Name: {}\n'.format(whois_obj['name'])
if 'org' in whois_obj and whois_obj['org']:
result_str += 'Organization: {}\n'.format(whois_obj['org'])
if 'address' in whois_obj and whois_obj['address']:
result_str += 'Address: {}\n'.format(whois_obj['address'])
if 'city' in whois_obj and whois_obj['city']:
result_str += 'DNSSec: {}\n'.format(whois_obj['city'])
if 'state' in whois_obj and whois_obj['state']:
result_str += 'State: {}\n'.format(whois_obj['state'])
if 'zipcode' in whois_obj and whois_obj['zipcode']:
result_str += 'Zipcode: {}\n'.format(whois_obj['zipcode'])
if 'country' in whois_obj and whois_obj['country']:
result_str += 'Country: {}\n'.format(whois_obj['country'])
if result_str:
db.update_hostnames_description(current_project['id'], form.hostname.data, result_str)
referer = request.headers.get("Referer")
referer += '#/hostnames'
return redirect(referer)
if form.hostname.data:
whois_obj = whois.whois(form.hostname.data)
result_str = ''
if 'registrar' in whois_obj and whois_obj['registrar']:
result_str += 'Registrar: {}\n'.format(whois_obj['registrar'])
if 'whois_server' in whois_obj and whois_obj['whois_server']:
result_str += 'Whois server: {}\n'.format(whois_obj['whois_server'])
if 'referral_url' in whois_obj and whois_obj['referral_url']:
result_str += 'Referral URL: {}\n'.format(whois_obj['referral_url'])
if 'name_servers' in whois_obj and whois_obj['name_servers']:
result_str += 'Name servers: \n{}\n'.format('\n'.join([' ' + x.lower() for x in set(whois_obj['name_servers'])]))
if 'emails' in whois_obj and whois_obj['emails']:
result_str += 'Emails: \n{}\n'.format('\n'.join([' ' + x for x in set(whois_obj['emails'])]))
if 'dnssec' in whois_obj and whois_obj['dnssec']:
result_str += 'DNSSec: {}\n'.format(whois_obj['dnssec'])
if 'name' in whois_obj and whois_obj['name']:
result_str += 'Name: {}\n'.format(whois_obj['name'])
if 'org' in whois_obj and whois_obj['org']:
result_str += 'Organization: {}\n'.format(whois_obj['org'])
if 'address' in whois_obj and whois_obj['address']:
result_str += 'Address: {}\n'.format(whois_obj['address'])
if 'city' in whois_obj and whois_obj['city']:
result_str += 'DNSSec: {}\n'.format(whois_obj['city'])
if 'state' in whois_obj and whois_obj['state']:
result_str += 'State: {}\n'.format(whois_obj['state'])
if 'zipcode' in whois_obj and whois_obj['zipcode']:
result_str += 'Zipcode: {}\n'.format(whois_obj['zipcode'])
if 'country' in whois_obj and whois_obj['country']:
result_str += 'Country: {}\n'.format(whois_obj['country'])
# add even with result_str is empty
try:
ip = socket.gethostbyname(form.hostname.data)
hosts = db.select_ip_from_project(current_project['id'], ip)
if not hosts:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
'Added from Whois information')
else:
host_id = hosts[0]['id']
hostname_obj = db.select_ip_hostname(host_id, form.hostname.data)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id, form.hostname.data, '', current_user['id'])
except:
pass
db.update_hostnames_description(current_project['id'], form.hostname.data, result_str)
if form.hostnames.data:
for hostname in form.hostnames.data:
whois_obj = whois.whois(hostname)
result_str = ''
if 'registrar' in whois_obj and whois_obj['registrar']:
result_str += 'Registrar: {}\n'.format(whois_obj['registrar'])
if 'whois_server' in whois_obj and whois_obj['whois_server']:
result_str += 'Whois server: {}\n'.format(whois_obj['whois_server'])
if 'referral_url' in whois_obj and whois_obj['referral_url']:
result_str += 'Referral URL: {}\n'.format(whois_obj['referral_url'])
if 'name_servers' in whois_obj and whois_obj['name_servers']:
result_str += 'Name servers: \n{}\n'.format('\n'.join([' ' + x.lower() for x in set(whois_obj['name_servers'])]))
if 'emails' in whois_obj and whois_obj['emails']:
result_str += 'Emails: \n{}\n'.format('\n'.join([' ' + x for x in set(whois_obj['emails'])]))
if 'dnssec' in whois_obj and whois_obj['dnssec']:
result_str += 'DNSSec: {}\n'.format(whois_obj['dnssec'])
if 'name' in whois_obj and whois_obj['name']:
result_str += 'Name: {}\n'.format(whois_obj['name'])
if 'org' in whois_obj and whois_obj['org']:
result_str += 'Organization: {}\n'.format(whois_obj['org'])
if 'address' in whois_obj and whois_obj['address']:
result_str += 'Address: {}\n'.format(whois_obj['address'])
if 'city' in whois_obj and whois_obj['city']:
result_str += 'DNSSec: {}\n'.format(whois_obj['city'])
if 'state' in whois_obj and whois_obj['state']:
result_str += 'State: {}\n'.format(whois_obj['state'])
if 'zipcode' in whois_obj and whois_obj['zipcode']:
result_str += 'Zipcode: {}\n'.format(whois_obj['zipcode'])
if 'country' in whois_obj and whois_obj['country']:
result_str += 'Country: {}\n'.format(whois_obj['country'])
# add even with result_str is empty
try:
ip = socket.gethostbyname(hostname)
hosts = db.select_ip_from_project(current_project['id'], ip)
if not hosts:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
'Added from Whois information')
else:
host_id = hosts[0]['id']
hostname_obj = db.select_ip_hostname(host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id, hostname, '', current_user['id'])
except:
pass
db.update_hostnames_description(current_project['id'], hostname, result_str)
return render_template('project/tools/scanners/whois.html',
current_project=current_project,
errors=errors,
tab_name='Whois')
@routes.route('/project/<uuid:project_id>/tools/duplicator/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def duplicator_page(project_id, current_project, current_user):
return render_template('project/tools/export/duplicator.html',
current_project=current_project,
tab_name='Duplicator')
@routes.route('/project/<uuid:project_id>/tools/duplicator/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def duplicator_page_form(project_id, current_project, current_user):
form = DuplicatorForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
destination_project = db.check_user_project_access(form.destination_project.data, current_user['id'])
if not destination_project:
errors.append("Destination project not found!")
if not errors:
if not (destination_project['status'] and not (destination_project['auto_archive'] and destination_project['end_date'] < time.time())):
errors.append("Destination project is in archive!")
if not errors:
if form.copy_info.data:
destination_project['description'] = current_project['description']
if form.copy_scope.data:
destination_project['scope'] = current_project['scope']
if form.copy_deadline.data:
destination_project['end_date'] = current_project['end_date']
destination_project['auto_archive'] = 0
destination_project['status'] = 1
if int(destination_project['end_date']) < int(destination_project['start_date']):
destination_project['start_date'] = current_project['start_date']
if form.copy_users.data:
old_users = json.loads(destination_project['testers'])
new_users = old_users + json.loads(current_project['testers'])
new_users = list(set(new_users))
destination_project['testers'] = json.dumps(new_users)
if form.copy_teams.data:
old_teams = json.loads(destination_project['teams'])
new_teams = old_teams + json.loads(current_project['teams'])
new_teams = list(set(new_teams))
destination_project['teams'] = json.dumps(new_teams)
db.update_project_settings(destination_project['id'],
destination_project['name'],
destination_project['description'],
destination_project['type'],
destination_project['scope'],
destination_project['start_date'],
destination_project['end_date'],
destination_project['auto_archive'],
json.loads(destination_project['testers']),
json.loads(destination_project['teams']))
# check paths
paths_ids_list = list(set(form.paths.data))
hosts_ids_list = list(set(form.hosts.data))
networks_ids_list = list(set(form.networks.data))
for path_id in paths_ids_list:
current_path = db.select_path(path_id=path_id,
project_id=current_project['id'])
if current_path:
current_path = current_path[0]
if current_path['host_out'] and current_path['host_out'] not in hosts_ids_list:
hosts_ids_list.append(current_path['host_out'])
if current_path['host_in'] and current_path['host_in'] not in hosts_ids_list:
hosts_ids_list.append(current_path['host_in'])
if current_path['network_in'] and current_path['network_in'] not in networks_ids_list:
networks_ids_list.append(current_path['network_in'])
if current_path['network_out'] and current_path['network_out'] not in networks_ids_list:
networks_ids_list.append(current_path['network_out'])
# hosts
for host_id in hosts_ids_list:
current_host = db.select_project_host(current_project['id'], host_id)
if current_host:
current_host = current_host[0]
# if same host exists
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_host_id = destination_host['id']
db.update_host_comment_threats(destination_host_id,
current_host['comment'],
json.loads(current_host['threats']),
current_host['os'])
else:
destination_host_id = db.insert_host(destination_project['id'],
current_host['ip'],
current_user['id'])
db.update_host_comment_threats(destination_host_id,
current_host['comment'],
json.loads(current_host['threats']),
current_host['os'])
# insert ports
current_ports = db.select_host_ports(current_host['id'])
for current_port in current_ports:
# check if port exists
destination_port = db.select_host_port(destination_host_id,
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port_id = destination_port[0]['id']
else:
destination_port_id = db.insert_host_port(destination_host_id,
int(current_port['port']),
bool(current_port['is_tcp']),
'', '', current_user['id'],
destination_project['id'])
db.update_port_proto_description(destination_port_id,
current_port['service'],
current_port['description'])
# insert hostnames
current_hostnames = db.select_ip_hostnames(current_host['id'])
for current_hostname in current_hostnames:
# check if exists
destination_hostname = db.select_ip_hostname(destination_host_id, current_hostname['hostname'])
if destination_hostname:
destination_hostname_id = destination_hostname[0]['id']
db.update_hostname(destination_hostname_id, current_hostname['description'])
else:
hostname_id = db.insert_hostname(destination_host_id, current_hostname['hostname'],
current_hostname['description'],
current_user['id'])
# issues
for issue_id in form.issues.data:
current_issue = db.select_issue(issue_id)
if current_issue and current_issue[0]['project_id'] == current_project['id']:
current_issue = current_issue[0]
# fullfill issue hosts
current_ports_dict = json.loads(current_issue['services'])
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
destination_issue_id = db.insert_new_issue_no_dublicate(
current_issue['name'], current_issue['description'],
current_issue['url_path'], current_issue['cvss'],
current_user['id'], destination_ports_dict, current_issue['status'],
destination_project['id'], current_issue['cve'],
current_issue['cwe'], current_issue['type'],
current_issue['fix'], current_issue['param']
)
# add PoCs
current_pocs = db.select_issue_pocs(current_issue['id'])
for current_poc in current_pocs:
current_poc_path = path.join('./static/files/poc/', current_poc['id'])
destination_poc_id = gen_uuid()
destination_poc_path = path.join('./static/files/poc/', destination_poc_id)
if current_poc['port_id'] == "0":
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
"0",
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
"0",
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
else:
current_port = db.select_project_port(current_project['id'], current_poc['port_id'])
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'], current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'], current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
current_port['port'],
current_port['is_tcp'])
if destination_port:
destination_port = destination_port[0]
if current_poc['hostname_id'] == "0":
# add poc with port
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
destination_port['id'],
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
"0",
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
else:
current_hostname = db.select_project_hostname(current_project['id'], current_poc['hostname_id'])
if current_hostname:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_host['id'], current_hostname['hostname'])
if destination_hostname:
# add poc with hostname
destination_hostname = destination_hostname[0]
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
destination_port['id'],
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
destination_hostname['id'],
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
else:
# add poc without hostname
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
destination_port['id'],
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
"0",
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
# files
for current_file_id in form.files.data:
current_file = db.select_files(current_file_id)
if current_file and current_file[0]['project_id'] == current_project['id']:
current_file = current_file[0]
current_file_path = path.join('./static/files/code/', current_file['id'])
destination_file_id = gen_uuid()
destination_file_path = path.join('./static/files/code/', destination_file_id)
current_ports_dict = json.loads(current_file['services'])
# services
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
file_data = b''
if config["files"]["files_storage"] == 'database':
f = open(destination_file_path, 'rb')
file_data = f.read()
f.close()
remove(destination_file_path)
db.insert_new_file(destination_file_id,
destination_project['id'],
current_file['filename'],
current_file['description'],
destination_ports_dict,
current_file['type'],
current_user['id'],
storage=config["files"]["files_storage"],
data=file_data
)
copyfile(current_file_path, destination_file_path)
# creds
for cred_id in form.creds.data:
current_cred = db.select_creds(cred_id)
if current_cred and current_cred[0]['project_id'] == current_project['id']:
current_cred = current_cred[0]
current_ports_dict = json.loads(current_cred['services'])
# services
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
dublicate_creds = db.select_creds_dublicates(
destination_project['id'],
current_cred['login'],
current_cred['hash'],
current_cred['cleartext'],
current_cred['description'],
current_cred['source'],
current_cred['hash_type']
)
if dublicate_creds:
dublicate_creds = dublicate_creds[0]
joined_services = json.loads(dublicate_creds['services'])
for port_id in destination_ports_dict:
if port_id not in joined_services:
joined_services[port_id] = []
for hostname_id in destination_ports_dict[port_id]:
if hostname_id not in joined_services[port_id]:
joined_services[port_id].append(hostname_id)
db.update_creds(
dublicate_creds['id'],
dublicate_creds['login'],
dublicate_creds['hash'],
dublicate_creds['hash_type'],
dublicate_creds['cleartext'],
dublicate_creds['description'],
dublicate_creds['source'],
joined_services
)
else:
dumplicate_cred_id = db.insert_new_cred(
current_cred['login'],
current_cred['hash'],
current_cred['hash_type'],
current_cred['cleartext'],
current_cred['description'],
current_cred['source'],
destination_ports_dict,
current_user['id'],
destination_project['id']
)
# networks
for network_id in networks_ids_list:
current_network = db.select_project_networks_by_id(
current_project['id'],
network_id)
if current_network:
current_network = current_network[0]
current_ports_dict = json.loads(current_network['access_from'])
# services
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
# check duplicates
duplicate_network = db.select_network_by_ip(destination_project['id'],
current_network['ip'],
current_network['mask'],
current_network['is_ipv6'])
if duplicate_network:
duplicate_network = duplicate_network[0]
joined_services = json.loads(duplicate_network['access_from'])
for port_id in destination_ports_dict:
if port_id not in joined_services:
joined_services[port_id] = []
for hostname_id in destination_ports_dict[port_id]:
if hostname_id not in joined_services[port_id]:
joined_services[port_id].append(hostname_id)
db.update_network(duplicate_network['id'],
destination_project['id'],
current_network['ip'],
current_network['mask'],
current_network['asn'],
current_network['comment'],
current_network['is_ipv6'],
current_network['internal_ip'],
current_network['cmd'],
joined_services,
current_network['name'])
else:
network_id = db.insert_new_network(
current_network['ip'],
current_network['mask'],
current_network['asn'],
current_network['comment'],
destination_project['id'],
current_user['id'],
current_network['is_ipv6'],
current_network['internal_ip'],
current_network['cmd'],
destination_ports_dict,
current_network['name']
)
# notes
for note_id in form.notes.data:
current_note = db.select_note(note_id)
if current_note and current_note[0]['project_id'] == current_project['id']:
current_note = current_note[0]
db.insert_new_note(
destination_project['id'],
current_note['name'],
current_user['id'],
'',
current_note['text']
)
# host notes
for host_id in form.note_hosts.data:
current_host_notes = db.select_host_notes(host_id, current_project['id'])
for current_note in current_host_notes:
current_host = db.select_project_host(current_project['id'], current_note['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_host_id = destination_host['id']
else:
destination_host_id = db.insert_host(destination_project['id'],
current_host['ip'],
current_user['id'])
db.insert_new_note(
destination_project['id'],
current_note['name'],
current_user['id'],
destination_host_id,
current_note['text']
)
# network paths
for path_id in paths_ids_list:
current_path = db.select_path(path_id=path_id,
project_id=current_project['id'])
if current_path:
host_in = ''
network_in = ''
host_out = ''
network_out = ''
current_path = current_path[0]
if current_path['host_out']:
source_host = db.select_host(current_path['host_out'])[0]
host_out = db.select_project_host_by_ip(destination_project['id'], source_host['ip'])[0]['id']
if current_path['host_in']:
source_host = db.select_host(current_path['host_in'])[0]
host_in = db.select_project_host_by_ip(destination_project['id'], source_host['ip'])[0]['id']
if current_path['network_out']:
source_network = db.select_network(current_path['network_out'])[0]
network_out = db.select_network_by_ip(destination_project['id'],
source_network['ip'],
source_network['mask'],
source_network['is_ipv6'])[0]['id']
if current_path['network_in']:
source_network = db.select_network(current_path['network_in'])[0]
network_in = db.select_network_by_ip(destination_project['id'],
source_network['ip'],
source_network['mask'],
source_network['is_ipv6'])[0]['id']
# search dublicates
dublicate_paths = db.search_path(project_id=destination_project['id'],
out_host=host_out,
out_network=network_out,
in_host=host_in,
in_network=network_in)
if not dublicate_paths:
path_id = db.insert_path(project_id=destination_project['id'],
out_host=host_out,
out_network=network_out,
in_host=host_in,
in_network=network_in,
description=current_path['description'],
path_type=current_path['type'],
direction=current_path['direction'])
return render_template('project/tools/export/duplicator.html',
current_project=current_project,
tab_name='Duplicator',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/wpscan/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def wpscan_page(project_id, current_project, current_user):
return render_template('project/tools/import/wpscan.html',
current_project=current_project,
tab_name='WPScan')
@routes.route('/project/<uuid:project_id>/tools/wpscan/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def wpscan_page_form(project_id, current_project, current_user):
form = WPScanForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# json files
for file in form.json_files.data:
if file.filename:
file_content = file.read().decode('charmap')
try:
file_dict = json.loads(file_content)
current_ip = file_dict['target_ip']
# validate ip
ipaddress.ip_address(current_ip)
current_host = db.select_project_host_by_ip(current_project['id'], current_ip)
if current_host:
current_host_id = current_host[0]['id']
else:
current_host_id = db.insert_host(current_project['id'],
current_ip,
current_user['id'],
"Added from WPScan")
# get protocol
current_url = file_dict['target_url']
current_url_obj = urllib.parse.urlparse(current_url)
current_scheme = current_url_obj.scheme.lower()
note_output = "<h1>Scan of {} </h1></br></br>".format(current_url)
if current_url_obj.port:
current_port_num = int(current_url_obj.port)
else:
if current_scheme == 'http':
current_port_num = 80
elif current_scheme == 'https':
current_port_num = 443
current_wordpress_path = current_url_obj.path
if current_port_num < 1 or current_port_num > 65535:
raise Exception
# create port
current_port_obj = db.select_host_port(current_host_id,
current_port_num,
True)
if current_port_obj:
current_port_id = current_port_obj[0]['id']
else:
current_port_id = db.insert_host_port(current_host_id,
current_port_num,
True,
current_scheme,
'WordPress',
current_user['id'],
current_project['id'])
# create hostname
hostname = current_url_obj.hostname
if hostname == current_ip:
current_hostname_id = "0"
else:
current_hostname = db.select_ip_hostname(current_host_id,
hostname)
if current_hostname:
current_hostname_id = current_hostname[0]['id']
else:
current_hostname_id = db.insert_hostname(
current_host_id,
hostname,
"Added from WPScan",
current_user['id']
)
# Interesting findings
interest_obj = file_dict['interesting_findings']
if interest_obj:
note_output += "<h1>Interesting findings </h1></br>"
for find_obj in interest_obj:
note_output += "<h2><b>URL:</b> " + find_obj["url"] + "</h2></br>"
note_output += "<b>Type:</b> " + find_obj["type"] + "</br>"
note_output += "<b>Description:</b> " + find_obj["to_s"] + "</br>"
note_output += "<b>Found by:</b> " + find_obj["found_by"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in find_obj["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
if "url" in find_obj["references"]:
note_output += "<b>Reference urls:</b> <ol>"
for url in find_obj["references"]["url"]:
note_output += "<li>" + htmlspecialchars(url) + "</li>"
note_output += "</ol></br>"
if "metasploit" in find_obj["references"]:
note_output += "<b>Reference metasploit:</b> <ol>"
for url in find_obj["references"]["metasploit"]:
note_output += "<li>" + htmlspecialchars(url) + "</li>"
note_output += "</ol></br>"
# Versions issues detection
version_obj = file_dict['version']
if version_obj:
note_output += "<h1>Version detection </h1></br>"
note_output += "<b>Version:</b> " + version_obj["number"] + "</br>"
note_output += "<b>Found by:</b> " + version_obj["found_by"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in version_obj["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
for current_issue in version_obj["vulnerabilities"]:
issue_name = current_issue["title"]
issue_fix = "Upgrade WordPress to version >= " + current_issue["fixed_in"]
issue_cve = ",".join(current_issue["references"]["cve"])
issue_description = "{}\n\nURLs:\n{}\n\nwpvulndb: {}".format(issue_name,
"\n".join([" - " + x for x in current_issue["references"]["url"]]),
", ".join(current_issue["references"]["wpvulndb"]))
if "exploitdb" in current_issue:
issue_description += "\n\nExploitDB: {}".format(current_issue["exploitdb"])
if "youtube" in current_issue:
issue_description += "\n\nYoutube: {}".format(current_issue["youtube"])
issue_id = db.insert_new_issue_no_dublicate(
issue_name,
issue_description,
current_wordpress_path,
0,
current_user['id'],
{current_port_id: [current_hostname_id]},
"Need to recheck",
current_project['id'],
issue_cve,
0,
"web",
issue_fix,
""
)
# Theme
main_theme_obj = file_dict['main_theme']
if main_theme_obj:
note_output += "<h1>Main theme </h1></br>"
note_output += "<b>Name:</b> " + main_theme_obj["slug"] + "</br>"
note_output += "<b>Location:</b> " + main_theme_obj["location"] + "</br>"
if "readme_url" in main_theme_obj:
note_output += "<b>Readme URL:</b> " + main_theme_obj["readme_url"] + "</br>"
if "style_uri" in main_theme_obj:
note_output += "<b>Official URL:</b> " + main_theme_obj["style_uri"] + "</br>"
if "version" in main_theme_obj and main_theme_obj["version"]:
note_output += "<b>Version:</b> " + main_theme_obj["version"]["number"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in main_theme_obj["version"]["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
for current_issue in main_theme_obj["vulnerabilities"]:
issue_name = current_issue["title"]
issue_fix = "Upgrade main theme {} to version >= {}".format(main_theme_obj["slug"], current_issue["fixed_in"])
issue_cve = ",".join(current_issue["references"]["cve"])
issue_description = "{}\n\nURLs:\n{}\n\nwpvulndb: {}".format(issue_name,
"\n".join([" - " + x for x in current_issue["references"]["url"]]),
", ".join(current_issue["references"]["wpvulndb"]))
if "exploitdb" in current_issue:
issue_description += "\n\nExploitDB: {}".format(current_issue["exploitdb"])
if "youtube" in current_issue:
issue_description += "\n\nYoutube: {}".format(current_issue["youtube"])
issue_id = db.insert_new_issue_no_dublicate(
issue_name,
issue_description,
current_wordpress_path,
0,
current_user['id'],
{current_port_id: [current_hostname_id]},
"Need to recheck",
current_project['id'],
issue_cve,
0,
"web",
issue_fix,
""
)
# Plugins
plugins_obj = file_dict['plugins']
if plugins_obj:
note_output += "<h1>Plugins</h1></br>"
for plugin_name in plugins_obj:
plugin_obj = plugins_obj[plugin_name]
note_output += "<h2>" + plugin_name + "</h2></br>"
note_output += "<b>Location:</b> " + plugin_obj["location"] + "</br>"
note_output += "<b>Found by:</b> " + plugin_obj["found_by"] + "</br>"
if "error_log_url" in plugins_obj and plugin_obj["error_log_url"]:
note_output += "<b>Error log URL:</b> " + plugin_obj["error_log_url"] + "</br>"
if "directory_listing" in plugin_obj and plugin_obj["directory_listing"]:
note_output += "<b>Dir listing URL:</b> " + plugin_obj["directory_listing"] + "</br>"
if "changelog_url" in plugin_obj and plugin_obj["changelog_url"]:
note_output += "<b>Changelog URL:</b> " + plugin_obj["changelog_url"] + "</br>"
if "readme_url" in plugin_obj and plugin_obj["readme_url"]:
note_output += "<b>Readme URL:</b> " + plugin_obj["readme_url"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in plugin_obj["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
if "version" in plugin_obj and plugin_obj["version"]:
note_output += "<b>Version:</b> " + plugin_obj["version"]["number"] + "</br>"
note_output += "<b>Version entries:</b> <ol>"
for entry in plugin_obj["version"]["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
for current_issue in plugin_obj["vulnerabilities"]:
issue_name = current_issue["title"]
issue_fix = "Upgrade plugin {} to version >= {}".format(plugin_name, current_issue["fixed_in"])
issue_cve = ",".join(current_issue["references"]["cve"])
issue_description = "{}\n\nURLs:\n{}\n\nwpvulndb: {}".format(issue_name,
"\n".join([" - " + x for x in current_issue["references"]["url"]]),
", ".join(current_issue["references"]["wpvulndb"]))
if "exploitdb" in current_issue:
issue_description += "\n\nExploitDB: {}".format(current_issue["exploitdb"])
if "youtube" in current_issue:
issue_description += "\n\nYoutube: {}".format(current_issue["youtube"])
issue_id = db.insert_new_issue_no_dublicate(
issue_name,
issue_description,
current_wordpress_path,
0,
current_user['id'],
{current_port_id: [current_hostname_id]},
"Need to recheck",
current_project['id'],
issue_cve,
0,
"web",
issue_fix,
""
)
# Add note
note_id = db.insert_new_note(current_project['id'],
"WPScan: {}".format(current_port_num),
current_user['id'],
current_host_id,
note_output)
except ValueError as e:
errors.append('One of files was corrupted: {}'.format(e))
return render_template('project/tools/import/wpscan.html',
current_project=current_project,
tab_name='WPScan',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/kube-hunter/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def kubehunter_page(project_id, current_project, current_user):
return render_template('project/tools/import/kubehunter.html',
current_project=current_project,
tab_name='kube-hunter')
@routes.route('/project/<uuid:project_id>/tools/kube-hunter/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def kubehunter_form(project_id, current_project, current_user):
form = KuberHunter()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.json_files.data:
if file.filename:
json_report_data = file.read().decode('charmap')
scan_result = json.loads(json_report_data)
# add node description
for node_obj in scan_result['nodes']:
try:
node_type = form.hosts_description.data
if 'type' in node_obj:
node_type = "Kubernetes " + node_obj['type']
node_ip = node_obj['location']
# check if valid ip
ipaddress.ip_address(node_ip)
current_host = db.select_ip_from_project(current_project['id'], node_ip)
if current_host:
current_host = current_host[0]
db.update_host_description(current_host['id'], node_type)
else:
current_host = db.insert_host(current_project['id'],
node_ip,
current_user['id'],
node_type)
except Exception as e:
# next Node
pass
# services
for service_obj in scan_result['services']:
try:
service_info = service_obj['service']
service_ip = service_obj['location'].split(':')[0]
service_port = int(service_obj['location'].split(':')[1])
# check ip
ipaddress.ip_address(service_ip)
# add host
current_host = db.select_ip_from_project(current_project['id'], service_ip)
if current_host:
current_host = current_host[0]
else:
current_host = db.insert_host(current_project['id'],
service_ip,
current_user['id'],
form.hosts_description.data)
# add port
current_port = db.select_ip_port(current_host['id'], service_port, is_tcp=True)
if current_port:
current_port = current_port[0]
db.update_port_service(current_port['id'],
service_info)
else:
current_port = db.insert_host_port(current_host['id'],
service_port,
True,
service_info,
form.ports_description.data,
current_user['id'],
current_project['id'])
except Exception as e:
# next service
pass
# add issues
for issue_obj in scan_result['vulnerabilities']:
try:
issue_ip = issue_obj['location'].split(':')[0]
issue_port = 0
if ':' in issue_obj['location']:
issue_port = int(issue_obj['location'].split(':')[1])
# check ip
ipaddress.ip_address(issue_ip)
issue_cvss = 0
issue_severity = issue_obj['severity']
issue_name = issue_obj['vulnerability']
issue_category = issue_obj['category']
issue_num = issue_obj['vid']
issue_poc_str = issue_obj['evidence']
issue_link = issue_obj['avd_reference']
issue_script = issue_obj['hunter']
issue_description = issue_obj['description']
issue_full_description = 'Category: {}\nEvidence: {}\nModule: {}\nLink: {}\nNumber: {}\n\n{}'.format(
issue_category,
issue_poc_str,
issue_script,
issue_link,
issue_num,
issue_description
)
if issue_severity == 'low':
issue_cvss = 2.0
elif issue_severity == 'medium':
issue_cvss = 5.0
elif issue_severity == 'high':
issue_cvss = 8.0
elif issue_severity == 'critical':
issue_cvss = 10.0
# add host
current_host = db.select_ip_from_project(current_project['id'], issue_ip)
if current_host:
current_host = current_host[0]
else:
current_host = db.insert_host(current_project['id'],
issue_ip,
current_user['id'],
form.hosts_description.data)
# add port
current_port = db.select_ip_port(current_host['id'], issue_port, is_tcp=True)
if current_port:
current_port = current_port[0]
db.update_port_service(current_port['id'],
form.ports_description.data)
else:
current_port = db.insert_host_port(current_host['id'],
issue_port,
True,
'kubernetes',
form.ports_description.data,
current_user['id'],
current_project['id'])
# add issue
services = {current_port['id']: ['0']}
current_issue = db.insert_new_issue_no_dublicate(issue_name,
issue_full_description,
'',
issue_cvss,
current_user['id'],
services,
'need to recheck',
current_project['id'],
'',
0,
'custom',
'',
'')
except Exception as e:
print(e)
pass
return render_template('project/tools/import/kubehunter.html',
current_project=current_project,
tab_name='kube-hunter',
errors=errors)
return render_template('project/tools/import/kubehunter.html',
current_project=current_project,
tab_name='kube-hunter',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/burp_enterprise/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def burp_enterprise_page(project_id, current_project, current_user):
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition')
@routes.route('/project/<uuid:project_id>/tools/burp_enterprise/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def burp_enterprise_form(project_id, current_project, current_user):
form = BurpEnterpriseForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if errors:
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition',
errors=errors)
# hostnames dict
if len(form.hostnames.data) != len(form.ips.data):
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition',
errors=['Error with hostnames'])
i = 0
hostname_dict = {}
for i in range(len(form.hostnames.data)):
hostname_dict[form.hostnames.data[i]] = form.ips.data[i]
auto_resolve = form.auto_resolve.data == 1
# xml files
for file in form.html_files.data:
if file.filename:
html_data = file.read()
scan_result = BeautifulSoup(html_data, "html.parser")
# find list of issues
site_array = scan_result.select('h1:contains("Issues found on")')
for site_obj in site_array:
url = site_obj.string.split('Issues found on ')[1].strip()
parsed_url = urllib.parse.urlparse(url)
protocol = parsed_url.scheme
hostname = parsed_url.netloc
port = 80
ip = ''
if not parsed_url.port:
if protocol == 'https':
port = 443
else:
port = 80
else:
port = int(parsed_url.port)
pass
# check ip
try:
ipaddress.ip_address(hostname)
ip = hostname
hostname = ''
except Exception as e:
pass
if hostname:
try:
email_validator.validate_email_domain_part(hostname)
except email_validator.EmailNotValidError:
errors.append('Hostname not valid!')
hostname = ''
# check hostname
if ip == '':
if hostname in hostname_dict:
ip = hostname_dict[hostname]
elif auto_resolve:
ip = socket.gethostbyname(hostname)
if ip and not errors:
# add host
current_host = db.select_ip_from_project(current_project['id'], ip)
if current_host:
current_host = current_host[0]
else:
current_host = db.insert_host(current_project['id'],
ip,
current_user['id'],
form.hosts_description.data)
# add port
current_port = db.select_ip_port(current_host['id'], port, is_tcp=True)
if current_port:
current_port = current_port[0]
db.update_port_service(current_port['id'],
protocol)
else:
current_port = db.insert_host_port(current_host['id'],
port,
True,
protocol,
form.ports_description.data,
current_user['id'],
current_project['id'])
# add hostname
current_hostname = None
if hostname:
current_hostname = db.select_ip_hostname(current_host['id'],
hostname)
if current_hostname:
current_hostname = current_hostname[0]
else:
hostname_id = db.insert_hostname(current_host['id'], hostname,
form.hostnames_description.data,
current_user['id'])
current_hostname = db.select_hostname(hostname_id)
# issues loop
rows_array = site_obj.parent.find_all('tr')[1:]
issue_name = ''
i = 0
for issue_header_obj in rows_array:
i += 1
if 'class' in issue_header_obj.attrs and 'issue-type-row' in issue_header_obj.attrs['class']:
issue_name = issue_header_obj.find('td').string.split(' [')[0]
else:
td_arr = issue_header_obj.find_all('td')
issue_path = issue_header_obj.find('td', {"class": "issue-path"}).string.strip()
dom_id = issue_header_obj.find('a').attrs['href'].replace('#', '')
severity = td_arr[1].string
issue_cvss = 0.0
if severity == 'Low':
issue_cvss = 2.0
elif severity == 'Medium':
issue_cvss = 5.0
elif severity == 'High':
issue_cvss = 8.0
elif severity == 'Critical':
issue_cvss = 10.0
# goto issue container
issue_container = scan_result.find('a', {"name": dom_id}).parent
issue_name = issue_container.find('h2').string
issue_description_container = issue_container.find('div')
issue_description_text = str(issue_description_container.getText())
while ' ' in issue_description_text:
issue_description_text = issue_description_text.replace(' ', ' ')
while '\n\n\n' in issue_description_text:
issue_description_text = issue_description_text.replace('\n\n\n', '\n\n')
print(1)
# ignoring Remediation detail
# Remidiation == fix
issue_fix_short_header = issue_container.select('h3:contains("Remediation detail")')
issue_fix_short1_header = issue_container.select('h3:contains("Issue remediation")')
issue_fix = ''
if issue_fix_short_header:
next_elem = issue_fix_short_header[0].find_next()
issue_fix += str(next_elem.getText()) + '\n\n'
if issue_fix_short1_header:
next_elem = issue_fix_short1_header[0].find_next()
issue_fix += str(next_elem.getText())
# issue_fix = issue_fix.replace('<ul>', '\n').replace('<li>', ' - ').replace('</li>', '\n').replace('</ul>', '').replace('\t', '').replace('<div>', '').replace('</div>', '').replace('<b>', '').replace('</b>', '')
while ' ' in issue_fix:
issue_fix = issue_fix.replace(' ', ' ')
while '\n\n\n' in issue_fix:
issue_fix = issue_fix.replace('\n\n\n', '\n\n')
# References
issue_ref_header = issue_container.select('h3:contains("References")')
issue_ref = ''
if issue_ref_header:
issue_ref_header = issue_ref_header[0].find_next()
issue_ref = '\n\nReferences:\n'
links = issue_ref_header.find_all('a')
for link_obj in links:
issue_ref += ' - ' + link_obj.string + ': ' + link_obj.attrs['href'] + '\n'
# Vulnerability classifications
issue_class_header = issue_container.select('h3:contains("Vulnerability classifications")')
issue_class = ''
if issue_class_header:
issue_class_header = issue_class_header[0].find_next()
issue_class = '\n\nClassification:\n'
links = issue_class_header.find_all('a')
for link_obj in links:
issue_class += link_obj.string + ': ' + link_obj.attrs['href'] + '\n'
# add issue
issue_full_description = issue_description_text + issue_ref + issue_class
while ' ' in issue_full_description:
issue_full_description = issue_full_description.replace(' ', ' ')
while '\n\n\n' in issue_full_description:
issue_full_description = issue_full_description.replace('\n\n\n', '\n\n')
try:
services = {current_port['id']: ['0']}
if current_hostname:
services = {current_port['id']: [current_hostname['id']]}
except Exception as e:
pass
current_issue_id = db.insert_new_issue_no_dublicate(
name='Burp: ' + issue_name,
description=str(issue_full_description),
url_path=str(issue_path),
cvss=float(issue_cvss),
user_id=current_user['id'],
services=services,
status='Need to recheck',
project_id=current_project['id'],
cve='',
cwe=0,
issue_type='web',
fix=str(issue_fix),
param=''
)
# PoC Request
issue_request_header = issue_container.select('h3:contains("Request:")')
if issue_request_header:
next_elem = issue_request_header[0].find_next()
poc_text = str(next_elem.getText()).replace('\r', '')
# add poc
file_data = b''
if config['files']['poc_storage'] == 'database':
file_data = poc_text.encode('charmap')
poc_id = db.insert_new_poc(current_port['id'],
'HTTP request',
'text',
'request.txt',
current_issue_id,
current_user['id'],
current_hostname['id'] if current_hostname else '0',
storage=config['files']['poc_storage'],
data=file_data)
if config['files']['poc_storage'] == 'filesystem':
file_path = './static/files/poc/{}'.format(poc_id)
file_object = open(file_path, 'w')
file_object.write(poc_text)
file_object.close()
# PoC Response
issue_response_header = issue_container.select('h3:contains("Response:")')
if issue_response_header:
next_elem = issue_response_header[0].find_next()
poc_text = str(next_elem.getText()).replace('\r', '')
# add poc
file_data = b''
if config['files']['poc_storage'] == 'database':
file_data = poc_text.encode('charmap')
poc_id = db.insert_new_poc(current_port['id'],
'HTTP response',
'text',
'response.txt',
current_issue_id,
current_user['id'],
current_hostname['id'] if current_hostname else '0',
storage=config['files']['poc_storage'],
data=file_data)
if config['files']['poc_storage'] == 'filesystem':
file_path = './static/files/poc/{}'.format(poc_id)
file_object = open(file_path, 'w')
file_object.write(poc_text)
file_object.close()
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/dnsrecon/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def dnsrecon_page(project_id, current_project, current_user):
return render_template('project/tools/import/dnsrecon.html',
current_project=current_project,
tab_name='DNSrecon')
@routes.route('/project/<uuid:project_id>/tools/dnsrecon/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def dnsrecon_page_form(project_id, current_project, current_user):
form = DNSreconForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
hostnames_dict = {}
ports_dict = {}
# json files
for file in form.json_files.data:
if file.filename:
json_report_data = file.read().decode('charmap')
scan_result = json.loads(json_report_data)
for hostname_row in scan_result:
hostname = hostname_row['target'] if 'target' in hostname_row else ''
hostname_name = hostname_row['mname'] if 'mname' in hostname_row else ''
host_ip = hostname_row['address'] if 'address' in hostname_row else ''
host_port = hostname_row['port'] if 'port' in hostname_row else ''
hostname_info = hostname_row['strings'] if 'strings' in hostname_row else ''
hostname_type = hostname_row['type'] if 'type' in hostname_row else ''
'''
1. Name <--> Address
2. Target <--> Address
3. Name <--> String
(Port, Type)
'''
if host_ip:
# check if host_ip domain or IP
try:
ipaddress.ip_address(host_ip)
except Exception as e:
# its domain, need ip
host_ip_old = host_ip
host_ip = ''
for hostname_row_tmp in scan_result:
host_ip_tmp = hostname_row['address'] if 'address' in hostname_row else ''
domain_tmp = hostname_row['mname'] if 'mname' in hostname_row else ''
if host_ip_old == domain_tmp:
try:
ipaddress.ip_address(host_ip_tmp)
host_ip = host_ip_tmp
except Exception as e1:
pass
if hostname_name != '' and host_ip != '':
# 1. Name <--> Address
if hostname == '':
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type)
}
else:
if host_ip not in hostnames_dict[hostname_name]['ip']:
hostnames_dict[hostname_name]['ip'].append(host_ip)
# 1. Name <--> Address <--> Target
else:
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}\nName: {}'.format(hostname_type, hostname_name)
}
elif hostname_name != '' and host_ip == '' and hostname_info != '':
# Name <--> String
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [],
'description': 'Type: {}\nInfo: {}'.format(hostname_type, hostname_info)
}
else:
hostnames_dict[hostname_name]['description'] += '\nType: {}\nInfo: {}'.format(hostname_type, hostname_info)
elif hostname != '' and host_ip != '' and hostname_name == '':
# Target <--> Address
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type),
}
# add ports
if host_port != '' and host_ip != '':
if host_ip not in ports_dict:
ports_dict[host_ip] = [host_port]
else:
if host_port not in ports_dict[host_ip]:
ports_dict[host_ip].append(host_port)
# csv load
for file in form.csv_files.data:
if file.filename:
scan_result = csv.DictReader(codecs.iterdecode(file, 'charmap'), delimiter=',')
for hostname_row in scan_result:
hostname = hostname_row['Target']
hostname_name = hostname_row['Name']
host_ip = hostname_row['Address']
host_port = hostname_row['Port']
hostname_info = hostname_row['String']
hostname_type = hostname_row['Type']
'''
1. Name <--> Address
2. Target <--> Address
3. Name <--> String
(Port, Type)
'''
if host_ip:
# check if host_ip domain or IP
try:
ipaddress.ip_address(host_ip)
except Exception as e:
# its domain, need ip
host_ip_old = host_ip
host_ip = ''
for hostname_row_tmp in scan_result:
host_ip_tmp = hostname_row_tmp['Address']
domain_tmp = hostname_row_tmp['Name']
if host_ip_old == domain_tmp:
try:
ipaddress.ip_address(host_ip_tmp)
host_ip = host_ip_tmp
except Exception as e1:
pass
if hostname_name != '' and host_ip != '':
# 1. Name <--> Address
if hostname == '':
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type)
}
else:
if host_ip not in hostnames_dict[hostname_name]['ip']:
hostnames_dict[hostname_name]['ip'].append(host_ip)
# 1. Name <--> Address <--> Target
else:
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}\nName: {}'.format(hostname_type, hostname_name)
}
elif hostname_name != '' and host_ip == '' and hostname_info != '':
# Name <--> String
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [],
'description': 'Type: {}\nInfo: {}'.format(hostname_type, hostname_info)
}
else:
hostnames_dict[hostname_name]['description'] += '\nType: {}\nInfo: {}'.format(hostname_type, hostname_info)
elif hostname != '' and host_ip != '' and hostname_name == '':
# Target <--> Address
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type),
}
# add ports
if host_port != '' and host_ip != '':
if host_ip not in ports_dict:
ports_dict[host_ip] = [host_port]
else:
if host_port not in ports_dict[host_ip]:
ports_dict[host_ip].append(host_port)
for file in form.xml_files.data:
if file.filename:
soup = BeautifulSoup(file.read(), "html.parser")
scan_result = soup.findAll('record')
for hostname_row in scan_result:
hostname = hostname_row.get('target') if hostname_row.get('target') else ''
hostname_name = hostname_row.get('name') if hostname_row.get('name') else ''
host_ip = hostname_row.get('address') if hostname_row.get('address') else ''
host_port = hostname_row.get('port') if hostname_row.get('port') else ''
hostname_info = hostname_row.get('strings') if hostname_row.get('strings') else ''
hostname_type = hostname_row.get('type') if hostname_row.get('type') else ''
'''
1. Name <--> Address
2. Target <--> Address
3. Name <--> String
(Port, Type)
'''
if host_ip:
# check if host_ip domain or IP
try:
ipaddress.ip_address(host_ip)
except Exception as e:
# its domain, need ip
host_ip_old = host_ip
host_ip = ''
for hostname_row_tmp in scan_result:
host_ip_tmp = hostname_row_tmp.get('address') if hostname_row_tmp.get('address') else ''
domain_tmp = hostname_row_tmp.get('name') if hostname_row_tmp.get('name') else ''
if host_ip_old == domain_tmp:
try:
ipaddress.ip_address(host_ip_tmp)
host_ip = host_ip_tmp
except Exception as e1:
pass
if hostname_name != '' and host_ip != '':
# 1. Name <--> Address
if hostname == '':
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type)
}
else:
if host_ip not in hostnames_dict[hostname_name]['ip']:
hostnames_dict[hostname_name]['ip'].append(host_ip)
# 1. Name <--> Address <--> Target
else:
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}\nName: {}'.format(hostname_type, hostname_name)
}
elif hostname_name != '' and host_ip == '' and hostname_info != '':
# Name <--> String
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [],
'description': 'Type: {}\nInfo: {}'.format(hostname_type, hostname_info)
}
else:
hostnames_dict[hostname_name]['description'] += '\nType: {}\nInfo: {}'.format(hostname_type, hostname_info)
elif hostname != '' and host_ip != '' and hostname_name == '':
# Target <--> Address
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type),
}
# add ports
if host_port != '' and host_ip != '':
if host_ip not in ports_dict:
ports_dict[host_ip] = [host_port]
else:
if host_port not in ports_dict[host_ip]:
ports_dict[host_ip].append(host_port)
# hostnames_dict = {'google.com':{'ip':[8.8.8.8], 'description': '...' }}
for hostname in hostnames_dict:
ip_array = hostnames_dict[hostname]['ip']
description = hostnames_dict[hostname]['description']
for ip_address in ip_array:
# check if valid ip
ip_obj = ipaddress.ip_address(ip_address)
if (':' not in ip_address) or (':' in ip_address and not form.ignore_ipv6.data):
current_host = db.select_project_host_by_ip(current_project['id'], ip_address)
if not current_host:
host_id = db.insert_host(current_project['id'], ip_address, current_user['id'], form.hosts_description.data)
else:
host_id = current_host[0]['id']
current_hostname = db.select_ip_hostname(host_id, hostname)
if not current_hostname:
hostname_id = db.insert_hostname(host_id, hostname, description, current_user['id'])
else:
hostname_id = current_hostname[0]['id']
db.update_hostname(hostname_id, description)
# ports_dict = {'ip':['8888']}
for ip_address in ports_dict:
# check if valid ip
ports_arr = list(set(ports_dict[ip_address]))
ip_obj = ipaddress.ip_address(ip_address)
if (':' not in ip_address) or (':' in ip_address and not form.ignore_ipv6.data):
current_host = db.select_project_host_by_ip(current_project['id'], ip_address)
if not current_host:
host_id = db.insert_host(current_project['id'], ip_address, current_user['id'], form.hosts_description.data)
else:
host_id = current_host[0]['id']
for port_num in ports_arr:
port_num_int = int(port_num)
if port_num_int > 0 and port_num_int < 65536:
current_port = db.select_host_port(host_id, int(port_num), is_tcp=True)
if not current_port:
port_id = db.insert_host_port(host_id, port_num_int, True, 'unknown', form.ports_description.data, current_user['id'], current_project['id'])
return render_template('project/tools/import/dnsrecon.html',
current_project=current_project,
tab_name='DNSrecon',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/theharvester/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def theharvester_page(project_id, current_project, current_user):
return render_template('project/tools/import/theharvester.html',
current_project=current_project,
tab_name='theHarvester')
@routes.route('/project/<uuid:project_id>/tools/theharvester/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
@check_project_archived
def theharvester_page_form(project_id, current_project, current_user):
form = theHarvesterForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
soup = BeautifulSoup(file.read(), "html.parser")
scan_result = soup.findAll('host')
for hostname_row in scan_result:
ips_str = hostname_row.find('ip').text
hostname = hostname_row.find('hostname').text
ip_array = ips_str.split(', ')
for ip_address in ip_array:
# check valid ip
ipaddress.ip_address(ip_address)
current_host = db.select_project_host_by_ip(current_project['id'], ip_address)
if current_host:
host_id = current_host[0]['id']
else:
host_id = db.insert_host(current_project['id'], ip_address, current_user['id'],
form.hosts_description.data)
current_hostname = db.select_ip_hostname(host_id, hostname)
if not current_hostname:
hostname_id = db.insert_hostname(host_id, hostname, form.hostnames_description.data, current_user['id'])
return render_template('project/tools/import/theharvester.html',
current_project=current_project,
tab_name='theHarvester',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/metasploit/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def metasploit_page(project_id, current_project, current_user):
return render_template('project/tools/import/metasploit.html',
current_project=current_project,
tab_name='Metasploit')
@routes.route('/project/<uuid:project_id>/tools/metasploit/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
@check_project_archived
def metasploit_page_form(project_id, current_project, current_user):
form = MetasploitForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
'''
<MetasploitV5>
1. <hosts> - hosts info (domain/ip) - ignore <vulns>
2. <events> - ignoring
3. <web_sites>
4. <web_pages> - ignoring
5. <web_forms> - ignoring
6. <web_vuln>
Steps:
1. Add hosts
2. Add sites
3. Add site vulns
'''
if not errors:
for file in form.xml_files.data:
if file.filename:
soup = BeautifulSoup(file.read(), "html.parser")
# Add hosts & ports
hosts_obj = soup.find('hosts')
scan_result = hosts_obj.findAll('host')
hosts_dict = {}
ports_dict = {}
for host_row in scan_result:
host_report_id = host_row.find('id').text
host_ip = host_row.find('address').text
host_mac = host_row.find('mac').text
host_state = host_row.find('state').text
host_os = host_row.find('os-name').text # Linux
host_os_flavor = host_row.find('os-flavor').text # ???
host_os_version = host_row.find('os-sp').text # 2.6.X
host_os_lang = host_row.find('os-lang').text # ???
host_os_arch = host_row.find('arch').text # x86_64
host_os_detected_arch = host_row.find('detected-arch').text # x86_64
host_os_family = host_row.find('os-family').text # Linux
host_type = host_row.find('purpose').text # device
host_info = host_row.find('info').text
host_comments = host_row.find('comments').text
# create Host OS string
host_os_full = ''
if host_os:
host_os_full += host_os
if host_os_family and host_os_family != host_os:
host_os_full += '({})'.format(host_os_family)
if host_os_flavor:
host_os_full += ' ' + host_os_flavor
if host_os_version:
host_os_full += ' ' + host_os_version
if host_os_lang:
host_os_full += ' Lang:{}'.format(host_os_lang)
if host_os_arch:
host_os_full += ' Arch:{}'.format(host_os_arch)
if host_os_detected_arch and host_os_detected_arch != host_os_arch:
host_os_full += ' Arch detected:{}'.format(host_os_detected_arch)
# create host description string
host_description_full = ''
if host_mac:
host_description_full += '\nMAC: {}'.format(host_mac)
if host_state:
host_description_full += '\nState: {}'.format(host_state)
if host_type:
host_description_full += '\nType: {}'.format(host_type)
if host_info:
host_description_full += '\nInfo: {}'.format(host_info)
if host_comments:
host_description_full += '\nComments: {}'.format(host_comments)
# check if ip correct
ipaddress.ip_address(host_ip)
hosts_dict[host_report_id] = {
'ip': host_ip,
'description': host_description_full.strip(' \t\n\r'),
'os': host_os_full
}
# add ports
services_object = host_row.find('services')
services_arr = services_object.findAll('service')
# add all ports to ports_dict
for port_row in services_arr:
port_report_id = port_row.find('id').text
port_num = int(port_row.find('port').text) # 80
port_is_tcp = port_row.find('proto').text == 'tcp'
port_state = port_row.find('state').text # open closed filtered TODO: add option which port to add
port_service = port_row.find('name').text # ftp
port_info = port_row.find('info').text # vsftpd 2.3.4
if port_num > 0 and port_num < 65536:
ports_dict[port_report_id] = {
'port': port_num,
'is_tcp': port_is_tcp,
'state': port_state,
'service': port_service,
'info': port_info,
'host_report_id': host_report_id
}
# add notes to port objects - nmap scripts
if form.add_nmap_scripts.data:
notes_object = host_row.find('notes')
notes_arr = notes_object.findAll('note')
for note_row in notes_arr:
script_name = note_row.find('ntype').text # nmap.nse.smb-os-discovery.host
if script_name not in ['host.comments', 'host.info', 'host.os.nmap_fingerprint', 'host.name']:
host_report_id = note_row.find('host-id').text
script_critical = note_row.find('critical').text # ???
service_report_id = note_row.find('service-id').text
try:
script_data = base64.b64decode(note_row.find('data').text)[16:].decode('charmap').strip(' \n\t\r')
except Exception as e:
script_data = note_row.find('data').text.strip(' \n\t\r')
while ' ' in script_data:
script_data = script_data.replace(' ', ' ')
note_full = 'Script: {}'.format(script_name)
if script_critical:
note_full += '\nCritical: {}'.format(script_critical)
if script_data:
note_full += '\nOutput:\n\n{}\n\n'.format(script_data)
note_full = note_full.strip(' \t\n\r')
if service_report_id:
ports_dict[service_report_id]['info'] += '\n' + note_full
elif host_report_id:
hosts_dict[host_report_id]['description'] += '\n' + note_full
# add hosts
for host_obj in hosts_dict:
current_host = db.select_project_host_by_ip(current_project['id'], hosts_dict[host_obj]['ip'])
if current_host:
host_id = current_host[0]['id']
if hosts_dict[host_obj]['description']:
db.update_host_description(host_id, hosts_dict[host_obj]['description'])
if hosts_dict[host_obj]['os']:
db.update_host_os(host_id, hosts_dict[host_obj]['os'])
else:
host_id = db.insert_host(current_project['id'], hosts_dict[host_obj]['ip'], current_user['id'],
hosts_dict[host_obj]['description'], os=hosts_dict[host_obj]['os'])
hosts_dict[host_obj]['pcf_id'] = host_id
# add ports
for port_obj in ports_dict:
current_port = db.select_host_port(hosts_dict[ports_dict[port_obj]['host_report_id']]['pcf_id'],
ports_dict[port_obj]['port'],
ports_dict[port_obj]['is_tcp'])
if current_port:
port_id = current_port[0]['id']
db.update_port_proto_description(port_id, ports_dict[port_obj]['service'], ports_dict[port_obj]['info'])
else:
port_id = db.insert_host_port(hosts_dict[ports_dict[port_obj]['host_report_id']]['pcf_id'],
ports_dict[port_obj]['port'], ports_dict[port_obj]['is_tcp'], ports_dict[port_obj]['service'],
ports_dict[port_obj]['info'], current_user['id'], current_project['id'])
ports_dict[port_obj]['pcf_id'] = port_id
# ignoring websites due to it is connected with services which were added earlier
if not form.only_nmap.data:
# create websites_dict
web_dict = {}
websites_obj = soup.find('web_sites')
website_row = websites_obj.findAll('web_site')
for website_obj in website_row:
web_id = website_obj.find('id').text
service_id = website_obj.find('service-id').text
vhost = website_obj.find('vhost').text
pcf_port_id = ports_dict[service_id]['pcf_id']
pcf_host_id = hosts_dict[ports_dict[service_id]['host_report_id']]['pcf_id']
pcf_hostname_id = 0
if vhost:
current_hostname = db.select_ip_hostname(pcf_host_id, vhost)
if current_hostname:
hostname_id = current_hostname[0]['id']
else:
hostname_id = db.insert_hostname(pcf_host_id, vhost, form.hostnames_description.data, current_user['id'])
pcf_hostname_id = hostname_id
web_dict[web_id] = {
'pcf_port_id': pcf_port_id,
'pcf_host_id': pcf_host_id,
'pcf_hostname_id': pcf_hostname_id
}
# Add web vulns
vulns_obj = soup.find('web_vulns')
vuln_row = vulns_obj.findAll('web_vuln')
for vuln_obj in vuln_row:
vuln_url = vuln_obj.find('path').text
vuln_method = vuln_obj.find('method').text
vuln_param = vuln_obj.find('pname').text
vuln_params = base64.b64decode(vuln_obj.find('params').text).decode('charmap')[4:] # i dont know how to parse better
vuln_description = vuln_obj.find('description').text
vuln_payload = vuln_obj.find('payload').text
vuln_website_id = vuln_obj.find('web-site-id').text
vuln_cvss = float(vuln_obj.find('risk').text)
vuln_name = 'Metasploit: {}'.format(vuln_obj.find('name').text)
vuln_poc_str = vuln_obj.find('proof').text
vuln_query = vuln_obj.find('query').text
vuln_description_full = vuln_description
if vuln_poc_str:
vuln_description_full += '\nPoC: {}'.format(vuln_poc_str)
if vuln_query:
vuln_description_full += '\nQuery: {}'.format(vuln_query)
if vuln_params:
vuln_description_full += '\nParams: {}'.format(vuln_params)
if vuln_payload:
vuln_description_full += '\nPayload: {}'.format(vuln_payload)
vuln_param_full = '({}) {}'.format(vuln_method, vuln_param)
if vuln_cvss < 0 or vuln_cvss > 10:
vuln_cvss = 0
services = {web_dict[vuln_website_id]['pcf_port_id']: [web_dict[vuln_website_id]['pcf_hostname_id']]}
issue_id = db.insert_new_issue_no_dublicate(vuln_name,
vuln_description_full,
vuln_url,
vuln_cvss,
current_user['id'],
services,
'Need to recheck',
current_project['id'],
cve='',
cwe='',
issue_type='web',
fix='',
param=vuln_param_full
)
return render_template('project/tools/import/metasploit.html',
current_project=current_project,
tab_name='Metasploit',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/nuclei/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def nuclei_page(project_id, current_project, current_user):
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei')
@routes.route('/project/<uuid:project_id>/tools/nuclei/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nuclei_page_form(project_id, current_project, current_user):
form = NucleiForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if errors:
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei',
errors=errors)
# hostnames dict
if len(form.hostnames.data) != len(form.ips.data):
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei',
errors=['Error with hostnames'])
i = 0
hostname_dict = {}
for i in range(len(form.hostnames.data)):
hostname_dict[form.hostnames.data[i]] = form.ips.data[i]
auto_resolve = form.auto_resolve.data == 1
# json files
for file in form.json_files.data:
if file.filename:
json_data = json.loads('[{}]'.format(file.read().decode('charmap').strip(' \t\r\n').replace('\r', '').replace('\n', ',')))
for issue_obj in json_data:
# important fields
issue_name = 'Nuclei: {}'.format(issue_obj['info']['name'])
issue_tags = 'Tags: {}'.format(', '.join(issue_obj['info']['tags'])) if issue_obj['info']['tags'] else ""
issue_description = issue_obj['info']['description'] if 'description' in issue_obj['info'] else ''
issue_references = "Links:\n{}".format('\n'.join([' - {}'.format(x) for x in issue_obj['info']['reference']])) if issue_obj['info']['reference'] else ""
issue_severity = "info"
issue_matcher_name = 'Matched: {}'.format(issue_obj['matcher-name']) if 'matcher-name' in issue_obj else ""
issue_cvss = 0.0
if issue_severity == 'low':
issue_cvss = 2.0
elif issue_severity == 'medium':
issue_cvss = 5.0
elif issue_severity == 'high':
issue_cvss = 8.0
elif issue_severity == 'critical':
issue_cvss = 10.0
issue_type = 'Script type: {}'.format(issue_obj['type']) if issue_obj['type'] else ""
issue_curl_cmd = 'Curl: {}'.format(issue_obj["curl-command"]) if "curl-command" in issue_obj else ''
issue_ip = issue_obj["ip"] if "ip" in issue_obj else "" # 192.168.3.11
issue_host = issue_obj["host"] if "host" in issue_obj else '' # https://google.com
issue_url = ''
issue_protocol = issue_obj["protocol"] if "protocol" in issue_obj else '' # i dont know key "protocol
issue_port = 0
issue_hostname = ''
issue_cve = issue_obj["cve"] if "cve" in issue_obj else ''
issue_cwe = issue_obj["cwe"] if "cwe" in issue_obj else ''
# validate ip
if issue_ip:
try:
ipaddress.ip_address(issue_ip)
except Exception as e:
issue_ip = ''
if issue_host:
# check if url
url_obj = None
try:
url_obj = urlparse(issue_host)
except Exception as e:
# wrong url
pass
if url_obj:
# its web!
# check protocol
issue_protocol = 'http'
if url_obj.scheme:
issue_protocol = url_obj.scheme
# check port
if issue_protocol == 'http':
issue_port = 80
elif issue_protocol == 'https':
issue_port = 443
if url_obj.port:
issue_port = url_obj.port
# check url path
if issue_obj["matched-at"].startswith(issue_host):
issue_url = issue_obj["matched-at"][len(issue_host):]
if not issue_url:
issue_path = '/'
# ip or hostname
if not issue_ip and url_obj.hostname:
try:
ip_obj = ipaddress.ip_address(url_obj.hostname)
issue_ip = url_obj.hostname
except Exception as e:
issue_hostname = url_obj.hostname
pass
elif url_obj.hostname:
issue_hostname = url_obj.hostname
if 'port' in issue_obj:
issue_port = int(issue_obj['port'])
blacklist_tags = ["template-id", "info", "host", "matched-at",
"timestamp", "curl-command", "type", "port",
"matcher-name", "matcher-status", "template",
"template-url", "protocol", "cve", "cwe", "ip"]
issue_other_fields = ''
for key_name in issue_obj:
if key_name not in blacklist_tags:
issue_other_fields += '{}: {}\n'.format(key_name, str(issue_obj[key_name]))
if issue_port < 0 or issue_port > 65535:
issue_port = 0
# resolve ip
if not issue_ip and issue_hostname:
if issue_hostname in hostname_dict:
issue_ip = hostname_dict[issue_hostname]
elif auto_resolve:
try:
issue_ip = socket.gethostbyname(issue_hostname)
except Exception as e:
pass
# if ip, port (, hostname)
# create them in db
services = {}
if issue_ip:
# create host
current_host = db.select_project_host_by_ip(current_project['id'], issue_ip)
if current_host:
host_id = current_host[0]['id']
else:
host_id = db.insert_host(current_project['id'], issue_ip, current_user['id'],
comment=form.hosts_description.data)
# create port
current_port = db.select_host_port(host_id, issue_port, True)
if current_port:
port_id = current_port[0]['id']
else:
port_id = db.insert_host_port(host_id, issue_port, True, issue_protocol,
form.ports_description.data, current_user['id'],
current_project['id'])
# create hostname
hostname_id = 0
if issue_hostname:
current_hostname = db.select_ip_hostname(host_id, issue_hostname)
if current_hostname:
hostname_id = current_hostname[0]['id']
else:
hostname_id = db.insert_hostname(host_id, issue_hostname, form.hostnames_description.data,
current_user['id'])
services = {port_id: [hostname_id]}
# create description
issue_full_description = issue_description + '\n'
if issue_matcher_name:
issue_full_description += '\n' + issue_matcher_name
if issue_tags:
issue_full_description += '\n' + issue_tags
if issue_type:
issue_full_description += '\n' + issue_type
if issue_curl_cmd:
issue_full_description += '\n' + issue_curl_cmd
if issue_references:
issue_full_description += '\n' + issue_references
if issue_other_fields:
issue_full_description += '\n' + issue_other_fields
# create issue
issue_id = db.insert_new_issue_no_dublicate(issue_name,
issue_full_description,
issue_url,
issue_cvss,
current_user['id'],
services,
'Need to recheck',
current_project['id'],
issue_cve,
issue_cwe,
'web' if issue_protocol.startswith('http') else 'custom',
fix='',
param=''
)
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei')
@routes.route('/project/<uuid:project_id>/tools/nmap-helper/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def nmap_helper_page(project_id, current_project, current_user):
return render_template('project/tools/helpers/nmap-helper.html',
current_project=current_project,
tab_name='Nmap Helper')
@routes.route('/project/<uuid:project_id>/tools/pingcastle/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def pingcastle_page(project_id, current_project, current_user):
return render_template('project/tools/import/pingcastle.html',
current_project=current_project,
tab_name='PingCastle')
@routes.route('/project/<uuid:project_id>/tools/pingcastle/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def pingcastle_page_form(project_id, current_project, current_user):
form = PingCastleForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# prepare issues database
f = open('./routes/ui/tools_files/PingCastle/PingCastleDescription.resx')
s = f.read()
f.close()
issues_database = {}
issues_database_xml = BeautifulSoup(s, 'html.parser')
for issue_obj in issues_database_xml.findAll('data'):
issues_database[issue_obj.attrs['name']] = issue_obj.findAll('value')[0].text
# xml files
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(), "html.parser")
scan_obj = scan_result.healthcheckdata
# add DCs
domain_controllers = scan_obj.domaincontrollers
dc_ports_dict = {}
if domain_controllers:
for domain_obj in domain_controllers.findAll('healthcheckdomaincontroller'):
host_description = ''
host_os = '' if not domain_obj.operatingsystem else domain_obj.operatingsystem.text
if domain_obj.dcname: host_description += 'DC name: {}\n'.format(domain_obj.dcname.text)
if domain_obj.lastcomputerlogondate: host_description += 'Last Logon: {}\n'.format(domain_obj.lastcomputerlogondate.text)
if domain_obj.distinguishedname: host_description += 'Distinguished Name: {}\n'.format(domain_obj.distinguishedname.text)
if domain_obj.ownersid: host_description += 'Owner SID: {}\n'.format(domain_obj.ownersid.text)
if domain_obj.ownername: host_description += 'Owner Name: {}\n'.format(domain_obj.ownername.text)
if domain_obj.hasnullsession and domain_obj.hasnullsession == 'true': host_description += 'Has null session!\n'
if domain_obj.supportsmb1 and domain_obj.supportsmb1.text == 'true':
host_description += 'Supports SMB1!\n'
if domain_obj.smb1securitymode and domain_obj.smb1securitymode.text == 'NotTested':
host_description += 'SMB1SecurityMode: {}\n'.format(domain_obj.smb1securitymode.text)
if domain_obj.supportsmb2orsmb3 and domain_obj.supportsmb2orsmb3.text == 'true': host_description += 'Supports SMBv2 or SMBv3.\n'
if domain_obj.smb2securitymode: host_description += 'SMB2 security mode: {}\n'.format(domain_obj.smb2securitymode.text)
if domain_obj.remotespoolerdetected and domain_obj.remotespoolerdetected.text == 'true': host_description += 'Detected remote spooler.\n'
if domain_obj.pwdlastset: host_description += 'Last pwd set: {}.\n'.format(domain_obj.pwdlastset.text)
if domain_obj.rodc and domain_obj.rodc.text == 'true': host_description += 'Read-Only DC\n'
if domain_obj.sysvoloverwrite and domain_obj.sysvoloverwrite == 'true': host_description += 'SYSVOL overwrite\n'
if domain_obj.fsmo:
fsmo_result = ', '.join([x.text for x in domain_obj.fsmo.findAll("string")])
if fsmo_result:
host_description += 'FSMO: {}\n'.format(fsmo_result)
host_description = host_description.strip(' \n\t\r')
# TODO: fields LDAPSProtocols
try:
ip_obj = domain_obj.ip
for host_ip_obj in ip_obj.findAll('string'):
host_ip = host_ip_obj.text
# check for valid ip
ipaddress.ip_address(host_ip)
current_host = db.select_project_host_by_ip(current_project['id'], host_ip)
if current_host:
current_host_id = current_host[0]['id']
if host_os:
db.update_host_os(current_host_id, host_os)
else:
current_host_id = db.insert_host(current_project['id'], host_ip, current_user['id'], 'Added from PingCastle', os=host_os)
# add 88 port
current_port = db.select_host_port(current_host_id, port_num=88, is_tcp=True)
if current_port:
current_port_id = current_port[0]['id']
if host_description:
db.update_port_proto_description(current_port_id, 'kerberos', host_description)
else:
current_port_id = db.insert_host_port(current_host_id, 88, True, 'kerberos',
host_description, current_user['id'], current_project['id'])
dc_ports_dict[current_port_id] = ['0']
except Exception as e:
pass
# Issues - RiskRules
risk_rules = scan_obj.riskrules
for risk_obj in risk_rules.findAll('healthcheckriskrule'):
issue_points = int(risk_obj.points.text)
issue_category = risk_obj.category.text # PrivilegedAccounts
issue_model = risk_obj.model.text # AccountTakeOver
issue_riskid = risk_obj.riskid.text.replace('-', '_') # A_AdminSDHolder
issue_briefly = risk_obj.rationale.text
issue_links = issues_database[issue_riskid + '_Documentation'].replace(' ', '') if (issue_riskid + '_Documentation') in issues_database else ''
issue_purpose = issues_database[issue_riskid + '_Description'] if (issue_riskid + '_Description') in issues_database else ''
issue_fix = issues_database[issue_riskid + '_Solution'] if (issue_riskid + '_Solution') in issues_database else ''
issue_technical_description = issues_database[issue_riskid + '_TechnicalExplanation'] if (issue_riskid + '_TechnicalExplanation') in issues_database else ''
issue_name = 'PingCastle: {}'.format(issues_database[issue_riskid + '_Title'])
issue_full_description = 'Brief: {}\n\nTechnical information: {}\n\nTest purpose: {}\n\nLinks: \n{}'.format(
issue_briefly,
issue_technical_description,
issue_purpose,
issue_links
)
if issue_points < 1:
issue_cvss = 0
elif issue_points < 10:
issue_cvss = 3
elif issue_points < 30:
issue_cvss = 6
else:
issue_cvss = 9.5
issue_id = db.insert_new_issue_no_dublicate(issue_name, issue_full_description, '', issue_cvss,
current_user['id'], dc_ports_dict, 'need to recheck',
current_project['id'], fix=issue_fix)
return render_template('project/tools/import/pingcastle.html',
current_project=current_project,
tab_name='PingCastle',
errors=errors)
| from routes.ui import routes
from app import check_session, db, redirect, render_template, request, \
send_log_data, requires_authorization, csrf, config
from .project import check_project_access, check_project_archived
from urllib.parse import urlparse
from system.forms import *
from libnmap.parser import NmapParser
from libnessus.parser import NessusParser
import email_validator
import json
import codecs
import re
import io
from flask import Response, send_file
from bs4 import BeautifulSoup
import urllib.parse
from IPy import IP
import socket
import csv
import dicttoxml
import time
from xml.dom.minidom import parseString
import ipwhois
import shodan
from shutil import copyfile
import ipaddress
import whois
from os import path, remove
from system.crypto_functions import *
from system.security_functions import htmlspecialchars
from routes.ui.tools_addons import nmap_scripts
@routes.route('/project/<uuid:project_id>/tools/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def project_tools(project_id, current_project, current_user):
return render_template('project/tools/list.html',
current_project=current_project,
tab_name='Tools')
@routes.route('/project/<uuid:project_id>/tools/nmap/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nmap_page(project_id, current_project, current_user):
return render_template('project/tools/import/nmap.html',
current_project=current_project,
tab_name='Nmap')
@routes.route('/project/<uuid:project_id>/tools/nmap/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nmap_page_form(project_id, current_project, current_user):
form = NmapForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
add_empty_hosts = form.add_no_open.data
# parse ports
ignore_ports = form.ignore_ports.data.replace(' ', '')
ignore_port_arr1 = ignore_ports.split(',') if ignore_ports else []
ignore_port_array = []
for port_str in ignore_port_arr1:
protocol = 'tcp'
port_num = port_str
if '/' in port_str:
if port_str.split('/')[1].lower() == 'udp':
protocol = 'udp'
port_num = port_str.split('/')[0]
port_num = int(port_num)
ignore_port_array.append([port_num, protocol])
ignore_services_array = [service.lower() for service in form.ignore_services.data.replace(' ', '').split(',')]
for file in form.files.data:
try:
xml_report_data = file.read().decode('charmap')
nmap_report = NmapParser.parse_fromstring(xml_report_data)
except:
return render_template('project/tools/import/nmap.html',
current_project=current_project,
errors=['Оne of uploaded files was incorrect!'],
success=1,
tab_name='Nmap')
try:
command_str = nmap_report.commandline
except:
command_str = ''
for host in nmap_report.hosts:
# check if we will add host
found = 0
os = ''
if host.os and host.os.osmatches:
os = host.os.osmatches[0].name
for service in host.services:
protocol = service.protocol.lower()
port_num = int(service.port)
service_name = service.service.lower()
if [port_num, protocol] not in ignore_port_array and service_name not in ignore_services_array:
if service.state == 'open':
found = 1
elif service.state == 'filtered' and \
form.rule.data in ['filtered', 'closed']:
found = 1
elif service.state == 'closed' and \
form.rule.data == 'closed':
found = 1
if found or add_empty_hosts:
host_id = db.select_project_host_by_ip(
current_project['id'], host.address)
if not host_id:
host_info = form.hosts_description.data
host_id = db.insert_host(current_project['id'],
host.address,
current_user['id'],
host_info)
else:
host_id = host_id[0]['id']
if os:
db.update_host_os(host_id, os)
for hostname in host.hostnames:
if hostname and hostname != host.address:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user[
'id'])
else:
hostname_id = hostname_id[0]['id']
for service in host.services:
is_tcp = service.protocol == 'tcp'
protocol_str = service.protocol.lower()
port_num = int(service.port)
service_name = service.service
service_banner = service.banner
add = 0
if [port_num,
protocol_str] not in ignore_port_array and service_name.lower() not in ignore_services_array:
if service.state == 'open':
add = 1
elif service.state == 'filtered' and \
form.rule.data in ['filtered', 'closed']:
add = 1
service_banner += '\nstate: filtered'
elif service.state == 'closed' and \
form.rule.data == 'closed':
add = 1
service_banner += '\nstate: closed'
if add == 1:
port_id = db.select_ip_port(host_id, service.port,
is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
service.port,
is_tcp,
service_name,
service_banner,
current_user[
'id'],
current_project[
'id'])
else:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id,
service_name,
service_banner)
for script_xml in service.scripts_results:
for script in nmap_scripts.modules:
script_class = script.nmap_plugin
if script_class.script_id == script_xml['id'] and \
script_class.script_source == 'service':
script_obj = script_class(script_xml)
if 'port_info' in script_obj.script_types:
result = script_obj.port_info()
update = False
if 'protocol' in result and result['protocol'] and \
result['protocol'].lower() not in service_name.lower():
service_name = result['protocol']
update = True
if 'info' in result and result['info'] and \
result['info'].lower() not in service_banner.lower():
service_banner += '\n' + result['info']
update = True
if update:
db.update_port_proto_description(port_id,
service_name,
service_banner)
if 'issue' in script_obj.script_types:
issues = script_obj.issues()
for issue in issues:
db.insert_new_issue_no_dublicate(issue['name'],
issue[
'description'] if 'description' in issue else '',
issue['path'] if 'path' in issue else '',
issue['cvss'] if 'cvss' in issue else 0.0,
current_user['id'],
{port_id: ['0']},
'need to recheck',
current_project['id'],
cve=issue['cve'] if 'cve' in issue else '',
cwe=issue['cwe'] if 'cwe' in issue else 0,
issue_type='service',
fix=issue['fix'] if 'fix' in issue else '',
param=issue[
'params'] if 'params' in issue else '')
if 'credentials' in script_obj.script_types:
credentials = script_obj.credentials()
for cred in credentials:
login = cred['login'] if 'login' in cred else ''
cleartext = cred['cleartext'] if 'cleartext' in cred else ''
hash_str = cred['hash'] if 'hash' in cred else ''
description = cred['description'] if 'description' in cred else ''
source = cred['source'] if 'source' in cred else ''
dublicates_creds = db.select_creds_dublicates(current_project['id'],
login,
hash_str, cleartext,
description,
source,
'')
if dublicates_creds:
dublicates_creds = dublicates_creds[0]
services = json.loads(dublicates_creds['services'])
if port_id not in services:
services[port_id] = ["0"]
else:
services[port_id].append("0")
db.update_creds(dublicates_creds['id'],
login,
hash_str,
dublicates_creds['hash_type'],
cleartext,
description,
source,
services)
else:
db.insert_new_cred(login,
hash_str,
'other',
cleartext,
description,
source,
{port_id: ["0"]},
current_user['id'],
current_project['id'])
current_host = db.select_host(host_id)[0]
host_zero_port = db.select_host_port(current_host['id'])[0]
for script_xml in host.scripts_results:
for script in nmap_scripts.modules:
script_class = script.nmap_plugin
if script_class.script_id == script_xml['id'] and \
script_class.script_source == 'host':
script_obj = script_class(script_xml)
if 'server_info' in script_obj.script_types:
result = script_obj.host_info()
update = False
if 'os' in result and result['os'] and \
result['os'].lower() not in current_host['os'].lower():
current_host['os'] = result['os']
update = True
if 'info' in result and result['info'] and \
result['info'].lower() not in current_host['comment'].lower():
current_host['comment'] += '\n' + result['info']
update = True
if update:
db.update_host_comment_threats(current_host['id'],
current_host['comment'],
current_host['threats'],
current_host['os'])
if 'hostnames' in result:
for hostname in result['hostnames']:
hostnames_found = db.select_ip_hostname(current_host['id'], hostname)
if not hostnames_found:
db.insert_hostname(current_host['id'], hostname,
form.hostnames_description.data, current_user['id'])
if 'issue' in script_obj.script_types:
issues = script_obj.issues()
for issue in issues:
db.insert_new_issue_no_dublicate(issue['name'],
issue[
'description'] if 'description' in issue else '',
issue['path'] if 'path' in issue else '',
issue['cvss'] if 'cvss' in issue else 0.0,
current_user['id'],
{host_zero_port['id']: ['0']},
'need to recheck',
current_project['id'],
cve=issue['cve'] if 'cve' in issue else '',
cwe=issue['cwe'] if 'cwe' in issue else 0,
issue_type='service',
fix=issue['fix'] if 'fix' in issue else '',
param=issue[
'params'] if 'params' in issue else '')
if 'credentials' in script_obj.script_types:
credentials = script_obj.credentials()
for cred in credentials:
login = cred['login'] if 'login' in cred else ''
cleartext = cred['cleartext'] if 'cleartext' in cred else ''
hash_str = cred['hash'] if 'hash' in cred else ''
description = cred['description'] if 'description' in cred else ''
source = cred['source'] if 'source' in cred else ''
dublicates_creds = db.select_creds_dublicates(current_project['id'],
login,
hash_str, cleartext,
description,
source,
'')
if dublicates_creds:
dublicates_creds = dublicates_creds[0]
services = json.loads(dublicates_creds['services'])
if host_zero_port['id'] not in services:
services[host_zero_port['id']] = ["0"]
else:
services[host_zero_port['id']].append("0")
db.update_creds(dublicates_creds['id'],
login,
hash_str,
dublicates_creds['hash_type'],
cleartext,
description,
source,
services)
else:
db.insert_new_cred(login,
hash_str,
'other',
cleartext,
description,
source,
{host_zero_port['id']: ["0"]},
current_user['id'],
current_project['id'])
return render_template('project/tools/import/nmap.html',
current_project=current_project,
errors=errors,
success=1,
tab_name='Nmap')
@routes.route('/project/<uuid:project_id>/tools/nessus/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nessus_page(project_id, current_project, current_user):
return render_template('project/tools/import/nessus.html',
current_project=current_project,
tab_name='Nessus')
@routes.route('/project/<uuid:project_id>/tools/nessus/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nessus_page_form(project_id, current_project, current_user):
form = NessusForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
add_info_issues = form.add_info_issues.data
# xml files
for file in form.xml_files.data:
if file.filename:
xml_report_data = file.read().decode('charmap')
scan_result = NessusParser.parse_fromstring(xml_report_data)
for host in scan_result.hosts:
host_id = db.select_project_host_by_ip(
current_project['id'], host.ip)
if not host_id:
host_id = db.insert_host(current_project['id'],
host.ip,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
hostname = host.name if host.name != host.ip else ''
try:
test_hostname = IP(host.address)
except ValueError:
test_hostname = ''
if not hostname and not test_hostname and host.address:
hostname = host.address
if hostname:
hostname_id = db.select_ip_hostname(host_id, hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
for issue in host.get_report_items:
# create port
is_tcp = issue.protocol == 'tcp'
port_id = db.select_ip_port(host_id, int(issue.port),
is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
issue.port,
is_tcp,
issue.service,
form.ports_description.data,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
db.update_port_service(port_id,
issue.service)
# add issue to created port
name = 'Nessus: {}'.format(issue.plugin_name)
try:
issue_info = issue.synopsis
except KeyError:
issue_info = ''
description = 'Plugin name: {}\r\n\r\nInfo: \r\n{} \r\n\r\nOutput: \r\n {}'.format(
issue.plugin_name,
issue_info,
issue.description.strip('\n'))
# add host OS
if issue.get_vuln_plugin["pluginName"] == 'OS Identification':
os = issue.get_vuln_plugin["plugin_output"].split('\n')[1].split(' : ')[1]
db.update_host_os(host_id, os)
cve = issue.cve.replace('[', '').replace(']', '').replace("'", '').replace(",", ', ') if issue.cve else ''
cvss = 0
severity = float(issue.severity)
if severity == 0 and issue.get_vuln_info['risk_factor'] == 'None':
cvss = 0
elif 'cvss3_base_score' in issue.get_vuln_info:
cvss = float(issue.get_vuln_info['cvss3_base_score'])
elif 'cvss_base_score' in issue.get_vuln_info:
cvss = float(issue.get_vuln_info['cvss_base_score'])
else:
pass
if hostname_id:
services = {port_id: ['0', hostname_id]}
else:
services = {port_id: ['0']}
if severity > 0 or (severity == 0 and add_info_issues):
db.insert_new_issue_no_dublicate(name, description, '', cvss,
current_user['id'], services,
'need to check',
current_project['id'],
cve, cwe=0, issue_type='custom', fix=issue.solution)
return render_template('project/tools/import/nessus.html',
current_project=current_project,
errors=errors,
tab_name='Nessus')
@routes.route('/project/<uuid:project_id>/tools/nikto/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nikto_page(project_id, current_project, current_user):
return render_template('project/tools/import/nikto.html',
current_project=current_project,
tab_name='Nikto')
@routes.route('/project/<uuid:project_id>/tools/nikto/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nikto_page_form(project_id, current_project, current_user):
form = NiktoForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# json files
for file in form.json_files.data:
if file.filename:
json_report_data = file.read().decode('charmap').replace(',]', ']').replace(',}', '}')
scan_result = json.loads(json_report_data)
host = scan_result['ip']
hostname = scan_result['host'] if scan_result['ip'] != scan_result['host'] else ''
issues = scan_result['vulnerabilities']
port = int(scan_result['port'])
protocol = 'https' if '443' in str(port) else 'http'
is_tcp = 1
port_description = 'Added by Nikto scan'
if scan_result['banner']:
port_description = 'Nikto banner: {}'.format(
scan_result['banner'])
# add host
host_id = db.select_project_host_by_ip(current_project['id'],
host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id, hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
port_description,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
for issue in issues:
method = issue['method']
url = issue['url']
full_url = '{} {}'.format(method, url)
osvdb = int(issue['OSVDB'])
info = issue['msg']
full_info = 'OSVDB: {}\n\n{}'.format(osvdb, info)
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue('Nikto scan', full_info, full_url, 0,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=0,
)
# csv load
for file in form.csv_files.data:
if file.filename:
scan_result = csv.reader(codecs.iterdecode(file, 'charmap'),
delimiter=',')
for issue in scan_result:
if len(issue) == 7:
hostname = issue[0]
host = issue[1]
port = int(issue[2])
protocol = 'https' if '443' in str(port) else 'http'
is_tcp = 1
osvdb = issue[3]
full_url = '{} {}'.format(issue[4], issue[5])
full_info = 'OSVDB: {}\n{}'.format(osvdb, issue[6])
# add host
host_id = db.select_project_host_by_ip(
current_project['id'],
host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user[
'id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
form.ports_description.data,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
# add issue
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue('Nikto scan', full_info, full_url,
0,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=0,
)
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser").niktoscan.scandetails
host = scan_result['targetip']
port = int(scan_result['targetport'])
is_tcp = 1
port_banner = scan_result['targetbanner']
hostname = scan_result['targethostname']
issues = scan_result.findAll("item")
protocol = 'https' if '443' in str(port) else 'http'
port_description = ''
if port_banner:
port_description = 'Nikto banner: {}'.format(
scan_result['targetbanner'])
# add host
host_id = db.select_project_host_by_ip(
current_project['id'],
host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
form.hosts_description.data)
else:
host_id = host_id[0]['id']
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
port_description,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
for issue in issues:
method = issue['method']
url = issue.uri.contents[0]
full_url = '{} {}'.format(method, url)
osvdb = int(issue['osvdbid'])
info = issue.description.contents[0]
full_info = 'OSVDB: {}\n\n{}'.format(osvdb, info)
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue('Nikto scan', full_info, full_url, 0,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=0,
)
return render_template('project/tools/import/nikto.html',
current_project=current_project,
tab_name='Nikto',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/acunetix/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def acunetix_page(project_id, current_project, current_user):
return render_template('project/tools/import/acunetix.html',
current_project=current_project,
tab_name='Acunetix')
@routes.route('/project/<uuid:project_id>/tools/acunetix/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def acunetix_page_form(project_id, current_project, current_user):
form = AcunetixForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
auto_resolve = form.auto_resolve.data == 1
# xml files
for file in form.files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser").scangroup.scan
start_url = scan_result.starturl.contents[0]
parsed_url = urllib.parse.urlparse(start_url)
protocol = parsed_url.scheme
hostname = parsed_url.hostname
if hostname is None:
hostname = parsed_url.path
port = parsed_url.port
os_descr = scan_result.os.contents[0]
port_banner = scan_result.banner.contents[0]
web_banner = scan_result.webserver.contents[0]
port_description = 'Banner: {} Web: {}'.format(port_banner,
web_banner)
host_description = 'OS: {}'.format(os_descr)
is_tcp = 1
if not port:
port = 80
if protocol == 'https':
port = 443
try:
IP(hostname)
host = hostname
hostname = ''
except:
if form.host.data:
IP(form.host.data)
host = form.host.data
elif form.auto_resolve.data == 1:
host = socket.gethostbyname(hostname)
else:
errors.append('ip not resolved!')
if not errors:
# add host
host_id = db.select_project_host_by_ip(current_project['id'], host)
if not host_id:
host_id = db.insert_host(current_project['id'],
host,
current_user['id'],
host_description)
else:
host_id = host_id[0]['id']
db.update_host_description(host_id, host_description)
# add hostname
hostname_id = ''
if hostname and hostname != host:
hostname_id = db.select_ip_hostname(host_id,
hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Acunetix scan',
current_user['id'])
else:
hostname_id = hostname_id[0]['id']
# add port
port_id = db.select_ip_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id,
port,
is_tcp,
protocol,
port_description,
current_user['id'],
current_project['id'])
else:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id, protocol,
port_description)
issues = scan_result.reportitems.findAll("reportitem")
for issue in issues:
issue_name = issue.contents[1].contents[0]
module_name = issue.modulename.contents[0]
uri = issue.affects.contents[0]
request_params = issue.parameter.contents[0]
full_uri = '{} params:{}'.format(uri, request_params)
impact = issue.impact.contents[0]
issue_description = issue.description.contents[0]
recomendations = issue.recommendation.contents[0]
issue_request = issue.technicaldetails.request.contents[
0]
cwe = 0
if issue.cwe:
cwe = int(issue.cwe['id'].replace('CWE-', ''))
cvss = float(issue.cvss.score.contents[0])
# TODO: check CVE field
full_info = '''Module: \n{}\n\nDescription: \n{}\n\nImpact: \n{}\n\nRecomendations: \n{}\n\nRequest: \n{}'''.format(
module_name, issue_description, impact,
recomendations, issue_request)
services = {port_id: ['0']}
if hostname_id:
services = {port_id: ['0', hostname_id]}
db.insert_new_issue(issue_name,
full_info, full_uri,
cvss,
current_user['id'], services,
'need to check',
current_project['id'],
cve=0,
cwe=cwe
)
return render_template('project/tools/import/acunetix.html',
current_project=current_project,
tab_name='Acunetix',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/exporter/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def exporter_page(project_id, current_project, current_user):
return render_template(
'project/tools/export/exporter.html',
current_project=current_project,
tab_name='Exporter')
@routes.route('/project/<uuid:project_id>/tools/exporter/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def exporter_page_form(project_id, current_project, current_user):
form = ExportHosts()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
result_hosts = db.search_hostlist(project_id=current_project['id'],
network=form.network.data,
ip_hostname=form.ip_hostname.data,
issue_name=form.issue_name.data,
port=form.port.data,
service=form.service.data,
comment=form.comment.data,
threats=form.threats.data)
else:
return render_template(
'project/tools/export/exporter.html',
current_project=current_project,
tab_name='Exporter',
errors=errors)
result = ''
separator = '\n' if form.separator.data == '[newline]' \
else form.separator.data
host_export = form.hosts_export.data
ports_array = []
if form.port.data:
ports_array = [[int(port.split('/')[0]), port.split('/')[1] == 'tcp']
for port in form.port.data.split(',')]
prefix = form.prefix.data
postfix = form.postfix.data
if form.filetype.data == 'txt':
# txt worker
response_type = 'text/plain'
if not form.add_ports.data:
# no ports
ips = [host['ip'] for host in result_hosts]
ips_hostnames = {}
hostnames = []
for host in result_hosts:
host_hostname = db.select_ip_hostnames(host['id'])
hostnames += [hostname['hostname'] for hostname in
host_hostname]
ips_hostnames[host['ip']] = host_hostname
hostnames = list(set(hostnames))
if host_export == 'ip':
result = separator.join([prefix + x + postfix for x in ips])
elif host_export == 'hostname':
result = separator.join([prefix + x + postfix for x in hostnames])
elif host_export == 'ip&hostname':
result = separator.join([prefix + x + postfix for x in ips + hostnames])
elif host_export == 'ip&hostname_unique':
host_hostnames_arr = []
for ip in ips_hostnames:
if not ips_hostnames[ip]:
host_hostnames_arr.append(ip)
else:
host_hostnames_arr += [hostname['hostname'] for
hostname in ips_hostnames[ip]]
result = separator.join([prefix + x + postfix for x in host_hostnames_arr])
else:
# with ports
# preparation: issues
if form.issue_name.data:
port_ids = db.search_issues_port_ids(current_project['id'],
form.issue_name.data)
for host in result_hosts:
ports = db.select_host_ports(host['id'])
hostnames = db.select_ip_hostnames(host['id'])
for port in ports:
if (not form.port.data) or (
[port['port'], port['is_tcp']] in ports_array):
if form.service.data in port['service']:
if (not form.issue_name.data) or (
port['id'] in port_ids):
if host_export == 'ip&hostname':
result += '{}{}{}:{}{}'.format(separator,
prefix,
host['ip'],
port['port'],
postfix)
for hostname in hostnames:
result += '{}{}{}:{}{}'.format(separator,
prefix,
hostname[
'hostname'],
port['port'],
postfix)
elif host_export == 'ip':
result += '{}{}{}:{}{}'.format(separator,
prefix,
host['ip'],
port['port'],
postfix)
elif host_export == 'hostname':
for hostname in hostnames:
result += '{}{}{}:{}{}'.format(separator,
prefix,
hostname[
'hostname'],
port['port'],
postfix)
elif host_export == 'ip&hostname_unique':
if hostnames:
for hostname in hostnames:
result += '{}{}{}:{}{}'.format(
separator,
prefix,
hostname[
'hostname'],
port['port'],
postfix)
else:
result += '{}{}{}:{}{}'.format(
separator,
prefix,
host['ip'],
port['port'],
postfix)
if result:
result = result[len(separator):]
elif form.filetype.data == 'csv':
response_type = 'text/plain'
# 'host/hostname','port', 'type', 'service', 'description'
# always with ports
csvfile = io.StringIO()
csv_writer = csv.writer(csvfile, dialect='excel', delimiter=';')
columns = ['host', 'port', 'type', 'service', 'description']
csv_writer.writerow(columns)
# preparation: issues
if form.issue_name.data:
port_ids = db.search_issues_port_ids(current_project['id'],
form.issue_name.data)
for host in result_hosts:
ports = db.select_host_ports(host['id'])
hostnames = db.select_ip_hostnames(host['id'])
for port in ports:
if (not form.port.data) or ([port['port'], port['is_tcp']]
in ports_array):
if form.service.data in port['service']:
if (not form.issue_name.data) or (
port['id'] in port_ids):
if host_export == 'ip&hostname':
csv_writer.writerow([host['ip'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
for hostname in hostnames:
csv_writer.writerow([hostname['hostname'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
elif host_export == 'ip':
csv_writer.writerow([host['ip'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
elif host_export == 'hostname':
for hostname in hostnames:
csv_writer.writerow([hostname['hostname'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
elif host_export == 'ip&hostname_unique':
if hostnames:
for hostname in hostnames:
csv_writer.writerow(
[hostname['hostname'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
else:
csv_writer.writerow([host['ip'],
port['port'],
'tcp' if port[
'is_tcp'] else 'udp',
port['service'],
port['description']])
result = csvfile.getvalue()
elif form.filetype.data == 'json' or form.filetype.data == 'xml':
if form.filetype.data == 'xml':
response_type = 'text/xml'
else:
response_type = 'application/json'
# first generates json
# [{"<ip>":"","hostnames":["<hostname_1",..],
# "ports":[ {"num":"<num>", "type":"tcp", "service":"<service>",
# "description": "<comment>"},...],},...]
json_object = []
# preparation: issues
if form.issue_name.data:
port_ids = db.search_issues_port_ids(current_project['id'],
form.issue_name.data)
for host in result_hosts:
ports = db.select_host_ports(host['id'])
hostnames = db.select_ip_hostnames(host['id'])
host_object = {}
host_object['ip'] = host['ip']
host_object['hostnames'] = [hostname['hostname'] for hostname in
hostnames]
host_object['ports'] = []
for port in ports:
if (not form.port.data) or ([port['port'], port['is_tcp']]
in ports_array):
if form.service.data in port['service']:
port_object = {}
port_object['num'] = port['port']
port_object['type'] = 'tcp' if port['is_tcp'] else 'udp'
port_object['service'] = port['service']
port_object['description'] = port['description']
if (not form.issue_name.data) or (
port['id'] in port_ids):
host_object['ports'].append(port_object)
if not ((not host_object['ports']) and (form.port.data or
form.service.data or
form.issue_name.data)):
json_object.append(host_object)
if form.filetype.data == 'xml':
s = dicttoxml.dicttoxml(json_object)
dom = parseString(s)
result = dom.toprettyxml()
else:
result = json.dumps(json_object, sort_keys=True, indent=4)
if form.open_in_browser.data:
return Response(result, content_type=response_type)
else:
return send_file(io.BytesIO(result.encode()),
attachment_filename='{}.{}'.format(form.filename.data,
form.filetype.data),
mimetype=response_type,
as_attachment=True)
@routes.route('/project/<uuid:project_id>/tools/http-sniffer/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def http_sniffer(project_id, current_project, current_user):
return render_template('project/tools/sniffers/http.html',
current_project=current_project,
tab_name='HTTP-Sniffer')
@routes.route('/project/<uuid:project_id>/tools/http-sniffer/add',
methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def http_sniffer_add_form(project_id, current_project, current_user):
form = NewHTTPSniffer()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
sniffer_id = db.insert_new_http_sniffer(form.name.data, current_project['id'])
return redirect(
'/project/{}/tools/http-sniffer/#/sniffer_{}'.format(current_project['id'], sniffer_id))
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
@routes.route(
'/project/<uuid:project_id>/tools/http-sniffer/<uuid:sniffer_id>/edit',
methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def http_sniffer_edit_form(project_id, current_project, current_user,
sniffer_id):
# check if sniffer in project
current_sniffer = db.select_http_sniffer_by_id(str(sniffer_id))
if not current_sniffer or current_sniffer[0]['project_id'] != \
current_project['id']:
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
current_sniffer = current_sniffer[0]
form = EditHTTPSniffer()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
if form.submit.data == 'Clear':
db.delete_http_sniffer_requests(current_sniffer['id'])
elif form.submit.data == 'Update':
db.update_http_sniffer(current_sniffer['id'],
form.status.data,
form.location.data,
form.body.data,
form.save_credentials.data)
return redirect(
'/project/{}/tools/http-sniffer/#/sniffer_{}'.format(current_project['id'], current_sniffer['id']))
@routes.route('/http_sniff/<uuid:sniffer_id>/', defaults={"route_path": ""},
methods=['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT',
'OPTIONS', 'TRACE', 'PATCH'])
@csrf.exempt
@routes.route('/http_sniff/<uuid:sniffer_id>/<path:route_path>',
methods=['GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'CONNECT',
'OPTIONS', 'TRACE', 'PATCH'])
@csrf.exempt
def http_sniffer_capture_page(sniffer_id, route_path):
current_sniffer = db.select_http_sniffer_by_id(str(sniffer_id))
if not current_sniffer:
return redirect('/')
current_sniffer = current_sniffer[0]
http_start_header = '''{} {} {}'''.format(request.method,
request.environ['RAW_URI'],
request.environ[
'SERVER_PROTOCOL'])
http_headers = str(request.headers)
data = request.get_data().decode('charmap')
ip = request.remote_addr
if current_sniffer['save_credentials']:
data_headers = http_headers.replace('\r', '')
post_data = ''
if '\n\n' in post_data:
post_data = data_headers.split('\n\n')[1]
# worker of headers
for header_str in data_headers.split('\n\n')[0].split('\n'):
header_name = header_str.split(':')[0].strip().lower()
header_data = ''
if ':' in header_str:
header_data = header_str.split(':')[1].strip()
if header_data:
# token header
if header_name == 'token':
db.insert_new_cred('',
'',
'other',
header_data,
'"Token" header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
if header_name == 'authorization':
auth_type = header_data.split(' ')[0].lower()
auth_data = ''
if ' ' in header_data:
auth_data = ' '.join(header_data.split(' ')[1:]).strip()
if auth_data:
if auth_type in ['bearer', 'token']:
db.insert_new_cred('',
'',
'other',
auth_data,
'"Authorization" bearer token',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
elif auth_type == 'basic':
try:
decoded = base64.b64decode(auth_data.encode('charmap')).decode('charmap')
login = decoded.split(':')[0]
password = ''
if ':' in decoded:
password = ':'.join(decoded.split(':')[1:])
if login or password:
db.insert_new_cred(login,
'',
'other',
password,
'"Authorization" basic header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
except Exception as e:
pass
elif auth_type == 'digest':
username = ""
if 'username="' in auth_data:
username = auth_data.split('username="')[1].split('"')[0]
if "username='" in auth_data:
username = auth_data.split("username='")[1].split("'")[0]
db.insert_new_cred(username,
'',
'other',
auth_data,
'"Authorization" digest header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
elif auth_type == 'ntlm':
db.insert_new_cred('',
'',
'other',
auth_data,
'"Authorization" unknown header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
else:
db.insert_new_cred('',
'',
'other',
auth_data,
'"Authorization" NTLM header',
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
# worker of post data
post_params = list(request.form)
login = ''
login_name = ''
password = ''
password_name = ''
for param_name in post_params:
param_value = request.form[param_name]
if param_name.lower() in ['pwd', 'pass', 'password', 'key', 'keyphrase', 'secret', 'token']:
password = param_value
password_name = param_name
elif param_name.lower() in ['login', 'user', 'username', 'email', 'id']:
login = param_value
login_name = param_name
if password_name:
db.insert_new_cred(login,
'',
'other',
password,
'POST data "{}" parameter'.format(password_name),
"HTTP sniffer, source ip: {}".format(ip),
{},
'',
current_sniffer['project_id']
)
current_time = int(time.time() * 1000)
full_request_str = '''{}\n{}{}'''.format(http_start_header, http_headers,
data)
db.insert_new_http_sniffer_package(current_sniffer['id'], current_time,
ip, full_request_str)
if current_sniffer['location']:
return current_sniffer['body'], current_sniffer['status'], {
'Content-Location': current_sniffer['location'],
'Location': current_sniffer['location'],
'Content-Type': 'text/plain'}
else:
return current_sniffer['body'], current_sniffer['status'], \
{'Content-Type': 'text/plain'}
@routes.route(
'/project/<uuid:project_id>/tools/http-sniffer/<uuid:sniffer_id>/delete',
methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def http_sniffer_delete_form(project_id, current_project, current_user,
sniffer_id):
# check if sniffer in project
current_sniffer = db.select_http_sniffer_by_id(str(sniffer_id))
if not current_sniffer or current_sniffer[0]['project_id'] != \
current_project['id']:
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
current_sniffer = current_sniffer[0]
db.safe_delete_http_sniffer(current_sniffer['id'])
return redirect(
'/project/{}/tools/http-sniffer/'.format(current_project['id']))
@routes.route('/project/<uuid:project_id>/tools/ipwhois/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def ipwhois_page(project_id, current_project, current_user):
return render_template('project/tools/scanners/ipwhois.html',
current_project=current_project,
tab_name='IPWhois')
@routes.route('/project/<uuid:project_id>/tools/ipwhois/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def ipwhois_page_form(project_id, current_project, current_user):
form = IPWhoisForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if form.ip.data:
try:
ip_object = ipwhois.IPWhois(form.ip.data)
ip_data = ip_object.lookup_rdap()
asn_num = ip_data["asn"]
if asn_num != 'NA':
network = ip_data["asn_cidr"]
gateway = network.split('/')[0]
mask = int(network.split('/')[1])
country = ip_data["asn_country_code"]
description = ip_data["asn_description"]
asn_date = ip_data['asn_date']
ip_version = ip_data["network"]["ip_version"]
# insert_new_network(self, ip, mask, asn, comment,
# project_id, user_id,is_ipv6):
full_description = "Country: {}\nDate: {}\nDescription: {}".format(
country,
asn_date,
description)
# check if exist
network = db.select_network_by_ip(current_project['id'],
gateway,
mask,
ipv6=(ip_version == 'v6'))
if not network:
network_id = db.insert_new_network(gateway, mask, asn_num,
full_description,
current_project['id'],
current_user['id'],
ip_version == 'v6')
else:
network_id = network[0]['id']
db.update_network(network_id, current_project['id'], gateway, mask, asn_num,
full_description, ip_version == 'v6', network[0]['internal_ip'],
network[0]['cmd'], json.loads(network[0]['access_from']), network[0]['name'])
return redirect(
'/project/{}/networks/'.format(current_project['id']))
else:
errors.append('ASN does not exist!')
except ipwhois.IPDefinedError:
errors.append('IP was defined in standards')
except ValueError:
errors.append('IP was defined in standards')
if form.hosts.data:
for host in form.hosts.data:
try:
ip_object = ipwhois.IPWhois(host)
ip_data = ip_object.lookup_rdap()
asn_num = ip_data["asn"]
if asn_num != 'NA':
network = ip_data["asn_cidr"]
gateway = network.split('/')[0]
mask = int(network.split('/')[1])
country = ip_data["asn_country_code"]
description = ip_data["asn_description"]
asn_date = ip_data['asn_date']
ip_version = ip_data["network"]["ip_version"]
# insert_new_network(self, ip, mask, asn, comment,
# project_id, user_id,is_ipv6):
full_description = "Country: {}\nDate: {}\nDescription: {}".format(
country,
asn_date,
description)
# check if exist
network = db.select_network_by_ip(current_project['id'],
gateway,
mask,
ipv6=(ip_version == 'v6'))
if not network:
network_id = db.insert_new_network(gateway, mask,
asn_num,
full_description,
current_project[
'id'],
current_user['id'],
ip_version == 'v6')
else:
network_id = network[0]['id']
db.update_network(network_id, current_project['id'], gateway, mask,
asn_num, full_description, ip_version == 'v6', network[0]['internal_ip'],
network[0]['cmd'], json.loads(network[0]['access_from']), network[0]['name'])
else:
errors.append('ASN does not exist!')
except ipwhois.IPDefinedError:
errors.append('IP was defined in standards')
except ValueError:
errors.append('IP was defined in standards')
if form.networks.data:
for host in form.networks.data:
try:
ip_object = ipwhois.IPWhois(host)
ip_data = ip_object.lookup_rdap()
asn_num = ip_data["asn"]
if asn_num != 'NA':
network = ip_data["asn_cidr"]
gateway = network.split('/')[0]
mask = int(network.split('/')[1])
country = ip_data["asn_country_code"]
description = ip_data["asn_description"]
asn_date = ip_data['asn_date']
ip_version = ip_data["network"]["ip_version"]
# insert_new_network(self, ip, mask, asn, comment,
# project_id, user_id,is_ipv6):
full_description = "Country: {}\nDate: {}\nDescription: {}".format(
country,
asn_date,
description)
# check if exist
network = db.select_network_by_ip(current_project['id'],
gateway,
mask,
ipv6=(ip_version == 'v6'))
if not network:
network_id = db.insert_new_network(gateway, mask,
asn_num,
full_description,
current_project[
'id'],
current_user['id'],
ip_version == 'v6')
else:
network_id = network[0]['id']
db.update_network(network_id, current_project['id'], gateway, mask, asn_num,
full_description, ip_version == 'v6', network[0]['internal_ip'],
network[0]['cmd'], json.loads(network[0]['access_from']), network[0]['name'])
else:
errors.append('ASN does not exist!')
except ipwhois.IPDefinedError:
errors.append('IP was defined in standards')
except ValueError:
errors.append('Wrong ip format')
return render_template('project/tools/scanners/ipwhois.html',
current_project=current_project,
errors=errors,
tab_name='IPWhois')
@routes.route('/project/<uuid:project_id>/tools/shodan/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def shodan_page(project_id, current_project, current_user):
return render_template('project/tools/scanners/shodan.html',
current_project=current_project,
tab_name='Shodan')
@routes.route('/project/<uuid:project_id>/tools/shodan/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def shodan_page_form(project_id, current_project, current_user):
form = ShodanForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
# api_key
shodan_api_key = form.api_key.data
if form.api_id.data and is_valid_uuid(form.api_id.data):
users_configs = db.select_configs(team_id='0',
user_id=current_user['id'],
name='shodan')
for team in db.select_user_teams(current_user['id']):
users_configs += db.select_configs(team_id=team['id'],
user_id='0',
name='shodan')
for config in users_configs:
if config['id'] == form.api_id.data:
shodan_api_key = config['data']
if not shodan_api_key:
errors.append('Key not found!')
shodan_api = shodan.Shodan(shodan_api_key)
# checker
try:
shodan_api.host('8.8.8.8')
except shodan.exception.APIError:
errors.append('Wrong API Shodan key!')
if not errors:
if form.ip.data:
try:
shodan_json = shodan_api.host(form.ip.data)
asn = int(shodan_json['asn'].replace('AS', ''))
os_info = shodan_json['os']
ip = shodan_json['ip_str']
ip_version = IP(ip).version()
asn_info = shodan_json['isp']
coords = ''
if 'latitude' in shodan_json:
coords = "lat {} long {}".format(shodan_json['latitude'],
shodan_json['longitude'])
country = ''
if 'country_name' in shodan_json:
country = shodan_json['country_name']
city = ''
if 'city' in shodan_json:
city = shodan_json['city']
organization = shodan_json['org']
if form.need_network.data:
# create network
net_tmp = ipwhois.net.Net('8.8.8.8')
asn_tmp = ipwhois.asn.ASNOrigin(net_tmp)
asn_full_data = asn_tmp.lookup(asn='AS{}'.format(asn))
for network in asn_full_data['nets']:
if ipaddress.ip_address(ip) in \
ipaddress.ip_network(network['cidr'], False):
cidr = network['cidr']
net_ip = cidr.split('/')[0]
net_mask = int(cidr.split('/')[1])
net_descr = network['description']
net_maintain = network['maintainer']
full_network_description = 'ASN info: {}\nCountry: {}\nCity: {}\nCoords: {}\nDescription: {}\nMaintainer: {}'.format(
asn_info, country, city,
coords, net_descr, net_maintain)
network_id = db.select_network_by_ip(
current_project['id'], net_ip, net_mask,
ip_version == 6)
if not network_id:
network_id = db.insert_new_network(net_ip,
net_mask,
asn,
full_network_description,
current_project[
'id'],
current_user[
'id'],
ip_version == 6)
else:
network_id = network_id[0]['id']
db.update_network(network_id, current_project['id'], net_ip, net_mask,
asn, full_network_description, ip_version == 6, network_id[0]['internal_ip'],
network_id[0]['cmd'], json.loads(network_id[0]['access_from']), network_id[0]['name'])
# create host
full_host_description = "Country: {}\nCity: {}\nOrganization: {}".format(
country, city, organization)
# hostnames = shodan_json["hostnames"]
host_id = db.select_project_host_by_ip(
current_project['id'],
ip)
if host_id:
host_id = host_id[0]['id']
db.update_host_description(host_id,
full_host_description)
else:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
full_host_description)
# add hostnames
for hostname in shodan_json["hostnames"]:
hostname_obj = db.select_ip_hostname(host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Shodan',
current_user['id'])
# add ports with cve
for port in shodan_json['data']:
product = ''
if 'product' in port:
product = port['product']
is_tcp = (port['transport'] == 'tcp')
port_num = int(port['port'])
port_info = ''
protocol = port['_shodan']["module"]
if 'info' in port:
port_info = port['info']
full_port_info = "Product: {}\nInfo: {}".format(
product,
port_info
)
port_id = db.select_ip_port(host_id, port_num,
is_tcp=is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id,
protocol,
full_port_info)
else:
port_id = db.insert_host_port(host_id, port_num,
is_tcp,
protocol,
full_port_info,
current_user['id'],
current_project['id'])
# add vulnerabilities
if "vulns" in port:
vulns = port['vulns']
for cve in vulns:
cvss = vulns[cve]['cvss']
summary = vulns[cve]['summary']
services = {port_id: ["0"]}
issue_id = db.insert_new_issue(cve, summary, '',
cvss,
current_user[
'id'],
services,
'need to check',
current_project[
'id'],
cve=cve)
except shodan.exception.APIError as e:
errors.append(e)
except ValueError:
errors.append('Wrong ip!')
elif form.hosts.data:
for host in form.hosts.data.split(','):
try:
shodan_json = shodan_api.host(host)
asn = int(shodan_json['asn'].replace('AS', ''))
os_info = shodan_json['os']
ip = shodan_json['ip_str']
ip_version = IP(ip).version()
asn_info = shodan_json['isp']
coords = ''
if 'latitude' in shodan_json:
coords = "lat {} long {}".format(
shodan_json['latitude'],
shodan_json['longitude'])
country = ''
if 'country_name' in shodan_json:
country = shodan_json['country_name']
city = ''
if 'city' in shodan_json:
city = shodan_json['city']
organization = shodan_json['org']
if form.need_network.data:
# create network
net_tmp = ipwhois.net.Net('8.8.8.8')
asn_tmp = ipwhois.asn.ASNOrigin(net_tmp)
asn_full_data = asn_tmp.lookup(asn='AS{}'.format(asn))
for network in asn_full_data['nets']:
if ipaddress.ip_address(ip) in \
ipaddress.ip_network(network['cidr'],
False):
cidr = network['cidr']
net_ip = cidr.split('/')[0]
net_mask = int(cidr.split('/')[1])
net_descr = network['description']
net_maintain = network['maintainer']
full_network_description = 'ASN info: {}\nCountry: {}\nCity: {}\nCoords: {}\nDescription: {}\nMaintainer: {}'.format(
asn_info, country, city,
coords, net_descr, net_maintain)
network_id = db.select_network_by_ip(
current_project['id'], net_ip, net_mask,
ip_version == 6)
if not network_id:
network_id = db.insert_new_network(net_ip,
net_mask,
asn,
full_network_description,
current_project[
'id'],
current_user[
'id'],
ip_version == 6)
else:
network_id = network_id[0]['id']
db.update_network(network_id, current_project['id'], net_ip, net_mask,
asn, full_network_description, ip_version == 6, network_id[0]['internal_ip'],
network_id[0]['cmd'], json.loads(network_id[0]['access_from']), network_id[0]['name'])
# create host
full_host_description = "Country: {}\nCity: {}\nOS: {}\nOrganization: {}".format(
country, city, organization)
# hostnames = shodan_json["hostnames"]
host_id = db.select_project_host_by_ip(
current_project['id'],
ip)
if host_id:
host_id = host_id[0]['id']
db.update_host_description(host_id,
full_host_description)
else:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
full_host_description)
if os_info:
db.update_host_os(host_id, os_info)
# add hostnames
for hostname in shodan_json["hostnames"]:
hostname_obj = db.select_ip_hostname(host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Shodan',
current_user['id'])
# add ports with cve
for port in shodan_json['data']:
product = ''
if 'product' in port:
product = port['product']
is_tcp = (port['transport'] == 'tcp')
port_num = int(port['port'])
port_info = ''
protocol = port['_shodan']["module"]
if 'info' in port:
port_info = port['info']
full_port_info = "Product: {}\nInfo: {}".format(
product,
port_info
)
port_id = db.select_ip_port(host_id, port_num,
is_tcp=is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_proto_description(port_id,
protocol,
full_port_info)
else:
port_id = db.insert_host_port(host_id, port_num,
is_tcp,
protocol,
full_port_info,
current_user['id'],
current_project['id'])
# add vulnerabilities
if "vulns" in port:
vulns = port['vulns']
for cve in vulns:
cvss = vulns[cve]['cvss']
summary = vulns[cve]['summary']
services = {port_id: ["0"]}
issue_id = db.insert_new_issue(cve, summary, '',
cvss,
current_user[
'id'],
services,
'need to check',
current_project[
'id'],
cve=cve)
except shodan.exception.APIError as e:
errors.append(e)
except ValueError:
errors.append('Wrong ip!')
time.sleep(1.1) # shodan delay
elif form.networks.data:
for network_id in form.networks.data.split(','):
if is_valid_uuid(network_id):
current_network = db.select_network(network_id)
if current_network and current_network[0]['asn'] and \
current_network[0]['asn'] > 0:
asn = int(current_network[0]['asn'])
result = shodan_api.search('asn:AS{}'.format(asn),
limit=1000)
for shodan_json in result['matches']:
try:
os_info = shodan_json['os']
ip = shodan_json['ip_str']
ip_version = IP(ip).version()
asn_info = shodan_json['isp']
coords = ''
if 'latitude' in shodan_json:
coords = "lat {} long {}".format(
shodan_json['latitude'],
shodan_json['longitude'])
country = ''
if 'country_name' in shodan_json:
country = shodan_json['country_name']
city = ''
if 'city' in shodan_json:
city = shodan_json['city']
organization = shodan_json['org']
if form.need_network.data:
# create network
net_tmp = ipwhois.net.Net('8.8.8.8')
asn_tmp = ipwhois.asn.ASNOrigin(net_tmp)
asn_full_data = asn_tmp.lookup(
asn='AS{}'.format(asn))
for network in asn_full_data['nets']:
if ipaddress.ip_address(ip) in \
ipaddress.ip_network(
network['cidr'],
False):
cidr = network['cidr']
net_ip = cidr.split('/')[0]
net_mask = int(cidr.split('/')[1])
net_descr = network['description']
net_maintain = network['maintainer']
full_network_description = 'ASN info: {}\nCountry: {}\nCity: {}\nCoords: {}\nDescription: {}\nMaintainer: {}'.format(
asn_info, country, city,
coords, net_descr, net_maintain)
network_id = db.select_network_by_ip(
current_project['id'], net_ip,
net_mask,
ip_version == 6)
if not network_id:
network_id = db.insert_new_network(
net_ip,
net_mask,
asn,
full_network_description,
current_project[
'id'],
current_user[
'id'],
ip_version == 6)
else:
network_id = network_id[0]['id']
db.update_network(network_id,
current_project['id'],
net_ip,
net_mask,
asn,
full_network_description,
ip_version == 6, network_id[0]['internal_ip'],
network_id[0]['cmd'], json.loads(network_id[0]['access_from']),
network_id[0]['name'])
# create host
full_host_description = "Country: {}\nCity: {}\nOS: {}\nOrganization: {}".format(
country, city, os_info, organization)
# hostnames = shodan_json["hostnames"]
host_id = db.select_project_host_by_ip(
current_project['id'],
ip)
if host_id:
host_id = host_id[0]['id']
db.update_host_description(host_id,
full_host_description)
else:
host_id = db.insert_host(
current_project['id'],
ip,
current_user['id'],
full_host_description)
# add hostnames
for hostname in shodan_json["hostnames"]:
hostname_obj = db.select_ip_hostname(
host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id,
hostname,
'Added from Shodan',
current_user['id'])
# add ports with cve
port_num = int(shodan_json['port'])
product = ''
if 'product' in shodan_json:
product = shodan_json['product']
is_tcp = int(shodan_json['transport'] == 'tcp')
port_info = ''
protocol = shodan_json['_shodan']["module"]
if 'info' in shodan_json:
port_info = shodan_json['info']
full_port_info = "Product: {}\nInfo: {}".format(
product,
port_info
)
port_id = db.select_ip_port(host_id,
port_num,
is_tcp=is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_proto_description(
port_id,
protocol,
full_port_info)
else:
port_id = db.insert_host_port(host_id,
port_num,
is_tcp,
protocol,
full_port_info,
current_user[
'id'],
current_project[
'id'])
# add vulnerabilities
if "vulns" in shodan_json:
vulns = shodan_json['vulns']
for cve in vulns:
cvss = vulns[cve]['cvss']
summary = vulns[cve]['summary']
services = {port_id: ["0"]}
issue_id = db.insert_new_issue(cve,
summary,
'',
cvss,
current_user[
'id'],
services,
'need to check',
current_project[
'id'],
cve=cve)
except shodan.exception.APIError as e:
pass # a lot of errors
except ValueError:
pass # a lot of errors
time.sleep(1.1) # shodan delay
return render_template('project/tools/scanners/shodan.html',
current_project=current_project,
errors=errors,
tab_name='Shodan')
@routes.route('/project/<uuid:project_id>/tools/checkmarx/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def checkmarx_page(project_id, current_project, current_user):
return render_template('project/tools/import/checkmarx.html',
current_project=current_project,
tab_name='Checkmarx')
@routes.route('/project/<uuid:project_id>/tools/checkmarx/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def checkmarx_page_form(project_id, current_project, current_user):
form = CheckmaxForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# xml files
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("query")
for query in query_list:
vulnerability_name = re.sub(' Version:[0-9]+', '', query.attrs['querypath'].split('\\')[-1])
language = query.attrs['language']
cwe = query.attrs['cweid']
vuln_array = query.find_all("result")
for vuln_example in vuln_array:
criticality = vuln_example.attrs['severity'] # High
filename = vuln_example.attrs['filename']
path_find = vuln_example.find_all("path")
paths_str_arrays = []
for path_obj in path_find:
paths_str = ''
path_nodes = vuln_example.find_all("pathnode")
if path_nodes:
paths_str = '########## Path {} ###########\n'.format(path_find.index(path_obj) + 1)
for path_node in path_nodes:
filename = path_node.find_all("filename")[0].text
line_num = int(path_node.find_all("line")[0].text)
colum_num = int(path_node.find_all("column")[0].text)
code_arr = path_node.find_all("code")
node_str = 'Filename: {}\nLine: {} Column: {}'.format(filename, line_num, colum_num)
for code in code_arr:
node_str += '\n' + code.text.strip(' \t')
paths_str += node_str + '\n\n'
if paths_str:
paths_str_arrays.append(paths_str + '\n\n')
all_paths_str = '\n'.join(paths_str_arrays)
if criticality == 'High':
cvss = 9.5
elif criticality == 'Medium':
cvss = 8.0
elif criticality == 'Low':
cvss = 2.0
else:
cvss = 0
issue_id = db.insert_new_issue(vulnerability_name,
'Language: {}\n'.format(language) + all_paths_str, filename,
cvss, current_user['id'],
{}, 'need to check', current_project['id'], cwe=cwe,
issue_type='custom')
return render_template('project/tools/import/checkmarx.html',
current_project=current_project,
errors=errors,
tab_name='Checkmarx')
@routes.route('/project/<uuid:project_id>/tools/depcheck/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def depcheck_page(project_id, current_project, current_user):
return render_template('project/tools/import/depcheck.html',
current_project=current_project,
tab_name='DepCheck')
@routes.route('/project/<uuid:project_id>/tools/depcheck/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def depcheck_page_form(project_id, current_project, current_user):
form = Depcheck()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("dependency")
for query in query_list:
filename = query.find("filename").text
filepath = query.find("filepath").text
vuln_array = query.find_all("vulnerability")
for vuln_example in vuln_array:
name = vuln_example.find('name').text
cve = ''
if name.startswith('CVE'):
cve = name
cvss_obj = vuln_example.find('cvssv3')
if cvss_obj:
cvss = float(cvss_obj.find('basescore').text)
elif vuln_example.find('cvssscore'):
cvss = float(vuln_example.find('cvssscore').text)
elif vuln_example.find('cvssv2'):
cvss = float(vuln_example.find('cvssv2').find('score').text)
else:
cvss = 0
cwes = vuln_example.find_all("cwe")
cwe = 0
if cwes:
cwe = int(cwes[0].text.replace('CWE-', '').split(' ')[0])
description = vuln_example.find('description').text
soft_search = vuln_example.find_all("software")
software_arr = []
for path_obj in soft_search:
s = str(path_obj.text)
versions = ''
if 'versionstartincluding' in path_obj.attrs:
versions += str(path_obj.attrs['versionstartincluding']) + '<=x'
if 'versionstartexcluding' in path_obj.attrs:
versions += str(path_obj.attrs['versionendexcluding']) + '<x'
if 'versionendincluding' in path_obj.attrs:
versions += '<=' + str(path_obj.attrs['versionendincluding'])
if 'versionendexcluding' in path_obj.attrs:
versions += '<' + str(path_obj.attrs['versionendexcluding'])
if versions:
s += ' versions ({})'.format(versions)
software_arr.append(s)
all_software_str = '\n\n'.join(software_arr)
full_description = 'File: ' + filepath + '\n\n' + description \
+ '\n\nVulnerable versions: \n' + all_software_str
issue_id = db.insert_new_issue(name, full_description, filepath, cvss, current_user['id'],
'{}', 'need to recheck', current_project['id'], cve, cwe,
'custom', '', filename)
return render_template('project/tools/import/depcheck.html',
current_project=current_project,
tab_name='DepCheck',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/openvas/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def openvas_page(project_id, current_project, current_user):
return render_template('project/tools/import/openvas.html',
current_project=current_project,
tab_name='OpenVAS')
@routes.route('/project/<uuid:project_id>/tools/openvas/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def openvas_page_form(project_id, current_project, current_user):
form = Openvas()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("result")
for query in query_list:
if query.find('host'): # disables result tags inside issue description
issue_host = query.find('host').text.split('\n')[0]
issue_hostname = query.find('host').find('hostname').text
port_str = query.find('port').text.split('/')[0]
if port_str == 'general':
issue_port = 0
else:
issue_port = int(port_str)
issue_is_tcp = int(query.find('port').text.split('/')[1] == 'tcp')
nvt_obj = query.find('nvt')
issue_name = nvt_obj.find('name').text
issue_type = nvt_obj.find('family').text
issue_cvss = float(nvt_obj.find('cvss_base').text)
issue_long_description = nvt_obj.find('tags').text
solution_obj = nvt_obj.find('solution')
issue_solution = ''
if solution_obj.get('type') != 'WillNotFix':
issue_solution = solution_obj.text
cve_list = []
links_list = []
refs_objects = nvt_obj.find('refs')
if refs_objects:
refs_objects = refs_objects.findAll('ref')
for ref_obj in refs_objects:
if ref_obj.get('type') == 'url':
links_list.append(ref_obj.get('id'))
if ref_obj.get('type') == 'cve':
cve_list.append(ref_obj.get('id'))
issue_short_description = ''
if query.find('description'):
issue_short_description = query.find('description').text
# check if host exists
host_id = db.select_project_host_by_ip(current_project['id'], issue_host)
if not host_id:
host_id = db.insert_host(current_project['id'], issue_host,
current_user['id'], form.hosts_description.data)
else:
host_id = host_id[0]['id']
# check if port exists
port_id = db.select_host_port(host_id, issue_port, issue_is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id, issue_port, issue_is_tcp, 'unknown', form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = port_id[0]['id']
# check if hostname exists
hostname_id = ''
if issue_hostname != '':
hostname_id = db.select_ip_hostname(host_id, issue_hostname)
if not hostname_id:
hostname_id = db.insert_hostname(host_id, issue_hostname,
form.hostnames_description.data, current_user['id'])
else:
hostname_id = hostname_id[0]['id']
full_description = 'Short description: \n{}\n\nFull description:\n{}'.format(
issue_short_description,
issue_long_description)
cve_str = ','.join(cve_list)
if links_list:
full_description += '\n\nLinks:\n' + '\n'.join(links_list)
services = {
port_id: [hostname_id] if hostname_id else ['0']
}
db.insert_new_issue_no_dublicate(issue_name, full_description, '', issue_cvss, current_user['id'],
services, 'need to recheck', current_project['id'], cve_str,
0, 'custom', issue_solution, '')
return render_template('project/tools/import/openvas.html',
current_project=current_project,
tab_name='OpenVAS',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/netsparker/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def netsparker_page(project_id, current_project, current_user):
return render_template('project/tools/import/netsparker.html',
current_project=current_project,
tab_name='NetSparker')
@routes.route('/project/<uuid:project_id>/tools/netsparker/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def netsparker_page_form(project_id, current_project, current_user):
def beautify_output(xml_str):
if xml_str == ' ': xml_str = ''
xml_str = xml_str.replace('<p>', '\t').replace('</p>', '\n')
xml_str = xml_str.replace('<li>', '* ').replace('</li>', '\n')
xml_str = xml_str.replace('<ol>', '\n').replace('</ol>', '\n')
xml_str = xml_str.replace('<div>', '').replace('</div>', '\n')
xml_str = xml_str.replace("<a target='_blank' href='", '').replace("'><i class='icon-external-link'></i>",
' - ')
xml_str = xml_str.replace('<ul>', '').replace('</ul>', '')
xml_str = xml_str.replace('</a>', '\n')
return xml_str
form = Netsparker()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(),
"html.parser")
query_list = scan_result.find_all("vulnerability")
for vuln in query_list:
is_confirmed = vuln.get('confirmed') == 'True'
if is_confirmed or (not form.only_confirmed):
vuln_url = vuln.find('url').text
vuln_name = 'Netsparker: ' + vuln.find('type').text
vuln_severity = vuln.find('severity').text # High, Medium, Low, Information, BestPractice
vuln_description = beautify_output(vuln.find('description').text)
vuln_impact = beautify_output(vuln.find('impact').text)
vuln_fix = beautify_output(vuln.find('actionstotake').text)
vuln_fix1 = beautify_output(vuln.find('remedy').text)
vuln_skills = beautify_output(vuln.find('requiredskillsforexploitation').text)
vuln_links = beautify_output(vuln.find('externalreferences').text)
vuln_fix1_links = beautify_output(vuln.find('remedyreferences').text)
vuln_request = beautify_output(vuln.find('rawrequest').text)
vuln_response = beautify_output(vuln.find('rawresponse').text)
vuln_poc = beautify_output(vuln.find('proofofconcept').text)
vuln_path = ''
vuln_args = ''
# parse info
info_list = vuln.find('extrainformation').findAll('info')
for info_obj in info_list:
info_name = info_obj.get('name')
if info_name == 'ParserAnalyzerEngine_InputName':
vuln_args += ', (Input) ' + info_name
elif info_name == 'ParserAnalyzerEngine_FormTargetAction':
vuln_path = info_name
elif info_name == 'ParserAnalyzerEngine_IdentifiedFieldName':
vuln_args += ', (Input) ' + info_name
elif info_name == 'CookieAnalyzerEngine_IdentifiedCookies':
vuln_args += ', (Cookie) ' + info_name
elif info_name == 'ExtractedVersion':
vuln_description += '\n\nExtracted version: ' + info_name
elif info_name == 'IdentifiedErrorMessage':
vuln_description += '\n\nError message: \n' + info_name
elif info_name == 'ExtractedIPAddresses':
vuln_description += '\n\nExtracted IP: ' + info_name
elif info_name == 'CustomField_FormAction':
vuln_path = info_name
elif info_name == 'ParserAnalyzerEngine_ExternalLinks':
vuln_description += '\n\nExternal links: \n' + info_name
elif info_name == 'ParserAnalyzerEngine_FormName':
vuln_args += ', (Form) ' + info_name
elif info_name == 'EmailDisclosure_EmailAddresses':
vuln_description += '\n\nFound email: ' + info_name
elif info_name == 'Options_Allowed_Methods':
vuln_description += '\n\nAllowed methods: ' + info_name
elif info_name == 'ParserAnalyzerEngine_FormTargetAction':
vuln_description = '\n\nInternal path: ' + info_name
vuln_cwe = vuln.find('classification').find('cwe').text
if not vuln_cwe: vuln_cwe = 0
vuln_cvss = 0
classification_obj = vuln.find('classification')
if classification_obj.find('cvss'):
for cvss_obj in classification_obj.find('cvss').findAll('score'):
if cvss_obj.find('type').text == 'Base':
vuln_cvss = float(cvss_obj.find('value').text)
# parse url
splitted_url = urllib.parse.urlsplit(vuln_url)
vuln_scheme = splitted_url.scheme
if not vuln_scheme:
vuln_scheme = 'http'
vuln_host_unverified = splitted_url.hostname
vuln_path_unverified = splitted_url.path
vuln_port = splitted_url.port
if not vuln_port:
if vuln_scheme == 'https':
vuln_port = 443
elif vuln_scheme == 'ftp':
vuln_port = 21
else:
vuln_port = 80
vuln_port = int(vuln_port)
if not vuln_path:
vuln_path = vuln_path_unverified
is_ip = False
vuln_host = ''
vuln_hostname = ''
try:
vuln_host = str(ipaddress.ip_address(vuln_host_unverified))
except ValueError:
vuln_hostname = vuln_host_unverified
if not vuln_host and vuln_hostname:
try:
vuln_host = str(socket.gethostbyname(vuln_host_unverified))
except:
pass
hostname_id = ''
port_id = ''
host_id = ''
if vuln_host:
dublicate_host = db.select_project_host_by_ip(current_project['id'], vuln_host)
if not dublicate_host:
host_id = db.insert_host(current_project['id'],
vuln_host,
current_user['id'],
form.hosts_description.data)
else:
host_id = dublicate_host[0]['id']
# add port
dublicate_port = db.select_host_port(host_id, vuln_port, True)
if not dublicate_port:
port_id = db.insert_host_port(host_id, vuln_port, True,
vuln_scheme, form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = dublicate_port[0]['id']
# add hostname
if vuln_hostname:
dublicate_hostname = db.select_ip_hostname(host_id, vuln_hostname)
if not dublicate_hostname:
hostname_id = db.insert_hostname(host_id, vuln_hostname,
form.hostnames_description.data,
current_user['id'])
else:
hostname_id = dublicate_hostname[0]['id']
# add issue
full_description = 'URL: {}\n\nDescription: \n{}\n\n'.format(vuln_url, vuln_description)
if vuln_impact:
full_description += 'Impact: ' + vuln_impact + '\n\n'
if vuln_skills:
full_description += 'Skills: ' + vuln_skills + '\n\n'
if vuln_poc:
full_description += 'PoC: ' + vuln_poc + '\n\n'
if vuln_links:
full_description += 'Links: \n' + vuln_links + '\n\n'
full_fix = 'Actions: ' + vuln_fix + '\n Fix:' + vuln_fix1 + '\n Links: ' + vuln_fix1_links
services = {}
if hostname_id:
services[port_id] = [hostname_id]
elif port_id:
services[port_id] = ["0"]
issue_id = db.insert_new_issue_no_dublicate(vuln_name, full_description,
vuln_path, vuln_cvss,
current_user['id'],
services,
'need to recheck',
current_project['id'],
'', vuln_cwe, 'web', full_fix, vuln_args)
# create PoC
poc_text = vuln_request + vuln_response
poc_text = poc_text.replace('\r', '')
file_data = b''
if config['files']['poc_storage'] == 'database':
file_data = poc_text.encode('charmap')
poc_id = db.insert_new_poc(port_id if port_id else "0",
'Added from Netsparker',
'text',
'HTTP.txt',
issue_id,
current_user['id'],
hostname_id if hostname_id else '0',
storage=config['files']['poc_storage'],
data=file_data)
if config['files']['poc_storage'] == 'filesystem':
file_path = './static/files/poc/{}'.format(poc_id)
file_object = open(file_path, 'w')
file_object.write(poc_text)
file_object.close()
return render_template('project/tools/import/netsparker.html',
current_project=current_project,
tab_name='NetSparker',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/qualys/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def qualys_page(project_id, current_project, current_user):
return render_template('project/tools/import/qualys.html',
current_project=current_project,
tab_name='Qualys')
@routes.route('/project/<uuid:project_id>/tools/qualys/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def qualys_form(project_id, current_project, current_user):
def beautify_output(xml_str):
xml_str = xml_str.replace('<p>', '\t').replace('<P>', '\t')
xml_str = xml_str.replace('<BR>', '\n').replace('</p>', '\n')
return xml_str
form = QualysForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# xml files
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(), "html.parser")
hosts_list = scan_result.find_all("ip")
for host in hosts_list:
host_id = ''
hostname = ''
ip = host.attrs['value']
tmp_host = db.select_project_host_by_ip(current_project['id'], ip)
if tmp_host:
host_id = tmp_host[0]['id']
if 'name' in host.attrs and ip != host.attrs['name']:
hostname = host.attrs['name']
# TODO: dont forget to add hostname
if form.add_empty_host and not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
ports_list = host.find('services')
if ports_list:
for port_obj in ports_list.findAll('cat'):
if 'port' in port_obj.attrs and 'protocol' in port_obj.attrs:
if not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
port = int(port_obj.attrs['port'])
is_tcp = int(port_obj.attrs['protocol'] == 'tcp')
service = port_obj.attrs['value']
port_id = db.select_host_port(host_id, port, is_tcp)
if port_id:
port_id = port_id[0]['id']
db.update_port_service(port_id, service)
else:
port_id = db.insert_host_port(host_id, port, is_tcp, service, form.ports_description.data,
current_user['id'], current_project['id'])
issues_list = host.find('vulns')
if issues_list:
for issue_obj in issues_list.findAll('cat'):
if not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
port_num = 0
is_tcp = 1
if 'port' in issue_obj.attrs and 'protocol' in issue_obj.attrs:
port_num = int(issue_obj.attrs['port'])
is_tcp = int(issue_obj.attrs['protocol'] == 'tcp')
port_id = db.select_host_port(host_id, port_num, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id, port_num, is_tcp, 'unknown', form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = port_id[0]['id']
cvss = 0
cvss_tmp1 = issue_obj.find('cvss3_base')
cvss_tmp2 = issue_obj.find('cvss3_temporal')
cvss_tmp3 = issue_obj.find('cvss_temporal')
if cvss_tmp1 and cvss_tmp1.text != '-':
cvss = float(cvss_tmp1.text)
elif cvss_tmp2 and cvss_tmp2.text != '-':
cvss = float(cvss_tmp2.text)
elif cvss_tmp3 and cvss_tmp3.text != '-':
cvss = float(cvss_tmp3.text)
issue_name = issue_obj.find('title').text
issue_diagnostic = issue_obj.find('diagnosis').text
issue_description = issue_obj.find('consequence').text
issue_solution = beautify_output(issue_obj.find('solution').text)
# TODO: add PoC
issue_output = issue_obj.find('result')
try:
issue_output = issue_obj.find('result').text
except AttributeError:
issue_output = ''
issue_full_description = 'Diagnosis: \n{} \n\nConsequence: \n{}'.format(issue_diagnostic, issue_description)
issue_full_description = beautify_output(issue_full_description)
services = {port_id: ['0']}
issue_id = db.insert_new_issue_no_dublicate(issue_name, issue_full_description, '', cvss, current_user['id'], services, 'need to recheck',
current_project['id'], '', 0, 'custom', issue_solution, '')
issues_list = host.find('practices')
if issues_list:
for issue_obj in issues_list.findAll('practice'):
if not host_id:
host_id = db.insert_host(current_project['id'], ip, current_user['id'], form.hosts_description.data)
cve = ''
if 'cveid' in issue_obj.attrs:
cve = issue_obj.attrs['cveid']
issue_name = issue_obj.find('title').text
issue_diagnostic = issue_obj.find('diagnosis').text
issue_description = issue_obj.find('consequence').text
issue_solution = beautify_output(issue_obj.find('solution').text)
# TODO: add PoC
issue_output = issue_obj.find('result')
try:
issue_output = issue_obj.find('result').text
except AttributeError:
issue_output = ''
issue_full_description = 'Diagnosis: \n{} \n\nConsequence: \n{}'.format(issue_diagnostic, issue_description)
issue_full_description = beautify_output(issue_full_description)
issue_links = []
for url in issue_obj.findAll('url'):
issue_links.append(url.text)
for url in issue_obj.findAll('link'):
issue_links.append(url.text)
if issue_links:
issue_full_description += '\n\nLinks:\n' + '\n'.join(['- ' + url for url in issue_links])
cvss = 0
cvss = 0
cvss_tmp1 = issue_obj.find('cvss3_base')
cvss_tmp2 = issue_obj.find('cvss3_temporal')
cvss_tmp3 = issue_obj.find('cvss_temporal')
if cvss_tmp1 and cvss_tmp1.text != '-':
cvss = float(cvss_tmp1.text)
elif cvss_tmp2 and cvss_tmp2.text != '-':
cvss = float(cvss_tmp2.text)
elif cvss_tmp3 and cvss_tmp3.text != '-':
cvss = float(cvss_tmp3.text)
# try to detect port
port = 0
is_tcp = 1
info_str = issue_output.split('\n')[0]
if ' detected on port ' in info_str:
port = int(info_str.split(' detected on port ')[1].split(' ')[0])
if ' over ' in info_str.split(' detected on port ')[1]:
is_tcp = int(info_str.split(' detected on port ')[1].split(' over ')[1].split(' ')[0] == 'TCP')
port_id = db.select_host_port(host_id, port, is_tcp)
if not port_id:
port_id = db.insert_host_port(host_id, port, is_tcp, 'unknown', form.ports_description.data,
current_user['id'], current_project['id'])
else:
port_id = port_id[0]['id']
services = {port_id: ['0']}
issue_id = db.insert_new_issue_no_dublicate(issue_name, issue_full_description, cve, cvss, current_user['id'], services, 'need to recheck',
current_project['id'], '', 0, 'custom', issue_solution, '')
return render_template('project/tools/import/qualys.html',
current_project=current_project,
errors=errors,
tab_name='Qualys')
@routes.route('/project/<uuid:project_id>/tools/whois/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def whois_page(project_id, current_project, current_user):
return render_template('project/tools/scanners/whois.html',
current_project=current_project,
tab_name='Whois')
@routes.route('/project/<uuid:project_id>/tools/whois/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def whois_page_form(project_id, current_project, current_user):
form = WhoisForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
if form.host_id.data and is_valid_uuid(form.host_id.data):
host = db.select_project_host(current_project['id'], form.host_id.data)
if not host:
errors.append('Host not found!')
else:
host_id = host[0]['id']
hostname = db.select_ip_hostname(host_id, form.hostname.data)
if not hostname:
errors.append('Hostname not found!')
else:
hostname_id = hostname[0]['id']
if not errors:
if form.host_id.data:
whois_obj = whois.whois(form.hostname.data)
result_str = ''
if 'registrar' in whois_obj and whois_obj['registrar']:
result_str += 'Registrar: {}\n'.format(whois_obj['registrar'])
if 'whois_server' in whois_obj and whois_obj['whois_server']:
result_str += 'Whois server: {}\n'.format(whois_obj['whois_server'])
if 'referral_url' in whois_obj and whois_obj['referral_url']:
result_str += 'Referral URL: {}\n'.format(whois_obj['referral_url'])
if 'name_servers' in whois_obj and whois_obj['name_servers']:
result_str += 'Name servers: \n{}\n'.format('\n'.join([' ' + x.lower() for x in set(whois_obj['name_servers'])]))
if 'emails' in whois_obj and whois_obj['emails']:
result_str += 'Emails: \n{}\n'.format('\n'.join([' ' + x for x in set(whois_obj['emails'])]))
if 'dnssec' in whois_obj and whois_obj['dnssec']:
result_str += 'DNSSec: {}\n'.format(whois_obj['dnssec'])
if 'name' in whois_obj and whois_obj['name']:
result_str += 'Name: {}\n'.format(whois_obj['name'])
if 'org' in whois_obj and whois_obj['org']:
result_str += 'Organization: {}\n'.format(whois_obj['org'])
if 'address' in whois_obj and whois_obj['address']:
result_str += 'Address: {}\n'.format(whois_obj['address'])
if 'city' in whois_obj and whois_obj['city']:
result_str += 'DNSSec: {}\n'.format(whois_obj['city'])
if 'state' in whois_obj and whois_obj['state']:
result_str += 'State: {}\n'.format(whois_obj['state'])
if 'zipcode' in whois_obj and whois_obj['zipcode']:
result_str += 'Zipcode: {}\n'.format(whois_obj['zipcode'])
if 'country' in whois_obj and whois_obj['country']:
result_str += 'Country: {}\n'.format(whois_obj['country'])
if result_str:
db.update_hostnames_description(current_project['id'], form.hostname.data, result_str)
referer = request.headers.get("Referer")
referer += '#/hostnames'
return redirect(referer)
if form.hostname.data:
whois_obj = whois.whois(form.hostname.data)
result_str = ''
if 'registrar' in whois_obj and whois_obj['registrar']:
result_str += 'Registrar: {}\n'.format(whois_obj['registrar'])
if 'whois_server' in whois_obj and whois_obj['whois_server']:
result_str += 'Whois server: {}\n'.format(whois_obj['whois_server'])
if 'referral_url' in whois_obj and whois_obj['referral_url']:
result_str += 'Referral URL: {}\n'.format(whois_obj['referral_url'])
if 'name_servers' in whois_obj and whois_obj['name_servers']:
result_str += 'Name servers: \n{}\n'.format('\n'.join([' ' + x.lower() for x in set(whois_obj['name_servers'])]))
if 'emails' in whois_obj and whois_obj['emails']:
result_str += 'Emails: \n{}\n'.format('\n'.join([' ' + x for x in set(whois_obj['emails'])]))
if 'dnssec' in whois_obj and whois_obj['dnssec']:
result_str += 'DNSSec: {}\n'.format(whois_obj['dnssec'])
if 'name' in whois_obj and whois_obj['name']:
result_str += 'Name: {}\n'.format(whois_obj['name'])
if 'org' in whois_obj and whois_obj['org']:
result_str += 'Organization: {}\n'.format(whois_obj['org'])
if 'address' in whois_obj and whois_obj['address']:
result_str += 'Address: {}\n'.format(whois_obj['address'])
if 'city' in whois_obj and whois_obj['city']:
result_str += 'DNSSec: {}\n'.format(whois_obj['city'])
if 'state' in whois_obj and whois_obj['state']:
result_str += 'State: {}\n'.format(whois_obj['state'])
if 'zipcode' in whois_obj and whois_obj['zipcode']:
result_str += 'Zipcode: {}\n'.format(whois_obj['zipcode'])
if 'country' in whois_obj and whois_obj['country']:
result_str += 'Country: {}\n'.format(whois_obj['country'])
# add even with result_str is empty
try:
ip = socket.gethostbyname(form.hostname.data)
hosts = db.select_ip_from_project(current_project['id'], ip)
if not hosts:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
'Added from Whois information')
else:
host_id = hosts[0]['id']
hostname_obj = db.select_ip_hostname(host_id, form.hostname.data)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id, form.hostname.data, '', current_user['id'])
except:
pass
db.update_hostnames_description(current_project['id'], form.hostname.data, result_str)
if form.hostnames.data:
for hostname in form.hostnames.data:
whois_obj = whois.whois(hostname)
result_str = ''
if 'registrar' in whois_obj and whois_obj['registrar']:
result_str += 'Registrar: {}\n'.format(whois_obj['registrar'])
if 'whois_server' in whois_obj and whois_obj['whois_server']:
result_str += 'Whois server: {}\n'.format(whois_obj['whois_server'])
if 'referral_url' in whois_obj and whois_obj['referral_url']:
result_str += 'Referral URL: {}\n'.format(whois_obj['referral_url'])
if 'name_servers' in whois_obj and whois_obj['name_servers']:
result_str += 'Name servers: \n{}\n'.format('\n'.join([' ' + x.lower() for x in set(whois_obj['name_servers'])]))
if 'emails' in whois_obj and whois_obj['emails']:
result_str += 'Emails: \n{}\n'.format('\n'.join([' ' + x for x in set(whois_obj['emails'])]))
if 'dnssec' in whois_obj and whois_obj['dnssec']:
result_str += 'DNSSec: {}\n'.format(whois_obj['dnssec'])
if 'name' in whois_obj and whois_obj['name']:
result_str += 'Name: {}\n'.format(whois_obj['name'])
if 'org' in whois_obj and whois_obj['org']:
result_str += 'Organization: {}\n'.format(whois_obj['org'])
if 'address' in whois_obj and whois_obj['address']:
result_str += 'Address: {}\n'.format(whois_obj['address'])
if 'city' in whois_obj and whois_obj['city']:
result_str += 'DNSSec: {}\n'.format(whois_obj['city'])
if 'state' in whois_obj and whois_obj['state']:
result_str += 'State: {}\n'.format(whois_obj['state'])
if 'zipcode' in whois_obj and whois_obj['zipcode']:
result_str += 'Zipcode: {}\n'.format(whois_obj['zipcode'])
if 'country' in whois_obj and whois_obj['country']:
result_str += 'Country: {}\n'.format(whois_obj['country'])
# add even with result_str is empty
try:
ip = socket.gethostbyname(hostname)
hosts = db.select_ip_from_project(current_project['id'], ip)
if not hosts:
host_id = db.insert_host(current_project['id'],
ip,
current_user['id'],
'Added from Whois information')
else:
host_id = hosts[0]['id']
hostname_obj = db.select_ip_hostname(host_id, hostname)
if not hostname_obj:
hostname_id = db.insert_hostname(host_id, hostname, '', current_user['id'])
except:
pass
db.update_hostnames_description(current_project['id'], hostname, result_str)
return render_template('project/tools/scanners/whois.html',
current_project=current_project,
errors=errors,
tab_name='Whois')
@routes.route('/project/<uuid:project_id>/tools/duplicator/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def duplicator_page(project_id, current_project, current_user):
return render_template('project/tools/export/duplicator.html',
current_project=current_project,
tab_name='Duplicator')
@routes.route('/project/<uuid:project_id>/tools/duplicator/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def duplicator_page_form(project_id, current_project, current_user):
form = DuplicatorForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
destination_project = db.check_user_project_access(form.destination_project.data, current_user['id'])
if not destination_project:
errors.append("Destination project not found!")
if not errors:
if not (destination_project['status'] and not (destination_project['auto_archive'] and destination_project['end_date'] < time.time())):
errors.append("Destination project is in archive!")
if not errors:
if form.copy_info.data:
destination_project['description'] = current_project['description']
if form.copy_scope.data:
destination_project['scope'] = current_project['scope']
if form.copy_deadline.data:
destination_project['end_date'] = current_project['end_date']
destination_project['auto_archive'] = 0
destination_project['status'] = 1
if int(destination_project['end_date']) < int(destination_project['start_date']):
destination_project['start_date'] = current_project['start_date']
if form.copy_users.data:
old_users = json.loads(destination_project['testers'])
new_users = old_users + json.loads(current_project['testers'])
new_users = list(set(new_users))
destination_project['testers'] = json.dumps(new_users)
if form.copy_teams.data:
old_teams = json.loads(destination_project['teams'])
new_teams = old_teams + json.loads(current_project['teams'])
new_teams = list(set(new_teams))
destination_project['teams'] = json.dumps(new_teams)
db.update_project_settings(destination_project['id'],
destination_project['name'],
destination_project['description'],
destination_project['type'],
destination_project['scope'],
destination_project['start_date'],
destination_project['end_date'],
destination_project['auto_archive'],
json.loads(destination_project['testers']),
json.loads(destination_project['teams']))
# check paths
paths_ids_list = list(set(form.paths.data))
hosts_ids_list = list(set(form.hosts.data))
networks_ids_list = list(set(form.networks.data))
for path_id in paths_ids_list:
current_path = db.select_path(path_id=path_id,
project_id=current_project['id'])
if current_path:
current_path = current_path[0]
if current_path['host_out'] and current_path['host_out'] not in hosts_ids_list:
hosts_ids_list.append(current_path['host_out'])
if current_path['host_in'] and current_path['host_in'] not in hosts_ids_list:
hosts_ids_list.append(current_path['host_in'])
if current_path['network_in'] and current_path['network_in'] not in networks_ids_list:
networks_ids_list.append(current_path['network_in'])
if current_path['network_out'] and current_path['network_out'] not in networks_ids_list:
networks_ids_list.append(current_path['network_out'])
# hosts
for host_id in hosts_ids_list:
current_host = db.select_project_host(current_project['id'], host_id)
if current_host:
current_host = current_host[0]
# if same host exists
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_host_id = destination_host['id']
db.update_host_comment_threats(destination_host_id,
current_host['comment'],
json.loads(current_host['threats']),
current_host['os'])
else:
destination_host_id = db.insert_host(destination_project['id'],
current_host['ip'],
current_user['id'])
db.update_host_comment_threats(destination_host_id,
current_host['comment'],
json.loads(current_host['threats']),
current_host['os'])
# insert ports
current_ports = db.select_host_ports(current_host['id'])
for current_port in current_ports:
# check if port exists
destination_port = db.select_host_port(destination_host_id,
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port_id = destination_port[0]['id']
else:
destination_port_id = db.insert_host_port(destination_host_id,
int(current_port['port']),
bool(current_port['is_tcp']),
'', '', current_user['id'],
destination_project['id'])
db.update_port_proto_description(destination_port_id,
current_port['service'],
current_port['description'])
# insert hostnames
current_hostnames = db.select_ip_hostnames(current_host['id'])
for current_hostname in current_hostnames:
# check if exists
destination_hostname = db.select_ip_hostname(destination_host_id, current_hostname['hostname'])
if destination_hostname:
destination_hostname_id = destination_hostname[0]['id']
db.update_hostname(destination_hostname_id, current_hostname['description'])
else:
hostname_id = db.insert_hostname(destination_host_id, current_hostname['hostname'],
current_hostname['description'],
current_user['id'])
# issues
for issue_id in form.issues.data:
current_issue = db.select_issue(issue_id)
if current_issue and current_issue[0]['project_id'] == current_project['id']:
current_issue = current_issue[0]
# fullfill issue hosts
current_ports_dict = json.loads(current_issue['services'])
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
destination_issue_id = db.insert_new_issue_no_dublicate(
current_issue['name'], current_issue['description'],
current_issue['url_path'], current_issue['cvss'],
current_user['id'], destination_ports_dict, current_issue['status'],
destination_project['id'], current_issue['cve'],
current_issue['cwe'], current_issue['type'],
current_issue['fix'], current_issue['param']
)
# add PoCs
current_pocs = db.select_issue_pocs(current_issue['id'])
for current_poc in current_pocs:
current_poc_path = path.join('./static/files/poc/', current_poc['id'])
destination_poc_id = gen_uuid()
destination_poc_path = path.join('./static/files/poc/', destination_poc_id)
if current_poc['port_id'] == "0":
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
"0",
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
"0",
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
else:
current_port = db.select_project_port(current_project['id'], current_poc['port_id'])
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'], current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'], current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
current_port['port'],
current_port['is_tcp'])
if destination_port:
destination_port = destination_port[0]
if current_poc['hostname_id'] == "0":
# add poc with port
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
destination_port['id'],
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
"0",
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
else:
current_hostname = db.select_project_hostname(current_project['id'], current_poc['hostname_id'])
if current_hostname:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_host['id'], current_hostname['hostname'])
if destination_hostname:
# add poc with hostname
destination_hostname = destination_hostname[0]
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
destination_port['id'],
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
destination_hostname['id'],
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
else:
# add poc without hostname
copyfile(current_poc_path, destination_poc_path)
file_data = b''
if config['files']['poc_storage'] == 'database':
f = open(destination_poc_path, 'rb')
file_data = f.read()
f.close()
remove(destination_poc_path)
poc_id = db.insert_new_poc(
destination_port['id'],
current_poc['description'],
current_poc['type'],
current_poc['filename'],
destination_issue_id,
current_user['id'],
"0",
destination_poc_id,
storage=config['files']['poc_storage'],
data=file_data
)
# files
for current_file_id in form.files.data:
current_file = db.select_files(current_file_id)
if current_file and current_file[0]['project_id'] == current_project['id']:
current_file = current_file[0]
current_file_path = path.join('./static/files/code/', current_file['id'])
destination_file_id = gen_uuid()
destination_file_path = path.join('./static/files/code/', destination_file_id)
current_ports_dict = json.loads(current_file['services'])
# services
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
file_data = b''
if config["files"]["files_storage"] == 'database':
f = open(destination_file_path, 'rb')
file_data = f.read()
f.close()
remove(destination_file_path)
db.insert_new_file(destination_file_id,
destination_project['id'],
current_file['filename'],
current_file['description'],
destination_ports_dict,
current_file['type'],
current_user['id'],
storage=config["files"]["files_storage"],
data=file_data
)
copyfile(current_file_path, destination_file_path)
# creds
for cred_id in form.creds.data:
current_cred = db.select_creds(cred_id)
if current_cred and current_cred[0]['project_id'] == current_project['id']:
current_cred = current_cred[0]
current_ports_dict = json.loads(current_cred['services'])
# services
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
dublicate_creds = db.select_creds_dublicates(
destination_project['id'],
current_cred['login'],
current_cred['hash'],
current_cred['cleartext'],
current_cred['description'],
current_cred['source'],
current_cred['hash_type']
)
if dublicate_creds:
dublicate_creds = dublicate_creds[0]
joined_services = json.loads(dublicate_creds['services'])
for port_id in destination_ports_dict:
if port_id not in joined_services:
joined_services[port_id] = []
for hostname_id in destination_ports_dict[port_id]:
if hostname_id not in joined_services[port_id]:
joined_services[port_id].append(hostname_id)
db.update_creds(
dublicate_creds['id'],
dublicate_creds['login'],
dublicate_creds['hash'],
dublicate_creds['hash_type'],
dublicate_creds['cleartext'],
dublicate_creds['description'],
dublicate_creds['source'],
joined_services
)
else:
dumplicate_cred_id = db.insert_new_cred(
current_cred['login'],
current_cred['hash'],
current_cred['hash_type'],
current_cred['cleartext'],
current_cred['description'],
current_cred['source'],
destination_ports_dict,
current_user['id'],
destination_project['id']
)
# networks
for network_id in networks_ids_list:
current_network = db.select_project_networks_by_id(
current_project['id'],
network_id)
if current_network:
current_network = current_network[0]
current_ports_dict = json.loads(current_network['access_from'])
# services
destination_ports_dict = {}
for current_port_id in current_ports_dict:
current_port = db.select_project_port(current_project['id'],
current_port_id)
if current_port:
current_port = current_port[0]
current_host = db.select_project_host(current_project['id'],
current_port['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_port = db.select_host_port(destination_host['id'],
int(current_port['port']),
bool(current_port['is_tcp']))
if destination_port:
destination_port = destination_port[0]
# hostname search
for current_hostname_id in current_ports_dict[current_port_id]:
if current_hostname_id == "0":
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [current_hostname_id]
else:
destination_ports_dict[destination_port['id']].append(current_hostname_id)
else:
current_hostname = db.select_hostname(current_hostname_id)
if current_hostname and current_hostname[0]['host_id'] == current_port['host_id']:
current_hostname = current_hostname[0]
destination_hostname = db.select_ip_hostname(destination_port['host_id'],
current_hostname['hostname'])
if destination_hostname:
# add hostname to issue
destination_hostname = destination_hostname[0]
if destination_port['id'] not in destination_ports_dict:
destination_ports_dict[destination_port['id']] = [destination_hostname['id']]
else:
destination_ports_dict[destination_port['id']].append(destination_hostname['id'])
else:
# get 0 port if port not found
destination_host_port_id = db.select_host_port(destination_host['id'])[0]['id']
if destination_host_port_id not in destination_ports_dict:
destination_ports_dict[destination_host_port_id] = [""]
elif "" not in destination_ports_dict[destination_host_port_id]:
destination_ports_dict[destination_host_port_id].append("")
else:
# port was already added
pass
# check duplicates
duplicate_network = db.select_network_by_ip(destination_project['id'],
current_network['ip'],
current_network['mask'],
current_network['is_ipv6'])
if duplicate_network:
duplicate_network = duplicate_network[0]
joined_services = json.loads(duplicate_network['access_from'])
for port_id in destination_ports_dict:
if port_id not in joined_services:
joined_services[port_id] = []
for hostname_id in destination_ports_dict[port_id]:
if hostname_id not in joined_services[port_id]:
joined_services[port_id].append(hostname_id)
db.update_network(duplicate_network['id'],
destination_project['id'],
current_network['ip'],
current_network['mask'],
current_network['asn'],
current_network['comment'],
current_network['is_ipv6'],
current_network['internal_ip'],
current_network['cmd'],
joined_services,
current_network['name'])
else:
network_id = db.insert_new_network(
current_network['ip'],
current_network['mask'],
current_network['asn'],
current_network['comment'],
destination_project['id'],
current_user['id'],
current_network['is_ipv6'],
current_network['internal_ip'],
current_network['cmd'],
destination_ports_dict,
current_network['name']
)
# notes
for note_id in form.notes.data:
current_note = db.select_note(note_id)
if current_note and current_note[0]['project_id'] == current_project['id']:
current_note = current_note[0]
db.insert_new_note(
destination_project['id'],
current_note['name'],
current_user['id'],
'',
current_note['text']
)
# host notes
for host_id in form.note_hosts.data:
current_host_notes = db.select_host_notes(host_id, current_project['id'])
for current_note in current_host_notes:
current_host = db.select_project_host(current_project['id'], current_note['host_id'])
if current_host:
current_host = current_host[0]
destination_host = db.select_project_host_by_ip(destination_project['id'],
current_host['ip'])
if destination_host:
destination_host = destination_host[0]
destination_host_id = destination_host['id']
else:
destination_host_id = db.insert_host(destination_project['id'],
current_host['ip'],
current_user['id'])
db.insert_new_note(
destination_project['id'],
current_note['name'],
current_user['id'],
destination_host_id,
current_note['text']
)
# network paths
for path_id in paths_ids_list:
current_path = db.select_path(path_id=path_id,
project_id=current_project['id'])
if current_path:
host_in = ''
network_in = ''
host_out = ''
network_out = ''
current_path = current_path[0]
if current_path['host_out']:
source_host = db.select_host(current_path['host_out'])[0]
host_out = db.select_project_host_by_ip(destination_project['id'], source_host['ip'])[0]['id']
if current_path['host_in']:
source_host = db.select_host(current_path['host_in'])[0]
host_in = db.select_project_host_by_ip(destination_project['id'], source_host['ip'])[0]['id']
if current_path['network_out']:
source_network = db.select_network(current_path['network_out'])[0]
network_out = db.select_network_by_ip(destination_project['id'],
source_network['ip'],
source_network['mask'],
source_network['is_ipv6'])[0]['id']
if current_path['network_in']:
source_network = db.select_network(current_path['network_in'])[0]
network_in = db.select_network_by_ip(destination_project['id'],
source_network['ip'],
source_network['mask'],
source_network['is_ipv6'])[0]['id']
# search dublicates
dublicate_paths = db.search_path(project_id=destination_project['id'],
out_host=host_out,
out_network=network_out,
in_host=host_in,
in_network=network_in)
if not dublicate_paths:
path_id = db.insert_path(project_id=destination_project['id'],
out_host=host_out,
out_network=network_out,
in_host=host_in,
in_network=network_in,
description=current_path['description'],
path_type=current_path['type'],
direction=current_path['direction'])
return render_template('project/tools/export/duplicator.html',
current_project=current_project,
tab_name='Duplicator',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/wpscan/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def wpscan_page(project_id, current_project, current_user):
return render_template('project/tools/import/wpscan.html',
current_project=current_project,
tab_name='WPScan')
@routes.route('/project/<uuid:project_id>/tools/wpscan/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def wpscan_page_form(project_id, current_project, current_user):
form = WPScanForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# json files
for file in form.json_files.data:
if file.filename:
file_content = file.read().decode('charmap')
try:
file_dict = json.loads(file_content)
current_ip = file_dict['target_ip']
# validate ip
ipaddress.ip_address(current_ip)
current_host = db.select_project_host_by_ip(current_project['id'], current_ip)
if current_host:
current_host_id = current_host[0]['id']
else:
current_host_id = db.insert_host(current_project['id'],
current_ip,
current_user['id'],
"Added from WPScan")
# get protocol
current_url = file_dict['target_url']
current_url_obj = urllib.parse.urlparse(current_url)
current_scheme = current_url_obj.scheme.lower()
note_output = "<h1>Scan of {} </h1></br></br>".format(current_url)
if current_url_obj.port:
current_port_num = int(current_url_obj.port)
else:
if current_scheme == 'http':
current_port_num = 80
elif current_scheme == 'https':
current_port_num = 443
current_wordpress_path = current_url_obj.path
if current_port_num < 1 or current_port_num > 65535:
raise Exception
# create port
current_port_obj = db.select_host_port(current_host_id,
current_port_num,
True)
if current_port_obj:
current_port_id = current_port_obj[0]['id']
else:
current_port_id = db.insert_host_port(current_host_id,
current_port_num,
True,
current_scheme,
'WordPress',
current_user['id'],
current_project['id'])
# create hostname
hostname = current_url_obj.hostname
if hostname == current_ip:
current_hostname_id = "0"
else:
current_hostname = db.select_ip_hostname(current_host_id,
hostname)
if current_hostname:
current_hostname_id = current_hostname[0]['id']
else:
current_hostname_id = db.insert_hostname(
current_host_id,
hostname,
"Added from WPScan",
current_user['id']
)
# Interesting findings
interest_obj = file_dict['interesting_findings']
if interest_obj:
note_output += "<h1>Interesting findings </h1></br>"
for find_obj in interest_obj:
note_output += "<h2><b>URL:</b> " + find_obj["url"] + "</h2></br>"
note_output += "<b>Type:</b> " + find_obj["type"] + "</br>"
note_output += "<b>Description:</b> " + find_obj["to_s"] + "</br>"
note_output += "<b>Found by:</b> " + find_obj["found_by"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in find_obj["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
if "url" in find_obj["references"]:
note_output += "<b>Reference urls:</b> <ol>"
for url in find_obj["references"]["url"]:
note_output += "<li>" + htmlspecialchars(url) + "</li>"
note_output += "</ol></br>"
if "metasploit" in find_obj["references"]:
note_output += "<b>Reference metasploit:</b> <ol>"
for url in find_obj["references"]["metasploit"]:
note_output += "<li>" + htmlspecialchars(url) + "</li>"
note_output += "</ol></br>"
# Versions issues detection
version_obj = file_dict['version']
if version_obj:
note_output += "<h1>Version detection </h1></br>"
note_output += "<b>Version:</b> " + version_obj["number"] + "</br>"
note_output += "<b>Found by:</b> " + version_obj["found_by"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in version_obj["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
for current_issue in version_obj["vulnerabilities"]:
issue_name = current_issue["title"]
issue_fix = "Upgrade WordPress to version >= " + current_issue["fixed_in"]
issue_cve = ",".join(current_issue["references"]["cve"])
issue_description = "{}\n\nURLs:\n{}\n\nwpvulndb: {}".format(issue_name,
"\n".join([" - " + x for x in current_issue["references"]["url"]]),
", ".join(current_issue["references"]["wpvulndb"]))
if "exploitdb" in current_issue:
issue_description += "\n\nExploitDB: {}".format(current_issue["exploitdb"])
if "youtube" in current_issue:
issue_description += "\n\nYoutube: {}".format(current_issue["youtube"])
issue_id = db.insert_new_issue_no_dublicate(
issue_name,
issue_description,
current_wordpress_path,
0,
current_user['id'],
{current_port_id: [current_hostname_id]},
"Need to recheck",
current_project['id'],
issue_cve,
0,
"web",
issue_fix,
""
)
# Theme
main_theme_obj = file_dict['main_theme']
if main_theme_obj:
note_output += "<h1>Main theme </h1></br>"
note_output += "<b>Name:</b> " + main_theme_obj["slug"] + "</br>"
note_output += "<b>Location:</b> " + main_theme_obj["location"] + "</br>"
if "readme_url" in main_theme_obj:
note_output += "<b>Readme URL:</b> " + main_theme_obj["readme_url"] + "</br>"
if "style_uri" in main_theme_obj:
note_output += "<b>Official URL:</b> " + main_theme_obj["style_uri"] + "</br>"
if "version" in main_theme_obj and main_theme_obj["version"]:
note_output += "<b>Version:</b> " + main_theme_obj["version"]["number"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in main_theme_obj["version"]["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
for current_issue in main_theme_obj["vulnerabilities"]:
issue_name = current_issue["title"]
issue_fix = "Upgrade main theme {} to version >= {}".format(main_theme_obj["slug"], current_issue["fixed_in"])
issue_cve = ",".join(current_issue["references"]["cve"])
issue_description = "{}\n\nURLs:\n{}\n\nwpvulndb: {}".format(issue_name,
"\n".join([" - " + x for x in current_issue["references"]["url"]]),
", ".join(current_issue["references"]["wpvulndb"]))
if "exploitdb" in current_issue:
issue_description += "\n\nExploitDB: {}".format(current_issue["exploitdb"])
if "youtube" in current_issue:
issue_description += "\n\nYoutube: {}".format(current_issue["youtube"])
issue_id = db.insert_new_issue_no_dublicate(
issue_name,
issue_description,
current_wordpress_path,
0,
current_user['id'],
{current_port_id: [current_hostname_id]},
"Need to recheck",
current_project['id'],
issue_cve,
0,
"web",
issue_fix,
""
)
# Plugins
plugins_obj = file_dict['plugins']
if plugins_obj:
note_output += "<h1>Plugins</h1></br>"
for plugin_name in plugins_obj:
plugin_obj = plugins_obj[plugin_name]
note_output += "<h2>" + plugin_name + "</h2></br>"
note_output += "<b>Location:</b> " + plugin_obj["location"] + "</br>"
note_output += "<b>Found by:</b> " + plugin_obj["found_by"] + "</br>"
if "error_log_url" in plugins_obj and plugin_obj["error_log_url"]:
note_output += "<b>Error log URL:</b> " + plugin_obj["error_log_url"] + "</br>"
if "directory_listing" in plugin_obj and plugin_obj["directory_listing"]:
note_output += "<b>Dir listing URL:</b> " + plugin_obj["directory_listing"] + "</br>"
if "changelog_url" in plugin_obj and plugin_obj["changelog_url"]:
note_output += "<b>Changelog URL:</b> " + plugin_obj["changelog_url"] + "</br>"
if "readme_url" in plugin_obj and plugin_obj["readme_url"]:
note_output += "<b>Readme URL:</b> " + plugin_obj["readme_url"] + "</br>"
note_output += "<b>Interesting entries:</b> <ol>"
for entry in plugin_obj["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
if "version" in plugin_obj and plugin_obj["version"]:
note_output += "<b>Version:</b> " + plugin_obj["version"]["number"] + "</br>"
note_output += "<b>Version entries:</b> <ol>"
for entry in plugin_obj["version"]["interesting_entries"]:
note_output += "<li>" + htmlspecialchars(entry) + "</li>"
note_output += "</ol></br>"
for current_issue in plugin_obj["vulnerabilities"]:
issue_name = current_issue["title"]
issue_fix = "Upgrade plugin {} to version >= {}".format(plugin_name, current_issue["fixed_in"])
issue_cve = ",".join(current_issue["references"]["cve"])
issue_description = "{}\n\nURLs:\n{}\n\nwpvulndb: {}".format(issue_name,
"\n".join([" - " + x for x in current_issue["references"]["url"]]),
", ".join(current_issue["references"]["wpvulndb"]))
if "exploitdb" in current_issue:
issue_description += "\n\nExploitDB: {}".format(current_issue["exploitdb"])
if "youtube" in current_issue:
issue_description += "\n\nYoutube: {}".format(current_issue["youtube"])
issue_id = db.insert_new_issue_no_dublicate(
issue_name,
issue_description,
current_wordpress_path,
0,
current_user['id'],
{current_port_id: [current_hostname_id]},
"Need to recheck",
current_project['id'],
issue_cve,
0,
"web",
issue_fix,
""
)
# Add note
note_id = db.insert_new_note(current_project['id'],
"WPScan: {}".format(current_port_num),
current_user['id'],
current_host_id,
note_output)
except ValueError as e:
errors.append('One of files was corrupted: {}'.format(e))
return render_template('project/tools/import/wpscan.html',
current_project=current_project,
tab_name='WPScan',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/kube-hunter/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def kubehunter_page(project_id, current_project, current_user):
return render_template('project/tools/import/kubehunter.html',
current_project=current_project,
tab_name='kube-hunter')
@routes.route('/project/<uuid:project_id>/tools/kube-hunter/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def kubehunter_form(project_id, current_project, current_user):
form = KuberHunter()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.json_files.data:
if file.filename:
json_report_data = file.read().decode('charmap')
scan_result = json.loads(json_report_data)
# add node description
for node_obj in scan_result['nodes']:
try:
node_type = form.hosts_description.data
if 'type' in node_obj:
node_type = "Kubernetes " + node_obj['type']
node_ip = node_obj['location']
# check if valid ip
ipaddress.ip_address(node_ip)
current_host = db.select_ip_from_project(current_project['id'], node_ip)
if current_host:
current_host = current_host[0]
db.update_host_description(current_host['id'], node_type)
else:
current_host = db.insert_host(current_project['id'],
node_ip,
current_user['id'],
node_type)
except Exception as e:
# next Node
pass
# services
for service_obj in scan_result['services']:
try:
service_info = service_obj['service']
service_ip = service_obj['location'].split(':')[0]
service_port = int(service_obj['location'].split(':')[1])
# check ip
ipaddress.ip_address(service_ip)
# add host
current_host = db.select_ip_from_project(current_project['id'], service_ip)
if current_host:
current_host = current_host[0]
else:
current_host = db.insert_host(current_project['id'],
service_ip,
current_user['id'],
form.hosts_description.data)
# add port
current_port = db.select_ip_port(current_host['id'], service_port, is_tcp=True)
if current_port:
current_port = current_port[0]
db.update_port_service(current_port['id'],
service_info)
else:
current_port = db.insert_host_port(current_host['id'],
service_port,
True,
service_info,
form.ports_description.data,
current_user['id'],
current_project['id'])
except Exception as e:
# next service
pass
# add issues
for issue_obj in scan_result['vulnerabilities']:
try:
issue_ip = issue_obj['location'].split(':')[0]
issue_port = 0
if ':' in issue_obj['location']:
issue_port = int(issue_obj['location'].split(':')[1])
# check ip
ipaddress.ip_address(issue_ip)
issue_cvss = 0
issue_severity = issue_obj['severity']
issue_name = issue_obj['vulnerability']
issue_category = issue_obj['category']
issue_num = issue_obj['vid']
issue_poc_str = issue_obj['evidence']
issue_link = issue_obj['avd_reference']
issue_script = issue_obj['hunter']
issue_description = issue_obj['description']
issue_full_description = 'Category: {}\nEvidence: {}\nModule: {}\nLink: {}\nNumber: {}\n\n{}'.format(
issue_category,
issue_poc_str,
issue_script,
issue_link,
issue_num,
issue_description
)
if issue_severity == 'low':
issue_cvss = 2.0
elif issue_severity == 'medium':
issue_cvss = 5.0
elif issue_severity == 'high':
issue_cvss = 8.0
elif issue_severity == 'critical':
issue_cvss = 10.0
# add host
current_host = db.select_ip_from_project(current_project['id'], issue_ip)
if current_host:
current_host = current_host[0]
else:
current_host = db.insert_host(current_project['id'],
issue_ip,
current_user['id'],
form.hosts_description.data)
# add port
current_port = db.select_ip_port(current_host['id'], issue_port, is_tcp=True)
if current_port:
current_port = current_port[0]
db.update_port_service(current_port['id'],
form.ports_description.data)
else:
current_port = db.insert_host_port(current_host['id'],
issue_port,
True,
'kubernetes',
form.ports_description.data,
current_user['id'],
current_project['id'])
# add issue
services = {current_port['id']: ['0']}
current_issue = db.insert_new_issue_no_dublicate(issue_name,
issue_full_description,
'',
issue_cvss,
current_user['id'],
services,
'need to recheck',
current_project['id'],
'',
0,
'custom',
'',
'')
except Exception as e:
print(e)
pass
return render_template('project/tools/import/kubehunter.html',
current_project=current_project,
tab_name='kube-hunter',
errors=errors)
return render_template('project/tools/import/kubehunter.html',
current_project=current_project,
tab_name='kube-hunter',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/burp_enterprise/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def burp_enterprise_page(project_id, current_project, current_user):
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition')
@routes.route('/project/<uuid:project_id>/tools/burp_enterprise/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def burp_enterprise_form(project_id, current_project, current_user):
form = BurpEnterpriseForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if errors:
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition',
errors=errors)
# hostnames dict
if len(form.hostnames.data) != len(form.ips.data):
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition',
errors=['Error with hostnames'])
i = 0
hostname_dict = {}
for i in range(len(form.hostnames.data)):
hostname_dict[form.hostnames.data[i]] = form.ips.data[i]
auto_resolve = form.auto_resolve.data == 1
# xml files
for file in form.html_files.data:
if file.filename:
html_data = file.read()
scan_result = BeautifulSoup(html_data, "html.parser")
# find list of issues
site_array = scan_result.select('h1:contains("Issues found on")')
for site_obj in site_array:
url = site_obj.string.split('Issues found on ')[1].strip()
parsed_url = urllib.parse.urlparse(url)
protocol = parsed_url.scheme
hostname = parsed_url.netloc
port = 80
ip = ''
if not parsed_url.port:
if protocol == 'https':
port = 443
else:
port = 80
else:
port = int(parsed_url.port)
pass
# check ip
try:
ipaddress.ip_address(hostname)
ip = hostname
hostname = ''
except Exception as e:
pass
if hostname:
try:
email_validator.validate_email_domain_part(hostname)
except email_validator.EmailNotValidError:
errors.append('Hostname not valid!')
hostname = ''
# check hostname
if ip == '':
if hostname in hostname_dict:
ip = hostname_dict[hostname]
elif auto_resolve:
ip = socket.gethostbyname(hostname)
if ip and not errors:
# add host
current_host = db.select_ip_from_project(current_project['id'], ip)
if current_host:
current_host = current_host[0]
else:
current_host = db.insert_host(current_project['id'],
ip,
current_user['id'],
form.hosts_description.data)
# add port
current_port = db.select_ip_port(current_host['id'], port, is_tcp=True)
if current_port:
current_port = current_port[0]
db.update_port_service(current_port['id'],
protocol)
else:
current_port = db.insert_host_port(current_host['id'],
port,
True,
protocol,
form.ports_description.data,
current_user['id'],
current_project['id'])
# add hostname
current_hostname = None
if hostname:
current_hostname = db.select_ip_hostname(current_host['id'],
hostname)
if current_hostname:
current_hostname = current_hostname[0]
else:
hostname_id = db.insert_hostname(current_host['id'], hostname,
form.hostnames_description.data,
current_user['id'])
current_hostname = db.select_hostname(hostname_id)
# issues loop
rows_array = site_obj.parent.find_all('tr')[1:]
issue_name = ''
i = 0
for issue_header_obj in rows_array:
i += 1
if 'class' in issue_header_obj.attrs and 'issue-type-row' in issue_header_obj.attrs['class']:
issue_name = issue_header_obj.find('td').string.split(' [')[0]
else:
td_arr = issue_header_obj.find_all('td')
issue_path = issue_header_obj.find('td', {"class": "issue-path"}).string.strip()
dom_id = issue_header_obj.find('a').attrs['href'].replace('#', '')
severity = td_arr[1].string
issue_cvss = 0.0
if severity == 'Low':
issue_cvss = 2.0
elif severity == 'Medium':
issue_cvss = 5.0
elif severity == 'High':
issue_cvss = 8.0
elif severity == 'Critical':
issue_cvss = 10.0
# goto issue container
issue_container = scan_result.find('a', {"name": dom_id}).parent
issue_name = issue_container.find('h2').string
issue_description_container = issue_container.find('div')
issue_description_text = str(issue_description_container.getText())
while ' ' in issue_description_text:
issue_description_text = issue_description_text.replace(' ', ' ')
while '\n\n\n' in issue_description_text:
issue_description_text = issue_description_text.replace('\n\n\n', '\n\n')
print(1)
# ignoring Remediation detail
# Remidiation == fix
issue_fix_short_header = issue_container.select('h3:contains("Remediation detail")')
issue_fix_short1_header = issue_container.select('h3:contains("Issue remediation")')
issue_fix = ''
if issue_fix_short_header:
next_elem = issue_fix_short_header[0].find_next()
issue_fix += str(next_elem.getText()) + '\n\n'
if issue_fix_short1_header:
next_elem = issue_fix_short1_header[0].find_next()
issue_fix += str(next_elem.getText())
# issue_fix = issue_fix.replace('<ul>', '\n').replace('<li>', ' - ').replace('</li>', '\n').replace('</ul>', '').replace('\t', '').replace('<div>', '').replace('</div>', '').replace('<b>', '').replace('</b>', '')
while ' ' in issue_fix:
issue_fix = issue_fix.replace(' ', ' ')
while '\n\n\n' in issue_fix:
issue_fix = issue_fix.replace('\n\n\n', '\n\n')
# References
issue_ref_header = issue_container.select('h3:contains("References")')
issue_ref = ''
if issue_ref_header:
issue_ref_header = issue_ref_header[0].find_next()
issue_ref = '\n\nReferences:\n'
links = issue_ref_header.find_all('a')
for link_obj in links:
issue_ref += ' - ' + link_obj.string + ': ' + link_obj.attrs['href'] + '\n'
# Vulnerability classifications
issue_class_header = issue_container.select('h3:contains("Vulnerability classifications")')
issue_class = ''
if issue_class_header:
issue_class_header = issue_class_header[0].find_next()
issue_class = '\n\nClassification:\n'
links = issue_class_header.find_all('a')
for link_obj in links:
issue_class += link_obj.string + ': ' + link_obj.attrs['href'] + '\n'
# add issue
issue_full_description = issue_description_text + issue_ref + issue_class
while ' ' in issue_full_description:
issue_full_description = issue_full_description.replace(' ', ' ')
while '\n\n\n' in issue_full_description:
issue_full_description = issue_full_description.replace('\n\n\n', '\n\n')
try:
services = {current_port['id']: ['0']}
if current_hostname:
services = {current_port['id']: [current_hostname['id']]}
except Exception as e:
pass
current_issue_id = db.insert_new_issue_no_dublicate(
name='Burp: ' + issue_name,
description=str(issue_full_description),
url_path=str(issue_path),
cvss=float(issue_cvss),
user_id=current_user['id'],
services=services,
status='Need to recheck',
project_id=current_project['id'],
cve='',
cwe=0,
issue_type='web',
fix=str(issue_fix),
param=''
)
# PoC Request
issue_request_header = issue_container.select('h3:contains("Request:")')
if issue_request_header:
next_elem = issue_request_header[0].find_next()
poc_text = str(next_elem.getText()).replace('\r', '')
# add poc
file_data = b''
if config['files']['poc_storage'] == 'database':
file_data = poc_text.encode('charmap')
poc_id = db.insert_new_poc(current_port['id'],
'HTTP request',
'text',
'request.txt',
current_issue_id,
current_user['id'],
current_hostname['id'] if current_hostname else '0',
storage=config['files']['poc_storage'],
data=file_data)
if config['files']['poc_storage'] == 'filesystem':
file_path = './static/files/poc/{}'.format(poc_id)
file_object = open(file_path, 'w')
file_object.write(poc_text)
file_object.close()
# PoC Response
issue_response_header = issue_container.select('h3:contains("Response:")')
if issue_response_header:
next_elem = issue_response_header[0].find_next()
poc_text = str(next_elem.getText()).replace('\r', '')
# add poc
file_data = b''
if config['files']['poc_storage'] == 'database':
file_data = poc_text.encode('charmap')
poc_id = db.insert_new_poc(current_port['id'],
'HTTP response',
'text',
'response.txt',
current_issue_id,
current_user['id'],
current_hostname['id'] if current_hostname else '0',
storage=config['files']['poc_storage'],
data=file_data)
if config['files']['poc_storage'] == 'filesystem':
file_path = './static/files/poc/{}'.format(poc_id)
file_object = open(file_path, 'w')
file_object.write(poc_text)
file_object.close()
return render_template('project/tools/import/burp_enterprise.html',
current_project=current_project,
tab_name='Burp Suite Enterprise Edition',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/dnsrecon/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def dnsrecon_page(project_id, current_project, current_user):
return render_template('project/tools/import/dnsrecon.html',
current_project=current_project,
tab_name='DNSrecon')
@routes.route('/project/<uuid:project_id>/tools/dnsrecon/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def dnsrecon_page_form(project_id, current_project, current_user):
form = DNSreconForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
hostnames_dict = {}
ports_dict = {}
# json files
for file in form.json_files.data:
if file.filename:
json_report_data = file.read().decode('charmap')
scan_result = json.loads(json_report_data)
for hostname_row in scan_result:
hostname = hostname_row['target'] if 'target' in hostname_row else ''
hostname_name = hostname_row['mname'] if 'mname' in hostname_row else ''
host_ip = hostname_row['address'] if 'address' in hostname_row else ''
host_port = hostname_row['port'] if 'port' in hostname_row else ''
hostname_info = hostname_row['strings'] if 'strings' in hostname_row else ''
hostname_type = hostname_row['type'] if 'type' in hostname_row else ''
'''
1. Name <--> Address
2. Target <--> Address
3. Name <--> String
(Port, Type)
'''
if host_ip:
# check if host_ip domain or IP
try:
ipaddress.ip_address(host_ip)
except Exception as e:
# its domain, need ip
host_ip_old = host_ip
host_ip = ''
for hostname_row_tmp in scan_result:
host_ip_tmp = hostname_row['address'] if 'address' in hostname_row else ''
domain_tmp = hostname_row['mname'] if 'mname' in hostname_row else ''
if host_ip_old == domain_tmp:
try:
ipaddress.ip_address(host_ip_tmp)
host_ip = host_ip_tmp
except Exception as e1:
pass
if hostname_name != '' and host_ip != '':
# 1. Name <--> Address
if hostname == '':
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type)
}
else:
if host_ip not in hostnames_dict[hostname_name]['ip']:
hostnames_dict[hostname_name]['ip'].append(host_ip)
# 1. Name <--> Address <--> Target
else:
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}\nName: {}'.format(hostname_type, hostname_name)
}
elif hostname_name != '' and host_ip == '' and hostname_info != '':
# Name <--> String
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [],
'description': 'Type: {}\nInfo: {}'.format(hostname_type, hostname_info)
}
else:
hostnames_dict[hostname_name]['description'] += '\nType: {}\nInfo: {}'.format(hostname_type, hostname_info)
elif hostname != '' and host_ip != '' and hostname_name == '':
# Target <--> Address
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type),
}
# add ports
if host_port != '' and host_ip != '':
if host_ip not in ports_dict:
ports_dict[host_ip] = [host_port]
else:
if host_port not in ports_dict[host_ip]:
ports_dict[host_ip].append(host_port)
# csv load
for file in form.csv_files.data:
if file.filename:
scan_result = csv.DictReader(codecs.iterdecode(file, 'charmap'), delimiter=',')
for hostname_row in scan_result:
hostname = hostname_row['Target']
hostname_name = hostname_row['Name']
host_ip = hostname_row['Address']
host_port = hostname_row['Port']
hostname_info = hostname_row['String']
hostname_type = hostname_row['Type']
'''
1. Name <--> Address
2. Target <--> Address
3. Name <--> String
(Port, Type)
'''
if host_ip:
# check if host_ip domain or IP
try:
ipaddress.ip_address(host_ip)
except Exception as e:
# its domain, need ip
host_ip_old = host_ip
host_ip = ''
for hostname_row_tmp in scan_result:
host_ip_tmp = hostname_row_tmp['Address']
domain_tmp = hostname_row_tmp['Name']
if host_ip_old == domain_tmp:
try:
ipaddress.ip_address(host_ip_tmp)
host_ip = host_ip_tmp
except Exception as e1:
pass
if hostname_name != '' and host_ip != '':
# 1. Name <--> Address
if hostname == '':
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type)
}
else:
if host_ip not in hostnames_dict[hostname_name]['ip']:
hostnames_dict[hostname_name]['ip'].append(host_ip)
# 1. Name <--> Address <--> Target
else:
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}\nName: {}'.format(hostname_type, hostname_name)
}
elif hostname_name != '' and host_ip == '' and hostname_info != '':
# Name <--> String
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [],
'description': 'Type: {}\nInfo: {}'.format(hostname_type, hostname_info)
}
else:
hostnames_dict[hostname_name]['description'] += '\nType: {}\nInfo: {}'.format(hostname_type, hostname_info)
elif hostname != '' and host_ip != '' and hostname_name == '':
# Target <--> Address
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type),
}
# add ports
if host_port != '' and host_ip != '':
if host_ip not in ports_dict:
ports_dict[host_ip] = [host_port]
else:
if host_port not in ports_dict[host_ip]:
ports_dict[host_ip].append(host_port)
for file in form.xml_files.data:
if file.filename:
soup = BeautifulSoup(file.read(), "html.parser")
scan_result = soup.findAll('record')
for hostname_row in scan_result:
hostname = hostname_row.get('target') if hostname_row.get('target') else ''
hostname_name = hostname_row.get('name') if hostname_row.get('name') else ''
host_ip = hostname_row.get('address') if hostname_row.get('address') else ''
host_port = hostname_row.get('port') if hostname_row.get('port') else ''
hostname_info = hostname_row.get('strings') if hostname_row.get('strings') else ''
hostname_type = hostname_row.get('type') if hostname_row.get('type') else ''
'''
1. Name <--> Address
2. Target <--> Address
3. Name <--> String
(Port, Type)
'''
if host_ip:
# check if host_ip domain or IP
try:
ipaddress.ip_address(host_ip)
except Exception as e:
# its domain, need ip
host_ip_old = host_ip
host_ip = ''
for hostname_row_tmp in scan_result:
host_ip_tmp = hostname_row_tmp.get('address') if hostname_row_tmp.get('address') else ''
domain_tmp = hostname_row_tmp.get('name') if hostname_row_tmp.get('name') else ''
if host_ip_old == domain_tmp:
try:
ipaddress.ip_address(host_ip_tmp)
host_ip = host_ip_tmp
except Exception as e1:
pass
if hostname_name != '' and host_ip != '':
# 1. Name <--> Address
if hostname == '':
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type)
}
else:
if host_ip not in hostnames_dict[hostname_name]['ip']:
hostnames_dict[hostname_name]['ip'].append(host_ip)
# 1. Name <--> Address <--> Target
else:
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}\nName: {}'.format(hostname_type, hostname_name)
}
elif hostname_name != '' and host_ip == '' and hostname_info != '':
# Name <--> String
if hostname_name not in hostnames_dict:
hostnames_dict[hostname_name] = {
'ip': [],
'description': 'Type: {}\nInfo: {}'.format(hostname_type, hostname_info)
}
else:
hostnames_dict[hostname_name]['description'] += '\nType: {}\nInfo: {}'.format(hostname_type, hostname_info)
elif hostname != '' and host_ip != '' and hostname_name == '':
# Target <--> Address
if hostname not in hostnames_dict:
hostnames_dict[hostname] = {
'ip': [host_ip],
'description': 'Type: {}'.format(hostname_type),
}
# add ports
if host_port != '' and host_ip != '':
if host_ip not in ports_dict:
ports_dict[host_ip] = [host_port]
else:
if host_port not in ports_dict[host_ip]:
ports_dict[host_ip].append(host_port)
# hostnames_dict = {'google.com':{'ip':[8.8.8.8], 'description': '...' }}
for hostname in hostnames_dict:
ip_array = hostnames_dict[hostname]['ip']
description = hostnames_dict[hostname]['description']
for ip_address in ip_array:
# check if valid ip
ip_obj = ipaddress.ip_address(ip_address)
if (':' not in ip_address) or (':' in ip_address and not form.ignore_ipv6.data):
current_host = db.select_project_host_by_ip(current_project['id'], ip_address)
if not current_host:
host_id = db.insert_host(current_project['id'], ip_address, current_user['id'], form.hosts_description.data)
else:
host_id = current_host[0]['id']
current_hostname = db.select_ip_hostname(host_id, hostname)
if not current_hostname:
hostname_id = db.insert_hostname(host_id, hostname, description, current_user['id'])
else:
hostname_id = current_hostname[0]['id']
db.update_hostname(hostname_id, description)
# ports_dict = {'ip':['8888']}
for ip_address in ports_dict:
# check if valid ip
ports_arr = list(set(ports_dict[ip_address]))
ip_obj = ipaddress.ip_address(ip_address)
if (':' not in ip_address) or (':' in ip_address and not form.ignore_ipv6.data):
current_host = db.select_project_host_by_ip(current_project['id'], ip_address)
if not current_host:
host_id = db.insert_host(current_project['id'], ip_address, current_user['id'], form.hosts_description.data)
else:
host_id = current_host[0]['id']
for port_num in ports_arr:
port_num_int = int(port_num)
if port_num_int > 0 and port_num_int < 65536:
current_port = db.select_host_port(host_id, int(port_num), is_tcp=True)
if not current_port:
port_id = db.insert_host_port(host_id, port_num_int, True, 'unknown', form.ports_description.data, current_user['id'], current_project['id'])
return render_template('project/tools/import/dnsrecon.html',
current_project=current_project,
tab_name='DNSrecon',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/theharvester/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def theharvester_page(project_id, current_project, current_user):
return render_template('project/tools/import/theharvester.html',
current_project=current_project,
tab_name='theHarvester')
@routes.route('/project/<uuid:project_id>/tools/theharvester/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
@check_project_archived
def theharvester_page_form(project_id, current_project, current_user):
form = theHarvesterForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
for file in form.xml_files.data:
if file.filename:
soup = BeautifulSoup(file.read(), "html.parser")
scan_result = soup.findAll('host')
for hostname_row in scan_result:
ips_str = hostname_row.find('ip').text
hostname = hostname_row.find('hostname').text
ip_array = ips_str.split(', ')
for ip_address in ip_array:
# check valid ip
ipaddress.ip_address(ip_address)
current_host = db.select_project_host_by_ip(current_project['id'], ip_address)
if current_host:
host_id = current_host[0]['id']
else:
host_id = db.insert_host(current_project['id'], ip_address, current_user['id'],
form.hosts_description.data)
current_hostname = db.select_ip_hostname(host_id, hostname)
if not current_hostname:
hostname_id = db.insert_hostname(host_id, hostname, form.hostnames_description.data, current_user['id'])
return render_template('project/tools/import/theharvester.html',
current_project=current_project,
tab_name='theHarvester',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/metasploit/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def metasploit_page(project_id, current_project, current_user):
return render_template('project/tools/import/metasploit.html',
current_project=current_project,
tab_name='Metasploit')
@routes.route('/project/<uuid:project_id>/tools/metasploit/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
@check_project_archived
def metasploit_page_form(project_id, current_project, current_user):
form = MetasploitForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
'''
<MetasploitV5>
1. <hosts> - hosts info (domain/ip) - ignore <vulns>
2. <events> - ignoring
3. <web_sites>
4. <web_pages> - ignoring
5. <web_forms> - ignoring
6. <web_vuln>
Steps:
1. Add hosts
2. Add sites
3. Add site vulns
'''
if not errors:
for file in form.xml_files.data:
if file.filename:
soup = BeautifulSoup(file.read(), "html.parser")
# Add hosts & ports
hosts_obj = soup.find('hosts')
scan_result = hosts_obj.findAll('host')
hosts_dict = {}
ports_dict = {}
for host_row in scan_result:
host_report_id = host_row.find('id').text
host_ip = host_row.find('address').text
host_mac = host_row.find('mac').text
host_state = host_row.find('state').text
host_os = host_row.find('os-name').text # Linux
host_os_flavor = host_row.find('os-flavor').text # ???
host_os_version = host_row.find('os-sp').text # 2.6.X
host_os_lang = host_row.find('os-lang').text # ???
host_os_arch = host_row.find('arch').text # x86_64
host_os_detected_arch = host_row.find('detected-arch').text # x86_64
host_os_family = host_row.find('os-family').text # Linux
host_type = host_row.find('purpose').text # device
host_info = host_row.find('info').text
host_comments = host_row.find('comments').text
# create Host OS string
host_os_full = ''
if host_os:
host_os_full += host_os
if host_os_family and host_os_family != host_os:
host_os_full += '({})'.format(host_os_family)
if host_os_flavor:
host_os_full += ' ' + host_os_flavor
if host_os_version:
host_os_full += ' ' + host_os_version
if host_os_lang:
host_os_full += ' Lang:{}'.format(host_os_lang)
if host_os_arch:
host_os_full += ' Arch:{}'.format(host_os_arch)
if host_os_detected_arch and host_os_detected_arch != host_os_arch:
host_os_full += ' Arch detected:{}'.format(host_os_detected_arch)
# create host description string
host_description_full = ''
if host_mac:
host_description_full += '\nMAC: {}'.format(host_mac)
if host_state:
host_description_full += '\nState: {}'.format(host_state)
if host_type:
host_description_full += '\nType: {}'.format(host_type)
if host_info:
host_description_full += '\nInfo: {}'.format(host_info)
if host_comments:
host_description_full += '\nComments: {}'.format(host_comments)
# check if ip correct
ipaddress.ip_address(host_ip)
hosts_dict[host_report_id] = {
'ip': host_ip,
'description': host_description_full.strip(' \t\n\r'),
'os': host_os_full
}
# add ports
services_object = host_row.find('services')
services_arr = services_object.findAll('service')
# add all ports to ports_dict
for port_row in services_arr:
port_report_id = port_row.find('id').text
port_num = int(port_row.find('port').text) # 80
port_is_tcp = port_row.find('proto').text == 'tcp'
port_state = port_row.find('state').text # open closed filtered TODO: add option which port to add
port_service = port_row.find('name').text # ftp
port_info = port_row.find('info').text # vsftpd 2.3.4
if port_num > 0 and port_num < 65536:
ports_dict[port_report_id] = {
'port': port_num,
'is_tcp': port_is_tcp,
'state': port_state,
'service': port_service,
'info': port_info,
'host_report_id': host_report_id
}
# add notes to port objects - nmap scripts
if form.add_nmap_scripts.data:
notes_object = host_row.find('notes')
notes_arr = notes_object.findAll('note')
for note_row in notes_arr:
script_name = note_row.find('ntype').text # nmap.nse.smb-os-discovery.host
if script_name not in ['host.comments', 'host.info', 'host.os.nmap_fingerprint', 'host.name']:
host_report_id = note_row.find('host-id').text
script_critical = note_row.find('critical').text # ???
service_report_id = note_row.find('service-id').text
try:
script_data = base64.b64decode(note_row.find('data').text)[16:].decode('charmap').strip(' \n\t\r')
except Exception as e:
script_data = note_row.find('data').text.strip(' \n\t\r')
while ' ' in script_data:
script_data = script_data.replace(' ', ' ')
note_full = 'Script: {}'.format(script_name)
if script_critical:
note_full += '\nCritical: {}'.format(script_critical)
if script_data:
note_full += '\nOutput:\n\n{}\n\n'.format(script_data)
note_full = note_full.strip(' \t\n\r')
if service_report_id:
ports_dict[service_report_id]['info'] += '\n' + note_full
elif host_report_id:
hosts_dict[host_report_id]['description'] += '\n' + note_full
# add hosts
for host_obj in hosts_dict:
current_host = db.select_project_host_by_ip(current_project['id'], hosts_dict[host_obj]['ip'])
if current_host:
host_id = current_host[0]['id']
if hosts_dict[host_obj]['description']:
db.update_host_description(host_id, hosts_dict[host_obj]['description'])
if hosts_dict[host_obj]['os']:
db.update_host_os(host_id, hosts_dict[host_obj]['os'])
else:
host_id = db.insert_host(current_project['id'], hosts_dict[host_obj]['ip'], current_user['id'],
hosts_dict[host_obj]['description'], os=hosts_dict[host_obj]['os'])
hosts_dict[host_obj]['pcf_id'] = host_id
# add ports
for port_obj in ports_dict:
current_port = db.select_host_port(hosts_dict[ports_dict[port_obj]['host_report_id']]['pcf_id'],
ports_dict[port_obj]['port'],
ports_dict[port_obj]['is_tcp'])
if current_port:
port_id = current_port[0]['id']
db.update_port_proto_description(port_id, ports_dict[port_obj]['service'], ports_dict[port_obj]['info'])
else:
port_id = db.insert_host_port(hosts_dict[ports_dict[port_obj]['host_report_id']]['pcf_id'],
ports_dict[port_obj]['port'], ports_dict[port_obj]['is_tcp'], ports_dict[port_obj]['service'],
ports_dict[port_obj]['info'], current_user['id'], current_project['id'])
ports_dict[port_obj]['pcf_id'] = port_id
# ignoring websites due to it is connected with services which were added earlier
if not form.only_nmap.data:
# create websites_dict
web_dict = {}
websites_obj = soup.find('web_sites')
website_row = websites_obj.findAll('web_site')
for website_obj in website_row:
web_id = website_obj.find('id').text
service_id = website_obj.find('service-id').text
vhost = website_obj.find('vhost').text
pcf_port_id = ports_dict[service_id]['pcf_id']
pcf_host_id = hosts_dict[ports_dict[service_id]['host_report_id']]['pcf_id']
pcf_hostname_id = 0
if vhost:
current_hostname = db.select_ip_hostname(pcf_host_id, vhost)
if current_hostname:
hostname_id = current_hostname[0]['id']
else:
hostname_id = db.insert_hostname(pcf_host_id, vhost, form.hostnames_description.data, current_user['id'])
pcf_hostname_id = hostname_id
web_dict[web_id] = {
'pcf_port_id': pcf_port_id,
'pcf_host_id': pcf_host_id,
'pcf_hostname_id': pcf_hostname_id
}
# Add web vulns
vulns_obj = soup.find('web_vulns')
vuln_row = vulns_obj.findAll('web_vuln')
for vuln_obj in vuln_row:
vuln_url = vuln_obj.find('path').text
vuln_method = vuln_obj.find('method').text
vuln_param = vuln_obj.find('pname').text
vuln_params = base64.b64decode(vuln_obj.find('params').text).decode('charmap')[4:] # i dont know how to parse better
vuln_description = vuln_obj.find('description').text
vuln_payload = vuln_obj.find('payload').text
vuln_website_id = vuln_obj.find('web-site-id').text
vuln_cvss = float(vuln_obj.find('risk').text)
vuln_name = 'Metasploit: {}'.format(vuln_obj.find('name').text)
vuln_poc_str = vuln_obj.find('proof').text
vuln_query = vuln_obj.find('query').text
vuln_description_full = vuln_description
if vuln_poc_str:
vuln_description_full += '\nPoC: {}'.format(vuln_poc_str)
if vuln_query:
vuln_description_full += '\nQuery: {}'.format(vuln_query)
if vuln_params:
vuln_description_full += '\nParams: {}'.format(vuln_params)
if vuln_payload:
vuln_description_full += '\nPayload: {}'.format(vuln_payload)
vuln_param_full = '({}) {}'.format(vuln_method, vuln_param)
if vuln_cvss < 0 or vuln_cvss > 10:
vuln_cvss = 0
services = {web_dict[vuln_website_id]['pcf_port_id']: [web_dict[vuln_website_id]['pcf_hostname_id']]}
issue_id = db.insert_new_issue_no_dublicate(vuln_name,
vuln_description_full,
vuln_url,
vuln_cvss,
current_user['id'],
services,
'Need to recheck',
current_project['id'],
cve='',
cwe='',
issue_type='web',
fix='',
param=vuln_param_full
)
return render_template('project/tools/import/metasploit.html',
current_project=current_project,
tab_name='Metasploit',
errors=errors)
@routes.route('/project/<uuid:project_id>/tools/nuclei/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def nuclei_page(project_id, current_project, current_user):
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei')
@routes.route('/project/<uuid:project_id>/tools/nuclei/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def nuclei_page_form(project_id, current_project, current_user):
form = NucleiForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if errors:
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei',
errors=errors)
# hostnames dict
if len(form.hostnames.data) != len(form.ips.data):
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei',
errors=['Error with hostnames'])
i = 0
hostname_dict = {}
for i in range(len(form.hostnames.data)):
hostname_dict[form.hostnames.data[i]] = form.ips.data[i]
auto_resolve = form.auto_resolve.data == 1
# json files
for file in form.json_files.data:
if file.filename:
json_data = json.loads('[{}]'.format(file.read().decode('charmap').strip(' \t\r\n').replace('\r', '').replace('\n', ',')))
for issue_obj in json_data:
# important fields
issue_name = 'Nuclei: {}'.format(issue_obj['info']['name'])
issue_tags = 'Tags: {}'.format(', '.join(issue_obj['info']['tags'])) if issue_obj['info']['tags'] else ""
issue_description = issue_obj['info']['description'] if 'description' in issue_obj['info'] else ''
issue_references = "Links:\n{}".format('\n'.join([' - {}'.format(x) for x in issue_obj['info']['reference']])) if issue_obj['info']['reference'] else ""
issue_severity = "info"
issue_matcher_name = 'Matched: {}'.format(issue_obj['matcher-name']) if 'matcher-name' in issue_obj else ""
issue_cvss = 0.0
if issue_severity == 'low':
issue_cvss = 2.0
elif issue_severity == 'medium':
issue_cvss = 5.0
elif issue_severity == 'high':
issue_cvss = 8.0
elif issue_severity == 'critical':
issue_cvss = 10.0
issue_type = 'Script type: {}'.format(issue_obj['type']) if issue_obj['type'] else ""
issue_curl_cmd = 'Curl: {}'.format(issue_obj["curl-command"]) if "curl-command" in issue_obj else ''
issue_ip = issue_obj["ip"] if "ip" in issue_obj else "" # 192.168.3.11
issue_host = issue_obj["host"] if "host" in issue_obj else '' # https://google.com
issue_url = ''
issue_protocol = issue_obj["protocol"] if "protocol" in issue_obj else '' # i dont know key "protocol
issue_port = 0
issue_hostname = ''
issue_cve = issue_obj["cve"] if "cve" in issue_obj else ''
issue_cwe = issue_obj["cwe"] if "cwe" in issue_obj else ''
# validate ip
if issue_ip:
try:
ipaddress.ip_address(issue_ip)
except Exception as e:
issue_ip = ''
if issue_host:
# check if url
url_obj = None
try:
url_obj = urlparse(issue_host)
except Exception as e:
# wrong url
pass
if url_obj:
# its web!
# check protocol
issue_protocol = 'http'
if url_obj.scheme:
issue_protocol = url_obj.scheme
# check port
if issue_protocol == 'http':
issue_port = 80
elif issue_protocol == 'https':
issue_port = 443
if url_obj.port:
issue_port = url_obj.port
# check url path
if issue_obj["matched-at"].startswith(issue_host):
issue_url = issue_obj["matched-at"][len(issue_host):]
if not issue_url:
issue_path = '/'
# ip or hostname
if not issue_ip and url_obj.hostname:
try:
ip_obj = ipaddress.ip_address(url_obj.hostname)
issue_ip = url_obj.hostname
except Exception as e:
issue_hostname = url_obj.hostname
pass
elif url_obj.hostname:
issue_hostname = url_obj.hostname
if 'port' in issue_obj:
issue_port = int(issue_obj['port'])
blacklist_tags = ["template-id", "info", "host", "matched-at",
"timestamp", "curl-command", "type", "port",
"matcher-name", "matcher-status", "template",
"template-url", "protocol", "cve", "cwe", "ip"]
issue_other_fields = ''
for key_name in issue_obj:
if key_name not in blacklist_tags:
issue_other_fields += '{}: {}\n'.format(key_name, str(issue_obj[key_name]))
if issue_port < 0 or issue_port > 65535:
issue_port = 0
# resolve ip
if not issue_ip and issue_hostname:
if issue_hostname in hostname_dict:
issue_ip = hostname_dict[issue_hostname]
elif auto_resolve:
try:
issue_ip = socket.gethostbyname(issue_hostname)
except Exception as e:
pass
# if ip, port (, hostname)
# create them in db
services = {}
if issue_ip:
# create host
current_host = db.select_project_host_by_ip(current_project['id'], issue_ip)
if current_host:
host_id = current_host[0]['id']
else:
host_id = db.insert_host(current_project['id'], issue_ip, current_user['id'],
comment=form.hosts_description.data)
# create port
current_port = db.select_host_port(host_id, issue_port, True)
if current_port:
port_id = current_port[0]['id']
else:
port_id = db.insert_host_port(host_id, issue_port, True, issue_protocol,
form.ports_description.data, current_user['id'],
current_project['id'])
# create hostname
hostname_id = 0
if issue_hostname:
current_hostname = db.select_ip_hostname(host_id, issue_hostname)
if current_hostname:
hostname_id = current_hostname[0]['id']
else:
hostname_id = db.insert_hostname(host_id, issue_hostname, form.hostnames_description.data,
current_user['id'])
services = {port_id: [hostname_id]}
# create description
issue_full_description = issue_description + '\n'
if issue_matcher_name:
issue_full_description += '\n' + issue_matcher_name
if issue_tags:
issue_full_description += '\n' + issue_tags
if issue_type:
issue_full_description += '\n' + issue_type
if issue_curl_cmd:
issue_full_description += '\n' + issue_curl_cmd
if issue_references:
issue_full_description += '\n' + issue_references
if issue_other_fields:
issue_full_description += '\n' + issue_other_fields
# create issue
issue_id = db.insert_new_issue_no_dublicate(issue_name,
issue_full_description,
issue_url,
issue_cvss,
current_user['id'],
services,
'Need to recheck',
current_project['id'],
issue_cve,
issue_cwe,
'web' if issue_protocol.startswith('http') else 'custom',
fix='',
param=''
)
return render_template('project/tools/import/nuclei.html',
current_project=current_project,
tab_name='Nuclei')
@routes.route('/project/<uuid:project_id>/tools/nmap-helper/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@send_log_data
def nmap_helper_page(project_id, current_project, current_user):
return render_template('project/tools/helpers/nmap-helper.html',
current_project=current_project,
tab_name='Nmap Helper')
@routes.route('/project/<uuid:project_id>/tools/pingcastle/', methods=['GET'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def pingcastle_page(project_id, current_project, current_user):
return render_template('project/tools/import/pingcastle.html',
current_project=current_project,
tab_name='PingCastle')
@routes.route('/project/<uuid:project_id>/tools/pingcastle/', methods=['POST'])
@requires_authorization
@check_session
@check_project_access
@check_project_archived
@send_log_data
def pingcastle_page_form(project_id, current_project, current_user):
form = PingCastleForm()
form.validate()
errors = []
if form.errors:
for field in form.errors:
for error in form.errors[field]:
errors.append(error)
if not errors:
# prepare issues database
f = open('./routes/ui/tools_files/PingCastle/PingCastleDescription.resx')
s = f.read()
f.close()
issues_database = {}
issues_database_xml = BeautifulSoup(s, 'html.parser')
for issue_obj in issues_database_xml.findAll('data'):
issues_database[issue_obj.attrs['name']] = issue_obj.findAll('value')[0].text
# xml files
for file in form.xml_files.data:
if file.filename:
scan_result = BeautifulSoup(file.read(), "html.parser")
scan_obj = scan_result.healthcheckdata
# add DCs
domain_controllers = scan_obj.domaincontrollers
dc_ports_dict = {}
if domain_controllers:
for domain_obj in domain_controllers.findAll('healthcheckdomaincontroller'):
host_description = ''
host_os = '' if not domain_obj.operatingsystem else domain_obj.operatingsystem.text
if domain_obj.dcname: host_description += 'DC name: {}\n'.format(domain_obj.dcname.text)
if domain_obj.lastcomputerlogondate: host_description += 'Last Logon: {}\n'.format(domain_obj.lastcomputerlogondate.text)
if domain_obj.distinguishedname: host_description += 'Distinguished Name: {}\n'.format(domain_obj.distinguishedname.text)
if domain_obj.ownersid: host_description += 'Owner SID: {}\n'.format(domain_obj.ownersid.text)
if domain_obj.ownername: host_description += 'Owner Name: {}\n'.format(domain_obj.ownername.text)
if domain_obj.hasnullsession and domain_obj.hasnullsession == 'true': host_description += 'Has null session!\n'
if domain_obj.supportsmb1 and domain_obj.supportsmb1.text == 'true':
host_description += 'Supports SMB1!\n'
if domain_obj.smb1securitymode and domain_obj.smb1securitymode.text == 'NotTested':
host_description += 'SMB1SecurityMode: {}\n'.format(domain_obj.smb1securitymode.text)
if domain_obj.supportsmb2orsmb3 and domain_obj.supportsmb2orsmb3.text == 'true': host_description += 'Supports SMBv2 or SMBv3.\n'
if domain_obj.smb2securitymode: host_description += 'SMB2 security mode: {}\n'.format(domain_obj.smb2securitymode.text)
if domain_obj.remotespoolerdetected and domain_obj.remotespoolerdetected.text == 'true': host_description += 'Detected remote spooler.\n'
if domain_obj.pwdlastset: host_description += 'Last pwd set: {}.\n'.format(domain_obj.pwdlastset.text)
if domain_obj.rodc and domain_obj.rodc.text == 'true': host_description += 'Read-Only DC\n'
if domain_obj.sysvoloverwrite and domain_obj.sysvoloverwrite == 'true': host_description += 'SYSVOL overwrite\n'
if domain_obj.fsmo:
fsmo_result = ', '.join([x.text for x in domain_obj.fsmo.findAll("string")])
if fsmo_result:
host_description += 'FSMO: {}\n'.format(fsmo_result)
host_description = host_description.strip(' \n\t\r')
# TODO: fields LDAPSProtocols
try:
ip_obj = domain_obj.ip
for host_ip_obj in ip_obj.findAll('string'):
host_ip = host_ip_obj.text
# check for valid ip
ipaddress.ip_address(host_ip)
current_host = db.select_project_host_by_ip(current_project['id'], host_ip)
if current_host:
current_host_id = current_host[0]['id']
if host_os:
db.update_host_os(current_host_id, host_os)
else:
current_host_id = db.insert_host(current_project['id'], host_ip, current_user['id'], 'Added from PingCastle', os=host_os)
# add 88 port
current_port = db.select_host_port(current_host_id, port_num=88, is_tcp=True)
if current_port:
current_port_id = current_port[0]['id']
if host_description:
db.update_port_proto_description(current_port_id, 'kerberos', host_description)
else:
current_port_id = db.insert_host_port(current_host_id, 88, True, 'kerberos',
host_description, current_user['id'], current_project['id'])
dc_ports_dict[current_port_id] = ['0']
except Exception as e:
pass
# Issues - RiskRules
risk_rules = scan_obj.riskrules
for risk_obj in risk_rules.findAll('healthcheckriskrule'):
issue_points = int(risk_obj.points.text)
issue_category = risk_obj.category.text # PrivilegedAccounts
issue_model = risk_obj.model.text # AccountTakeOver
issue_riskid = risk_obj.riskid.text.replace('-', '_') # A_AdminSDHolder
issue_briefly = risk_obj.rationale.text
issue_links = issues_database[issue_riskid + '_Documentation'].replace(' ', '') if (issue_riskid + '_Documentation') in issues_database else ''
issue_purpose = issues_database[issue_riskid + '_Description'] if (issue_riskid + '_Description') in issues_database else ''
issue_fix = issues_database[issue_riskid + '_Solution'] if (issue_riskid + '_Solution') in issues_database else ''
issue_technical_description = issues_database[issue_riskid + '_TechnicalExplanation'] if (issue_riskid + '_TechnicalExplanation') in issues_database else ''
issue_name = 'PingCastle: {}'.format(issues_database[issue_riskid + '_Title'])
issue_full_description = 'Brief: {}\n\nTechnical information: {}\n\nTest purpose: {}\n\nLinks: \n{}'.format(
issue_briefly,
issue_technical_description,
issue_purpose,
issue_links
)
if issue_points < 1:
issue_cvss = 0
elif issue_points < 10:
issue_cvss = 3
elif issue_points < 30:
issue_cvss = 6
else:
issue_cvss = 9.5
issue_id = db.insert_new_issue_no_dublicate(issue_name, issue_full_description, '', issue_cvss,
current_user['id'], dc_ports_dict, 'need to recheck',
current_project['id'], fix=issue_fix)
return render_template('project/tools/import/pingcastle.html',
current_project=current_project,
tab_name='PingCastle',
errors=errors)
| en | 0.512527 | # parse ports # check if we will add host # xml files # add hostname # create port # add issue to created port # add host OS # json files # add host # add hostname # add port # csv load # add host # add hostname # add port # add issue # add host # add hostname # add port # xml files # add host # add hostname # add port # TODO: check CVE field Module: \n{}\n\nDescription: \n{}\n\nImpact: \n{}\n\nRecomendations: \n{}\n\nRequest: \n{} # txt worker # no ports # with ports # preparation: issues # 'host/hostname','port', 'type', 'service', 'description' # always with ports # preparation: issues # first generates json # [{"<ip>":"","hostnames":["<hostname_1",..], # "ports":[ {"num":"<num>", "type":"tcp", "service":"<service>", # "description": "<comment>"},...],},...] # preparation: issues #/sniffer_{}'.format(current_project['id'], sniffer_id)) # check if sniffer in project #/sniffer_{}'.format(current_project['id'], current_sniffer['id'])) {} {} {} # worker of headers # token header # worker of post data {}\n{}{} # check if sniffer in project # insert_new_network(self, ip, mask, asn, comment, # project_id, user_id,is_ipv6): # check if exist # insert_new_network(self, ip, mask, asn, comment, # project_id, user_id,is_ipv6): # check if exist # insert_new_network(self, ip, mask, asn, comment, # project_id, user_id,is_ipv6): # check if exist # api_key # checker # create network # create host # hostnames = shodan_json["hostnames"] # add hostnames # add ports with cve # add vulnerabilities # create network # create host # hostnames = shodan_json["hostnames"] # add hostnames # add ports with cve # add vulnerabilities # shodan delay # create network # create host # hostnames = shodan_json["hostnames"] # add hostnames # add ports with cve # add vulnerabilities # a lot of errors # a lot of errors # shodan delay # xml files # High ######### Path {} ###########\n'.format(path_find.index(path_obj) + 1) # disables result tags inside issue description # check if host exists # check if port exists # check if hostname exists # High, Medium, Low, Information, BestPractice # parse info # parse url # add port # add hostname # add issue # create PoC # xml files # TODO: dont forget to add hostname # TODO: add PoC # TODO: add PoC # try to detect port # add even with result_str is empty # add even with result_str is empty # check paths # hosts # if same host exists # insert ports # check if port exists # insert hostnames # check if exists # issues # fullfill issue hosts # hostname search # add hostname to issue # get 0 port if port not found # port was already added # add PoCs # add poc with port # add poc with hostname # add poc without hostname # files # services # hostname search # add hostname to issue # get 0 port if port not found # port was already added # creds # services # hostname search # add hostname to issue # get 0 port if port not found # port was already added # networks # services # hostname search # add hostname to issue # get 0 port if port not found # port was already added # check duplicates # notes # host notes # network paths # search dublicates # json files # validate ip # get protocol # create port # create hostname # Interesting findings # Versions issues detection # Theme # Plugins # Add note # add node description # check if valid ip # next Node # services # check ip # add host # add port # next service # add issues # check ip # add host # add port # add issue # hostnames dict # xml files # find list of issues # check ip # check hostname # add host # add port # add hostname # issues loop # goto issue container # ignoring Remediation detail # Remidiation == fix # issue_fix = issue_fix.replace('<ul>', '\n').replace('<li>', ' - ').replace('</li>', '\n').replace('</ul>', '').replace('\t', '').replace('<div>', '').replace('</div>', '').replace('<b>', '').replace('</b>', '') # References # Vulnerability classifications # add issue # PoC Request # add poc # PoC Response # add poc # json files 1. Name <--> Address 2. Target <--> Address 3. Name <--> String (Port, Type) # check if host_ip domain or IP # its domain, need ip # 1. Name <--> Address # 1. Name <--> Address <--> Target # Name <--> String # Target <--> Address # add ports # csv load 1. Name <--> Address 2. Target <--> Address 3. Name <--> String (Port, Type) # check if host_ip domain or IP # its domain, need ip # 1. Name <--> Address # 1. Name <--> Address <--> Target # Name <--> String # Target <--> Address # add ports 1. Name <--> Address 2. Target <--> Address 3. Name <--> String (Port, Type) # check if host_ip domain or IP # its domain, need ip # 1. Name <--> Address # 1. Name <--> Address <--> Target # Name <--> String # Target <--> Address # add ports # hostnames_dict = {'google.com':{'ip':[8.8.8.8], 'description': '...' }} # check if valid ip # ports_dict = {'ip':['8888']} # check if valid ip # check valid ip <MetasploitV5> 1. <hosts> - hosts info (domain/ip) - ignore <vulns> 2. <events> - ignoring 3. <web_sites> 4. <web_pages> - ignoring 5. <web_forms> - ignoring 6. <web_vuln> Steps: 1. Add hosts 2. Add sites 3. Add site vulns # Add hosts & ports # Linux # ??? # 2.6.X # ??? # x86_64 # x86_64 # Linux # device # create Host OS string # create host description string # check if ip correct # add ports # add all ports to ports_dict # 80 # open closed filtered TODO: add option which port to add # ftp # vsftpd 2.3.4 # add notes to port objects - nmap scripts # nmap.nse.smb-os-discovery.host # ??? # add hosts # add ports # ignoring websites due to it is connected with services which were added earlier # create websites_dict # Add web vulns # i dont know how to parse better # hostnames dict # json files # important fields # 192.168.3.11 # https://google.com # i dont know key "protocol # validate ip # check if url # wrong url # its web! # check protocol # check port # check url path # ip or hostname # resolve ip # if ip, port (, hostname) # create them in db # create host # create port # create hostname # create description # create issue # prepare issues database # xml files # add DCs # TODO: fields LDAPSProtocols # check for valid ip # add 88 port # Issues - RiskRules # PrivilegedAccounts # AccountTakeOver # A_AdminSDHolder | 1.746576 | 2 |
tests/ut/python/reliability/model_fault_injection/test_fault_injection.py | hboshnak/mindarmour | 0 | 6615898 | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test for fault injection.
"""
import pytest
import numpy as np
from mindspore import Model
import mindspore.dataset as ds
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.utils.logger import LogUtil
from mindarmour.reliability.model_fault_injection.fault_injection import FaultInjector
from tests.ut.python.utils.mock_net import Net
LOGGER = LogUtil.get_instance()
TAG = 'Fault injection test'
LOGGER.set_level('INFO')
def dataset_generator():
"""mock training data."""
batch_size = 32
batches = 128
data = np.random.random((batches*batch_size, 1, 32, 32)).astype(
np.float32)
label = np.random.randint(0, 10, batches*batch_size).astype(np.int32)
for i in range(batches):
yield data[i*batch_size:(i + 1)*batch_size],\
label[i*batch_size:(i + 1)*batch_size]
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_fault_injector():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Run kick_off and metrics successfully
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_model():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw TypeError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(TypeError) as exc_info:
fi = FaultInjector(net, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is TypeError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_data():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw TypeError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_data = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
ds_label = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(TypeError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is TypeError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_fi_type():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw AttributeError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random_haha', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(ValueError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is ValueError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_fi_mode():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw ValueError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer_tail', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(ValueError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is ValueError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_fi_size():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw ValueError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [-1]
# Fault injection
with pytest.raises(ValueError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is ValueError
| # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test for fault injection.
"""
import pytest
import numpy as np
from mindspore import Model
import mindspore.dataset as ds
from mindspore.train.serialization import load_checkpoint, load_param_into_net
from mindarmour.utils.logger import LogUtil
from mindarmour.reliability.model_fault_injection.fault_injection import FaultInjector
from tests.ut.python.utils.mock_net import Net
LOGGER = LogUtil.get_instance()
TAG = 'Fault injection test'
LOGGER.set_level('INFO')
def dataset_generator():
"""mock training data."""
batch_size = 32
batches = 128
data = np.random.random((batches*batch_size, 1, 32, 32)).astype(
np.float32)
label = np.random.randint(0, 10, batches*batch_size).astype(np.int32)
for i in range(batches):
yield data[i*batch_size:(i + 1)*batch_size],\
label[i*batch_size:(i + 1)*batch_size]
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_fault_injector():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Run kick_off and metrics successfully
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_model():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw TypeError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(TypeError) as exc_info:
fi = FaultInjector(net, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is TypeError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_data():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw TypeError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_data = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
ds_label = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(TypeError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is TypeError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_fi_type():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw AttributeError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random_haha', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(ValueError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is ValueError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_fi_mode():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw ValueError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer_tail', 'all_layer']
fi_size = [1]
# Fault injection
with pytest.raises(ValueError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is ValueError
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.platform_x86_ascend_training
@pytest.mark.platform_arm_ascend_training
@pytest.mark.env_onecard
@pytest.mark.component_mindarmour
def test_wrong_fi_size():
"""
Feature: Fault injector
Description: Test fault injector
Expectation: Throw ValueError exception
"""
# load model
ckpt_path = '../../dataset/trained_ckpt_file/checkpoint_lenet-10_1875.ckpt'
net = Net()
param_dict = load_checkpoint(ckpt_path)
load_param_into_net(net, param_dict)
model = Model(net)
ds_eval = ds.GeneratorDataset(dataset_generator, ['image', 'label'])
test_images = []
test_labels = []
for data in ds_eval.create_tuple_iterator(output_numpy=True):
images = data[0].astype(np.float32)
labels = data[1]
test_images.append(images)
test_labels.append(labels)
ds_data = np.concatenate(test_images, axis=0)
ds_label = np.concatenate(test_labels, axis=0)
fi_type = ['bitflips_random', 'bitflips_designated', 'random', 'zeros',
'nan', 'inf', 'anti_activation', 'precision_loss']
fi_mode = ['single_layer', 'all_layer']
fi_size = [-1]
# Fault injection
with pytest.raises(ValueError) as exc_info:
fi = FaultInjector(model, fi_type, fi_mode, fi_size)
_ = fi.kick_off(ds_data, ds_label, iter_times=100)
_ = fi.metrics()
assert exc_info.type is ValueError
| en | 0.74259 | # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Test for fault injection. mock training data. Feature: Fault injector
Description: Test fault injector
Expectation: Run kick_off and metrics successfully # load model # Fault injection Feature: Fault injector
Description: Test fault injector
Expectation: Throw TypeError exception # load model # Fault injection Feature: Fault injector
Description: Test fault injector
Expectation: Throw TypeError exception # load model # Fault injection Feature: Fault injector
Description: Test fault injector
Expectation: Throw AttributeError exception # load model # Fault injection Feature: Fault injector
Description: Test fault injector
Expectation: Throw ValueError exception # load model # Fault injection Feature: Fault injector
Description: Test fault injector
Expectation: Throw ValueError exception # load model # Fault injection | 2.187953 | 2 |
scripts/multiview/older_scripts/depth_generator.py | FLW-TUDO/bop_toolkit | 0 | 6615899 | <reponame>FLW-TUDO/bop_toolkit
import numpy as np
import cv2
from bop_toolkit_lib import renderer
from bop_toolkit_lib import inout
from bop_toolkit_lib import config
from bop_toolkit_lib import dataset_params
from bop_toolkit_lib import misc
import os
import matplotlib.pyplot as plt
from tqdm import tqdm
p = {
# See dataset_params.py for options.
'dataset': 'mv',
# Dataset split. Options: 'train', 'val', 'test'.
'dataset_split': 'test',
# Dataset split type. None = default. See dataset_params.py for options.
'dataset_split_type': None,
# Type of the renderer.
'renderer_type': 'vispy', # Options: 'vispy', 'cpp', 'python'.
# Folder containing the BOP datasets.
'datasets_path': config.datasets_path,
}
model_type = None
dp_model = dataset_params.get_model_params(
p['datasets_path'], p['dataset'], model_type)
# Load dataset parameters.
dp_split = dataset_params.get_split_params(
p['datasets_path'], p['dataset'], p['dataset_split'], p['dataset_split_type'])
# The renderer has a larger canvas for generation of masks of truncated objects.
im_width, im_height = dp_split['im_size']
ren_width, ren_height = 3 * im_width, 3 * im_height
ren_cx_offset, ren_cy_offset = im_width, im_height
ren = renderer.create_renderer(
ren_width, ren_height, p['renderer_type'], mode='depth')
for obj_id in dp_model['obj_ids']:
model_fpath = dp_model['model_tpath'].format(obj_id=obj_id)
ren.add_object(obj_id, model_fpath)
scene_ids = dataset_params.get_present_scene_ids(dp_split)
#for scene_id in tqdm(scene_ids):
for scene_id in tqdm(scene_ids):
scene_id_full = f'{scene_id:06d}'
depth_dir_path = os.path.join(dp_split['split_path'], scene_id_full, 'depth')
if not os.path.exists(depth_dir_path):
os.makedirs(depth_dir_path)
# Load scene info and ground-truth poses.
scene_camera = inout.load_scene_camera(
dp_split['scene_camera_tpath'].format(scene_id=scene_id))
scene_gt = inout.load_scene_gt(
dp_split['scene_gt_tpath'].format(scene_id=scene_id))
im_ids = sorted(scene_gt.keys())
for im_counter, im_id in enumerate(im_ids):
# if im_counter % 100 == 0:
# misc.log(
# 'Calculating GT info - dataset: {} ({}, {}), scene: {}, im: {}'.format(
# p['dataset'], p['dataset_split'], p['dataset_split_type'], scene_id,
# im_id))
K = scene_camera[im_id]['cam_K']
fx, fy, cx, cy = K[0, 0], K[1, 1], K[0, 2], K[1, 2]
# im_size = (depth.shape[1], depth.shape[0])
depth_imgs = []
for gt_id, gt in enumerate(scene_gt[im_id]):
# Render depth image of the object model in the ground-truth pose.
depth_gt_large = ren.render_object(
gt['obj_id'], gt['cam_R_m2c'], gt['cam_t_m2c'],
fx, fy, cx + ren_cx_offset, cy + ren_cy_offset)['depth']
depth_gt = depth_gt_large[
ren_cy_offset:(ren_cy_offset + im_height),
ren_cx_offset:(ren_cx_offset + im_width)]
if np.sum(depth_gt) < 100 or np.sum(depth_gt) < 0.9 * np.sum(depth_gt_large):
print(gt['obj_id'], 'not in image')
continue
depth_imgs.append(depth_gt)
im_shape = np.shape(depth_gt)
combined_depth_gt = np.zeros((im_shape[0], im_shape[1]))
#print('Combining depth images..')
for depth_img in depth_imgs:
rows, columns = np.where(depth_img > 0)
for i, j in zip(rows, columns):
if (combined_depth_gt[i, j] == 0): # updated by first non-zero value in any depth image
combined_depth_gt[i, j] = depth_img[i, j]
combined_depth_gt = np.flipud(combined_depth_gt)
combined_depth_gt = np.fliplr(combined_depth_gt)
inout.save_depth(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), combined_depth_gt)
#combined_depth_gt = np.asarray(combined_depth_gt, dtype=np.uint16)
#cv2.imwrite(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), combined_depth_gt)
# cv2.imwrite('/home/hazem/projects/depth_test.png', depth_gt)
# img = cv2.imread(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), -1) # read image as is
# print(np.shape(img))
# normed_img = cv2.normalize(img, None, 0, 255, cv2.NORM_MINMAX, dtype=cv2.CV_8U)
# color = cv2.applyColorMap(normed_img, cv2.COLORMAP_JET)
# cv2.imshow('depth img', normed_img)
# key = cv2.waitKey(0)
# if key == 'q':
# cv2.destroyAllWindows()
#
# print(img.dtype)
# vals = []
# for row in normed_img:
# for elem in row:
# if elem != 0:
# print(f'Pixel value is: {elem}')
# vals.append(elem)
#
# num_bins = 20
# n, bins, patches = plt.hist(vals, num_bins, facecolor='blue', alpha=0.5)
# plt.show()
| import numpy as np
import cv2
from bop_toolkit_lib import renderer
from bop_toolkit_lib import inout
from bop_toolkit_lib import config
from bop_toolkit_lib import dataset_params
from bop_toolkit_lib import misc
import os
import matplotlib.pyplot as plt
from tqdm import tqdm
p = {
# See dataset_params.py for options.
'dataset': 'mv',
# Dataset split. Options: 'train', 'val', 'test'.
'dataset_split': 'test',
# Dataset split type. None = default. See dataset_params.py for options.
'dataset_split_type': None,
# Type of the renderer.
'renderer_type': 'vispy', # Options: 'vispy', 'cpp', 'python'.
# Folder containing the BOP datasets.
'datasets_path': config.datasets_path,
}
model_type = None
dp_model = dataset_params.get_model_params(
p['datasets_path'], p['dataset'], model_type)
# Load dataset parameters.
dp_split = dataset_params.get_split_params(
p['datasets_path'], p['dataset'], p['dataset_split'], p['dataset_split_type'])
# The renderer has a larger canvas for generation of masks of truncated objects.
im_width, im_height = dp_split['im_size']
ren_width, ren_height = 3 * im_width, 3 * im_height
ren_cx_offset, ren_cy_offset = im_width, im_height
ren = renderer.create_renderer(
ren_width, ren_height, p['renderer_type'], mode='depth')
for obj_id in dp_model['obj_ids']:
model_fpath = dp_model['model_tpath'].format(obj_id=obj_id)
ren.add_object(obj_id, model_fpath)
scene_ids = dataset_params.get_present_scene_ids(dp_split)
#for scene_id in tqdm(scene_ids):
for scene_id in tqdm(scene_ids):
scene_id_full = f'{scene_id:06d}'
depth_dir_path = os.path.join(dp_split['split_path'], scene_id_full, 'depth')
if not os.path.exists(depth_dir_path):
os.makedirs(depth_dir_path)
# Load scene info and ground-truth poses.
scene_camera = inout.load_scene_camera(
dp_split['scene_camera_tpath'].format(scene_id=scene_id))
scene_gt = inout.load_scene_gt(
dp_split['scene_gt_tpath'].format(scene_id=scene_id))
im_ids = sorted(scene_gt.keys())
for im_counter, im_id in enumerate(im_ids):
# if im_counter % 100 == 0:
# misc.log(
# 'Calculating GT info - dataset: {} ({}, {}), scene: {}, im: {}'.format(
# p['dataset'], p['dataset_split'], p['dataset_split_type'], scene_id,
# im_id))
K = scene_camera[im_id]['cam_K']
fx, fy, cx, cy = K[0, 0], K[1, 1], K[0, 2], K[1, 2]
# im_size = (depth.shape[1], depth.shape[0])
depth_imgs = []
for gt_id, gt in enumerate(scene_gt[im_id]):
# Render depth image of the object model in the ground-truth pose.
depth_gt_large = ren.render_object(
gt['obj_id'], gt['cam_R_m2c'], gt['cam_t_m2c'],
fx, fy, cx + ren_cx_offset, cy + ren_cy_offset)['depth']
depth_gt = depth_gt_large[
ren_cy_offset:(ren_cy_offset + im_height),
ren_cx_offset:(ren_cx_offset + im_width)]
if np.sum(depth_gt) < 100 or np.sum(depth_gt) < 0.9 * np.sum(depth_gt_large):
print(gt['obj_id'], 'not in image')
continue
depth_imgs.append(depth_gt)
im_shape = np.shape(depth_gt)
combined_depth_gt = np.zeros((im_shape[0], im_shape[1]))
#print('Combining depth images..')
for depth_img in depth_imgs:
rows, columns = np.where(depth_img > 0)
for i, j in zip(rows, columns):
if (combined_depth_gt[i, j] == 0): # updated by first non-zero value in any depth image
combined_depth_gt[i, j] = depth_img[i, j]
combined_depth_gt = np.flipud(combined_depth_gt)
combined_depth_gt = np.fliplr(combined_depth_gt)
inout.save_depth(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), combined_depth_gt)
#combined_depth_gt = np.asarray(combined_depth_gt, dtype=np.uint16)
#cv2.imwrite(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), combined_depth_gt)
# cv2.imwrite('/home/hazem/projects/depth_test.png', depth_gt)
# img = cv2.imread(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), -1) # read image as is
# print(np.shape(img))
# normed_img = cv2.normalize(img, None, 0, 255, cv2.NORM_MINMAX, dtype=cv2.CV_8U)
# color = cv2.applyColorMap(normed_img, cv2.COLORMAP_JET)
# cv2.imshow('depth img', normed_img)
# key = cv2.waitKey(0)
# if key == 'q':
# cv2.destroyAllWindows()
#
# print(img.dtype)
# vals = []
# for row in normed_img:
# for elem in row:
# if elem != 0:
# print(f'Pixel value is: {elem}')
# vals.append(elem)
#
# num_bins = 20
# n, bins, patches = plt.hist(vals, num_bins, facecolor='blue', alpha=0.5)
# plt.show() | en | 0.36806 | # See dataset_params.py for options. # Dataset split. Options: 'train', 'val', 'test'. # Dataset split type. None = default. See dataset_params.py for options. # Type of the renderer. # Options: 'vispy', 'cpp', 'python'. # Folder containing the BOP datasets. # Load dataset parameters. # The renderer has a larger canvas for generation of masks of truncated objects. #for scene_id in tqdm(scene_ids): # Load scene info and ground-truth poses. # if im_counter % 100 == 0: # misc.log( # 'Calculating GT info - dataset: {} ({}, {}), scene: {}, im: {}'.format( # p['dataset'], p['dataset_split'], p['dataset_split_type'], scene_id, # im_id)) # im_size = (depth.shape[1], depth.shape[0]) # Render depth image of the object model in the ground-truth pose. #print('Combining depth images..') # updated by first non-zero value in any depth image #combined_depth_gt = np.asarray(combined_depth_gt, dtype=np.uint16) #cv2.imwrite(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), combined_depth_gt) # cv2.imwrite('/home/hazem/projects/depth_test.png', depth_gt) # img = cv2.imread(dp_split['depth_tpath'].format(scene_id=scene_id, im_id=im_id), -1) # read image as is # print(np.shape(img)) # normed_img = cv2.normalize(img, None, 0, 255, cv2.NORM_MINMAX, dtype=cv2.CV_8U) # color = cv2.applyColorMap(normed_img, cv2.COLORMAP_JET) # cv2.imshow('depth img', normed_img) # key = cv2.waitKey(0) # if key == 'q': # cv2.destroyAllWindows() # # print(img.dtype) # vals = [] # for row in normed_img: # for elem in row: # if elem != 0: # print(f'Pixel value is: {elem}') # vals.append(elem) # # num_bins = 20 # n, bins, patches = plt.hist(vals, num_bins, facecolor='blue', alpha=0.5) # plt.show() | 2.017177 | 2 |
loss_keras/tversky_loss.py | ForrestPi/SegDL | 1 | 6615900 | import keras.backend as K
def tversky(y_true, y_pred):
y_true_pos = K.flatten(y_true)
y_pred_pos = K.flatten(y_pred)
true_pos = K.sum(y_true_pos * y_pred_pos)
false_neg = K.sum(y_true_pos * (1-y_pred_pos))
false_pos = K.sum((1-y_true_pos)*y_pred_pos)
alpha = 0.7
return (true_pos + smooth)/(true_pos + alpha*false_neg + (1-alpha)*false_pos + smooth)
def tversky_loss(y_true, y_pred):
return 1 - tversky(y_true,y_pred) | import keras.backend as K
def tversky(y_true, y_pred):
y_true_pos = K.flatten(y_true)
y_pred_pos = K.flatten(y_pred)
true_pos = K.sum(y_true_pos * y_pred_pos)
false_neg = K.sum(y_true_pos * (1-y_pred_pos))
false_pos = K.sum((1-y_true_pos)*y_pred_pos)
alpha = 0.7
return (true_pos + smooth)/(true_pos + alpha*false_neg + (1-alpha)*false_pos + smooth)
def tversky_loss(y_true, y_pred):
return 1 - tversky(y_true,y_pred) | none | 1 | 2.625159 | 3 | |
src/components/drive/driveMotorGroup.py | FRC-Team-3405/CompitionBot2020 | 0 | 6615901 | from .driveMotor import DriveMotor
from statistics import mean
from typing import List
class DriveMotorGroup():
def __init__(self, _motors: List[DriveMotor]):
self.motors = _motors
def set(self, value: float):
for i in self.motors:
i.set(value)
def getPosition(self) -> float:
values = []
for i in self.motors:
values.append(i.getPosition())
return mean(values)
def getVelocity(self) -> float:
values = []
for i in self.motors:
values.append(i.getVelocity())
return mean(values)
| from .driveMotor import DriveMotor
from statistics import mean
from typing import List
class DriveMotorGroup():
def __init__(self, _motors: List[DriveMotor]):
self.motors = _motors
def set(self, value: float):
for i in self.motors:
i.set(value)
def getPosition(self) -> float:
values = []
for i in self.motors:
values.append(i.getPosition())
return mean(values)
def getVelocity(self) -> float:
values = []
for i in self.motors:
values.append(i.getVelocity())
return mean(values)
| none | 1 | 3.205019 | 3 | |
02-basics/hello.py | sachinpr0001/data_science | 0 | 6615902 | def sayHi(name):
print("Hi" + name)
def sayHello(n1,n2):
print("Hello" + n1 + n2)
| def sayHi(name):
print("Hi" + name)
def sayHello(n1,n2):
print("Hello" + n1 + n2)
| none | 1 | 3.077739 | 3 | |
venv/lib/python3.8/site-packages/pip/_internal/utils/glibc.py | Retraces/UkraineBot | 2 | 6615903 | /home/runner/.cache/pip/pool/18/cd/58/da15a439ffedba6c9218583e88d6dcee88a505042b8dccdbffbd39085f | /home/runner/.cache/pip/pool/18/cd/58/da15a439ffedba6c9218583e88d6dcee88a505042b8dccdbffbd39085f | none | 1 | 0.834759 | 1 | |
tests/fugue/workflow/test_runtime_exception.py | LaurentErreca/fugue | 0 | 6615904 | import pandas as pd
from fugue import FugueWorkflow
import sys
import traceback
from fugue.constants import (
FUGUE_CONF_WORKFLOW_EXCEPTION_HIDE,
FUGUE_CONF_WORKFLOW_EXCEPTION_OPTIMIZE,
)
def test_runtime_exception():
if sys.version_info < (3, 7):
return
def tr(df: pd.DataFrame) -> pd.DataFrame:
raise Exception
def show(df):
df.show()
dag = FugueWorkflow()
df = dag.df([[0]], "a:int")
df = df.transform(tr, schema="*")
show(df)
try:
dag.run()
except:
assert len(traceback.extract_tb(sys.exc_info()[2])) < 10
try:
dag.run("native", {FUGUE_CONF_WORKFLOW_EXCEPTION_OPTIMIZE: False})
except:
assert len(traceback.extract_tb(sys.exc_info()[2])) > 10
try:
dag.run("native", {FUGUE_CONF_WORKFLOW_EXCEPTION_HIDE: ""})
except:
assert len(traceback.extract_tb(sys.exc_info()[2])) > 10
def test_modified_exception():
if sys.version_info < (3, 7):
return
def tr(df: pd.DataFrame) -> pd.DataFrame:
raise Exception
def show(df):
df.show()
def tt(df):
__modified_exception__ = NotImplementedError()
return df.transform(tr, schema="*")
dag = FugueWorkflow()
df = dag.df([[0]], "a:int")
df = tt(df)
show(df)
try:
dag.run()
except Exception as ex:
assert isinstance(ex.__cause__, NotImplementedError)
| import pandas as pd
from fugue import FugueWorkflow
import sys
import traceback
from fugue.constants import (
FUGUE_CONF_WORKFLOW_EXCEPTION_HIDE,
FUGUE_CONF_WORKFLOW_EXCEPTION_OPTIMIZE,
)
def test_runtime_exception():
if sys.version_info < (3, 7):
return
def tr(df: pd.DataFrame) -> pd.DataFrame:
raise Exception
def show(df):
df.show()
dag = FugueWorkflow()
df = dag.df([[0]], "a:int")
df = df.transform(tr, schema="*")
show(df)
try:
dag.run()
except:
assert len(traceback.extract_tb(sys.exc_info()[2])) < 10
try:
dag.run("native", {FUGUE_CONF_WORKFLOW_EXCEPTION_OPTIMIZE: False})
except:
assert len(traceback.extract_tb(sys.exc_info()[2])) > 10
try:
dag.run("native", {FUGUE_CONF_WORKFLOW_EXCEPTION_HIDE: ""})
except:
assert len(traceback.extract_tb(sys.exc_info()[2])) > 10
def test_modified_exception():
if sys.version_info < (3, 7):
return
def tr(df: pd.DataFrame) -> pd.DataFrame:
raise Exception
def show(df):
df.show()
def tt(df):
__modified_exception__ = NotImplementedError()
return df.transform(tr, schema="*")
dag = FugueWorkflow()
df = dag.df([[0]], "a:int")
df = tt(df)
show(df)
try:
dag.run()
except Exception as ex:
assert isinstance(ex.__cause__, NotImplementedError)
| none | 1 | 2.239008 | 2 | |
code/calc_0.py | huaji0353/DanbooRegion | 0 | 6615905 | import cv2
## trick.py
def mk_resize(x, k):
if x.shape[0] < x.shape[1]:
s0 = k
s1 = int(x.shape[1] * (k / x.shape[0]))
s1 = s1 - s1 % 128
_s0 = 32 * s0
_s1 = int(x.shape[1] * (_s0 / x.shape[0]))
_s1 = (_s1 + 64) - (_s1 + 64) % 128
else:
s1 = k
s0 = int(x.shape[0] * (k / x.shape[1]))
s0 = s0 - s0 % 128
_s1 = 32 * s1
_s0 = int(x.shape[0] * (_s1 / x.shape[1]))
_s0 = (_s0 + 64) - (_s0 + 64) % 128
new_min = min(_s1, _s0)
raw_min = min(x.shape[0], x.shape[1])
if new_min < raw_min:
interpolation = cv2.INTER_AREA
else:
interpolation = cv2.INTER_LANCZOS4
y = cv2.resize(x, (_s1, _s0), interpolation=interpolation)
return y
def d_resize(x, d, fac=1.0):
new_min = min(int(d[1] * fac), int(d[0] * fac))
raw_min = min(x.shape[0], x.shape[1])
if new_min < raw_min:
interpolation = cv2.INTER_AREA
else:
interpolation = cv2.INTER_LANCZOS4
y = cv2.resize(x, (int(d[1] * fac), int(d[0] * fac)), interpolation=interpolation)
return y
def min_resize(x, m):
if x.shape[0] < x.shape[1]:
s0 = m
s1 = int(float(m) / float(x.shape[0]) * float(x.shape[1]))
else:
s0 = int(float(m) / float(x.shape[1]) * float(x.shape[0]))
s1 = m
new_max = max(s1, s0)
raw_max = max(x.shape[0], x.shape[1])
if new_max < raw_max:
interpolation = cv2.INTER_AREA
else:
interpolation = cv2.INTER_LANCZOS4
y = cv2.resize(x, (s1, s0), interpolation=interpolation)
return y
## model.py
from keras.layers import Conv2D, Activation, Input, Concatenate, LeakyReLU, Lambda, AveragePooling2D, UpSampling2D, Convolution2D, BatchNormalization, Deconvolution2D, Add
from keras.models import Model
import tensorflow as tf
def make_diff_net():
def conv(x, filters, name):
return Conv2D(filters=filters, strides=(1, 1), kernel_size=(3, 3), padding='same', name=name)(x)
def relu(x):
return Activation('relu')(x)
def lrelu(x):
return LeakyReLU(alpha=0.1)(x)
def r_block(x, filters, name=None):
return relu(conv(relu(conv(x, filters, None if name is None else name + '_c1')), filters,
None if name is None else name + '_c2'))
def cat(a, b):
return Concatenate()([UpSampling2D((2, 2))(a), b])
def dog(x):
down = AveragePooling2D((2, 2))(x)
up = UpSampling2D((2, 2))(down)
diff = Lambda(lambda p: p[0] - p[1])([x, up])
return down, diff
ip = Input(shape=(512, 512, 3))
c512 = r_block(ip, 16, 'c512')
c256, l512 = dog(c512)
c256 = r_block(c256, 32, 'c256')
c128, l256 = dog(c256)
c128 = r_block(c128, 64, 'c128')
c64, l128 = dog(c128)
c64 = r_block(c64, 128, 'c64')
c32, l64 = dog(c64)
c32 = r_block(c32, 256, 'c32')
c16, l32 = dog(c32)
c16 = r_block(c16, 512, 'c16')
d32 = cat(c16, l32)
d32 = r_block(d32, 256, 'd32')
d64 = cat(d32, l64)
d64 = r_block(d64, 128, 'd64')
d128 = cat(d64, l128)
d128 = r_block(d128, 64, 'd128')
d256 = cat(d128, l256)
d256 = r_block(d256, 32, 'd256')
d512 = cat(d256, l512)
d512 = r_block(d512, 16, 'd512')
op = conv(d512, 1, 'op')
return Model(inputs=ip, outputs=op)
## ai.py
# go_xx xx_op
import numpy as np
import keras.backend as K
from keras.models import load_model
session = tf.Session()
K.set_session(session)
# placeholder & network
ip3 = tf.placeholder(dtype=tf.float32, shape=(None, None, None, 3))
vector = make_diff_net()
vector_op = 1.0 - vector(ip3 / 255.0)
srcnn = load_model('srcnn.net')
pads = 7
srcnn_op = srcnn(tf.pad(ip3 / 255.0, [[0, 0], [pads, pads], [pads, pads], [0, 0]], 'REFLECT'))[:, pads * 2:-pads * 2, pads * 2:-pads * 2, :][:, 1:-1, 1:-1, :] * 255.0
session.run(tf.global_variables_initializer())
print('begin load')
vector.load_weights('DanbooRegion2020UNet.net')
srcnn.load_weights('srcnn.net')
def go_vector(x):
return session.run(vector_op, feed_dict={
ip3: x[None, :, :, :]
})[0]
def go_srcnn(x):
return session.run(srcnn_op, feed_dict={
ip3: x[None, :, :, :]
})[0]
### seg.py
# go_vector go_unet
def go_vector_all(x):
a = go_vector(x)
b = np.fliplr(go_vector(np.fliplr(x)))
c = np.flipud(go_vector(np.flipud(x)))
d = np.flipud(np.fliplr(go_vector(np.flipud(np.fliplr(x)))))
p = (a + b + c + d) / 4.0
x = np.transpose(x, [1, 0, 2])
a = go_vector(x)
b = np.fliplr(go_vector(np.fliplr(x)))
c = np.flipud(go_vector(np.flipud(x)))
d = np.flipud(np.fliplr(go_vector(np.flipud(np.fliplr(x)))))
q = (a + b + c + d) / 4.0
return (p + np.transpose(q, [1, 0, 2])) / 2.0
def segment(image,sr_input=512,img_output=1024,ske_input=2**4):
# go_srcnn
raw_img = go_srcnn(min_resize(image, sr_input)).clip(0, 255).astype(np.uint8)
cv2.imwrite('tmp_raw_img.png', raw_img)
mk_img = mk_resize(raw_img, ske_input)
cv2.imwrite('tmp_mk_img.png', mk_img)
# 512 go_vector go_vector_all
skeleton = d_resize(go_vector_all(mk_img), min_resize(raw_img, img_output).shape) * 255.0
np.save("tmp_skeleton.npy",skeleton)
return skeleton.clip(0, 255).astype(np.uint8)
if __name__=='__main__':
import sys
image = cv2.imread(sys.argv[1])
skeleton = segment(image)
cv2.imwrite(f'{sys.argv[1]}_skeleton.png', skeleton)
print('ok!')
| import cv2
## trick.py
def mk_resize(x, k):
if x.shape[0] < x.shape[1]:
s0 = k
s1 = int(x.shape[1] * (k / x.shape[0]))
s1 = s1 - s1 % 128
_s0 = 32 * s0
_s1 = int(x.shape[1] * (_s0 / x.shape[0]))
_s1 = (_s1 + 64) - (_s1 + 64) % 128
else:
s1 = k
s0 = int(x.shape[0] * (k / x.shape[1]))
s0 = s0 - s0 % 128
_s1 = 32 * s1
_s0 = int(x.shape[0] * (_s1 / x.shape[1]))
_s0 = (_s0 + 64) - (_s0 + 64) % 128
new_min = min(_s1, _s0)
raw_min = min(x.shape[0], x.shape[1])
if new_min < raw_min:
interpolation = cv2.INTER_AREA
else:
interpolation = cv2.INTER_LANCZOS4
y = cv2.resize(x, (_s1, _s0), interpolation=interpolation)
return y
def d_resize(x, d, fac=1.0):
new_min = min(int(d[1] * fac), int(d[0] * fac))
raw_min = min(x.shape[0], x.shape[1])
if new_min < raw_min:
interpolation = cv2.INTER_AREA
else:
interpolation = cv2.INTER_LANCZOS4
y = cv2.resize(x, (int(d[1] * fac), int(d[0] * fac)), interpolation=interpolation)
return y
def min_resize(x, m):
if x.shape[0] < x.shape[1]:
s0 = m
s1 = int(float(m) / float(x.shape[0]) * float(x.shape[1]))
else:
s0 = int(float(m) / float(x.shape[1]) * float(x.shape[0]))
s1 = m
new_max = max(s1, s0)
raw_max = max(x.shape[0], x.shape[1])
if new_max < raw_max:
interpolation = cv2.INTER_AREA
else:
interpolation = cv2.INTER_LANCZOS4
y = cv2.resize(x, (s1, s0), interpolation=interpolation)
return y
## model.py
from keras.layers import Conv2D, Activation, Input, Concatenate, LeakyReLU, Lambda, AveragePooling2D, UpSampling2D, Convolution2D, BatchNormalization, Deconvolution2D, Add
from keras.models import Model
import tensorflow as tf
def make_diff_net():
def conv(x, filters, name):
return Conv2D(filters=filters, strides=(1, 1), kernel_size=(3, 3), padding='same', name=name)(x)
def relu(x):
return Activation('relu')(x)
def lrelu(x):
return LeakyReLU(alpha=0.1)(x)
def r_block(x, filters, name=None):
return relu(conv(relu(conv(x, filters, None if name is None else name + '_c1')), filters,
None if name is None else name + '_c2'))
def cat(a, b):
return Concatenate()([UpSampling2D((2, 2))(a), b])
def dog(x):
down = AveragePooling2D((2, 2))(x)
up = UpSampling2D((2, 2))(down)
diff = Lambda(lambda p: p[0] - p[1])([x, up])
return down, diff
ip = Input(shape=(512, 512, 3))
c512 = r_block(ip, 16, 'c512')
c256, l512 = dog(c512)
c256 = r_block(c256, 32, 'c256')
c128, l256 = dog(c256)
c128 = r_block(c128, 64, 'c128')
c64, l128 = dog(c128)
c64 = r_block(c64, 128, 'c64')
c32, l64 = dog(c64)
c32 = r_block(c32, 256, 'c32')
c16, l32 = dog(c32)
c16 = r_block(c16, 512, 'c16')
d32 = cat(c16, l32)
d32 = r_block(d32, 256, 'd32')
d64 = cat(d32, l64)
d64 = r_block(d64, 128, 'd64')
d128 = cat(d64, l128)
d128 = r_block(d128, 64, 'd128')
d256 = cat(d128, l256)
d256 = r_block(d256, 32, 'd256')
d512 = cat(d256, l512)
d512 = r_block(d512, 16, 'd512')
op = conv(d512, 1, 'op')
return Model(inputs=ip, outputs=op)
## ai.py
# go_xx xx_op
import numpy as np
import keras.backend as K
from keras.models import load_model
session = tf.Session()
K.set_session(session)
# placeholder & network
ip3 = tf.placeholder(dtype=tf.float32, shape=(None, None, None, 3))
vector = make_diff_net()
vector_op = 1.0 - vector(ip3 / 255.0)
srcnn = load_model('srcnn.net')
pads = 7
srcnn_op = srcnn(tf.pad(ip3 / 255.0, [[0, 0], [pads, pads], [pads, pads], [0, 0]], 'REFLECT'))[:, pads * 2:-pads * 2, pads * 2:-pads * 2, :][:, 1:-1, 1:-1, :] * 255.0
session.run(tf.global_variables_initializer())
print('begin load')
vector.load_weights('DanbooRegion2020UNet.net')
srcnn.load_weights('srcnn.net')
def go_vector(x):
return session.run(vector_op, feed_dict={
ip3: x[None, :, :, :]
})[0]
def go_srcnn(x):
return session.run(srcnn_op, feed_dict={
ip3: x[None, :, :, :]
})[0]
### seg.py
# go_vector go_unet
def go_vector_all(x):
a = go_vector(x)
b = np.fliplr(go_vector(np.fliplr(x)))
c = np.flipud(go_vector(np.flipud(x)))
d = np.flipud(np.fliplr(go_vector(np.flipud(np.fliplr(x)))))
p = (a + b + c + d) / 4.0
x = np.transpose(x, [1, 0, 2])
a = go_vector(x)
b = np.fliplr(go_vector(np.fliplr(x)))
c = np.flipud(go_vector(np.flipud(x)))
d = np.flipud(np.fliplr(go_vector(np.flipud(np.fliplr(x)))))
q = (a + b + c + d) / 4.0
return (p + np.transpose(q, [1, 0, 2])) / 2.0
def segment(image,sr_input=512,img_output=1024,ske_input=2**4):
# go_srcnn
raw_img = go_srcnn(min_resize(image, sr_input)).clip(0, 255).astype(np.uint8)
cv2.imwrite('tmp_raw_img.png', raw_img)
mk_img = mk_resize(raw_img, ske_input)
cv2.imwrite('tmp_mk_img.png', mk_img)
# 512 go_vector go_vector_all
skeleton = d_resize(go_vector_all(mk_img), min_resize(raw_img, img_output).shape) * 255.0
np.save("tmp_skeleton.npy",skeleton)
return skeleton.clip(0, 255).astype(np.uint8)
if __name__=='__main__':
import sys
image = cv2.imread(sys.argv[1])
skeleton = segment(image)
cv2.imwrite(f'{sys.argv[1]}_skeleton.png', skeleton)
print('ok!')
| en | 0.166255 | ## trick.py ## model.py ## ai.py # go_xx xx_op # placeholder & network ### seg.py # go_vector go_unet # go_srcnn # 512 go_vector go_vector_all | 2.694292 | 3 |
imodels/rule_set/__init__.py | stjordanis/imodels | 598 | 6615906 | '''Generic class for models that take the form of a set of (potentially overlapping) rules.
''' | '''Generic class for models that take the form of a set of (potentially overlapping) rules.
''' | en | 0.974477 | Generic class for models that take the form of a set of (potentially overlapping) rules. | 1.225432 | 1 |
kotlin/kotlin_repositories.bzl | pubref/rules_kotlin | 179 | 6615907 | load("//kotlin:java_import_external.bzl", "java_import_external")
KOTLIN_BUILD = """
package(default_visibility = ["//visibility:public"])
filegroup(
name = "home",
srcs = glob(["lib/*.jar"]),
)
java_import(
name = "runtime",
jars = ["lib/kotlin-runtime.jar"],
)
java_import(
name = "stdlib",
jars = ["lib/kotlin-stdlib.jar"],
)
java_import(
name = "compiler",
jars = ["lib/kotlin-compiler.jar"],
)
java_import(
name = "preloader",
jars = ["lib/kotlin-preloader.jar"],
)
java_import(
name = "test",
jars = ["lib/kotlin-test.jar"],
)
sh_binary(
name = "kotlin",
srcs = ["bin/kotlin"],
)
sh_binary(
name = "kotlinc",
srcs = ["bin/kotlinc"],
)
exports_files(["src"])
"""
def kotlin_repositories(
com_github_jetbrains_kotlin_url = "https://github.com/JetBrains/kotlin/releases/download/v1.2.0/kotlin-compiler-1.2.0.zip",
com_github_jetbrains_kotlin_sha256 = "895d0f8286db3e4f43d67cd5e09b600af6e0a5017cb74072d1b09c78b697775a",
omit_com_github_jetbrains_kotlin = False,
#omit_com_google_protobuf = False,
#omit_com_google_protobuf_java = False,
omit_com_google_protobuf_protobuf_java = False,
omit_javax_inject = False,
omit_com_google_errorprone_error_prone_annotations = False,
omit_com_google_code_findbugs_jsr305 = False,
omit_com_google_guava = False,
omit_com_google_dagger = False,
omit_com_google_dagger_compiler = False,
omit_com_google_dagger_producers = False,
):
if not omit_com_github_jetbrains_kotlin:
native.new_http_archive(
name = "com_github_jetbrains_kotlin",
url = com_github_jetbrains_kotlin_url,
sha256 = com_github_jetbrains_kotlin_sha256,
build_file_content = KOTLIN_BUILD,
strip_prefix = "kotlinc",
)
# UN-COMMENT these to build native worker proto from source.
# if not omit_com_google_protobuf:
# proto_library rules implicitly depend on @com_google_protobuf//:protoc,
# which is the proto-compiler.
# This statement defines the @com_google_protobuf repo.
# native.http_archive(
# name = "com_google_protobuf",
# urls = ["https://github.com/google/protobuf/archive/a6189acd18b00611c1dc7042299ad75486f08a1a.zip"],
# strip_prefix = "protobuf-a6189acd18b00611c1dc7042299ad75486f08a1a",
# sha256 = "102b5024120215c5a34ad23d9dd459e8ccc37dc3ef4c73d466ab802b6e3e9512",
# )
# if not omit_com_google_protobuf_java:
# # java_proto_library rules implicitly depend on @com_google_protobuf_java//:java_toolchain,
# # which is the Java proto runtime (base classes and common utilities).
# native.http_archive(
# name = "com_google_protobuf_java",
# urls = ["https://github.com/google/protobuf/archive/a6189acd18b00611c1dc7042299ad75486f08a1a.zip"],
# strip_prefix = "protobuf-a6189acd18b00611c1dc7042299ad75486f08a1a",
# sha256 = "102b5024120215c5a34ad23d9dd459e8ccc37dc3ef4c73d466ab802b6e3e9512",
# )
if not omit_com_google_protobuf_protobuf_java:
native.maven_jar(
name = "com_google_protobuf_protobuf_java",
artifact = "com.google.protobuf:protobuf-java:3.4.0",
sha1 = "b32aba0cbe737a4ca953f71688725972e3ee927c",
)
if not omit_javax_inject:
java_import_external(
name = "javax_inject",
licenses = ["notice"], # Apache 2.0
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar",
"http://repo1.maven.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar",
"http://maven.ibiblio.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar",
],
jar_sha256 = "91c77044a50c481636c32d916fd89c9118a72195390452c81065080f957de7ff",
)
if not omit_com_google_errorprone_error_prone_annotations:
java_import_external(
name = "com_google_errorprone_annotations",
licenses = ["notice"], # Apache 2.0
jar_sha256 = "e7749ffdf03fb8ebe08a727ea205acb301c8791da837fee211b99b04f9d79c46",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.0.15/error_prone_annotations-2.0.15.jar",
"http://maven.ibiblio.org/maven2/com/google/errorprone/error_prone_annotations/2.0.15/error_prone_annotations-2.0.15.jar",
"http://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.0.15/error_prone_annotations-2.0.15.jar",
],
)
if not omit_com_google_code_findbugs_jsr305:
java_import_external(
name = "com_google_code_findbugs",
licenses = ["notice"], # BSD 3-clause
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar",
"http://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar",
"http://maven.ibiblio.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar",
],
jar_sha256 = "905721a0eea90a81534abb7ee6ef4ea2e5e645fa1def0a5cd88402df1b46c9ed",
)
if not omit_com_google_guava:
java_import_external(
name = "com_google_guava",
licenses = ["notice"], # Apache 2.0
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
"http://repo1.maven.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
"http://maven.ibiblio.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
],
jar_sha256 = "36a666e3b71ae7f0f0dca23654b67e086e6c93d192f60ba5dfd5519db6c288c8",
deps = [
"@com_google_code_findbugs",
"@com_google_errorprone_annotations",
],
)
if not omit_com_google_dagger:
java_import_external(
name = "com_google_dagger",
jar_sha256 = "8b7806518bed270950002158934fbd8281725ee09909442f2f22b58520b667a7",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/dagger/dagger/2.9/dagger-2.9.jar",
"http://repo1.maven.org/maven2/com/google/dagger/dagger/2.9/dagger-2.9.jar",
],
licenses = ["notice"], # Apache 2.0
deps = ["@javax_inject"],
generated_rule_name = "runtime",
extra_build_file_content = "\n".join([
"java_library(",
" name = \"com_google_dagger\",",
" exported_plugins = [\"@com_google_dagger_compiler//:ComponentProcessor\"],",
" exports = [",
" \":runtime\",",
" \"@javax_inject\",",
" ],",
")",
]),
)
if not omit_com_google_dagger_compiler:
java_import_external(
name = "com_google_dagger_compiler",
jar_sha256 = "afe356def27710db5b60cad8e7a6c06510dc3d3b854f30397749cbf0d0e71315",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/dagger/dagger-compiler/2.9/dagger-compiler-2.9.jar",
"http://repo1.maven.org/maven2/com/google/dagger/dagger-compiler/2.9/dagger-compiler-2.9.jar",
],
licenses = ["notice"], # Apache 2.0
deps = [
"@com_google_code_findbugs",
"@com_google_dagger//:runtime",
"@com_google_dagger_producers//:runtime",
"@com_google_guava",
],
extra_build_file_content = "\n".join([
"java_plugin(",
" name = \"ComponentProcessor\",",
# TODO(jart): https://github.com/bazelbuild/bazel/issues/2286
# " output_licenses = [\"unencumbered\"],",
" processor_class = \"dagger.internal.codegen.ComponentProcessor\",",
" generates_api = 1,",
" tags = [",
" \"annotation=dagger.Component;genclass=${package}.Dagger${outerclasses}${classname}\",",
" \"annotation=dagger.producers.ProductionComponent;genclass=${package}.Dagger${outerclasses}${classname}\",",
" ],",
" deps = [\":com_google_dagger_compiler\"],",
")",
]),
)
if not omit_com_google_dagger_producers:
java_import_external(
name = "com_google_dagger_producers",
jar_sha256 = "b452dc1b95dd02f6272e97b15d1bd35d92b5f484a7d69bb73887b6c6699d8843",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/dagger/dagger-producers/2.9/dagger-producers-2.9.jar",
"http://repo1.maven.org/maven2/com/google/dagger/dagger-producers/2.9/dagger-producers-2.9.jar",
],
licenses = ["notice"], # Apache 2.0
deps = [
"@com_google_dagger//:runtime",
"@com_google_guava",
],
generated_rule_name = "runtime",
extra_build_file_content = "\n".join([
"java_library(",
" name = \"com_google_dagger_producers\",",
" exported_plugins = [\"@com_google_dagger_compiler//:ComponentProcessor\"],",
" exports = [",
" \":runtime\",",
" \"@com_google_dagger//:runtime\",",
" \"@javax_inject\",",
" ],",
")",
]),
)
| load("//kotlin:java_import_external.bzl", "java_import_external")
KOTLIN_BUILD = """
package(default_visibility = ["//visibility:public"])
filegroup(
name = "home",
srcs = glob(["lib/*.jar"]),
)
java_import(
name = "runtime",
jars = ["lib/kotlin-runtime.jar"],
)
java_import(
name = "stdlib",
jars = ["lib/kotlin-stdlib.jar"],
)
java_import(
name = "compiler",
jars = ["lib/kotlin-compiler.jar"],
)
java_import(
name = "preloader",
jars = ["lib/kotlin-preloader.jar"],
)
java_import(
name = "test",
jars = ["lib/kotlin-test.jar"],
)
sh_binary(
name = "kotlin",
srcs = ["bin/kotlin"],
)
sh_binary(
name = "kotlinc",
srcs = ["bin/kotlinc"],
)
exports_files(["src"])
"""
def kotlin_repositories(
com_github_jetbrains_kotlin_url = "https://github.com/JetBrains/kotlin/releases/download/v1.2.0/kotlin-compiler-1.2.0.zip",
com_github_jetbrains_kotlin_sha256 = "895d0f8286db3e4f43d67cd5e09b600af6e0a5017cb74072d1b09c78b697775a",
omit_com_github_jetbrains_kotlin = False,
#omit_com_google_protobuf = False,
#omit_com_google_protobuf_java = False,
omit_com_google_protobuf_protobuf_java = False,
omit_javax_inject = False,
omit_com_google_errorprone_error_prone_annotations = False,
omit_com_google_code_findbugs_jsr305 = False,
omit_com_google_guava = False,
omit_com_google_dagger = False,
omit_com_google_dagger_compiler = False,
omit_com_google_dagger_producers = False,
):
if not omit_com_github_jetbrains_kotlin:
native.new_http_archive(
name = "com_github_jetbrains_kotlin",
url = com_github_jetbrains_kotlin_url,
sha256 = com_github_jetbrains_kotlin_sha256,
build_file_content = KOTLIN_BUILD,
strip_prefix = "kotlinc",
)
# UN-COMMENT these to build native worker proto from source.
# if not omit_com_google_protobuf:
# proto_library rules implicitly depend on @com_google_protobuf//:protoc,
# which is the proto-compiler.
# This statement defines the @com_google_protobuf repo.
# native.http_archive(
# name = "com_google_protobuf",
# urls = ["https://github.com/google/protobuf/archive/a6189acd18b00611c1dc7042299ad75486f08a1a.zip"],
# strip_prefix = "protobuf-a6189acd18b00611c1dc7042299ad75486f08a1a",
# sha256 = "102b5024120215c5a34ad23d9dd459e8ccc37dc3ef4c73d466ab802b6e3e9512",
# )
# if not omit_com_google_protobuf_java:
# # java_proto_library rules implicitly depend on @com_google_protobuf_java//:java_toolchain,
# # which is the Java proto runtime (base classes and common utilities).
# native.http_archive(
# name = "com_google_protobuf_java",
# urls = ["https://github.com/google/protobuf/archive/a6189acd18b00611c1dc7042299ad75486f08a1a.zip"],
# strip_prefix = "protobuf-a6189acd18b00611c1dc7042299ad75486f08a1a",
# sha256 = "102b5024120215c5a34ad23d9dd459e8ccc37dc3ef4c73d466ab802b6e3e9512",
# )
if not omit_com_google_protobuf_protobuf_java:
native.maven_jar(
name = "com_google_protobuf_protobuf_java",
artifact = "com.google.protobuf:protobuf-java:3.4.0",
sha1 = "b32aba0cbe737a4ca953f71688725972e3ee927c",
)
if not omit_javax_inject:
java_import_external(
name = "javax_inject",
licenses = ["notice"], # Apache 2.0
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar",
"http://repo1.maven.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar",
"http://maven.ibiblio.org/maven2/javax/inject/javax.inject/1/javax.inject-1.jar",
],
jar_sha256 = "91c77044a50c481636c32d916fd89c9118a72195390452c81065080f957de7ff",
)
if not omit_com_google_errorprone_error_prone_annotations:
java_import_external(
name = "com_google_errorprone_annotations",
licenses = ["notice"], # Apache 2.0
jar_sha256 = "e7749ffdf03fb8ebe08a727ea205acb301c8791da837fee211b99b04f9d79c46",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.0.15/error_prone_annotations-2.0.15.jar",
"http://maven.ibiblio.org/maven2/com/google/errorprone/error_prone_annotations/2.0.15/error_prone_annotations-2.0.15.jar",
"http://repo1.maven.org/maven2/com/google/errorprone/error_prone_annotations/2.0.15/error_prone_annotations-2.0.15.jar",
],
)
if not omit_com_google_code_findbugs_jsr305:
java_import_external(
name = "com_google_code_findbugs",
licenses = ["notice"], # BSD 3-clause
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar",
"http://repo1.maven.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar",
"http://maven.ibiblio.org/maven2/com/google/code/findbugs/jsr305/1.3.9/jsr305-1.3.9.jar",
],
jar_sha256 = "905721a0eea90a81534abb7ee6ef4ea2e5e645fa1def0a5cd88402df1b46c9ed",
)
if not omit_com_google_guava:
java_import_external(
name = "com_google_guava",
licenses = ["notice"], # Apache 2.0
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
"http://repo1.maven.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
"http://maven.ibiblio.org/maven2/com/google/guava/guava/20.0/guava-20.0.jar",
],
jar_sha256 = "36a666e3b71ae7f0f0dca23654b67e086e6c93d192f60ba5dfd5519db6c288c8",
deps = [
"@com_google_code_findbugs",
"@com_google_errorprone_annotations",
],
)
if not omit_com_google_dagger:
java_import_external(
name = "com_google_dagger",
jar_sha256 = "8b7806518bed270950002158934fbd8281725ee09909442f2f22b58520b667a7",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/dagger/dagger/2.9/dagger-2.9.jar",
"http://repo1.maven.org/maven2/com/google/dagger/dagger/2.9/dagger-2.9.jar",
],
licenses = ["notice"], # Apache 2.0
deps = ["@javax_inject"],
generated_rule_name = "runtime",
extra_build_file_content = "\n".join([
"java_library(",
" name = \"com_google_dagger\",",
" exported_plugins = [\"@com_google_dagger_compiler//:ComponentProcessor\"],",
" exports = [",
" \":runtime\",",
" \"@javax_inject\",",
" ],",
")",
]),
)
if not omit_com_google_dagger_compiler:
java_import_external(
name = "com_google_dagger_compiler",
jar_sha256 = "afe356def27710db5b60cad8e7a6c06510dc3d3b854f30397749cbf0d0e71315",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/dagger/dagger-compiler/2.9/dagger-compiler-2.9.jar",
"http://repo1.maven.org/maven2/com/google/dagger/dagger-compiler/2.9/dagger-compiler-2.9.jar",
],
licenses = ["notice"], # Apache 2.0
deps = [
"@com_google_code_findbugs",
"@com_google_dagger//:runtime",
"@com_google_dagger_producers//:runtime",
"@com_google_guava",
],
extra_build_file_content = "\n".join([
"java_plugin(",
" name = \"ComponentProcessor\",",
# TODO(jart): https://github.com/bazelbuild/bazel/issues/2286
# " output_licenses = [\"unencumbered\"],",
" processor_class = \"dagger.internal.codegen.ComponentProcessor\",",
" generates_api = 1,",
" tags = [",
" \"annotation=dagger.Component;genclass=${package}.Dagger${outerclasses}${classname}\",",
" \"annotation=dagger.producers.ProductionComponent;genclass=${package}.Dagger${outerclasses}${classname}\",",
" ],",
" deps = [\":com_google_dagger_compiler\"],",
")",
]),
)
if not omit_com_google_dagger_producers:
java_import_external(
name = "com_google_dagger_producers",
jar_sha256 = "b452dc1b95dd02f6272e97b15d1bd35d92b5f484a7d69bb73887b6c6699d8843",
jar_urls = [
"http://bazel-mirror.storage.googleapis.com/repo1.maven.org/maven2/com/google/dagger/dagger-producers/2.9/dagger-producers-2.9.jar",
"http://repo1.maven.org/maven2/com/google/dagger/dagger-producers/2.9/dagger-producers-2.9.jar",
],
licenses = ["notice"], # Apache 2.0
deps = [
"@com_google_dagger//:runtime",
"@com_google_guava",
],
generated_rule_name = "runtime",
extra_build_file_content = "\n".join([
"java_library(",
" name = \"com_google_dagger_producers\",",
" exported_plugins = [\"@com_google_dagger_compiler//:ComponentProcessor\"],",
" exports = [",
" \":runtime\",",
" \"@com_google_dagger//:runtime\",",
" \"@javax_inject\",",
" ],",
")",
]),
)
| en | 0.519588 | package(default_visibility = ["//visibility:public"]) filegroup( name = "home", srcs = glob(["lib/*.jar"]), ) java_import( name = "runtime", jars = ["lib/kotlin-runtime.jar"], ) java_import( name = "stdlib", jars = ["lib/kotlin-stdlib.jar"], ) java_import( name = "compiler", jars = ["lib/kotlin-compiler.jar"], ) java_import( name = "preloader", jars = ["lib/kotlin-preloader.jar"], ) java_import( name = "test", jars = ["lib/kotlin-test.jar"], ) sh_binary( name = "kotlin", srcs = ["bin/kotlin"], ) sh_binary( name = "kotlinc", srcs = ["bin/kotlinc"], ) exports_files(["src"]) #omit_com_google_protobuf = False, #omit_com_google_protobuf_java = False, # UN-COMMENT these to build native worker proto from source. # if not omit_com_google_protobuf: # proto_library rules implicitly depend on @com_google_protobuf//:protoc, # which is the proto-compiler. # This statement defines the @com_google_protobuf repo. # native.http_archive( # name = "com_google_protobuf", # urls = ["https://github.com/google/protobuf/archive/a6189acd18b00611c1dc7042299ad75486f08a1a.zip"], # strip_prefix = "protobuf-a6189acd18b00611c1dc7042299ad75486f08a1a", # sha256 = "102b5024120215c5a34ad23d9dd459e8ccc37dc3ef4c73d466ab802b6e3e9512", # ) # if not omit_com_google_protobuf_java: # # java_proto_library rules implicitly depend on @com_google_protobuf_java//:java_toolchain, # # which is the Java proto runtime (base classes and common utilities). # native.http_archive( # name = "com_google_protobuf_java", # urls = ["https://github.com/google/protobuf/archive/a6189acd18b00611c1dc7042299ad75486f08a1a.zip"], # strip_prefix = "protobuf-a6189acd18b00611c1dc7042299ad75486f08a1a", # sha256 = "102b5024120215c5a34ad23d9dd459e8ccc37dc3ef4c73d466ab802b6e3e9512", # ) # Apache 2.0 # Apache 2.0 # BSD 3-clause # Apache 2.0 # Apache 2.0 # Apache 2.0 # TODO(jart): https://github.com/bazelbuild/bazel/issues/2286 # " output_licenses = [\"unencumbered\"],", # Apache 2.0 | 1.760234 | 2 |
TheCavesOfAntiquorum/main.py | lukesnc/TheCavesOfAntiquorum | 1 | 6615908 | <reponame>lukesnc/TheCavesOfAntiquorum
# The Caves of Antiquorum
# Author: <NAME>
# Main file / act 1
# includes
from TheCavesOfAntiquorum.helpers import clearScreen, inputError, startAct
from TheCavesOfAntiquorum import const, items
from time import sleep
import datetime
# Act 1 globals
uselessKey = 0 # First key found on the ground
startRoom = 0 # Allows revist of room for more clues
def furtherInTheHole():
writeKeyToSave()
print("you step further down into the depths of the cave\n")
sleep(5)
print("the ground begins to shake")
sleep(1)
print("the walls around you tremble")
sleep(1)
print("dust begins to fall from the cieling\n")
sleep(5)
print("the rocks have fallen and closed the path behind you")
sleep(5)
input("there is only one way to go: forward...")
startAct(2)
def writeKeyToSave():
# Write key to save
save = open(const.SAVE_PATH, 'a')
if uselessKey == 1:
save.write("USELESSKEY=1\n")
else:
save.write("USELESSKEY=0\n")
save.close()
print("\nSave file updated.\n")
sleep(1)
def inspectDoor():
sleep(1)
print("you jiggle the knob")
print("it requires a key\n")
if uselessKey == 0:
sleep(2)
paths()
elif uselessKey == 1:
sleep(1)
print("the key you have does not fit\n")
paths()
def throughHole():
global uselessKey
sleep(1)
print("the air gets noticeably colder, the howl ceases\n")
sleep(1)
print("it's very dark, you find yourself stumbling, however you can see a faint glow further on\n")
sleep(1)
print("keep going?\n")
while True:
print("keep going or go back")
option = input("> ")
if option == "keep going" and uselessKey == 0:
sleep(1)
print("you come to a bend in the tunneling, a key is stuck in the ground")
sleep(1)
print("pick it up?\n")
while True:
print("pick it up or keep going")
option2 = input("> ")
if option2 == "pick it up" or option2 == "pick up":
uselessKey = 1
print("picked up: " + items.UselessKey.name + "\n")
sleep(1)
while True:
print("keep going or go back")
option3 = input("> ")
if option3 == "keep going":
furtherInTheHole()
break
elif option3 == "back" or option3 == "go back":
paths()
break
inputError(option3)
break
elif option2 == "keep going":
furtherInTheHole()
break
inputError(option2)
break
elif option == "keep going" and uselessKey == 1:
furtherInTheHole()
break
elif option == "go back" or option == "back":
paths()
break
inputError(option)
def paths():
print("the path in front of you seeps cold air, and appears as a dark hole in the wall, there is a faint howl\n")
sleep(2)
print("maybe wind, maybe beast\n")
sleep(2)
print("the path behind is shut by a door\n")
while True:
print("inspect door, through hole, back")
option = input("> ")
if option == "inspect door" or option == "door":
inspectDoor()
break
elif option == "through hole" or option == "hole":
throughHole()
break
elif option == "back":
roomsOrPaths()
break
inputError(option)
def roomsOrPaths():
global startRoom
print("describe paths or room?\n")
while True:
print("paths or room")
option = input("> ")
if option == "room" and startRoom == 0:
print("the room is barren")
print("the ceiling depicts some artistic endeavor\n")
sleep(3)
print("you can barely make out the hieroglyphics\n")
sleep(2)
print("are those... teeth?\n")
startRoom = 1
roomsOrPaths()
break
elif option == "room" and startRoom == 1:
sleep(1)
print("you take another glance at the painting")
sleep(2)
print("you see a man bleeding\n")
roomsOrPaths()
break
elif option == "paths":
paths()
break
inputError(option)
def lightIt():
print("you get up and look around")
print("you're standing in the center of a stone room\n")
sleep(3)
print("there are two openings in the room, one behind you and one in front\n")
roomsOrPaths()
# Story begins
def beginStory():
print("you wake up, your head hurts\n")
sleep(1)
print("it's dark all around, but there is a small unlit lantern next to you, light it?\n")
sleep(1)
while True:
print("leave it or light it")
sleep(1)
option = input("> ")
if option == "leave it":
start()
break
elif option == "light it":
print("you're embraced by a comforting warmth\n")
sleep(2)
lightIt()
break
inputError(option)
def start():
clearScreen()
print("Welcome to The Caves of Antiquorum\nby <NAME> and <NAME>\n")
print("Type \"quit\" and press ENTER to exit the game at any time.")
input("Press ENTER to begin the game...")
clearScreen()
sleep(2)
beginStory()
# Creates a save file (or overwrites an existing one)
# Writes the time of creation and that the user is in act 1
def createSave():
print("Creating save file...")
save = open(const.SAVE_PATH, "w+")
save.write("SAVE FILE FOR THE CAVES OF ANTIQUORUM\n")
save.write("CREATED " + str(datetime.datetime.now()) + "\n\n")
save.write("ACT=1\n")
save.write("DEATHS=0\n")
save.write("\n=======ITEMS=======\n\n")
save.close()
sleep(5)
print("Done")
sleep(.5)
# First function that's called, begins the program
def boot():
# Checks if previous save exists, if not creates one
saveMade = False # variable prevents double creation
try:
save = open(const.SAVE_PATH, 'r')
except:
createSave()
saveMade = True
# Jump to the different acts based on save
save = open(const.SAVE_PATH, 'r')
# Don't use .read() in multiple conditions make a var instead and check that
saveData = save.read()
if "ACT=2" in saveData:
save.close()
startAct(2)
elif "ACT=3" in saveData:
save.close()
startAct(3)
else: # Something in the save file is corrupt or haven't left act 1
save.close()
if saveMade == False:
createSave()
start()
boot()
| # The Caves of Antiquorum
# Author: <NAME>
# Main file / act 1
# includes
from TheCavesOfAntiquorum.helpers import clearScreen, inputError, startAct
from TheCavesOfAntiquorum import const, items
from time import sleep
import datetime
# Act 1 globals
uselessKey = 0 # First key found on the ground
startRoom = 0 # Allows revist of room for more clues
def furtherInTheHole():
writeKeyToSave()
print("you step further down into the depths of the cave\n")
sleep(5)
print("the ground begins to shake")
sleep(1)
print("the walls around you tremble")
sleep(1)
print("dust begins to fall from the cieling\n")
sleep(5)
print("the rocks have fallen and closed the path behind you")
sleep(5)
input("there is only one way to go: forward...")
startAct(2)
def writeKeyToSave():
# Write key to save
save = open(const.SAVE_PATH, 'a')
if uselessKey == 1:
save.write("USELESSKEY=1\n")
else:
save.write("USELESSKEY=0\n")
save.close()
print("\nSave file updated.\n")
sleep(1)
def inspectDoor():
sleep(1)
print("you jiggle the knob")
print("it requires a key\n")
if uselessKey == 0:
sleep(2)
paths()
elif uselessKey == 1:
sleep(1)
print("the key you have does not fit\n")
paths()
def throughHole():
global uselessKey
sleep(1)
print("the air gets noticeably colder, the howl ceases\n")
sleep(1)
print("it's very dark, you find yourself stumbling, however you can see a faint glow further on\n")
sleep(1)
print("keep going?\n")
while True:
print("keep going or go back")
option = input("> ")
if option == "keep going" and uselessKey == 0:
sleep(1)
print("you come to a bend in the tunneling, a key is stuck in the ground")
sleep(1)
print("pick it up?\n")
while True:
print("pick it up or keep going")
option2 = input("> ")
if option2 == "pick it up" or option2 == "pick up":
uselessKey = 1
print("picked up: " + items.UselessKey.name + "\n")
sleep(1)
while True:
print("keep going or go back")
option3 = input("> ")
if option3 == "keep going":
furtherInTheHole()
break
elif option3 == "back" or option3 == "go back":
paths()
break
inputError(option3)
break
elif option2 == "keep going":
furtherInTheHole()
break
inputError(option2)
break
elif option == "keep going" and uselessKey == 1:
furtherInTheHole()
break
elif option == "go back" or option == "back":
paths()
break
inputError(option)
def paths():
print("the path in front of you seeps cold air, and appears as a dark hole in the wall, there is a faint howl\n")
sleep(2)
print("maybe wind, maybe beast\n")
sleep(2)
print("the path behind is shut by a door\n")
while True:
print("inspect door, through hole, back")
option = input("> ")
if option == "inspect door" or option == "door":
inspectDoor()
break
elif option == "through hole" or option == "hole":
throughHole()
break
elif option == "back":
roomsOrPaths()
break
inputError(option)
def roomsOrPaths():
global startRoom
print("describe paths or room?\n")
while True:
print("paths or room")
option = input("> ")
if option == "room" and startRoom == 0:
print("the room is barren")
print("the ceiling depicts some artistic endeavor\n")
sleep(3)
print("you can barely make out the hieroglyphics\n")
sleep(2)
print("are those... teeth?\n")
startRoom = 1
roomsOrPaths()
break
elif option == "room" and startRoom == 1:
sleep(1)
print("you take another glance at the painting")
sleep(2)
print("you see a man bleeding\n")
roomsOrPaths()
break
elif option == "paths":
paths()
break
inputError(option)
def lightIt():
print("you get up and look around")
print("you're standing in the center of a stone room\n")
sleep(3)
print("there are two openings in the room, one behind you and one in front\n")
roomsOrPaths()
# Story begins
def beginStory():
print("you wake up, your head hurts\n")
sleep(1)
print("it's dark all around, but there is a small unlit lantern next to you, light it?\n")
sleep(1)
while True:
print("leave it or light it")
sleep(1)
option = input("> ")
if option == "leave it":
start()
break
elif option == "light it":
print("you're embraced by a comforting warmth\n")
sleep(2)
lightIt()
break
inputError(option)
def start():
clearScreen()
print("Welcome to The Caves of Antiquorum\nby <NAME> and <NAME>\n")
print("Type \"quit\" and press ENTER to exit the game at any time.")
input("Press ENTER to begin the game...")
clearScreen()
sleep(2)
beginStory()
# Creates a save file (or overwrites an existing one)
# Writes the time of creation and that the user is in act 1
def createSave():
print("Creating save file...")
save = open(const.SAVE_PATH, "w+")
save.write("SAVE FILE FOR THE CAVES OF ANTIQUORUM\n")
save.write("CREATED " + str(datetime.datetime.now()) + "\n\n")
save.write("ACT=1\n")
save.write("DEATHS=0\n")
save.write("\n=======ITEMS=======\n\n")
save.close()
sleep(5)
print("Done")
sleep(.5)
# First function that's called, begins the program
def boot():
# Checks if previous save exists, if not creates one
saveMade = False # variable prevents double creation
try:
save = open(const.SAVE_PATH, 'r')
except:
createSave()
saveMade = True
# Jump to the different acts based on save
save = open(const.SAVE_PATH, 'r')
# Don't use .read() in multiple conditions make a var instead and check that
saveData = save.read()
if "ACT=2" in saveData:
save.close()
startAct(2)
elif "ACT=3" in saveData:
save.close()
startAct(3)
else: # Something in the save file is corrupt or haven't left act 1
save.close()
if saveMade == False:
createSave()
start()
boot() | en | 0.900919 | # The Caves of Antiquorum # Author: <NAME> # Main file / act 1 # includes # Act 1 globals # First key found on the ground # Allows revist of room for more clues # Write key to save # Story begins # Creates a save file (or overwrites an existing one) # Writes the time of creation and that the user is in act 1 # First function that's called, begins the program # Checks if previous save exists, if not creates one # variable prevents double creation # Jump to the different acts based on save # Don't use .read() in multiple conditions make a var instead and check that # Something in the save file is corrupt or haven't left act 1 | 3.365438 | 3 |
tools/test_pifu.py | lingtengqiu/Open-PIFuhd | 191 | 6615909 | '''
@author:<NAME>
@name:Open-Pifu inference
'''
import sys
sys.path.append("./")
from opt import opt
from mmcv import Config
import os
from torch.utils.data import DataLoader
import torch.nn as nn
from engineer.datasets.loader.build_loader import train_loader_collate_fn,test_loader_collate_fn
from engineer.datasets.builder import build_dataset
from engineer.models.builder import build_model
from engineer.core.eval import test_epoch,inference
from utils import group_weight
from utils.logger import info_cfg,get_experiments_id,setup_test_logger
import torch
import torch.optim as optim
from utils.dataloader import build_dataloader
from utils.distributed import set_up_ddp,build_dpp_net,load_checkpoints
import torch.distributed as dist
from tqdm import tqdm
import logging
logger = logging.getLogger("logger.trainer")
if __name__ == "__main__":
args = opt
assert args.config is not None,"you must give your model config"
cfg = Config.fromfile(args.config)
if cfg.logger :
logger=setup_test_logger(cfg.name,rank= args.local_rank)
if args.dist:
logger.info("Using Distributed test!")
# env setup
set_up_ddp(cfg,args)
info_cfg(logger,cfg)
test_data_set = build_dataset(cfg.data.test)
test_dataloader = build_dataloader(test_data_set,cfg,args,phase='test')
logger.info("test data size:{}".format(len(test_data_set)))
#build model
model = build_model(cfg.model)
if args.dist == True:
#build distributed network
model = build_dpp_net(model)
else:
model = model.cuda()
#resume
checkpoints_path,gallery_id = get_experiments_id(cfg)
resume_path = os.path.join(checkpoints_path,"epoch_best.tar".format(args.current))
epoch = load_checkpoints(model,None,resume_path,args)
inference(model, cfg, args, test_dataloader, epoch,gallery_id['test'],len(test_data_set.subjects))
| '''
@author:<NAME>
@name:Open-Pifu inference
'''
import sys
sys.path.append("./")
from opt import opt
from mmcv import Config
import os
from torch.utils.data import DataLoader
import torch.nn as nn
from engineer.datasets.loader.build_loader import train_loader_collate_fn,test_loader_collate_fn
from engineer.datasets.builder import build_dataset
from engineer.models.builder import build_model
from engineer.core.eval import test_epoch,inference
from utils import group_weight
from utils.logger import info_cfg,get_experiments_id,setup_test_logger
import torch
import torch.optim as optim
from utils.dataloader import build_dataloader
from utils.distributed import set_up_ddp,build_dpp_net,load_checkpoints
import torch.distributed as dist
from tqdm import tqdm
import logging
logger = logging.getLogger("logger.trainer")
if __name__ == "__main__":
args = opt
assert args.config is not None,"you must give your model config"
cfg = Config.fromfile(args.config)
if cfg.logger :
logger=setup_test_logger(cfg.name,rank= args.local_rank)
if args.dist:
logger.info("Using Distributed test!")
# env setup
set_up_ddp(cfg,args)
info_cfg(logger,cfg)
test_data_set = build_dataset(cfg.data.test)
test_dataloader = build_dataloader(test_data_set,cfg,args,phase='test')
logger.info("test data size:{}".format(len(test_data_set)))
#build model
model = build_model(cfg.model)
if args.dist == True:
#build distributed network
model = build_dpp_net(model)
else:
model = model.cuda()
#resume
checkpoints_path,gallery_id = get_experiments_id(cfg)
resume_path = os.path.join(checkpoints_path,"epoch_best.tar".format(args.current))
epoch = load_checkpoints(model,None,resume_path,args)
inference(model, cfg, args, test_dataloader, epoch,gallery_id['test'],len(test_data_set.subjects))
| en | 0.546662 | @author:<NAME> @name:Open-Pifu inference # env setup #build model #build distributed network #resume | 1.847031 | 2 |
python/31_bfs_dfs/bfs_dfs.py | shipan3452/algo | 22,028 | 6615910 | <filename>python/31_bfs_dfs/bfs_dfs.py
"""
Breadth-first search and depth-first search.
Author: <NAME>
"""
from typing import List, Optional, Generator, IO
from collections import deque
class Graph:
"""Undirected graph."""
def __init__(self, num_vertices: int):
self._num_vertices = num_vertices
self._adjacency = [[] for _ in range(num_vertices)]
def add_edge(self, s: int, t: int) -> None:
self._adjacency[s].append(t)
self._adjacency[t].append(s)
def _generate_path(self, s: int, t: int, prev: List[Optional[int]]) -> Generator[str, None, None]:
if prev[t] or s != t:
yield from self._generate_path(s, prev[t], prev)
yield str(t)
def bfs(self, s: int, t: int) -> IO[str]:
"""Print out the path from Vertex s to Vertex t
using bfs.
"""
if s == t: return
visited = [False] * self._num_vertices
visited[s] = True
q = deque()
q.append(s)
prev = [None] * self._num_vertices
while q:
v = q.popleft()
for neighbour in self._adjacency[v]:
if not visited[neighbour]:
prev[neighbour] = v
if neighbour == t:
print("->".join(self._generate_path(s, t, prev)))
return
visited[neighbour] = True
q.append(neighbour)
def dfs(self, s: int, t: int) -> IO[str]:
"""Print out a path from Vertex s to Vertex t
using dfs.
"""
found = False
visited = [False] * self._num_vertices
prev = [None] * self._num_vertices
def _dfs(from_vertex: int) -> None:
nonlocal found
if found: return
visited[from_vertex] = True
if from_vertex == t:
found = True
return
for neighbour in self._adjacency[from_vertex]:
if not visited[neighbour]:
prev[neighbour] = from_vertex
_dfs(neighbour)
_dfs(s)
print("->".join(self._generate_path(s, t, prev)))
if __name__ == "__main__":
graph = Graph(8)
graph.add_edge(0, 1)
graph.add_edge(0, 3)
graph.add_edge(1, 2)
graph.add_edge(1, 4)
graph.add_edge(2, 5)
graph.add_edge(3, 4)
graph.add_edge(4, 5)
graph.add_edge(4, 6)
graph.add_edge(5, 7)
graph.add_edge(6, 7)
graph.bfs(0, 7)
graph.dfs(0, 7)
| <filename>python/31_bfs_dfs/bfs_dfs.py
"""
Breadth-first search and depth-first search.
Author: <NAME>
"""
from typing import List, Optional, Generator, IO
from collections import deque
class Graph:
"""Undirected graph."""
def __init__(self, num_vertices: int):
self._num_vertices = num_vertices
self._adjacency = [[] for _ in range(num_vertices)]
def add_edge(self, s: int, t: int) -> None:
self._adjacency[s].append(t)
self._adjacency[t].append(s)
def _generate_path(self, s: int, t: int, prev: List[Optional[int]]) -> Generator[str, None, None]:
if prev[t] or s != t:
yield from self._generate_path(s, prev[t], prev)
yield str(t)
def bfs(self, s: int, t: int) -> IO[str]:
"""Print out the path from Vertex s to Vertex t
using bfs.
"""
if s == t: return
visited = [False] * self._num_vertices
visited[s] = True
q = deque()
q.append(s)
prev = [None] * self._num_vertices
while q:
v = q.popleft()
for neighbour in self._adjacency[v]:
if not visited[neighbour]:
prev[neighbour] = v
if neighbour == t:
print("->".join(self._generate_path(s, t, prev)))
return
visited[neighbour] = True
q.append(neighbour)
def dfs(self, s: int, t: int) -> IO[str]:
"""Print out a path from Vertex s to Vertex t
using dfs.
"""
found = False
visited = [False] * self._num_vertices
prev = [None] * self._num_vertices
def _dfs(from_vertex: int) -> None:
nonlocal found
if found: return
visited[from_vertex] = True
if from_vertex == t:
found = True
return
for neighbour in self._adjacency[from_vertex]:
if not visited[neighbour]:
prev[neighbour] = from_vertex
_dfs(neighbour)
_dfs(s)
print("->".join(self._generate_path(s, t, prev)))
if __name__ == "__main__":
graph = Graph(8)
graph.add_edge(0, 1)
graph.add_edge(0, 3)
graph.add_edge(1, 2)
graph.add_edge(1, 4)
graph.add_edge(2, 5)
graph.add_edge(3, 4)
graph.add_edge(4, 5)
graph.add_edge(4, 6)
graph.add_edge(5, 7)
graph.add_edge(6, 7)
graph.bfs(0, 7)
graph.dfs(0, 7)
| en | 0.816374 | Breadth-first search and depth-first search. Author: <NAME> Undirected graph. Print out the path from Vertex s to Vertex t using bfs. Print out a path from Vertex s to Vertex t using dfs. | 3.62571 | 4 |
donkeycar/parts/fram.py | mituhiromatuura/my_donkeycar-v3 | 0 | 6615911 | import smbus
class Fram:
def __init__(self, addr=0x50):
self.i2c = smbus.SMBus(1)
self.addr = addr
def write(self, adr, val):
try:
self.i2c.write_word_data(self.addr, adr*2, int(val*1000))
except:
print('failed to Fram write!!')
def read(self, adr):
try:
val = self.i2c.read_word_data(self.addr, adr*2)
if val >= 0x8000:
val = val | ~0xffff
return val / 1000.0, True
except:
print('failed to Fram read!!')
return 0, False
if __name__ == "__main__":
fram = Fram()
'''
fram.write(0, -0.2)
fram.write(1, -0.4)
fram.write(2, -0.6)
fram.write(3, -0.8)
fram.write(4, -10)
'''
fram.write(0, 0.7)
fram.write(1, 1.0)
fram.write(2, 0.4)
fram.write(3, 0.5)
fram.write(4, -2)
print(fram.read(0))
print(fram.read(1))
print(fram.read(2))
print(fram.read(3))
print(int(fram.read(4)))
| import smbus
class Fram:
def __init__(self, addr=0x50):
self.i2c = smbus.SMBus(1)
self.addr = addr
def write(self, adr, val):
try:
self.i2c.write_word_data(self.addr, adr*2, int(val*1000))
except:
print('failed to Fram write!!')
def read(self, adr):
try:
val = self.i2c.read_word_data(self.addr, adr*2)
if val >= 0x8000:
val = val | ~0xffff
return val / 1000.0, True
except:
print('failed to Fram read!!')
return 0, False
if __name__ == "__main__":
fram = Fram()
'''
fram.write(0, -0.2)
fram.write(1, -0.4)
fram.write(2, -0.6)
fram.write(3, -0.8)
fram.write(4, -10)
'''
fram.write(0, 0.7)
fram.write(1, 1.0)
fram.write(2, 0.4)
fram.write(3, 0.5)
fram.write(4, -2)
print(fram.read(0))
print(fram.read(1))
print(fram.read(2))
print(fram.read(3))
print(int(fram.read(4)))
| en | 0.437064 | fram.write(0, -0.2) fram.write(1, -0.4) fram.write(2, -0.6) fram.write(3, -0.8) fram.write(4, -10) | 2.892846 | 3 |
listenclosely/strategies/first_free.py | jlmadurga/listenclosely | 7 | 6615912 | from listenclosely.strategies.base import BaseAgentStrategy
from listenclosely.models import Agent
class FirstFreeAgentStrategy(BaseAgentStrategy):
"""
Choose first free agent
"""
def free_agent(self):
free_agents = Agent.online.all()
if free_agents:
return free_agents[0]
return None | from listenclosely.strategies.base import BaseAgentStrategy
from listenclosely.models import Agent
class FirstFreeAgentStrategy(BaseAgentStrategy):
"""
Choose first free agent
"""
def free_agent(self):
free_agents = Agent.online.all()
if free_agents:
return free_agents[0]
return None | en | 0.473594 | Choose first free agent | 2.32438 | 2 |
Source/myfitness_tests/myfitness/healthdata/data.py | lizawood/Apple-Health-Fitness-Tracker | 0 | 6615913 | <reponame>lizawood/Apple-Health-Fitness-Tracker
class Person:
def __init__(self, name, age, gender):
"""Create a object of class Person()
Parameters: name, age, gender
Return: An object of class Person"""
# assign the name, age, and gender to the class object in initialization
self.name = name
try:
self.age = int(age)
except:
print("Age is an integer")
self.age = None
self.gender = gender
def display(self):
"""Display the name, age and gender of a Person() object
Parameters: Person() object
Return: name, age, and gender"""
# display the name, age, and gender of initialized object
return "Name: {}, Age: {}, Gender: {}". format(self.name, self.age, self.gender)
class healthdata(Person):
def __init__(self, name, age, gender, file =''):
"""Create a object of class healthdata() this inherits from the superclass Person()
Parameters: name, age, gender, file
Return: An object of class healthdata"""
# inherit Person class name, age, gender and assign to heathdata object
Person.__init__(self, name, age, gender)
# assign the file to the object in initialization
self.file = file
def data(self):
"""Import the file assigned to the healthdata() object into a dataframe and assign it to the healthdata() object
Parameters: healthdata() object initialized above
Return: Display of healthdata object attributes name, age, gender and dataframe containing healthdata() object file"""
import pandas as pd # ensure pandas is imported
try:
self.data = pd.read_csv(self.file) # import the self.file into a dataframe using pandas
except FileNotFoundError:
print("File does not exist")
return False
Person.display(self) #display object attributes using inherited display() finction
return self.data
| class Person:
def __init__(self, name, age, gender):
"""Create a object of class Person()
Parameters: name, age, gender
Return: An object of class Person"""
# assign the name, age, and gender to the class object in initialization
self.name = name
try:
self.age = int(age)
except:
print("Age is an integer")
self.age = None
self.gender = gender
def display(self):
"""Display the name, age and gender of a Person() object
Parameters: Person() object
Return: name, age, and gender"""
# display the name, age, and gender of initialized object
return "Name: {}, Age: {}, Gender: {}". format(self.name, self.age, self.gender)
class healthdata(Person):
def __init__(self, name, age, gender, file =''):
"""Create a object of class healthdata() this inherits from the superclass Person()
Parameters: name, age, gender, file
Return: An object of class healthdata"""
# inherit Person class name, age, gender and assign to heathdata object
Person.__init__(self, name, age, gender)
# assign the file to the object in initialization
self.file = file
def data(self):
"""Import the file assigned to the healthdata() object into a dataframe and assign it to the healthdata() object
Parameters: healthdata() object initialized above
Return: Display of healthdata object attributes name, age, gender and dataframe containing healthdata() object file"""
import pandas as pd # ensure pandas is imported
try:
self.data = pd.read_csv(self.file) # import the self.file into a dataframe using pandas
except FileNotFoundError:
print("File does not exist")
return False
Person.display(self) #display object attributes using inherited display() finction
return self.data | en | 0.576429 | Create a object of class Person() Parameters: name, age, gender Return: An object of class Person # assign the name, age, and gender to the class object in initialization Display the name, age and gender of a Person() object Parameters: Person() object Return: name, age, and gender # display the name, age, and gender of initialized object Create a object of class healthdata() this inherits from the superclass Person() Parameters: name, age, gender, file Return: An object of class healthdata # inherit Person class name, age, gender and assign to heathdata object # assign the file to the object in initialization Import the file assigned to the healthdata() object into a dataframe and assign it to the healthdata() object Parameters: healthdata() object initialized above Return: Display of healthdata object attributes name, age, gender and dataframe containing healthdata() object file # ensure pandas is imported # import the self.file into a dataframe using pandas #display object attributes using inherited display() finction | 4.277251 | 4 |
examples/simple.py | AYCHOtel/XrplWebsocket | 8 | 6615914 | <filename>examples/simple.py
#!/usr/bin/env python
# coding: utf-8
import json
from xrpl_websocket import Client
if __name__ == "__main__":
# create instance
client = Client()
# connect to the websocket
client.connect(nowait=False)
# send server info command
resp = client.send(command='server_info')
print("Server Info:")
print(json.dumps(resp, indent = 4))
# close the connection
client.disconnect()
| <filename>examples/simple.py
#!/usr/bin/env python
# coding: utf-8
import json
from xrpl_websocket import Client
if __name__ == "__main__":
# create instance
client = Client()
# connect to the websocket
client.connect(nowait=False)
# send server info command
resp = client.send(command='server_info')
print("Server Info:")
print(json.dumps(resp, indent = 4))
# close the connection
client.disconnect()
| en | 0.531864 | #!/usr/bin/env python # coding: utf-8 # create instance # connect to the websocket # send server info command # close the connection | 2.671638 | 3 |
tests/acceptance/test_lib.py | peopledoc/septentrion | 7 | 6615915 | <filename>tests/acceptance/test_lib.py
import septentrion
from septentrion import configuration
from septentrion import db as db_module
def test_migrate(db):
settings_kwargs = {
# database connection settings
"host": db["host"],
"port": db["port"],
"username": db["user"],
"dbname": db["dbname"],
# migrate settings
"target_version": "1.1",
"migrations_root": "example_migrations",
}
septentrion.migrate(**settings_kwargs)
settings = configuration.Settings(**settings_kwargs)
assert db_module.is_schema_initialized(settings=settings)
assert (
db_module.get_current_schema_version(settings=settings).original_string == "1.1"
)
| <filename>tests/acceptance/test_lib.py
import septentrion
from septentrion import configuration
from septentrion import db as db_module
def test_migrate(db):
settings_kwargs = {
# database connection settings
"host": db["host"],
"port": db["port"],
"username": db["user"],
"dbname": db["dbname"],
# migrate settings
"target_version": "1.1",
"migrations_root": "example_migrations",
}
septentrion.migrate(**settings_kwargs)
settings = configuration.Settings(**settings_kwargs)
assert db_module.is_schema_initialized(settings=settings)
assert (
db_module.get_current_schema_version(settings=settings).original_string == "1.1"
)
| en | 0.632495 | # database connection settings # migrate settings | 2.354232 | 2 |
Curso/Python 3/Aulas/aula07.py | RochaYuri/cursoemvideo-python3 | 0 | 6615916 | <gh_stars>0
n1 = int (input ('Digite um número: '))
q = n1 - 1
w = n1 + 1
print ('o antecessor de {} é {} \n e o sucessor de {} é {}'.format (n1, q, n1, w))
| n1 = int (input ('Digite um número: '))
q = n1 - 1
w = n1 + 1
print ('o antecessor de {} é {} \n e o sucessor de {} é {}'.format (n1, q, n1, w)) | none | 1 | 3.911645 | 4 | |
django_football/teams/templatetags/teams_extras.py | gvpeek/django_football | 1 | 6615917 | <gh_stars>1-10
from django import template
register = template.Library()
@register.filter
def dictget(dictionary, key):
"""
Gets the value from the given dictionary for the given key and returns an empty string
if the key is not found.
"""
return dictionary.get(key, '') | from django import template
register = template.Library()
@register.filter
def dictget(dictionary, key):
"""
Gets the value from the given dictionary for the given key and returns an empty string
if the key is not found.
"""
return dictionary.get(key, '') | en | 0.608479 | Gets the value from the given dictionary for the given key and returns an empty string if the key is not found. | 2.857006 | 3 |
projects/migrations/0001_initial.py | akshaya9/fosswebsite | 369 | 6615918 | <reponame>akshaya9/fosswebsite
# -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-26 12:11
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('url', models.URLField(blank=True, max_length=400, null=True)),
('description', models.TextField(blank=True)),
('image', models.ImageField(blank=True, null=True, upload_to='project/')),
('date', models.DateField(blank=True, null=True)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ProjectMembers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_assigned', models.DateTimeField(auto_now_add=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ProjectScreenShot',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='project/')),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project')),
],
),
migrations.AddField(
model_name='language',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-06-26 12:11
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Language',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('language', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Project',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=300)),
('url', models.URLField(blank=True, max_length=400, null=True)),
('description', models.TextField(blank=True)),
('image', models.ImageField(blank=True, null=True, upload_to='project/')),
('date', models.DateField(blank=True, null=True)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ProjectMembers',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_assigned', models.DateTimeField(auto_now_add=True)),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ProjectScreenShot',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='project/')),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project')),
],
),
migrations.AddField(
model_name='language',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='projects.Project'),
),
] | en | 0.772869 | # -*- coding: utf-8 -*- # Generated by Django 1.11.2 on 2017-06-26 12:11 | 1.705298 | 2 |
## Spiral Flower Amazing Design Using Pyj.py | Anassyed615/Spiral-Flower-Graphics-Design | 1 | 6615919 | ## Spiral Flower Amazing Design Using Python turtle
## Python Graphics
import turtle
t = turtle.Turtle()
s = turtle.Screen()
s.bgcolor('black')
col = ('pink','yellow','red','blue')
t.speed(0)
for i in range(170):
t.pencolor(col[i%4])
t.setheading(i*95)
for b in range(4):
t.forward(i*1)
t.right(300) | ## Spiral Flower Amazing Design Using Python turtle
## Python Graphics
import turtle
t = turtle.Turtle()
s = turtle.Screen()
s.bgcolor('black')
col = ('pink','yellow','red','blue')
t.speed(0)
for i in range(170):
t.pencolor(col[i%4])
t.setheading(i*95)
for b in range(4):
t.forward(i*1)
t.right(300) | en | 0.367879 | ## Spiral Flower Amazing Design Using Python turtle ## Python Graphics | 4.148935 | 4 |
lambdarest/__version__.py | adamelmore/python-lambdarest | 1 | 6615920 | <gh_stars>1-10
__author__ = "sloev"
__email__ = "<EMAIL>"
__version__ = "7.0.0"
| __author__ = "sloev"
__email__ = "<EMAIL>"
__version__ = "7.0.0" | none | 1 | 0.876865 | 1 | |
La_republica/Limpieza.py | semilleroecusta/analisis_texto | 0 | 6615921 |
base20 = pd.DataFrame(pd.read_csv('2021-04-20.csv'))
base23 = pd.DataFrame(pd.read_csv('2021-04-23).csv'))
base26 = pd.DataFrame(pd.read_csv('2021-04-26).csv'))
base27 = pd.DataFrame(pd.read_csv('2021-04-27).csv'))
base29 = pd.DataFrame(pd.read_csv('2021-04-29).csv'))
base30 = pd.DataFrame(pd.read_csv('2021-04-30).csv'))
df = (base20.merge(base23,how= 'outer').merge(base26,how= 'outer').merge(base27,how= 'outer').merge(base29,how= 'outer').merge(base30,how= 'outer'))
df_texto = df.drop(['dia','fecha','seccion','Unnamed: 0'],axis=1)
df_texto = df_texto.drop_duplicates('titulo')
|
base20 = pd.DataFrame(pd.read_csv('2021-04-20.csv'))
base23 = pd.DataFrame(pd.read_csv('2021-04-23).csv'))
base26 = pd.DataFrame(pd.read_csv('2021-04-26).csv'))
base27 = pd.DataFrame(pd.read_csv('2021-04-27).csv'))
base29 = pd.DataFrame(pd.read_csv('2021-04-29).csv'))
base30 = pd.DataFrame(pd.read_csv('2021-04-30).csv'))
df = (base20.merge(base23,how= 'outer').merge(base26,how= 'outer').merge(base27,how= 'outer').merge(base29,how= 'outer').merge(base30,how= 'outer'))
df_texto = df.drop(['dia','fecha','seccion','Unnamed: 0'],axis=1)
df_texto = df_texto.drop_duplicates('titulo')
| none | 1 | 2.766236 | 3 | |
imager/utils/__init__.py | pharmbio/robot-imager | 1 | 6615922 | <reponame>pharmbio/robot-imager
from __future__ import annotations
from dataclasses import *
from typing import *
from collections import defaultdict
from datetime import datetime
import threading
from .serializer import Serializer, serializer, from_json, to_json # type: ignore
from .nub import nub # type: ignore
from .pp import show, pr, Color # type: ignore
from .profiling import timeit, memit # type: ignore
from .args import doc_header # type: ignore
import json
from urllib.request import urlopen
A = TypeVar('A')
B = TypeVar('B')
def curl(url: str) -> Any:
ten_minutes = 60 * 10
res = json.loads(urlopen(url, timeout=ten_minutes).read())
return res
def spawn(f: Callable[[], None]) -> None:
threading.Thread(target=f, daemon=True).start()
def group_by(xs: Iterable[A], key: Callable[[A], B]) -> defaultdict[B, list[A]]:
d: dict[B, list[A]] = defaultdict(list)
for x in xs:
d[key(x)] += [x]
return d
def uniq(xs: Iterable[A]) -> Iterable[A]:
return {x: None for x in xs}.keys()
def flatten(xss: Iterable[list[A]]) -> list[A]:
return sum(xss, cast(list[A], []))
def catch(m: Callable[[], A], default: B) -> A | B:
try:
return m()
except:
return default
@dataclass(frozen=False)
class Mutable(Generic[A]):
value: A
@classmethod
def factory(cls, x: A):
return field(default_factory=lambda: cls(x))
@classmethod
def init(cls, f: Callable[[], A]):
return field(default_factory=lambda: cls(f()))
def read_commasep(s: str, p: Callable[[str], A] = lambda x: x) -> list[A]:
return [p(x.strip()) for x in s.strip().split(',') if x.strip()]
def now_str_for_filename() -> str:
return str(datetime.now()).split('.')[0].replace(' ', '_')
@dataclass(frozen=True)
class test(Generic[A]):
lhs: A
def __eq__(self, rhs: A) -> bool:
if self.lhs == rhs:
import os
if os.environ.get('verbose'):
green = Color().green
print(green('✔ '), show(self.lhs))
print(green(' =='), show(rhs))
return True
else:
red = Color().red
print(red('✗ '), show(self.lhs))
print(red(' !='), show(rhs))
raise ValueError('Equality test failed')
def iterate_with_full_context(xs: Iterable[A]) -> list[tuple[list[A], A, list[A]]]:
xs = list(xs)
return [
(xs[:i], x, xs[i+1:])
for i, x in enumerate(xs)
]
def iterate_with_context(xs: Iterable[A]) -> list[tuple[A | None, A, A | None]]:
return [
(prev[-1] if prev else None, x, next[0] if next else None)
for prev, x, next in iterate_with_full_context(xs)
]
def iterate_with_next(xs: Iterable[A]) -> list[tuple[A, A | None]]:
return [
(x, next)
for _, x, next in iterate_with_context(xs)
]
def iterate_with_prev(xs: Iterable[A]) -> list[tuple[A | None, A]]:
return [
(prev, x)
for prev, x, _ in iterate_with_context(xs)
]
test(iterate_with_full_context([1,2,3,4])) == [
([], 1, [2, 3, 4]),
([1], 2, [3, 4]),
([1, 2], 3, [4]),
([1, 2, 3], 4, []),
]
test(iterate_with_context([1,2,3,4])) == [
(None, 1, 2),
(1, 2, 3),
(2, 3, 4),
(3, 4, None)
]
def git_HEAD() -> str | None:
from subprocess import run
try:
proc = run(['git', 'rev-parse', 'HEAD'], capture_output=True)
return proc.stdout.decode().strip()[:8]
except:
return None
from datetime import timedelta
def pp_secs(seconds: int | float) -> str:
'''
Pretty-print seconds.
>>> pp_secs(0)
'0.0'
>>> pp_secs(0.1)
'0.1'
>>> pp_secs(0.09)
'0.0'
>>> pp_secs(60)
'1:00.0'
>>> pp_secs(3600)
'1:00:00.0'
>>> pp_secs(3600 + 60 * 2 + 3 + 0.4)
'1:02:03.4'
>>> pp_secs(3600 * 24 - 0.1)
'23:59:59.9'
>>> pp_secs(3600 * 24)
'1 day, 0:00:00.0'
>>> pp_secs(-0)
'0.0'
>>> pp_secs(-0.1)
'-0.1'
>>> pp_secs(-0.09)
'-0.0'
>>> pp_secs(-60)
'-1:00.0'
>>> pp_secs(-3600)
'-1:00:00.0'
>>> pp_secs(-(3600 + 60 * 2 + 3 + 0.4))
'-1:02:03.4'
>>> pp_secs(-(3600 * 24 - 0.1))
'-23:59:59.9'
>>> pp_secs(-(3600 * 24))
'-1 day, 0:00:00.0'
'''
if seconds < 0:
return '-' + pp_secs(-seconds)
s = str(timedelta(seconds=float(seconds)))
s = s.lstrip('0:')
if not s:
s = '0'
if s.startswith('.'):
s = '0' + s
if '.' in s:
pre, post = s.split('.')
return pre + '.' + post[:1]
else:
return s + '.0'
def round_nnz(x: float, ndigits: int=1) -> float:
'''
Round and normalize negative zero
'''
v = round(x, ndigits)
if v == -0.0:
return 0.0
else:
return v
def zip_with(f: Callable[[float, float], float], xs: list[float], ys: list[float], ndigits: int=1) -> list[float]:
return [round_nnz(f(a, b), ndigits=ndigits) for a, b in zip(xs, ys)]
def zip_sub(xs: list[float], ys: list[float], ndigits: int=1) -> list[float]:
return zip_with(lambda a, b: a - b, xs, ys, ndigits=ndigits)
def zip_add(xs: list[float], ys: list[float], ndigits: int=1) -> list[float]:
return zip_with(lambda a, b: a + b, xs, ys, ndigits=ndigits)
| from __future__ import annotations
from dataclasses import *
from typing import *
from collections import defaultdict
from datetime import datetime
import threading
from .serializer import Serializer, serializer, from_json, to_json # type: ignore
from .nub import nub # type: ignore
from .pp import show, pr, Color # type: ignore
from .profiling import timeit, memit # type: ignore
from .args import doc_header # type: ignore
import json
from urllib.request import urlopen
A = TypeVar('A')
B = TypeVar('B')
def curl(url: str) -> Any:
ten_minutes = 60 * 10
res = json.loads(urlopen(url, timeout=ten_minutes).read())
return res
def spawn(f: Callable[[], None]) -> None:
threading.Thread(target=f, daemon=True).start()
def group_by(xs: Iterable[A], key: Callable[[A], B]) -> defaultdict[B, list[A]]:
d: dict[B, list[A]] = defaultdict(list)
for x in xs:
d[key(x)] += [x]
return d
def uniq(xs: Iterable[A]) -> Iterable[A]:
return {x: None for x in xs}.keys()
def flatten(xss: Iterable[list[A]]) -> list[A]:
return sum(xss, cast(list[A], []))
def catch(m: Callable[[], A], default: B) -> A | B:
try:
return m()
except:
return default
@dataclass(frozen=False)
class Mutable(Generic[A]):
value: A
@classmethod
def factory(cls, x: A):
return field(default_factory=lambda: cls(x))
@classmethod
def init(cls, f: Callable[[], A]):
return field(default_factory=lambda: cls(f()))
def read_commasep(s: str, p: Callable[[str], A] = lambda x: x) -> list[A]:
return [p(x.strip()) for x in s.strip().split(',') if x.strip()]
def now_str_for_filename() -> str:
return str(datetime.now()).split('.')[0].replace(' ', '_')
@dataclass(frozen=True)
class test(Generic[A]):
lhs: A
def __eq__(self, rhs: A) -> bool:
if self.lhs == rhs:
import os
if os.environ.get('verbose'):
green = Color().green
print(green('✔ '), show(self.lhs))
print(green(' =='), show(rhs))
return True
else:
red = Color().red
print(red('✗ '), show(self.lhs))
print(red(' !='), show(rhs))
raise ValueError('Equality test failed')
def iterate_with_full_context(xs: Iterable[A]) -> list[tuple[list[A], A, list[A]]]:
xs = list(xs)
return [
(xs[:i], x, xs[i+1:])
for i, x in enumerate(xs)
]
def iterate_with_context(xs: Iterable[A]) -> list[tuple[A | None, A, A | None]]:
return [
(prev[-1] if prev else None, x, next[0] if next else None)
for prev, x, next in iterate_with_full_context(xs)
]
def iterate_with_next(xs: Iterable[A]) -> list[tuple[A, A | None]]:
return [
(x, next)
for _, x, next in iterate_with_context(xs)
]
def iterate_with_prev(xs: Iterable[A]) -> list[tuple[A | None, A]]:
return [
(prev, x)
for prev, x, _ in iterate_with_context(xs)
]
test(iterate_with_full_context([1,2,3,4])) == [
([], 1, [2, 3, 4]),
([1], 2, [3, 4]),
([1, 2], 3, [4]),
([1, 2, 3], 4, []),
]
test(iterate_with_context([1,2,3,4])) == [
(None, 1, 2),
(1, 2, 3),
(2, 3, 4),
(3, 4, None)
]
def git_HEAD() -> str | None:
from subprocess import run
try:
proc = run(['git', 'rev-parse', 'HEAD'], capture_output=True)
return proc.stdout.decode().strip()[:8]
except:
return None
from datetime import timedelta
def pp_secs(seconds: int | float) -> str:
'''
Pretty-print seconds.
>>> pp_secs(0)
'0.0'
>>> pp_secs(0.1)
'0.1'
>>> pp_secs(0.09)
'0.0'
>>> pp_secs(60)
'1:00.0'
>>> pp_secs(3600)
'1:00:00.0'
>>> pp_secs(3600 + 60 * 2 + 3 + 0.4)
'1:02:03.4'
>>> pp_secs(3600 * 24 - 0.1)
'23:59:59.9'
>>> pp_secs(3600 * 24)
'1 day, 0:00:00.0'
>>> pp_secs(-0)
'0.0'
>>> pp_secs(-0.1)
'-0.1'
>>> pp_secs(-0.09)
'-0.0'
>>> pp_secs(-60)
'-1:00.0'
>>> pp_secs(-3600)
'-1:00:00.0'
>>> pp_secs(-(3600 + 60 * 2 + 3 + 0.4))
'-1:02:03.4'
>>> pp_secs(-(3600 * 24 - 0.1))
'-23:59:59.9'
>>> pp_secs(-(3600 * 24))
'-1 day, 0:00:00.0'
'''
if seconds < 0:
return '-' + pp_secs(-seconds)
s = str(timedelta(seconds=float(seconds)))
s = s.lstrip('0:')
if not s:
s = '0'
if s.startswith('.'):
s = '0' + s
if '.' in s:
pre, post = s.split('.')
return pre + '.' + post[:1]
else:
return s + '.0'
def round_nnz(x: float, ndigits: int=1) -> float:
'''
Round and normalize negative zero
'''
v = round(x, ndigits)
if v == -0.0:
return 0.0
else:
return v
def zip_with(f: Callable[[float, float], float], xs: list[float], ys: list[float], ndigits: int=1) -> list[float]:
return [round_nnz(f(a, b), ndigits=ndigits) for a, b in zip(xs, ys)]
def zip_sub(xs: list[float], ys: list[float], ndigits: int=1) -> list[float]:
return zip_with(lambda a, b: a - b, xs, ys, ndigits=ndigits)
def zip_add(xs: list[float], ys: list[float], ndigits: int=1) -> list[float]:
return zip_with(lambda a, b: a + b, xs, ys, ndigits=ndigits) | en | 0.219159 | # type: ignore # type: ignore # type: ignore # type: ignore # type: ignore Pretty-print seconds. >>> pp_secs(0) '0.0' >>> pp_secs(0.1) '0.1' >>> pp_secs(0.09) '0.0' >>> pp_secs(60) '1:00.0' >>> pp_secs(3600) '1:00:00.0' >>> pp_secs(3600 + 60 * 2 + 3 + 0.4) '1:02:03.4' >>> pp_secs(3600 * 24 - 0.1) '23:59:59.9' >>> pp_secs(3600 * 24) '1 day, 0:00:00.0' >>> pp_secs(-0) '0.0' >>> pp_secs(-0.1) '-0.1' >>> pp_secs(-0.09) '-0.0' >>> pp_secs(-60) '-1:00.0' >>> pp_secs(-3600) '-1:00:00.0' >>> pp_secs(-(3600 + 60 * 2 + 3 + 0.4)) '-1:02:03.4' >>> pp_secs(-(3600 * 24 - 0.1)) '-23:59:59.9' >>> pp_secs(-(3600 * 24)) '-1 day, 0:00:00.0' Round and normalize negative zero | 2.398788 | 2 |
nlp_libs/books/plotter.py | NLPaladins/rinehartAnalysis | 0 | 6615923 | import matplotlib.pyplot as plt
import matplotlib.colors as plt_colors
import matplotlib.lines as mlines
import numpy as np
def create_locs_labels(book, suspects, perp, dets, co_ocs, crime):
# divide the book into n chunks based on sentences
total_sents = book.get_total_sentences()
chapter_locs = []
cum_sents = 0
for chapter in book.clean:
chapter_locs.append(cum_sents)
cum_sents += len(chapter) - 2
locs = []
labels = []
colors = []
# other suspects
for name, values in suspects.items():
ch, sent_num, _ = values
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(name)
colors.append('tab:blue')
# perpetrator
for name, values in perp.items():
ch, sent_num, _ = values
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(name)
colors.append('tab:red')
# detective
for name, values in dets.items():
ch, sent_num, _ = values
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(name)
colors.append('tab:green')
# perp + det co-occurence
for i, co_oc in enumerate(co_ocs):
ch, sent_num, _, _ = co_oc
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(str(i + 1))
colors.append('tab:orange')
# crime occurence
# add end of book
locs.append(total_sents)
labels.append('END')
colors.append('tab:cyan')
# sort
idx = np.argsort(locs)
locs = np.array(locs)[idx]
labels = np.array(labels)[idx]
colors = np.array(colors)[idx]
return locs, labels, colors
def make_timeline(book, locs, labels, colors, title, num_x_labels=10, out_path=None):
fig, ax = plt.subplots(figsize=(12, 5), tight_layout=True)
total_sents = book.get_total_sentences()
x_ticks = np.arange(1, total_sents + 1, int(total_sents / num_x_labels))
x_labels = [str(int((pct) * 100 / num_x_labels)) + '%' for pct in range(num_x_labels + 1)]
ax.set_title(f'Timeline: {title}', size=20)
ax.set_xticks(x_ticks)
ax.set_xticklabels(x_labels, size=14)
ax.get_yaxis().set_visible(False)
# tiling the event heights with some nice levels
levels = np.tile([-5, 5, -3, 3, -1, 1],
int(np.ceil(len(labels) / 6)))[:len(labels)]
# plot the event lines
markerline, stemline, baseline = ax.stem(locs, levels,
linefmt="C3-", basefmt="k-",
use_line_collection=True)
# make the marker circle look nicer
plt.setp(markerline, mec="k", mfc="w", zorder=3)
# shift the markers to the baseline by replacing the y-data by zeros.
markerline.set_ydata(np.zeros(len(labels)))
# annotate chapters
vert = np.array(['top', 'bottom'])[(levels > 0).astype(int)]
for sent_num, lv, label, va in zip(locs, levels, labels, vert):
ax.annotate(label, xy=(sent_num, lv), xytext=(6.5 * len(label), np.sign(lv) * 3),
textcoords="offset points", va=va, ha="right", size=12)
# set colors
colors = [plt_colors.to_rgba(color) for color in colors]
stemline.set_colors(colors)
# legend
legend_colors = ['tab:blue', 'tab:red', 'tab:green', 'tab:orange']
legend_names = ['other suspects', 'perpetrator', 'detective', 'co-occurences']
proxies = [mlines.Line2D([], [], color=legend_colors[i], marker='_',
markersize=15, label=legend_names[i]) for i in range(4)]
fig.legend(handles=proxies, bbox_to_anchor=(1.0, 0.935), loc='upper left', fontsize=14)
ax.margins(y=0.1)
if out_path:
plt.save(out_path, dpi=300)
plt.show()
| import matplotlib.pyplot as plt
import matplotlib.colors as plt_colors
import matplotlib.lines as mlines
import numpy as np
def create_locs_labels(book, suspects, perp, dets, co_ocs, crime):
# divide the book into n chunks based on sentences
total_sents = book.get_total_sentences()
chapter_locs = []
cum_sents = 0
for chapter in book.clean:
chapter_locs.append(cum_sents)
cum_sents += len(chapter) - 2
locs = []
labels = []
colors = []
# other suspects
for name, values in suspects.items():
ch, sent_num, _ = values
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(name)
colors.append('tab:blue')
# perpetrator
for name, values in perp.items():
ch, sent_num, _ = values
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(name)
colors.append('tab:red')
# detective
for name, values in dets.items():
ch, sent_num, _ = values
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(name)
colors.append('tab:green')
# perp + det co-occurence
for i, co_oc in enumerate(co_ocs):
ch, sent_num, _, _ = co_oc
locs.append(chapter_locs[ch - 1] + sent_num)
labels.append(str(i + 1))
colors.append('tab:orange')
# crime occurence
# add end of book
locs.append(total_sents)
labels.append('END')
colors.append('tab:cyan')
# sort
idx = np.argsort(locs)
locs = np.array(locs)[idx]
labels = np.array(labels)[idx]
colors = np.array(colors)[idx]
return locs, labels, colors
def make_timeline(book, locs, labels, colors, title, num_x_labels=10, out_path=None):
fig, ax = plt.subplots(figsize=(12, 5), tight_layout=True)
total_sents = book.get_total_sentences()
x_ticks = np.arange(1, total_sents + 1, int(total_sents / num_x_labels))
x_labels = [str(int((pct) * 100 / num_x_labels)) + '%' for pct in range(num_x_labels + 1)]
ax.set_title(f'Timeline: {title}', size=20)
ax.set_xticks(x_ticks)
ax.set_xticklabels(x_labels, size=14)
ax.get_yaxis().set_visible(False)
# tiling the event heights with some nice levels
levels = np.tile([-5, 5, -3, 3, -1, 1],
int(np.ceil(len(labels) / 6)))[:len(labels)]
# plot the event lines
markerline, stemline, baseline = ax.stem(locs, levels,
linefmt="C3-", basefmt="k-",
use_line_collection=True)
# make the marker circle look nicer
plt.setp(markerline, mec="k", mfc="w", zorder=3)
# shift the markers to the baseline by replacing the y-data by zeros.
markerline.set_ydata(np.zeros(len(labels)))
# annotate chapters
vert = np.array(['top', 'bottom'])[(levels > 0).astype(int)]
for sent_num, lv, label, va in zip(locs, levels, labels, vert):
ax.annotate(label, xy=(sent_num, lv), xytext=(6.5 * len(label), np.sign(lv) * 3),
textcoords="offset points", va=va, ha="right", size=12)
# set colors
colors = [plt_colors.to_rgba(color) for color in colors]
stemline.set_colors(colors)
# legend
legend_colors = ['tab:blue', 'tab:red', 'tab:green', 'tab:orange']
legend_names = ['other suspects', 'perpetrator', 'detective', 'co-occurences']
proxies = [mlines.Line2D([], [], color=legend_colors[i], marker='_',
markersize=15, label=legend_names[i]) for i in range(4)]
fig.legend(handles=proxies, bbox_to_anchor=(1.0, 0.935), loc='upper left', fontsize=14)
ax.margins(y=0.1)
if out_path:
plt.save(out_path, dpi=300)
plt.show()
| en | 0.855311 | # divide the book into n chunks based on sentences # other suspects # perpetrator # detective # perp + det co-occurence # crime occurence # add end of book # sort # tiling the event heights with some nice levels # plot the event lines # make the marker circle look nicer # shift the markers to the baseline by replacing the y-data by zeros. # annotate chapters # set colors # legend | 2.786963 | 3 |
httprider/codegen/schema_to_java_generator.py | iSWORD/http-rider | 27 | 6615924 | <gh_stars>10-100
import re
import stringcase
__type_mapping = {
"integer": "int",
"number": "int",
"boolean": "boolean",
"string": "String",
}
def __norm(name):
return re.sub(r"[^a-zA-Z]", "", name)
def to_java_function_name(name):
return stringcase.camelcase(__norm(name))
def to_java_class_name(name):
return __norm(name).capitalize()
def to_java_variable(var_name):
return stringcase.camelcase(__norm(var_name))
def gen_class_variable(var_name, var_type, is_array=False):
# print("--> Class Variable: {}".format(var_name))
json_type = var_type.get("type")
if json_type == "object":
class_declaration = f"{var_name.capitalize()} {var_name};"
return (
class_declaration
+ "\n"
+ gen_class(var_name.capitalize(), var_type.get("properties"))
)
if json_type == "array":
return gen_array(var_name, var_type)
java_type = __type_mapping.get(json_type, f"Unhandled type {json_type}")
if is_array:
return "List<{}> {};".format(java_type, to_java_variable(var_name))
else:
return "{} {};".format(java_type, to_java_variable(var_name))
def gen_class(clazz_name, clazz_properties):
# print("-> Class {}".format(clazz_name))
properties = (
[gen_class_variable(*j) for j in clazz_properties.items()]
if clazz_properties
else []
)
properties_str = "\n".join(properties)
return f"""
public class {clazz_name} {{
{properties_str}
}}
"""
def gen_array(var_name, json_schema):
# print("--> Array {}".format(var_name))
array_items = json_schema.get("items")
if not array_items:
return "Object {};".format(var_name)
elif array_items and array_items.get("type") == "object":
clazz_name = var_name.capitalize()
item_class = gen_class(clazz_name, array_items.get("properties"))
return item_class + "\n" + "List<{}> {};".format(clazz_name, var_name)
else:
return gen_class_variable(var_name, array_items, is_array=True)
def code_from_schema(root_clazz, json_schema):
if not json_schema:
return ""
schema_type = json_schema.get("type", "object")
if schema_type == "array":
return gen_array("Root", json_schema)
elif schema_type == "object" and json_schema.get("properties"):
return gen_class(root_clazz, json_schema["properties"])
if __name__ == "__main__":
# test_schema = {
# "schema": {
# "type": "object",
# "properties": {
# "name": {"type": "string"},
# "amount": {
# "type": "object",
# "properties": {
# "value": {"type": "integer"},
# "currency": {"type": "string"},
# },
# "required": ["currency", "value"],
# },
# "references": {"type": "array", "items": {"type": "string"}},
# },
# "required": ["amount", "name", "references"],
# }
# }
test_schema = {
"schema": {
"type": "object",
"properties": {
"args": {
"type": "object",
"properties": {"username": {"type": "string"}},
"required": ["username"],
},
"data": {"type": "string"},
"files": {"type": "object"},
"form": {"type": "object"},
"headers": {
"type": "object",
"properties": {
"Accept": {"type": "string"},
"Accept-Encoding": {"type": "string"},
"Connection": {"type": "string"},
"Content-Length": {"type": "string"},
"Content-Type": {"type": "string"},
"Host": {"type": "string"},
"User-Agent": {"type": "string"},
"X-Correlation-Id": {"type": "string"},
"X-Shared": {"type": "string"},
},
"required": [
"Accept",
"Accept-Encoding",
"Connection",
"Content-Length",
"Content-Type",
"Host",
"User-Agent",
"X-Correlation-Id",
"X-Shared",
],
},
"json": {
"type": "object",
"properties": {
"amount": {
"type": "object",
"properties": {
"currency": {"type": "string"},
"value": {"type": "integer"},
},
"required": ["currency", "value"],
},
"name": {"type": "string"},
"references": {"type": "array", "items": {"type": "string"}},
},
"required": ["amount", "name", "references"],
},
"origin": {"type": "string"},
"url": {"type": "string"},
},
"required": [
"args",
"data",
"files",
"form",
"headers",
"json",
"origin",
"url",
],
}
}
code = code_from_schema("ApiRequest", test_schema.get("schema"))
print("--- " * 50)
print(code)
| import re
import stringcase
__type_mapping = {
"integer": "int",
"number": "int",
"boolean": "boolean",
"string": "String",
}
def __norm(name):
return re.sub(r"[^a-zA-Z]", "", name)
def to_java_function_name(name):
return stringcase.camelcase(__norm(name))
def to_java_class_name(name):
return __norm(name).capitalize()
def to_java_variable(var_name):
return stringcase.camelcase(__norm(var_name))
def gen_class_variable(var_name, var_type, is_array=False):
# print("--> Class Variable: {}".format(var_name))
json_type = var_type.get("type")
if json_type == "object":
class_declaration = f"{var_name.capitalize()} {var_name};"
return (
class_declaration
+ "\n"
+ gen_class(var_name.capitalize(), var_type.get("properties"))
)
if json_type == "array":
return gen_array(var_name, var_type)
java_type = __type_mapping.get(json_type, f"Unhandled type {json_type}")
if is_array:
return "List<{}> {};".format(java_type, to_java_variable(var_name))
else:
return "{} {};".format(java_type, to_java_variable(var_name))
def gen_class(clazz_name, clazz_properties):
# print("-> Class {}".format(clazz_name))
properties = (
[gen_class_variable(*j) for j in clazz_properties.items()]
if clazz_properties
else []
)
properties_str = "\n".join(properties)
return f"""
public class {clazz_name} {{
{properties_str}
}}
"""
def gen_array(var_name, json_schema):
# print("--> Array {}".format(var_name))
array_items = json_schema.get("items")
if not array_items:
return "Object {};".format(var_name)
elif array_items and array_items.get("type") == "object":
clazz_name = var_name.capitalize()
item_class = gen_class(clazz_name, array_items.get("properties"))
return item_class + "\n" + "List<{}> {};".format(clazz_name, var_name)
else:
return gen_class_variable(var_name, array_items, is_array=True)
def code_from_schema(root_clazz, json_schema):
if not json_schema:
return ""
schema_type = json_schema.get("type", "object")
if schema_type == "array":
return gen_array("Root", json_schema)
elif schema_type == "object" and json_schema.get("properties"):
return gen_class(root_clazz, json_schema["properties"])
if __name__ == "__main__":
# test_schema = {
# "schema": {
# "type": "object",
# "properties": {
# "name": {"type": "string"},
# "amount": {
# "type": "object",
# "properties": {
# "value": {"type": "integer"},
# "currency": {"type": "string"},
# },
# "required": ["currency", "value"],
# },
# "references": {"type": "array", "items": {"type": "string"}},
# },
# "required": ["amount", "name", "references"],
# }
# }
test_schema = {
"schema": {
"type": "object",
"properties": {
"args": {
"type": "object",
"properties": {"username": {"type": "string"}},
"required": ["username"],
},
"data": {"type": "string"},
"files": {"type": "object"},
"form": {"type": "object"},
"headers": {
"type": "object",
"properties": {
"Accept": {"type": "string"},
"Accept-Encoding": {"type": "string"},
"Connection": {"type": "string"},
"Content-Length": {"type": "string"},
"Content-Type": {"type": "string"},
"Host": {"type": "string"},
"User-Agent": {"type": "string"},
"X-Correlation-Id": {"type": "string"},
"X-Shared": {"type": "string"},
},
"required": [
"Accept",
"Accept-Encoding",
"Connection",
"Content-Length",
"Content-Type",
"Host",
"User-Agent",
"X-Correlation-Id",
"X-Shared",
],
},
"json": {
"type": "object",
"properties": {
"amount": {
"type": "object",
"properties": {
"currency": {"type": "string"},
"value": {"type": "integer"},
},
"required": ["currency", "value"],
},
"name": {"type": "string"},
"references": {"type": "array", "items": {"type": "string"}},
},
"required": ["amount", "name", "references"],
},
"origin": {"type": "string"},
"url": {"type": "string"},
},
"required": [
"args",
"data",
"files",
"form",
"headers",
"json",
"origin",
"url",
],
}
}
code = code_from_schema("ApiRequest", test_schema.get("schema"))
print("--- " * 50)
print(code) | en | 0.23661 | # print("--> Class Variable: {}".format(var_name)) # print("-> Class {}".format(clazz_name)) public class {clazz_name} {{ {properties_str} }} # print("--> Array {}".format(var_name)) # test_schema = { # "schema": { # "type": "object", # "properties": { # "name": {"type": "string"}, # "amount": { # "type": "object", # "properties": { # "value": {"type": "integer"}, # "currency": {"type": "string"}, # }, # "required": ["currency", "value"], # }, # "references": {"type": "array", "items": {"type": "string"}}, # }, # "required": ["amount", "name", "references"], # } # } | 3.085395 | 3 |
graphql/validation/rules/no_undefined_variables.py | ThanksBoomerang/graphql-core-legacy | 8 | 6615925 | <gh_stars>1-10
from ...error import GraphQLError
from .base import ValidationRule
# Necessary for static type checking
if False: # flake8: noqa
from ..validation import ValidationContext
from ...language.ast import Document, OperationDefinition
from typing import List, Union, Set
class NoUndefinedVariables(ValidationRule):
__slots__ = ("defined_variable_names",)
def __init__(self, context):
# type: (ValidationContext) -> None
self.defined_variable_names = set() # type: Set[str]
super(NoUndefinedVariables, self).__init__(context)
@staticmethod
def undefined_var_message(var_name, op_name=None):
if op_name:
return 'Variable "${}" is not defined by operation "{}".'.format(
var_name, op_name
)
return 'Variable "${}" is not defined.'.format(var_name)
def enter_OperationDefinition(
self,
operation, # type: OperationDefinition
key, # type: int
parent, # type: List[OperationDefinition]
path, # type: List[Union[int, str]]
ancestors, # type: List[Document]
):
# type: (...) -> None
self.defined_variable_names = set()
def leave_OperationDefinition(
self,
operation, # type: OperationDefinition
key, # type: int
parent, # type: List[OperationDefinition]
path, # type: List[str]
ancestors, # type: List[Document]
):
# type: (...) -> None
usages = self.context.get_recursive_variable_usages(operation)
for variable_usage in usages:
node = variable_usage.node
var_name = node.name.value
if var_name not in self.defined_variable_names:
self.context.report_error(
GraphQLError(
self.undefined_var_message(
var_name, operation.name and operation.name.value
),
[node, operation],
)
)
def enter_VariableDefinition(self, node, key, parent, path, ancestors):
self.defined_variable_names.add(node.variable.name.value)
| from ...error import GraphQLError
from .base import ValidationRule
# Necessary for static type checking
if False: # flake8: noqa
from ..validation import ValidationContext
from ...language.ast import Document, OperationDefinition
from typing import List, Union, Set
class NoUndefinedVariables(ValidationRule):
__slots__ = ("defined_variable_names",)
def __init__(self, context):
# type: (ValidationContext) -> None
self.defined_variable_names = set() # type: Set[str]
super(NoUndefinedVariables, self).__init__(context)
@staticmethod
def undefined_var_message(var_name, op_name=None):
if op_name:
return 'Variable "${}" is not defined by operation "{}".'.format(
var_name, op_name
)
return 'Variable "${}" is not defined.'.format(var_name)
def enter_OperationDefinition(
self,
operation, # type: OperationDefinition
key, # type: int
parent, # type: List[OperationDefinition]
path, # type: List[Union[int, str]]
ancestors, # type: List[Document]
):
# type: (...) -> None
self.defined_variable_names = set()
def leave_OperationDefinition(
self,
operation, # type: OperationDefinition
key, # type: int
parent, # type: List[OperationDefinition]
path, # type: List[str]
ancestors, # type: List[Document]
):
# type: (...) -> None
usages = self.context.get_recursive_variable_usages(operation)
for variable_usage in usages:
node = variable_usage.node
var_name = node.name.value
if var_name not in self.defined_variable_names:
self.context.report_error(
GraphQLError(
self.undefined_var_message(
var_name, operation.name and operation.name.value
),
[node, operation],
)
)
def enter_VariableDefinition(self, node, key, parent, path, ancestors):
self.defined_variable_names.add(node.variable.name.value) | en | 0.362493 | # Necessary for static type checking # flake8: noqa # type: (ValidationContext) -> None # type: Set[str] # type: OperationDefinition # type: int # type: List[OperationDefinition] # type: List[Union[int, str]] # type: List[Document] # type: (...) -> None # type: OperationDefinition # type: int # type: List[OperationDefinition] # type: List[str] # type: List[Document] # type: (...) -> None | 2.06433 | 2 |
wagtailcommerce/tests/demoproducts/apps.py | theplusagency/wagtail-commerce | 3 | 6615926 | from django.apps import AppConfig
class DemoProductsAppConfig(AppConfig):
name = 'wagtailcommerce.tests.demoproducts'
label = 'wagtailcommerce_tests_demoproducts'
verbose_name = "Wagtail Commerce Tests Demo Products"
| from django.apps import AppConfig
class DemoProductsAppConfig(AppConfig):
name = 'wagtailcommerce.tests.demoproducts'
label = 'wagtailcommerce_tests_demoproducts'
verbose_name = "Wagtail Commerce Tests Demo Products"
| none | 1 | 1.094463 | 1 | |
python_modules/libraries/dagster-twilio/dagster_twilio/version.py | habibutsu/dagster | 3 | 6615927 | <reponame>habibutsu/dagster<filename>python_modules/libraries/dagster-twilio/dagster_twilio/version.py
__version__ = '0.7.0'
__nightly__ = '2020.02.13'
| __version__ = '0.7.0'
__nightly__ = '2020.02.13' | none | 1 | 1.024469 | 1 | |
ConCeptCNN/model.py | jiaolang771/aicad | 2 | 6615928 | # <NAME>
import numpy as np
import scipy.io as sio
import os
import math
# plot the model
os.environ["PATH"] += os.pathsep + 'C:\\Program Files (x86)\\Graphviz2.38\\bin'
from keras.utils.vis_utils import plot_model
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import roc_curve, roc_auc_score, confusion_matrix, accuracy_score
from keras.layers import Dense, Dropout, BatchNormalization, Activation, Input, Conv2D, Flatten, MaxPooling2D, GlobalAveragePooling2D, AveragePooling2D, concatenate
from keras.models import Sequential, Model
import keras
from indeption_model import inception_module
import scipy.io as sio
import jason
import matplotlib.pyplot as plt
def check_models():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool1'))
model.add(Conv2D(64, kernel_size=(3, 1), padding='valid', activation='relu', name='conv2'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool2'))
model.summary()
model = Sequential()
model.add(Conv2D(32, kernel_size=(5, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool1'))
model.add(Conv2D(64, kernel_size=(5, 1), padding='valid', activation='relu', name='conv2'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool2'))
model.summary()
model = Sequential()
model.add(Conv2D(32, kernel_size=(7, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool1'))
model.add(Conv2D(64, kernel_size=(7, 1), padding='valid', activation='relu', name='conv2'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool2'))
model.summary()
model = Sequential()
model.add(Conv2D(64, kernel_size=(90, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.summary()
def build_fc(input_shape=(90, 90)):
input_data = Input(shape=input_shape)
coarse_channel = Conv2D(64, kernel_size=(1, 3), padding='valid', activation='relu', name='coarse_conv1')(input_data)
coarse_channel = AveragePooling2D((1, 3), strides=(1, 2), name='coarse_AVG_pool1')(coarse_channel)
coarse_channel = Conv2D(128, kernel_size=(1, 3), padding='valid', activation='relu', name='coarse_conv2')(coarse_channel)
coarse_channel = AveragePooling2D((1, 3), strides=(1, 2), name='coarse_AVG_pool2')(coarse_channel)
medium_channel = Conv2D(64, kernel_size=(1, 5), padding='valid', activation='relu', name='medium_conv1')(input_data)
medium_channel = AveragePooling2D((1, 3), strides=(1, 2), name='medium_AVG_pool1')(medium_channel)
medium_channel = Conv2D(128, kernel_size=(1, 5), padding='valid', activation='relu', name='medium_conv2')(medium_channel)
medium_channel = AveragePooling2D((1, 3), strides=(1, 2), name='medium_AVG_pool2')(medium_channel)
fine_channel = Conv2D(64, kernel_size=(1, 7), padding='valid', activation='relu', name='fine_conv1')(input_data)
fine_channel = AveragePooling2D((1, 3), strides=(1, 2), name='fine_AVG_pool1')(fine_channel)
fine_channel = Conv2D(128, kernel_size=(1, 7), padding='valid', activation='relu', name='fine_conv2')(fine_channel)
fine_channel = AveragePooling2D((1, 3), strides=(1, 2), name='fine_AVG_pool2')(fine_channel)
global_channel = Conv2D(128, kernel_size=(1, 90), padding='valid', activation='relu', name='global_conv1')(input_data)
# merge filted data
img_feat = concatenate([coarse_channel, medium_channel, fine_channel, global_channel], axis=2)
img_feat = Flatten()(img_feat)
img_feat = Dense(256, use_bias=False, name='dense1')(img_feat)
img_feat = Dropout(0.5)(img_feat)
img_feat = BatchNormalization()(img_feat)
img_feat = Dense(256, use_bias=False, name='dense2')(img_feat)
img_feat = Dropout(0.5)(img_feat)
img_feat = BatchNormalization()(img_feat)
out = Dense(1, use_bias=False)(img_feat)
out = Activation('sigmoid', name='prediction_layer')(out)
model = Model(inputs=input_data,
outputs=out,
name="Multi-filter-CNN")
return model
| # <NAME>
import numpy as np
import scipy.io as sio
import os
import math
# plot the model
os.environ["PATH"] += os.pathsep + 'C:\\Program Files (x86)\\Graphviz2.38\\bin'
from keras.utils.vis_utils import plot_model
from sklearn.model_selection import StratifiedKFold
from sklearn.metrics import roc_curve, roc_auc_score, confusion_matrix, accuracy_score
from keras.layers import Dense, Dropout, BatchNormalization, Activation, Input, Conv2D, Flatten, MaxPooling2D, GlobalAveragePooling2D, AveragePooling2D, concatenate
from keras.models import Sequential, Model
import keras
from indeption_model import inception_module
import scipy.io as sio
import jason
import matplotlib.pyplot as plt
def check_models():
model = Sequential()
model.add(Conv2D(32, kernel_size=(3, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool1'))
model.add(Conv2D(64, kernel_size=(3, 1), padding='valid', activation='relu', name='conv2'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool2'))
model.summary()
model = Sequential()
model.add(Conv2D(32, kernel_size=(5, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool1'))
model.add(Conv2D(64, kernel_size=(5, 1), padding='valid', activation='relu', name='conv2'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool2'))
model.summary()
model = Sequential()
model.add(Conv2D(32, kernel_size=(7, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool1'))
model.add(Conv2D(64, kernel_size=(7, 1), padding='valid', activation='relu', name='conv2'))
model.add(AveragePooling2D((3, 1), strides=(2, 1), name='AVG_pool2'))
model.summary()
model = Sequential()
model.add(Conv2D(64, kernel_size=(90, 1), padding='valid', activation='relu', input_shape=(90, 90, 1), name='conv1'))
model.summary()
def build_fc(input_shape=(90, 90)):
input_data = Input(shape=input_shape)
coarse_channel = Conv2D(64, kernel_size=(1, 3), padding='valid', activation='relu', name='coarse_conv1')(input_data)
coarse_channel = AveragePooling2D((1, 3), strides=(1, 2), name='coarse_AVG_pool1')(coarse_channel)
coarse_channel = Conv2D(128, kernel_size=(1, 3), padding='valid', activation='relu', name='coarse_conv2')(coarse_channel)
coarse_channel = AveragePooling2D((1, 3), strides=(1, 2), name='coarse_AVG_pool2')(coarse_channel)
medium_channel = Conv2D(64, kernel_size=(1, 5), padding='valid', activation='relu', name='medium_conv1')(input_data)
medium_channel = AveragePooling2D((1, 3), strides=(1, 2), name='medium_AVG_pool1')(medium_channel)
medium_channel = Conv2D(128, kernel_size=(1, 5), padding='valid', activation='relu', name='medium_conv2')(medium_channel)
medium_channel = AveragePooling2D((1, 3), strides=(1, 2), name='medium_AVG_pool2')(medium_channel)
fine_channel = Conv2D(64, kernel_size=(1, 7), padding='valid', activation='relu', name='fine_conv1')(input_data)
fine_channel = AveragePooling2D((1, 3), strides=(1, 2), name='fine_AVG_pool1')(fine_channel)
fine_channel = Conv2D(128, kernel_size=(1, 7), padding='valid', activation='relu', name='fine_conv2')(fine_channel)
fine_channel = AveragePooling2D((1, 3), strides=(1, 2), name='fine_AVG_pool2')(fine_channel)
global_channel = Conv2D(128, kernel_size=(1, 90), padding='valid', activation='relu', name='global_conv1')(input_data)
# merge filted data
img_feat = concatenate([coarse_channel, medium_channel, fine_channel, global_channel], axis=2)
img_feat = Flatten()(img_feat)
img_feat = Dense(256, use_bias=False, name='dense1')(img_feat)
img_feat = Dropout(0.5)(img_feat)
img_feat = BatchNormalization()(img_feat)
img_feat = Dense(256, use_bias=False, name='dense2')(img_feat)
img_feat = Dropout(0.5)(img_feat)
img_feat = BatchNormalization()(img_feat)
out = Dense(1, use_bias=False)(img_feat)
out = Activation('sigmoid', name='prediction_layer')(out)
model = Model(inputs=input_data,
outputs=out,
name="Multi-filter-CNN")
return model
| en | 0.829486 | # <NAME> # plot the model # merge filted data | 2.408396 | 2 |
other/doomsday_alg.py | ikr4mm/Python | 79 | 6615929 | <reponame>ikr4mm/Python
# doomsday algo atau biasa disebut dengan aturan kiamat
# adalah algoritma daripada penentuan hari dalam seminggu
# ia menyediakan kalender abadi karena kalender georgian
# bergerak dalam siklus 400 tahun
# https://id.wikinew.wiki/wiki/Doomsday_rule
DOOMSDAY_LEAP = [4, 1, 7, 4, 2, 6, 4, 1, 5, 3, 7, 5]
DOOMSDAY_NOT_LEAP = [3, 7, 7, 4, 2, 6, 4, 1, 5, 3, 7, 5]
WEEK_DAY_NAMES = {
0: "Minggu",
1: "Senin",
2: "Selasa",
3: "Rabu",
4: "Kamis",
5: "Jumat",
6: "Sabtu",
}
def get_week_day(year: int, month: int, day: int) -> str:
"""
Mengembalikan nama hari-minggu dari tanggal tertentu.
>>> get_week_day(2021, 8, 17)
'Selasa'
"""
assert len(str(year)) > 2, "tahun seharusnya dalam format yyyy"
assert 1 <= month <= 12, "bulan seharusnya antara 1 dan 12"
assert 1 <= day <= 31, "hari seharusnya antara 1 dan 31"
# penerapan algoritma kiamat
century = year // 100
century_anchor = (5 * (century % 4) + 2) % 7
centurian = year % 100
centurian_m = centurian % 12
dooms_day = (
(centurian // 12) + centurian_m + (centurian_m // 4) + century_anchor
) % 7
day_anchor = (
DOOMSDAY_NOT_LEAP[month - 1]
if (year % 4 != 0) or (centurian == 0 and (year % 400) == 0)
else DOOMSDAY_LEAP[month - 1]
)
week_day = (dooms_day + day - day_anchor) % 7
return WEEK_DAY_NAMES[week_day]
if __name__ == "__main__":
import doctest
doctest.testmod()
| # doomsday algo atau biasa disebut dengan aturan kiamat
# adalah algoritma daripada penentuan hari dalam seminggu
# ia menyediakan kalender abadi karena kalender georgian
# bergerak dalam siklus 400 tahun
# https://id.wikinew.wiki/wiki/Doomsday_rule
DOOMSDAY_LEAP = [4, 1, 7, 4, 2, 6, 4, 1, 5, 3, 7, 5]
DOOMSDAY_NOT_LEAP = [3, 7, 7, 4, 2, 6, 4, 1, 5, 3, 7, 5]
WEEK_DAY_NAMES = {
0: "Minggu",
1: "Senin",
2: "Selasa",
3: "Rabu",
4: "Kamis",
5: "Jumat",
6: "Sabtu",
}
def get_week_day(year: int, month: int, day: int) -> str:
"""
Mengembalikan nama hari-minggu dari tanggal tertentu.
>>> get_week_day(2021, 8, 17)
'Selasa'
"""
assert len(str(year)) > 2, "tahun seharusnya dalam format yyyy"
assert 1 <= month <= 12, "bulan seharusnya antara 1 dan 12"
assert 1 <= day <= 31, "hari seharusnya antara 1 dan 31"
# penerapan algoritma kiamat
century = year // 100
century_anchor = (5 * (century % 4) + 2) % 7
centurian = year % 100
centurian_m = centurian % 12
dooms_day = (
(centurian // 12) + centurian_m + (centurian_m // 4) + century_anchor
) % 7
day_anchor = (
DOOMSDAY_NOT_LEAP[month - 1]
if (year % 4 != 0) or (centurian == 0 and (year % 400) == 0)
else DOOMSDAY_LEAP[month - 1]
)
week_day = (dooms_day + day - day_anchor) % 7
return WEEK_DAY_NAMES[week_day]
if __name__ == "__main__":
import doctest
doctest.testmod() | id | 0.78508 | # doomsday algo atau biasa disebut dengan aturan kiamat # adalah algoritma daripada penentuan hari dalam seminggu # ia menyediakan kalender abadi karena kalender georgian # bergerak dalam siklus 400 tahun # https://id.wikinew.wiki/wiki/Doomsday_rule Mengembalikan nama hari-minggu dari tanggal tertentu. >>> get_week_day(2021, 8, 17) 'Selasa' # penerapan algoritma kiamat | 3.707199 | 4 |
GEOS_Util/coupled_diagnostics/verification/ecmwf_int/implied.py | GEOS-ESM/GMAO_Shared | 1 | 6615930 | import os
from matplotlib import pyplot as pl
import scipy as sp
import netCDF4 as nc
import ecmwf_int,lwmask
# Read surface fluxes
lw=ecmwf_int.Ctl()('str').clim(12).mean(0); lw.data/=3600*24
sw=ecmwf_int.Ctl()('ssr').clim(12).mean(0); sw.data/=3600*24
lhf=ecmwf_int.Ctl()('slhf').clim(12).mean(0); lhf.data/=3600*24
shf=ecmwf_int.Ctl()('sshf').clim(12).mean(0); shf.data/=3600*24
netheat=sw(); netheat.data+=(lw.data+lhf.data+shf.data)
# Read ocean mask
mask=lwmask.mask
mask.shiftgrid(0.)
mask.regrid(netheat.grid)
netheat.data*=sp.ma.masked_less(mask.data,0.5)
netheat.data-=netheat.aave().data
zonal=netheat.gint(3)
imptrans=zonal()
jm=imptrans.dims[2]-1
for j in xrange(jm-1,-1,-1):
imptrans.data[:,:,j,:]=-zonal.subset(jind=slice(j,jm)).gint(2).data
imptrans.data/=1e15
GWlats=[47, 24, -19, -30];
GWerr=[0.1, 0.3, 0.6, 0.3];
GWoht=[0.6, 1.8, -0.8, -0.6];
trenb=sp.ma.masked_values(sp.genfromtxt('/home/yvikhlia/verification/implied/ANNUAL_TRANSPORTS_1985_1989.ascii',skiprows=1),-999.0)/100.
path=os.environ['HOME']+'/verification/ecmwf_int/plots'
try:
os.makedirs(path)
except OSError:
pass
pl.figure(1); pl.clf()
imptrans.d();
pl.plot(trenb[:,0],trenb[:,6], linewidth=2,color='red')
pl.plot(GWlats,GWoht,'*',color='green')
pl.errorbar(GWlats,GWoht,yerr=GWerr,fmt='*',color='green')
pl.plot(trenb[:,0],trenb[:,6]+trenb[:,13],color='red')
pl.plot(trenb[:,0],trenb[:,6]-trenb[:,13],color='red')
ax=pl.gca()
ax.set_title('Implied Ocean Heat Transport')
ax.set_ylabel('PW')
ax.set_xlim(-90.,90)
ax.set_ylim(-3,3)
ax.legend(('ECMWF Interim','Trenberth-Caron','Ganachaud-Wunsch'),loc=4)
pl.grid()
pl.show()
pl.savefig(path+'/implied.png')
| import os
from matplotlib import pyplot as pl
import scipy as sp
import netCDF4 as nc
import ecmwf_int,lwmask
# Read surface fluxes
lw=ecmwf_int.Ctl()('str').clim(12).mean(0); lw.data/=3600*24
sw=ecmwf_int.Ctl()('ssr').clim(12).mean(0); sw.data/=3600*24
lhf=ecmwf_int.Ctl()('slhf').clim(12).mean(0); lhf.data/=3600*24
shf=ecmwf_int.Ctl()('sshf').clim(12).mean(0); shf.data/=3600*24
netheat=sw(); netheat.data+=(lw.data+lhf.data+shf.data)
# Read ocean mask
mask=lwmask.mask
mask.shiftgrid(0.)
mask.regrid(netheat.grid)
netheat.data*=sp.ma.masked_less(mask.data,0.5)
netheat.data-=netheat.aave().data
zonal=netheat.gint(3)
imptrans=zonal()
jm=imptrans.dims[2]-1
for j in xrange(jm-1,-1,-1):
imptrans.data[:,:,j,:]=-zonal.subset(jind=slice(j,jm)).gint(2).data
imptrans.data/=1e15
GWlats=[47, 24, -19, -30];
GWerr=[0.1, 0.3, 0.6, 0.3];
GWoht=[0.6, 1.8, -0.8, -0.6];
trenb=sp.ma.masked_values(sp.genfromtxt('/home/yvikhlia/verification/implied/ANNUAL_TRANSPORTS_1985_1989.ascii',skiprows=1),-999.0)/100.
path=os.environ['HOME']+'/verification/ecmwf_int/plots'
try:
os.makedirs(path)
except OSError:
pass
pl.figure(1); pl.clf()
imptrans.d();
pl.plot(trenb[:,0],trenb[:,6], linewidth=2,color='red')
pl.plot(GWlats,GWoht,'*',color='green')
pl.errorbar(GWlats,GWoht,yerr=GWerr,fmt='*',color='green')
pl.plot(trenb[:,0],trenb[:,6]+trenb[:,13],color='red')
pl.plot(trenb[:,0],trenb[:,6]-trenb[:,13],color='red')
ax=pl.gca()
ax.set_title('Implied Ocean Heat Transport')
ax.set_ylabel('PW')
ax.set_xlim(-90.,90)
ax.set_ylim(-3,3)
ax.legend(('ECMWF Interim','Trenberth-Caron','Ganachaud-Wunsch'),loc=4)
pl.grid()
pl.show()
pl.savefig(path+'/implied.png')
| en | 0.645993 | # Read surface fluxes # Read ocean mask | 1.83177 | 2 |
rest_client/request.py | yeraydiazdiaz/sublime-rest-client | 0 | 6615931 | import typing as tp
from dataclasses import dataclass
@dataclass
class Request:
url: str
method: str = "GET"
headers: tp.Optional[tp.Mapping[str, str]] = None
body: tp.Optional[str] = None
| import typing as tp
from dataclasses import dataclass
@dataclass
class Request:
url: str
method: str = "GET"
headers: tp.Optional[tp.Mapping[str, str]] = None
body: tp.Optional[str] = None
| none | 1 | 2.81313 | 3 | |
problem9.py | rentes/Euler | 1 | 6615932 | """Project Euler - Problem 9 - http://projecteuler.net/problem=9"""
import sys
import time
import tools.timeutils as timeutils
def pythagorean_triplet():
"""
Finds the product of the terms of the pythagorean triplet a, b and c,
for which a^2 + b^2 = c^2 and a + b + c = 1000
"""
a, b, c = 1, 1, 1
triplet_found = False
while a < 1000 and triplet_found is False:
while b < 1000 and triplet_found is False:
while c < 1000 and triplet_found is False:
if (a ** 2 + b ** 2 == c ** 2) and (a + b + c == 1000):
print("triplet is:", a, b, c)
print("product is:", a * b * c)
triplet_found = True
else:
c += 1
b += 1
c = 1
a += 1
b = 1
def main():
"""Main entry point for the script"""
start = time.time()
pythagorean_triplet()
timeutils.elapsed_time(time.time() - start)
if __name__ == '__main__':
sys.exit(main())
| """Project Euler - Problem 9 - http://projecteuler.net/problem=9"""
import sys
import time
import tools.timeutils as timeutils
def pythagorean_triplet():
"""
Finds the product of the terms of the pythagorean triplet a, b and c,
for which a^2 + b^2 = c^2 and a + b + c = 1000
"""
a, b, c = 1, 1, 1
triplet_found = False
while a < 1000 and triplet_found is False:
while b < 1000 and triplet_found is False:
while c < 1000 and triplet_found is False:
if (a ** 2 + b ** 2 == c ** 2) and (a + b + c == 1000):
print("triplet is:", a, b, c)
print("product is:", a * b * c)
triplet_found = True
else:
c += 1
b += 1
c = 1
a += 1
b = 1
def main():
"""Main entry point for the script"""
start = time.time()
pythagorean_triplet()
timeutils.elapsed_time(time.time() - start)
if __name__ == '__main__':
sys.exit(main())
| en | 0.650629 | Project Euler - Problem 9 - http://projecteuler.net/problem=9 Finds the product of the terms of the pythagorean triplet a, b and c, for which a^2 + b^2 = c^2 and a + b + c = 1000 Main entry point for the script | 3.843383 | 4 |
examples/DynamicBindingDemo.py | Ellis0817/Introduction-to-Programming-Using-Python | 0 | 6615933 | class Student:
def __str__(self):
return "Student"
def printStudent(self):
print(self.__str__())
class GraduateStudent(Student):
def __str__(self):
return "Graduate Student"
a = Student()
b = GraduateStudent()
a.printStudent()
b.printStudent() | class Student:
def __str__(self):
return "Student"
def printStudent(self):
print(self.__str__())
class GraduateStudent(Student):
def __str__(self):
return "Graduate Student"
a = Student()
b = GraduateStudent()
a.printStudent()
b.printStudent() | none | 1 | 3.376162 | 3 | |
lib/JumpScale/base/TIMER.py | Jumpscale/jumpscale6_core | 1 | 6615934 | <reponame>Jumpscale/jumpscale6_core
from JumpScale import j
import time
class TIMER:
@staticmethod
def start():
TIMER.clean()
TIMER._start=time.time()
@staticmethod
def stop(nritems=0,log=True):
TIMER._stop=time.time()
TIMER.duration=TIMER._stop-TIMER._start
if nritems>0:
TIMER.nritems=float(nritems)
if TIMER.duration>0:
TIMER.performance=float(nritems)/float(TIMER.duration)
if log:
TIMER.result()
@staticmethod
def clean():
TIMER._stop=0.0
TIMER._start=0.0
TIMER.duration=0.0
TIMER.performance=0.0
TIMER.nritems=0.0
@staticmethod
def result():
print("duration:%s"%TIMER.duration)
print("nritems:%s"%TIMER.nritems)
print("performance:%s"%TIMER.performance)
j.base.timer=TIMER
| from JumpScale import j
import time
class TIMER:
@staticmethod
def start():
TIMER.clean()
TIMER._start=time.time()
@staticmethod
def stop(nritems=0,log=True):
TIMER._stop=time.time()
TIMER.duration=TIMER._stop-TIMER._start
if nritems>0:
TIMER.nritems=float(nritems)
if TIMER.duration>0:
TIMER.performance=float(nritems)/float(TIMER.duration)
if log:
TIMER.result()
@staticmethod
def clean():
TIMER._stop=0.0
TIMER._start=0.0
TIMER.duration=0.0
TIMER.performance=0.0
TIMER.nritems=0.0
@staticmethod
def result():
print("duration:%s"%TIMER.duration)
print("nritems:%s"%TIMER.nritems)
print("performance:%s"%TIMER.performance)
j.base.timer=TIMER | none | 1 | 2.920478 | 3 | |
m2-modified/ims/common/agentless-system-crawler/crawler/plugins/emitters/base_emitter.py | CCI-MOC/ABMI | 108 | 6615935 |
class BaseEmitter:
"""
Base emitter class from which emitters like FileEmitter, StdoutEmitter
should inherit. The main idea is that all emitters get a url, and should
implement an emit() function given an iostream (a buffer with the features
to emit).
"""
def __init__(self, url, timeout=1, max_retries=5,
emit_per_line=False):
self.url = url
self.timeout = timeout
self.max_retries = max_retries
self.emit_per_line = emit_per_line
def emit(self, iostream, compress=False,
metadata={}, snapshot_num=0):
"""
:param iostream: a CStringIO used to buffer the formatted features.
:param compress:
:param metadata:
:param snapshot_num:
:return:
"""
pass
|
class BaseEmitter:
"""
Base emitter class from which emitters like FileEmitter, StdoutEmitter
should inherit. The main idea is that all emitters get a url, and should
implement an emit() function given an iostream (a buffer with the features
to emit).
"""
def __init__(self, url, timeout=1, max_retries=5,
emit_per_line=False):
self.url = url
self.timeout = timeout
self.max_retries = max_retries
self.emit_per_line = emit_per_line
def emit(self, iostream, compress=False,
metadata={}, snapshot_num=0):
"""
:param iostream: a CStringIO used to buffer the formatted features.
:param compress:
:param metadata:
:param snapshot_num:
:return:
"""
pass
| en | 0.757113 | Base emitter class from which emitters like FileEmitter, StdoutEmitter should inherit. The main idea is that all emitters get a url, and should implement an emit() function given an iostream (a buffer with the features to emit). :param iostream: a CStringIO used to buffer the formatted features. :param compress: :param metadata: :param snapshot_num: :return: | 3.001798 | 3 |
src/stream_frame_identifier.py | ckcollab/twitch-experiments | 3 | 6615936 | <gh_stars>1-10
import av
import logging
import random
import sys
import time
import warnings
from livestreamer import Livestreamer
from experiments.test_detecting_in_lol_or_not import get_classifier, process_image
from ocr import ocr_image
logging.getLogger("libav.http").setLevel(logging.ERROR)
# Hide warnings from SKLearn from flooding screen
warnings.filterwarnings("ignore", category=DeprecationWarning)
if __name__ == "__main__":
if len(sys.argv) > 1:
streamer = sys.argv[1]
else:
print "Randomly selecting a streamer..."
streamer = random.choice((
"tsm_doublelift",
"grossie_gore",
"wingsofdeath"
))
classifier = get_classifier()
is_in_lol = False
print "Waiting for streamer %s to join a game..." % streamer
while True:
session = Livestreamer()
streams = session.streams('http://www.twitch.tv/%s' % streamer)
if streams:
stream = streams['source']
container = av.open(stream.url)
video_stream = next(s for s in container.streams if s.type == b'video')
image = None
for packet in container.demux(video_stream):
for frame in packet.decode():
image = frame.to_image()
features = process_image(image)
# save our old state before checking new state, only show message when state changes
old_is_in_lol = is_in_lol
is_in_lol = classifier.predict(features)
if not old_is_in_lol and is_in_lol:
timestr = time.strftime("%Y%m%d-%I:%M %p")
print "@@@@@@@@@@ Joined game: %s" % timestr
elif old_is_in_lol and not is_in_lol:
timestr = time.strftime("%Y%m%d-%I:%M %p")
print "@@@@@@@@@@ Left game: %s" % timestr
if is_in_lol:
print "OCR from image, trying to read character name:", ocr_image(image)
# As soon as we get a full image, we're done
if image:
break
time.sleep(1)
else:
print "Player not streaming, sleeping for 15 minutes"
time.sleep(15 * 60)
| import av
import logging
import random
import sys
import time
import warnings
from livestreamer import Livestreamer
from experiments.test_detecting_in_lol_or_not import get_classifier, process_image
from ocr import ocr_image
logging.getLogger("libav.http").setLevel(logging.ERROR)
# Hide warnings from SKLearn from flooding screen
warnings.filterwarnings("ignore", category=DeprecationWarning)
if __name__ == "__main__":
if len(sys.argv) > 1:
streamer = sys.argv[1]
else:
print "Randomly selecting a streamer..."
streamer = random.choice((
"tsm_doublelift",
"grossie_gore",
"wingsofdeath"
))
classifier = get_classifier()
is_in_lol = False
print "Waiting for streamer %s to join a game..." % streamer
while True:
session = Livestreamer()
streams = session.streams('http://www.twitch.tv/%s' % streamer)
if streams:
stream = streams['source']
container = av.open(stream.url)
video_stream = next(s for s in container.streams if s.type == b'video')
image = None
for packet in container.demux(video_stream):
for frame in packet.decode():
image = frame.to_image()
features = process_image(image)
# save our old state before checking new state, only show message when state changes
old_is_in_lol = is_in_lol
is_in_lol = classifier.predict(features)
if not old_is_in_lol and is_in_lol:
timestr = time.strftime("%Y%m%d-%I:%M %p")
print "@@@@@@@@@@ Joined game: %s" % timestr
elif old_is_in_lol and not is_in_lol:
timestr = time.strftime("%Y%m%d-%I:%M %p")
print "@@@@@@@@@@ Left game: %s" % timestr
if is_in_lol:
print "OCR from image, trying to read character name:", ocr_image(image)
# As soon as we get a full image, we're done
if image:
break
time.sleep(1)
else:
print "Player not streaming, sleeping for 15 minutes"
time.sleep(15 * 60) | en | 0.843629 | # Hide warnings from SKLearn from flooding screen # save our old state before checking new state, only show message when state changes # As soon as we get a full image, we're done | 2.655643 | 3 |
anjos/personal/scripts/remove_app.py | anjos/personal-website | 0 | 6615937 | #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Mon 30 Jun 2014 14:37:53 CEST
"""Procedures to clean-up unused app information
"""
def main():
import os
import sys
if len(sys.argv) <= 1:
print "usage: %s [-d|--debug] <app> [<app>...]"
sys.exit(1)
elif sys.argv[1] in ('-h', '--help'):
print "usage: %s [-d|--debug] <app> [<app>...]"
sys.exit(0)
os.environ['DJANGO_SETTINGS_MODULE'] = 'anjos.personal.settings'
from django.contrib.contenttypes.models import ContentType
from nav.models import Item
debug = False
if sys.argv[1] in ('-d', '--debug'):
debug = True
del sys.argv[1]
for app in sys.argv[1:]:
print "\nDeleting obsolete django content types and permissions for `%s'...\n" % (app,)
for k in ContentType.objects.filter(app_label=app):
if debug:
print '[debug] would delete: %s %s@%s (%s)' % (k.id, k.model, k.app_label, k.name)
else:
print 'deleting: %s %s@%s (%s)' % (k.id, k.model, k.app_label, k.name)
k.delete()
print "\nDeleting menu entries related to `%s'...\n" % (app,)
for k in Item.objects.filter(url__contains=app):
if debug:
print '[debug] would delete: %s %s' % (k.id, k.url)
else:
print 'deleting: %s %s' % (k.id, k.url)
k.delete()
| #!/usr/bin/env python
# vim: set fileencoding=utf-8 :
# Mon 30 Jun 2014 14:37:53 CEST
"""Procedures to clean-up unused app information
"""
def main():
import os
import sys
if len(sys.argv) <= 1:
print "usage: %s [-d|--debug] <app> [<app>...]"
sys.exit(1)
elif sys.argv[1] in ('-h', '--help'):
print "usage: %s [-d|--debug] <app> [<app>...]"
sys.exit(0)
os.environ['DJANGO_SETTINGS_MODULE'] = 'anjos.personal.settings'
from django.contrib.contenttypes.models import ContentType
from nav.models import Item
debug = False
if sys.argv[1] in ('-d', '--debug'):
debug = True
del sys.argv[1]
for app in sys.argv[1:]:
print "\nDeleting obsolete django content types and permissions for `%s'...\n" % (app,)
for k in ContentType.objects.filter(app_label=app):
if debug:
print '[debug] would delete: %s %s@%s (%s)' % (k.id, k.model, k.app_label, k.name)
else:
print 'deleting: %s %s@%s (%s)' % (k.id, k.model, k.app_label, k.name)
k.delete()
print "\nDeleting menu entries related to `%s'...\n" % (app,)
for k in Item.objects.filter(url__contains=app):
if debug:
print '[debug] would delete: %s %s' % (k.id, k.url)
else:
print 'deleting: %s %s' % (k.id, k.url)
k.delete()
| en | 0.429003 | #!/usr/bin/env python # vim: set fileencoding=utf-8 : # Mon 30 Jun 2014 14:37:53 CEST Procedures to clean-up unused app information | 2.333161 | 2 |
kattis/k_loorolls.py | ivanlyon/exercises | 0 | 6615938 | <reponame>ivanlyon/exercises
'''
Determine both scores of a programming contest team
Status: Accepted
'''
###############################################################################
def main():
"""Read input and print output"""
roll, usage = [int(i) for i in input().split()]
rolls, remaining = 1, roll % usage
while remaining:
usage -= remaining
rolls += 1
remaining = roll % usage
print(rolls)
###############################################################################
if __name__ == '__main__':
main()
| '''
Determine both scores of a programming contest team
Status: Accepted
'''
###############################################################################
def main():
"""Read input and print output"""
roll, usage = [int(i) for i in input().split()]
rolls, remaining = 1, roll % usage
while remaining:
usage -= remaining
rolls += 1
remaining = roll % usage
print(rolls)
###############################################################################
if __name__ == '__main__':
main() | de | 0.685765 | Determine both scores of a programming contest team Status: Accepted ############################################################################### Read input and print output ############################################################################### | 3.425183 | 3 |
examples/residential_mg_with_pv_and_dewhs/micro_grid_control_simulation.py | michchr/HybridControlPy | 1 | 6615939 | <gh_stars>1-10
from examples.residential_mg_with_pv_and_dewhs.modelling.micro_grid_agents import (GridAgentMpc, DewhAgentMpc,
PvAgentMpc, ResDemandAgentMpc)
from examples.residential_mg_with_pv_and_dewhs.modelling.parameters import (dewh_param_struct, grid_param_struct,
res_demand_param_struct, pv_param_struct)
from collections import namedtuple
from controllers.mpc_controller import MpcController
from examples.residential_mg_with_pv_and_dewhs.theromstat_control import DewhTheromstatController
from controllers.no_controller import NoController
import time
import numpy as np
import os
import pandas as pd
from datetime import datetime as DateTime
from examples.residential_mg_with_pv_and_dewhs.tariff_generator import TariffGenerator
import itertools
from utils.matrix_utils import atleast_2d_col
from structdict import StructDict
IDX = pd.IndexSlice
st = time.time()
BASE_FILE = os.path.dirname(__file__)
OMEGA_DHW_STOCHASTIC_SCENARIOS_PATH = os.path.realpath(
fr'{BASE_FILE}/data/dewh_dhw_demand_stochastic_scenarios_15Min_200Lpd_mean.pickle')
OMEGA_DHW_ACTUAL_SCENARIOS_PATH = os.path.realpath(
fr'{BASE_FILE}/data/dewh_dhw_demand_actual_scenarios_15Min_200Lpd_mean.pickle')
OMEGA_PV_PROFILE_PATH = os.path.realpath(
fr'{BASE_FILE}/data/pv_supply_norm_1w_max_15min_from_091218_150219.pickle')
OMEGA_RESD_PROFILE_PATH = os.path.realpath(
fr'{BASE_FILE}/data/res_demand_norm_1w_mean_15min_from_091218_150219.pickle')
omega_dhw_stochastic_scenarios = np.divide(pd.read_pickle(OMEGA_DHW_STOCHASTIC_SCENARIOS_PATH),
dewh_param_struct.ts) # in L/s
omega_dhw_actual_scenarios = np.divide(pd.read_pickle(OMEGA_DHW_ACTUAL_SCENARIOS_PATH),
dewh_param_struct.ts) # in L/s
omega_pv_profile = pd.read_pickle(OMEGA_PV_PROFILE_PATH) # in W with P_pv_max=1
omega_resd_profile = pd.read_pickle(OMEGA_RESD_PROFILE_PATH) # in W with P_resd_ave=1
print(f"Time to load historical data and scenarios':{time.time() - st}")
steps_per_day = int(pd.Timedelta('1D').total_seconds() / dewh_param_struct.ts)
N_p_max = 96
N_tilde_max = N_p_max + 1
sim_steps_max = 2500
max_steps = int(sim_steps_max + 2 * N_tilde_max)
N_h = 20
time_0 = DateTime(2018, 12, 10)
def get_actual_omega_dewh_profiles(actual_scenarios=None, N_h=1, size=1):
if isinstance(actual_scenarios, pd.DataFrame):
actual_scenarios = actual_scenarios.values
_, num_scen = actual_scenarios.shape
omega_dewh_profiles = StructDict()
for i in range(1, N_h + 1):
randstate = np.random.RandomState(seed=np.int32(i ** 2))
profile = actual_scenarios[:, randstate.choice(num_scen, size=size, replace=False)]
omega_dewh_profiles[i] = profile.reshape(-1, 1, order='F')
return omega_dewh_profiles
def get_dewh_random_initial_state(dev_id):
randstate = np.random.RandomState(seed=np.int32(dev_id ** 2))
return randstate.randint(55, 65)
def get_min_max_dhw_scenario(k, N_tilde, min_dhw_day, max_dhw_day):
min_dhw_day = min_dhw_day.flatten()
max_dhw_day = max_dhw_day.flatten()
if len(min_dhw_day) != steps_per_day:
raise ValueError("Invalid shape for min_dhw_day")
if len(max_dhw_day) != steps_per_day:
raise ValueError("Invalid shape for max_dhw_day")
pos = k % steps_per_day
mult = N_tilde // steps_per_day + 1
return [atleast_2d_col(np.roll(np.tile(dwh_day, mult), -pos)[:N_tilde]) for dwh_day in [min_dhw_day, max_dhw_day]]
tariff_gen = TariffGenerator(low_off_peak=48.40, low_stnd=76.28, low_peak=110.84, high_off_peak=55.90,
high_stnd=102.95, high_peak=339.77)
cost_profile = tariff_gen.get_price_vector(time_0, max_steps,
grid_param_struct.control_ts) / 3600 / 100 / 1000 * grid_param_struct.ts
# todo Still needs work
DewhAgentMpc.delete_all_devices()
GridAgentMpc.delete_all_devices()
PvAgentMpc.delete_all_devices()
ResDemandAgentMpc.delete_all_devices()
st = time.time()
## create grid ##
grid_param_struct_adjusted = grid_param_struct.deepcopy()
grid_param_struct_adjusted.P_g_min = -1e4 * N_h
grid_param_struct_adjusted.P_g_max = 1e4 * N_h
grid = GridAgentMpc(device_id=1, param_struct=grid_param_struct_adjusted)
grid.set_price_profile(price_profile=cost_profile)
## Create devices ##
min_dhw_day = omega_dhw_stochastic_scenarios.min(axis=1).values
max_dhw_day = omega_dhw_stochastic_scenarios.max(axis=1).values
omega_dhw_stochastic_scenarios_profile = omega_dhw_stochastic_scenarios.values.flatten(order='f')
dewh_param_struct_adjusted = dewh_param_struct.deepcopy()
dewh_param_struct_adjusted.T_h_min = 50.0 # C
dewh_param_struct_adjusted.T_h_max = 80.0
dewh_list = [DewhAgentMpc(device_id=i, param_struct=dewh_param_struct_adjusted) for i in range(1, N_h + 1)]
omega_dewh_profiles_struct = get_actual_omega_dewh_profiles(actual_scenarios=omega_dhw_actual_scenarios.values,
N_h=N_h, size=max_steps)
for dewh in dewh_list:
dewh.set_omega_profile(omega_profile=omega_dewh_profiles_struct[dewh.device_id])
dewh.set_omega_scenarios(omega_scenarios_profile=omega_dhw_stochastic_scenarios_profile)
grid.add_device(dewh)
pv_param_struct_adjusted = pv_param_struct.deepcopy()
pv_param_struct_adjusted.P_pv_units = N_h
pvAgent = PvAgentMpc(device_id=1, param_struct=pv_param_struct_adjusted)
pvAgent.set_omega_profile(omega_pv_profile)
grid.add_device(pvAgent)
res_demand_param_struct_adjusted = res_demand_param_struct.deepcopy()
res_demand_param_struct_adjusted.P_res_units = N_h
resdAgent = ResDemandAgentMpc(device_id=1, param_struct=res_demand_param_struct_adjusted)
resdAgent.set_omega_profile(omega_resd_profile)
grid.add_device(resdAgent)
################################
print(f"Time to create dewh's':{time.time() - st}")
ControllerClass = namedtuple('ControllerClass', [
'controller_type',
'is_deterministic'
])
controllers_choices = StructDict(
{'mpc_pb' : ControllerClass(MpcController, True),
'mpc_ce' : ControllerClass(MpcController, False),
'mpc_sb_reduced': ControllerClass(MpcController, False),
'mpc_sb_full' : ControllerClass(MpcController, False),
'mpc_minmax' : ControllerClass(MpcController, False),
'thermo' : ControllerClass(DewhTheromstatController, False)
}
)
def sim_mpc(N_p=1, sim_steps=1, soft_top_mult=10.0, soft_bot_mult=1.0, num_scenarios=20, N_sb_reduced=8,
controllers=None, save_text_postfix=""):
N_tilde = N_p + 1
controllers = controllers or {}
deterministic_struct = {cname: controller.is_deterministic for cname, controller in controllers.items()}
for dev in itertools.chain([grid], grid.devices):
dev.delete_all_controllers()
for cname, controller in controllers.items():
for dev in itertools.chain([grid], grid.devices):
if isinstance(dev, DewhAgentMpc):
if issubclass(controller.controller_type, MpcController):
dev.add_controller(cname, controller.controller_type, N_p=N_p)
else:
dev.add_controller(cname, controller.controller_type, N_p=0, N_tilde=1)
else:
if issubclass(controller.controller_type, MpcController):
dev.add_controller(cname, controller.controller_type, N_p=N_p)
else:
dev.add_controller(cname, NoController, N_p=0, N_tilde=1)
for dev in grid.devices:
if isinstance(dev, DewhAgentMpc):
dev.x_k = get_dewh_random_initial_state(dev.device_id)
total_cost_struct = StructDict({cname: 0 for cname in controllers})
grid.build_grid(k=0, deterministic_or_struct=deterministic_struct)
for k in range(0, sim_steps):
st = time.time()
prices_tilde = grid.get_price_tilde_k(k=k)
for cname, controller in controllers.items():
if issubclass(controller.controller_type, MpcController):
for dev in itertools.chain([grid], grid.devices):
if isinstance(dev, DewhAgentMpc) and issubclass(controller.controller_type, MpcController):
if cname.startswith('mpc'):
price_vec = prices_tilde[cname]
max_cost = np.sum(price_vec) * dewh_param_struct.P_h_Nom
q_mu_top = max_cost * soft_top_mult
q_mu_bot = max_cost * soft_bot_mult
dev.set_device_objective_atoms(controller_name=cname,
q_mu=np.hstack([q_mu_top, q_mu_bot]).ravel(order='c'))
if cname.startswith('mpc_sb'):
omega_tilde_scenarios = dev.get_omega_tilde_scenario(k, N_tilde=N_tilde,
num_scenarios=num_scenarios)
if cname == 'mpc_sb_reduced':
dev.controllers[cname].set_constraints(
other_constraints=[
dev.controllers[cname].gen_evo_constraints(
N_tilde=N_sb_reduced,
omega_scenarios_k=omega_tilde_scenarios)])
elif cname == 'mpc_sb_full':
dev.controllers[cname].set_constraints(
other_constraints=[
dev.controllers[cname].gen_evo_constraints(
omega_scenarios_k=omega_tilde_scenarios)])
elif cname == 'mpc_minmax':
omega_min, omega_max = get_min_max_dhw_scenario(k=k, N_tilde=N_tilde,
min_dhw_day=min_dhw_day,
max_dhw_day=max_dhw_day)
min_cons = dev.controllers[cname].gen_evo_constraints(N_tilde=N_tilde,
omega_tilde_k=omega_min)
max_cons = dev.controllers[cname].gen_evo_constraints(N_tilde=N_tilde,
omega_tilde_k=omega_max)
dev.controllers[cname].set_constraints(
other_constraints=[min_cons, max_cons])
elif isinstance(dev, GridAgentMpc) and issubclass(controller.controller_type, MpcController):
dev.controllers[cname].set_std_obj_atoms(q_z=prices_tilde[cname])
grid.build_grid(k=k, deterministic_or_struct=deterministic_struct)
grid.solve_grid_mpc(k=k, verbose=False, TimeLimit=20, MIPGap=1e-2)
print(f'k={k}, N_p={N_p}')
print(f"Time to solve including data transfer:{time.time() - st}")
l_sim = grid.sim_step_k(k=k)
print(f"Total Looptime:{time.time() - st}")
solve_times_struct = StructDict()
for cname in controllers:
total_cost_struct[cname] += grid.sim_logs[cname].get(k).cost
solve_times_struct[cname] = (grid.sim_logs[cname].get(k).time_in_solver,
grid.sim_logs[cname].get(k).time_solve_overall)
print('Total_cost\n', total_cost_struct)
print('Solve_times\n', solve_times_struct)
print('\n')
df_sim: pd.DataFrame = grid.grid_sim_dataframe
df_sim.index = pd.date_range(start=time_0, periods=sim_steps, freq='15min')
if save_text_postfix and not save_text_postfix.startswith('_'):
save_text_postfix = '_' + save_text_postfix
T_max = int(dewh_param_struct_adjusted.T_h_max)
T_min = int(dewh_param_struct_adjusted.T_h_min)
save_dir = fr'{BASE_FILE}/sim_out'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.realpath(
fr'{BASE_FILE}/sim_out/sim_Np_{N_p}_st_{int(soft_top_mult)}_sb_{int(soft_bot_mult)}_Ns_{num_scenarios}_'
fr'Nsr_{N_sb_reduced}_Nh_{N_h}_Tmax_{T_max}_Tmin_{T_min}{save_text_postfix}.sim_out')
df_sim.to_pickle(save_path)
return StructDict(df_sim=df_sim, locals_vars=locals())
controller_names = ['mpc_pb', 'mpc_ce', 'mpc_sb_reduced', 'mpc_sb_full', 'mpc_minmax', 'thermo']
controllers = controllers_choices.get_sub_struct(controller_names)
omega_dewh_profiles_struct = get_actual_omega_dewh_profiles(actual_scenarios=omega_dhw_actual_scenarios.values,
N_h=50, size=max_steps)
#
# sim_mpc(N_p=24, sim_steps=5, controllers=controllers, num_scenarios=20, N_sb_reduced=6,
# save_text_postfix=f'')
| from examples.residential_mg_with_pv_and_dewhs.modelling.micro_grid_agents import (GridAgentMpc, DewhAgentMpc,
PvAgentMpc, ResDemandAgentMpc)
from examples.residential_mg_with_pv_and_dewhs.modelling.parameters import (dewh_param_struct, grid_param_struct,
res_demand_param_struct, pv_param_struct)
from collections import namedtuple
from controllers.mpc_controller import MpcController
from examples.residential_mg_with_pv_and_dewhs.theromstat_control import DewhTheromstatController
from controllers.no_controller import NoController
import time
import numpy as np
import os
import pandas as pd
from datetime import datetime as DateTime
from examples.residential_mg_with_pv_and_dewhs.tariff_generator import TariffGenerator
import itertools
from utils.matrix_utils import atleast_2d_col
from structdict import StructDict
IDX = pd.IndexSlice
st = time.time()
BASE_FILE = os.path.dirname(__file__)
OMEGA_DHW_STOCHASTIC_SCENARIOS_PATH = os.path.realpath(
fr'{BASE_FILE}/data/dewh_dhw_demand_stochastic_scenarios_15Min_200Lpd_mean.pickle')
OMEGA_DHW_ACTUAL_SCENARIOS_PATH = os.path.realpath(
fr'{BASE_FILE}/data/dewh_dhw_demand_actual_scenarios_15Min_200Lpd_mean.pickle')
OMEGA_PV_PROFILE_PATH = os.path.realpath(
fr'{BASE_FILE}/data/pv_supply_norm_1w_max_15min_from_091218_150219.pickle')
OMEGA_RESD_PROFILE_PATH = os.path.realpath(
fr'{BASE_FILE}/data/res_demand_norm_1w_mean_15min_from_091218_150219.pickle')
omega_dhw_stochastic_scenarios = np.divide(pd.read_pickle(OMEGA_DHW_STOCHASTIC_SCENARIOS_PATH),
dewh_param_struct.ts) # in L/s
omega_dhw_actual_scenarios = np.divide(pd.read_pickle(OMEGA_DHW_ACTUAL_SCENARIOS_PATH),
dewh_param_struct.ts) # in L/s
omega_pv_profile = pd.read_pickle(OMEGA_PV_PROFILE_PATH) # in W with P_pv_max=1
omega_resd_profile = pd.read_pickle(OMEGA_RESD_PROFILE_PATH) # in W with P_resd_ave=1
print(f"Time to load historical data and scenarios':{time.time() - st}")
steps_per_day = int(pd.Timedelta('1D').total_seconds() / dewh_param_struct.ts)
N_p_max = 96
N_tilde_max = N_p_max + 1
sim_steps_max = 2500
max_steps = int(sim_steps_max + 2 * N_tilde_max)
N_h = 20
time_0 = DateTime(2018, 12, 10)
def get_actual_omega_dewh_profiles(actual_scenarios=None, N_h=1, size=1):
if isinstance(actual_scenarios, pd.DataFrame):
actual_scenarios = actual_scenarios.values
_, num_scen = actual_scenarios.shape
omega_dewh_profiles = StructDict()
for i in range(1, N_h + 1):
randstate = np.random.RandomState(seed=np.int32(i ** 2))
profile = actual_scenarios[:, randstate.choice(num_scen, size=size, replace=False)]
omega_dewh_profiles[i] = profile.reshape(-1, 1, order='F')
return omega_dewh_profiles
def get_dewh_random_initial_state(dev_id):
randstate = np.random.RandomState(seed=np.int32(dev_id ** 2))
return randstate.randint(55, 65)
def get_min_max_dhw_scenario(k, N_tilde, min_dhw_day, max_dhw_day):
min_dhw_day = min_dhw_day.flatten()
max_dhw_day = max_dhw_day.flatten()
if len(min_dhw_day) != steps_per_day:
raise ValueError("Invalid shape for min_dhw_day")
if len(max_dhw_day) != steps_per_day:
raise ValueError("Invalid shape for max_dhw_day")
pos = k % steps_per_day
mult = N_tilde // steps_per_day + 1
return [atleast_2d_col(np.roll(np.tile(dwh_day, mult), -pos)[:N_tilde]) for dwh_day in [min_dhw_day, max_dhw_day]]
tariff_gen = TariffGenerator(low_off_peak=48.40, low_stnd=76.28, low_peak=110.84, high_off_peak=55.90,
high_stnd=102.95, high_peak=339.77)
cost_profile = tariff_gen.get_price_vector(time_0, max_steps,
grid_param_struct.control_ts) / 3600 / 100 / 1000 * grid_param_struct.ts
# todo Still needs work
DewhAgentMpc.delete_all_devices()
GridAgentMpc.delete_all_devices()
PvAgentMpc.delete_all_devices()
ResDemandAgentMpc.delete_all_devices()
st = time.time()
## create grid ##
grid_param_struct_adjusted = grid_param_struct.deepcopy()
grid_param_struct_adjusted.P_g_min = -1e4 * N_h
grid_param_struct_adjusted.P_g_max = 1e4 * N_h
grid = GridAgentMpc(device_id=1, param_struct=grid_param_struct_adjusted)
grid.set_price_profile(price_profile=cost_profile)
## Create devices ##
min_dhw_day = omega_dhw_stochastic_scenarios.min(axis=1).values
max_dhw_day = omega_dhw_stochastic_scenarios.max(axis=1).values
omega_dhw_stochastic_scenarios_profile = omega_dhw_stochastic_scenarios.values.flatten(order='f')
dewh_param_struct_adjusted = dewh_param_struct.deepcopy()
dewh_param_struct_adjusted.T_h_min = 50.0 # C
dewh_param_struct_adjusted.T_h_max = 80.0
dewh_list = [DewhAgentMpc(device_id=i, param_struct=dewh_param_struct_adjusted) for i in range(1, N_h + 1)]
omega_dewh_profiles_struct = get_actual_omega_dewh_profiles(actual_scenarios=omega_dhw_actual_scenarios.values,
N_h=N_h, size=max_steps)
for dewh in dewh_list:
dewh.set_omega_profile(omega_profile=omega_dewh_profiles_struct[dewh.device_id])
dewh.set_omega_scenarios(omega_scenarios_profile=omega_dhw_stochastic_scenarios_profile)
grid.add_device(dewh)
pv_param_struct_adjusted = pv_param_struct.deepcopy()
pv_param_struct_adjusted.P_pv_units = N_h
pvAgent = PvAgentMpc(device_id=1, param_struct=pv_param_struct_adjusted)
pvAgent.set_omega_profile(omega_pv_profile)
grid.add_device(pvAgent)
res_demand_param_struct_adjusted = res_demand_param_struct.deepcopy()
res_demand_param_struct_adjusted.P_res_units = N_h
resdAgent = ResDemandAgentMpc(device_id=1, param_struct=res_demand_param_struct_adjusted)
resdAgent.set_omega_profile(omega_resd_profile)
grid.add_device(resdAgent)
################################
print(f"Time to create dewh's':{time.time() - st}")
ControllerClass = namedtuple('ControllerClass', [
'controller_type',
'is_deterministic'
])
controllers_choices = StructDict(
{'mpc_pb' : ControllerClass(MpcController, True),
'mpc_ce' : ControllerClass(MpcController, False),
'mpc_sb_reduced': ControllerClass(MpcController, False),
'mpc_sb_full' : ControllerClass(MpcController, False),
'mpc_minmax' : ControllerClass(MpcController, False),
'thermo' : ControllerClass(DewhTheromstatController, False)
}
)
def sim_mpc(N_p=1, sim_steps=1, soft_top_mult=10.0, soft_bot_mult=1.0, num_scenarios=20, N_sb_reduced=8,
controllers=None, save_text_postfix=""):
N_tilde = N_p + 1
controllers = controllers or {}
deterministic_struct = {cname: controller.is_deterministic for cname, controller in controllers.items()}
for dev in itertools.chain([grid], grid.devices):
dev.delete_all_controllers()
for cname, controller in controllers.items():
for dev in itertools.chain([grid], grid.devices):
if isinstance(dev, DewhAgentMpc):
if issubclass(controller.controller_type, MpcController):
dev.add_controller(cname, controller.controller_type, N_p=N_p)
else:
dev.add_controller(cname, controller.controller_type, N_p=0, N_tilde=1)
else:
if issubclass(controller.controller_type, MpcController):
dev.add_controller(cname, controller.controller_type, N_p=N_p)
else:
dev.add_controller(cname, NoController, N_p=0, N_tilde=1)
for dev in grid.devices:
if isinstance(dev, DewhAgentMpc):
dev.x_k = get_dewh_random_initial_state(dev.device_id)
total_cost_struct = StructDict({cname: 0 for cname in controllers})
grid.build_grid(k=0, deterministic_or_struct=deterministic_struct)
for k in range(0, sim_steps):
st = time.time()
prices_tilde = grid.get_price_tilde_k(k=k)
for cname, controller in controllers.items():
if issubclass(controller.controller_type, MpcController):
for dev in itertools.chain([grid], grid.devices):
if isinstance(dev, DewhAgentMpc) and issubclass(controller.controller_type, MpcController):
if cname.startswith('mpc'):
price_vec = prices_tilde[cname]
max_cost = np.sum(price_vec) * dewh_param_struct.P_h_Nom
q_mu_top = max_cost * soft_top_mult
q_mu_bot = max_cost * soft_bot_mult
dev.set_device_objective_atoms(controller_name=cname,
q_mu=np.hstack([q_mu_top, q_mu_bot]).ravel(order='c'))
if cname.startswith('mpc_sb'):
omega_tilde_scenarios = dev.get_omega_tilde_scenario(k, N_tilde=N_tilde,
num_scenarios=num_scenarios)
if cname == 'mpc_sb_reduced':
dev.controllers[cname].set_constraints(
other_constraints=[
dev.controllers[cname].gen_evo_constraints(
N_tilde=N_sb_reduced,
omega_scenarios_k=omega_tilde_scenarios)])
elif cname == 'mpc_sb_full':
dev.controllers[cname].set_constraints(
other_constraints=[
dev.controllers[cname].gen_evo_constraints(
omega_scenarios_k=omega_tilde_scenarios)])
elif cname == 'mpc_minmax':
omega_min, omega_max = get_min_max_dhw_scenario(k=k, N_tilde=N_tilde,
min_dhw_day=min_dhw_day,
max_dhw_day=max_dhw_day)
min_cons = dev.controllers[cname].gen_evo_constraints(N_tilde=N_tilde,
omega_tilde_k=omega_min)
max_cons = dev.controllers[cname].gen_evo_constraints(N_tilde=N_tilde,
omega_tilde_k=omega_max)
dev.controllers[cname].set_constraints(
other_constraints=[min_cons, max_cons])
elif isinstance(dev, GridAgentMpc) and issubclass(controller.controller_type, MpcController):
dev.controllers[cname].set_std_obj_atoms(q_z=prices_tilde[cname])
grid.build_grid(k=k, deterministic_or_struct=deterministic_struct)
grid.solve_grid_mpc(k=k, verbose=False, TimeLimit=20, MIPGap=1e-2)
print(f'k={k}, N_p={N_p}')
print(f"Time to solve including data transfer:{time.time() - st}")
l_sim = grid.sim_step_k(k=k)
print(f"Total Looptime:{time.time() - st}")
solve_times_struct = StructDict()
for cname in controllers:
total_cost_struct[cname] += grid.sim_logs[cname].get(k).cost
solve_times_struct[cname] = (grid.sim_logs[cname].get(k).time_in_solver,
grid.sim_logs[cname].get(k).time_solve_overall)
print('Total_cost\n', total_cost_struct)
print('Solve_times\n', solve_times_struct)
print('\n')
df_sim: pd.DataFrame = grid.grid_sim_dataframe
df_sim.index = pd.date_range(start=time_0, periods=sim_steps, freq='15min')
if save_text_postfix and not save_text_postfix.startswith('_'):
save_text_postfix = '_' + save_text_postfix
T_max = int(dewh_param_struct_adjusted.T_h_max)
T_min = int(dewh_param_struct_adjusted.T_h_min)
save_dir = fr'{BASE_FILE}/sim_out'
if not os.path.exists(save_dir):
os.makedirs(save_dir)
save_path = os.path.realpath(
fr'{BASE_FILE}/sim_out/sim_Np_{N_p}_st_{int(soft_top_mult)}_sb_{int(soft_bot_mult)}_Ns_{num_scenarios}_'
fr'Nsr_{N_sb_reduced}_Nh_{N_h}_Tmax_{T_max}_Tmin_{T_min}{save_text_postfix}.sim_out')
df_sim.to_pickle(save_path)
return StructDict(df_sim=df_sim, locals_vars=locals())
controller_names = ['mpc_pb', 'mpc_ce', 'mpc_sb_reduced', 'mpc_sb_full', 'mpc_minmax', 'thermo']
controllers = controllers_choices.get_sub_struct(controller_names)
omega_dewh_profiles_struct = get_actual_omega_dewh_profiles(actual_scenarios=omega_dhw_actual_scenarios.values,
N_h=50, size=max_steps)
#
# sim_mpc(N_p=24, sim_steps=5, controllers=controllers, num_scenarios=20, N_sb_reduced=6,
# save_text_postfix=f'') | en | 0.503645 | # in L/s # in L/s # in W with P_pv_max=1 # in W with P_resd_ave=1 # todo Still needs work ## create grid ## ## Create devices ## # C ################################ # # sim_mpc(N_p=24, sim_steps=5, controllers=controllers, num_scenarios=20, N_sb_reduced=6, # save_text_postfix=f'') | 1.994074 | 2 |
requestz/session.py | hanzhichao/requestz | 2 | 6615940 | <reponame>hanzhichao/requestz<filename>requestz/session.py<gh_stars>1-10
import io
import os
import datetime
import socket
import subprocess
import platform
import re
from urllib.parse import quote, urlencode, urlparse, urlunparse, urlsplit
from typing import Mapping
import pycurl
import certifi
from logz import log as logging
from requestz.response import Response
from requestz.request import Request
from requestz.utils import merge_dict, type_check
DEFAULT_REDIRECT_LIMIT = 30
DEFAULT_TIMEOUT = 60
DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) " \
"Chrome/73.0.3683.103 Safari/537.36"
DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36",
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
'Accept': '*/*',
'Connection': 'keep-alive',
}
REQUEST_KWARGS = dict(params=None, headers=None, cookies=None, data=None, json=None, files=None,timeout=None,
verify=None, allow_redirects=None, proxies=None, hooks=None, stream=None, cert=None)
class Session(object):
def __init__(self):
self.curl = pycurl.Curl() # todo session默认配置
self.base_url = None
self.headers = DEFAULT_HEADERS
self.cookies = {}
self.params = {}
self.timeout = DEFAULT_TIMEOUT
self.allow_redirects = True
self.auth = None
self.cert = None
self.trust_env = True
self.verify = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.curl.close()
self.curl = None
def _set_method(self, method):
if not method:
return
try:
self.curl.setopt(pycurl.CUSTOMREQUEST, method)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'不支持的请求方法 {method}')
def _set_url(self, url):
if not url:
return
try:
if not url.startswith('http'):
if not self.base_url:
raise ValueError('url未已http开头,并且base_url未配置')
else:
url = f'{self.base_url}/{url.lstrip("/")}'
self.curl.setopt(pycurl.URL, url)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'url: {url} 不合法')
def _set_user_agent(self, user_agent):
try:
self.curl.setopt(pycurl.USERAGENT, user_agent)
except:
raise ValueError('设置useragent: {value}失败')
def _set_headers(self, headers):
if not headers:
return
if isinstance(headers, Mapping):
for key, value in headers.items():
if key.lower() == 'user-agent':
self._set_user_agent(value)
break
headers = headers.items()
headers = [f'{key}: {value}' for key, value in headers]
try:
self.curl.setopt(pycurl.HTTPHEADER, headers)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'headers: {headers} 不合法')
def _set_upload(self, body):
type_check(body, (io.TextIOWrapper, io.BufferedReader))
# if not isinstance(body, (io.TextIOWrapper, io.BufferedReader)):
# raise TypeError(f'上传body: {type(body)} 只支持io.TextIOWrapper, io.BufferedReader')
try:
self.curl.setopt(pycurl.UPLOAD, 1)
self.curl.setopt(pycurl.READDATA, body)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'上传body: {body} 不合法')
def _set_body(self, body):
if not body:
return
if isinstance(body, (io.TextIOWrapper, io.BufferedReader)):
return self._set_upload(body)
try:
self.curl.setopt(pycurl.POSTFIELDS, body)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'body: {body} 不合法')
def _set_files(self, files):
if not files:
return
type_check(files, dict)
# if not isinstance(files, dict):
# raise TypeError(f'files: {files} 必须为字典格式')
files_data = []
for key, value in files.items():
type_check(value, (str, tuple, list))
# if not isinstance(value, (str, tuple, list)):
# raise TypeError(f'value: {value} 只支持str, tuple, list格式')
if isinstance(value, str):
values = [value]
else:
values = value
if len(values) > 1:
file_path = values[0]
if not os.path.exists(file_path):
raise ValueError(f'文件路径: {value}不存在')
title = (pycurl.FORM_FILE, pycurl.FORM_FILENAME, pycurl.FORM_CONTENTTYPE)
files_data.append((key, tuple(zip(title, values))[0]))
try:
self.curl.setopt(pycurl.HTTPPOST, files_data)
except Exception as ex:
logging.exception(ex)
raise ex
# raise ValueError(f'value: {value} 不合法')
def _set_timeout(self, timeout):
type_check(timeout, (int, float, tuple, list))
# if not isinstance(timeout, (int, float, tuple, list)):
# raise TypeError(f'timeout: {timeout} 只支持int,float, tuple, list格式')
if isinstance(timeout, (int, float)):
self.curl.setopt(pycurl.TIMEOUT, timeout)
if isinstance(timeout, (tuple, list)):
if len(timeout) < 2:
raise ValueError(f'timeout: {timeout} 应至少包含两个元素')
connection_timeout, download_timeout,*_ = timeout
type_check(connection_timeout, (int, float))
type_check(download_timeout, (int, float))
# if not all((isinstance(connection_timeout, (int, float)), isinstance(download_timeout, (int, float)))):
# raise TypeError(f'timeout: {timeout} 中前两个元素应为数字类型')
self.curl.setopt(pycurl.CONNECTTIMEOUT, connection_timeout)
self.curl.setopt(pycurl.TIMEOUT, download_timeout) # todo
def _set_allow_redirects(self, allow_redirects):
if allow_redirects:
if not isinstance(allow_redirects, int):
allow_redirects = DEFAULT_REDIRECT_LIMIT
try:
self.curl.setopt(pycurl.FOLLOWLOCATION, True)
self.curl.setopt(pycurl.MAXREDIRS, allow_redirects)
except Exception as ex:
logging.error(f'设置allow_redirects {allow_redirects}失败')
def _set_verify(self, verify):
if verify:
try:
self.curl.setopt(pycurl.CAINFO, certifi.where())
except Exception as ex:
logging.error(f'设置verify {verify}失败')
def _get_clint_ip(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
ip = s.getsockname()[0]
finally:
s.close()
return ip
def _ping(self, domain):
param = '-n' if platform.system().lower() == 'windows' else '-c'
command = f'ping {param} 1 {domain}'
print(command)
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
out = p.stdout.read().decode('utf-8')
result, *_ = re.findall(r'time=(\d.*)\sms', out, re.M) or ['']
if result and result.isdigit():
return float(result)
def _set_response(self, response):
response.status_code = self.curl.getinfo(pycurl.HTTP_CODE)
response.ok = 200 <= response.status_code < 400 # todo
# response.elapsed = self.curl.getinfo(pycurl.TOTAL_TIME)
response.client_ip = self._get_clint_ip() # todo
response.elapsed = datetime.timedelta(seconds=self.curl.getinfo(pycurl.TOTAL_TIME))
response.stats['total_time'] = self.curl.getinfo(pycurl.TOTAL_TIME)
response.stats['namelookup_time'] = self.curl.getinfo(pycurl.NAMELOOKUP_TIME)
response.stats['connect_time'] = self.curl.getinfo(pycurl.CONNECT_TIME)
response.stats['pretransfer_time'] = self.curl.getinfo(pycurl.PRETRANSFER_TIME)
response.stats['starttransfer_time'] = self.curl.getinfo(pycurl.STARTTRANSFER_TIME)
response.stats['redirect_time'] = self.curl.getinfo(pycurl.REDIRECT_TIME)
response.stats['ssl_time'] = self.curl.getinfo(pycurl.APPCONNECT_TIME) # 握手时间
response.stats['num_connects'] = self.curl.getinfo(pycurl.NUM_CONNECTS)
response.stats['redirect_count'] = self.curl.getinfo(pycurl.REDIRECT_COUNT)
response.stats['size_upload'] = self.curl.getinfo(pycurl.SIZE_UPLOAD)
response.stats['size_download'] = self.curl.getinfo(pycurl.SIZE_DOWNLOAD)
response.stats['content_length_upload'] = self.curl.getinfo(pycurl.CONTENT_LENGTH_UPLOAD)
response.stats['content_length_download'] = self.curl.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
response.stats['speed_upload'] = self.curl.getinfo(pycurl.SPEED_UPLOAD)
response.stats['speed_download'] = self.curl.getinfo(pycurl.SPEED_DOWNLOAD)
def send(self, request, timeout=None, verify=None, allow_redirects=None):
"""负责设置pycurl并发送请求,及组装响应"""
response = Response()
response.request = request
response.url = request.url
self._set_method(request.method)
self._set_url(request.url)
self._set_user_agent(DEFAULT_USER_AGENT) # 设置默认User-Agent
self._set_headers(request.headers)
self._set_body(request.body)
self._set_files(request.files)
self._set_timeout(timeout)
self._set_verify(verify)
self._set_allow_redirects(allow_redirects)
self.curl.setopt(pycurl.HEADERFUNCTION, response.handle_header_line)
# 发送请求
response.raw = self.curl.perform_rb()
self._set_response(response)
# 更新会话cookies
self.cookies.update(response.cookies)
return response
def super_send(self, requests, config=None, times=None, concurreny=None, is_async=False, loop_until=None):
pass
def request(self, method=None, url=None, params=None, headers=None, cookies=None, data=None, json=None, files=None,
timeout=None, verify=None, allow_redirects=None, proxies=None, hooks=None, stream=None, cert=None):
"""负责整合session和参数中的设置"""
if self.base_url and not url.startswith('http'):
url = '/'.join((self.base_url.rstrip('/'), url.lstrip('/')))
params = merge_dict(self.params, params)
headers = merge_dict(self.headers, headers)
cookies = merge_dict(self.cookies, cookies)
# 负责解析和格式化请求
request = Request().prepare(method, url, headers, cookies, params, data, json, files)
timeout = timeout or self.timeout
verify = verify if verify is not None else self.verify
allow_redirects = verify if allow_redirects is not None else self.allow_redirects
res = self.send(request, timeout, verify, allow_redirects)
return res
# todo 简化
def get(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('GET', url, params, headers, cookies, data, json, files, timeout, verify)
def post(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('POST', url, params, headers, cookies, data, json, files, timeout, verify)
def head(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('HEAD', url, params, headers, cookies, data, json, files, timeout, verify)
def options(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('OPTIONS', url, params, headers, cookies, data, json, files, timeout, verify)
def put(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('PUT', url, params, headers, cookies, data, json, files, timeout, verify)
def patch(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('PATCH', url, params, headers, cookies, data, json, files, timeout, verify)
def delete(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('DELETE', url, params, headers, cookies, data, json, files, timeout, verify)
| import io
import os
import datetime
import socket
import subprocess
import platform
import re
from urllib.parse import quote, urlencode, urlparse, urlunparse, urlsplit
from typing import Mapping
import pycurl
import certifi
from logz import log as logging
from requestz.response import Response
from requestz.request import Request
from requestz.utils import merge_dict, type_check
DEFAULT_REDIRECT_LIMIT = 30
DEFAULT_TIMEOUT = 60
DEFAULT_USER_AGENT = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) AppleWebKit/537.36 (KHTML, like Gecko) " \
"Chrome/73.0.3683.103 Safari/537.36"
DEFAULT_HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_4) "
"AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36",
'Accept-Encoding': ', '.join(('gzip', 'deflate')),
'Accept': '*/*',
'Connection': 'keep-alive',
}
REQUEST_KWARGS = dict(params=None, headers=None, cookies=None, data=None, json=None, files=None,timeout=None,
verify=None, allow_redirects=None, proxies=None, hooks=None, stream=None, cert=None)
class Session(object):
def __init__(self):
self.curl = pycurl.Curl() # todo session默认配置
self.base_url = None
self.headers = DEFAULT_HEADERS
self.cookies = {}
self.params = {}
self.timeout = DEFAULT_TIMEOUT
self.allow_redirects = True
self.auth = None
self.cert = None
self.trust_env = True
self.verify = False
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.curl.close()
self.curl = None
def _set_method(self, method):
if not method:
return
try:
self.curl.setopt(pycurl.CUSTOMREQUEST, method)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'不支持的请求方法 {method}')
def _set_url(self, url):
if not url:
return
try:
if not url.startswith('http'):
if not self.base_url:
raise ValueError('url未已http开头,并且base_url未配置')
else:
url = f'{self.base_url}/{url.lstrip("/")}'
self.curl.setopt(pycurl.URL, url)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'url: {url} 不合法')
def _set_user_agent(self, user_agent):
try:
self.curl.setopt(pycurl.USERAGENT, user_agent)
except:
raise ValueError('设置useragent: {value}失败')
def _set_headers(self, headers):
if not headers:
return
if isinstance(headers, Mapping):
for key, value in headers.items():
if key.lower() == 'user-agent':
self._set_user_agent(value)
break
headers = headers.items()
headers = [f'{key}: {value}' for key, value in headers]
try:
self.curl.setopt(pycurl.HTTPHEADER, headers)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'headers: {headers} 不合法')
def _set_upload(self, body):
type_check(body, (io.TextIOWrapper, io.BufferedReader))
# if not isinstance(body, (io.TextIOWrapper, io.BufferedReader)):
# raise TypeError(f'上传body: {type(body)} 只支持io.TextIOWrapper, io.BufferedReader')
try:
self.curl.setopt(pycurl.UPLOAD, 1)
self.curl.setopt(pycurl.READDATA, body)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'上传body: {body} 不合法')
def _set_body(self, body):
if not body:
return
if isinstance(body, (io.TextIOWrapper, io.BufferedReader)):
return self._set_upload(body)
try:
self.curl.setopt(pycurl.POSTFIELDS, body)
except Exception as ex:
logging.exception(ex)
raise ValueError(f'body: {body} 不合法')
def _set_files(self, files):
if not files:
return
type_check(files, dict)
# if not isinstance(files, dict):
# raise TypeError(f'files: {files} 必须为字典格式')
files_data = []
for key, value in files.items():
type_check(value, (str, tuple, list))
# if not isinstance(value, (str, tuple, list)):
# raise TypeError(f'value: {value} 只支持str, tuple, list格式')
if isinstance(value, str):
values = [value]
else:
values = value
if len(values) > 1:
file_path = values[0]
if not os.path.exists(file_path):
raise ValueError(f'文件路径: {value}不存在')
title = (pycurl.FORM_FILE, pycurl.FORM_FILENAME, pycurl.FORM_CONTENTTYPE)
files_data.append((key, tuple(zip(title, values))[0]))
try:
self.curl.setopt(pycurl.HTTPPOST, files_data)
except Exception as ex:
logging.exception(ex)
raise ex
# raise ValueError(f'value: {value} 不合法')
def _set_timeout(self, timeout):
type_check(timeout, (int, float, tuple, list))
# if not isinstance(timeout, (int, float, tuple, list)):
# raise TypeError(f'timeout: {timeout} 只支持int,float, tuple, list格式')
if isinstance(timeout, (int, float)):
self.curl.setopt(pycurl.TIMEOUT, timeout)
if isinstance(timeout, (tuple, list)):
if len(timeout) < 2:
raise ValueError(f'timeout: {timeout} 应至少包含两个元素')
connection_timeout, download_timeout,*_ = timeout
type_check(connection_timeout, (int, float))
type_check(download_timeout, (int, float))
# if not all((isinstance(connection_timeout, (int, float)), isinstance(download_timeout, (int, float)))):
# raise TypeError(f'timeout: {timeout} 中前两个元素应为数字类型')
self.curl.setopt(pycurl.CONNECTTIMEOUT, connection_timeout)
self.curl.setopt(pycurl.TIMEOUT, download_timeout) # todo
def _set_allow_redirects(self, allow_redirects):
if allow_redirects:
if not isinstance(allow_redirects, int):
allow_redirects = DEFAULT_REDIRECT_LIMIT
try:
self.curl.setopt(pycurl.FOLLOWLOCATION, True)
self.curl.setopt(pycurl.MAXREDIRS, allow_redirects)
except Exception as ex:
logging.error(f'设置allow_redirects {allow_redirects}失败')
def _set_verify(self, verify):
if verify:
try:
self.curl.setopt(pycurl.CAINFO, certifi.where())
except Exception as ex:
logging.error(f'设置verify {verify}失败')
def _get_clint_ip(self):
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(('8.8.8.8', 80))
ip = s.getsockname()[0]
finally:
s.close()
return ip
def _ping(self, domain):
param = '-n' if platform.system().lower() == 'windows' else '-c'
command = f'ping {param} 1 {domain}'
print(command)
p = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE)
out = p.stdout.read().decode('utf-8')
result, *_ = re.findall(r'time=(\d.*)\sms', out, re.M) or ['']
if result and result.isdigit():
return float(result)
def _set_response(self, response):
response.status_code = self.curl.getinfo(pycurl.HTTP_CODE)
response.ok = 200 <= response.status_code < 400 # todo
# response.elapsed = self.curl.getinfo(pycurl.TOTAL_TIME)
response.client_ip = self._get_clint_ip() # todo
response.elapsed = datetime.timedelta(seconds=self.curl.getinfo(pycurl.TOTAL_TIME))
response.stats['total_time'] = self.curl.getinfo(pycurl.TOTAL_TIME)
response.stats['namelookup_time'] = self.curl.getinfo(pycurl.NAMELOOKUP_TIME)
response.stats['connect_time'] = self.curl.getinfo(pycurl.CONNECT_TIME)
response.stats['pretransfer_time'] = self.curl.getinfo(pycurl.PRETRANSFER_TIME)
response.stats['starttransfer_time'] = self.curl.getinfo(pycurl.STARTTRANSFER_TIME)
response.stats['redirect_time'] = self.curl.getinfo(pycurl.REDIRECT_TIME)
response.stats['ssl_time'] = self.curl.getinfo(pycurl.APPCONNECT_TIME) # 握手时间
response.stats['num_connects'] = self.curl.getinfo(pycurl.NUM_CONNECTS)
response.stats['redirect_count'] = self.curl.getinfo(pycurl.REDIRECT_COUNT)
response.stats['size_upload'] = self.curl.getinfo(pycurl.SIZE_UPLOAD)
response.stats['size_download'] = self.curl.getinfo(pycurl.SIZE_DOWNLOAD)
response.stats['content_length_upload'] = self.curl.getinfo(pycurl.CONTENT_LENGTH_UPLOAD)
response.stats['content_length_download'] = self.curl.getinfo(pycurl.CONTENT_LENGTH_DOWNLOAD)
response.stats['speed_upload'] = self.curl.getinfo(pycurl.SPEED_UPLOAD)
response.stats['speed_download'] = self.curl.getinfo(pycurl.SPEED_DOWNLOAD)
def send(self, request, timeout=None, verify=None, allow_redirects=None):
"""负责设置pycurl并发送请求,及组装响应"""
response = Response()
response.request = request
response.url = request.url
self._set_method(request.method)
self._set_url(request.url)
self._set_user_agent(DEFAULT_USER_AGENT) # 设置默认User-Agent
self._set_headers(request.headers)
self._set_body(request.body)
self._set_files(request.files)
self._set_timeout(timeout)
self._set_verify(verify)
self._set_allow_redirects(allow_redirects)
self.curl.setopt(pycurl.HEADERFUNCTION, response.handle_header_line)
# 发送请求
response.raw = self.curl.perform_rb()
self._set_response(response)
# 更新会话cookies
self.cookies.update(response.cookies)
return response
def super_send(self, requests, config=None, times=None, concurreny=None, is_async=False, loop_until=None):
pass
def request(self, method=None, url=None, params=None, headers=None, cookies=None, data=None, json=None, files=None,
timeout=None, verify=None, allow_redirects=None, proxies=None, hooks=None, stream=None, cert=None):
"""负责整合session和参数中的设置"""
if self.base_url and not url.startswith('http'):
url = '/'.join((self.base_url.rstrip('/'), url.lstrip('/')))
params = merge_dict(self.params, params)
headers = merge_dict(self.headers, headers)
cookies = merge_dict(self.cookies, cookies)
# 负责解析和格式化请求
request = Request().prepare(method, url, headers, cookies, params, data, json, files)
timeout = timeout or self.timeout
verify = verify if verify is not None else self.verify
allow_redirects = verify if allow_redirects is not None else self.allow_redirects
res = self.send(request, timeout, verify, allow_redirects)
return res
# todo 简化
def get(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('GET', url, params, headers, cookies, data, json, files, timeout, verify)
def post(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('POST', url, params, headers, cookies, data, json, files, timeout, verify)
def head(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('HEAD', url, params, headers, cookies, data, json, files, timeout, verify)
def options(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('OPTIONS', url, params, headers, cookies, data, json, files, timeout, verify)
def put(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('PUT', url, params, headers, cookies, data, json, files, timeout, verify)
def patch(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('PATCH', url, params, headers, cookies, data, json, files, timeout, verify)
def delete(self, url, params=None, headers=None, cookies=None, data=None, json=None, files=None, timeout=None, verify=False):
return self.request('DELETE', url, params, headers, cookies, data, json, files, timeout, verify) | en | 0.161606 | # todo session默认配置 # if not isinstance(body, (io.TextIOWrapper, io.BufferedReader)): # raise TypeError(f'上传body: {type(body)} 只支持io.TextIOWrapper, io.BufferedReader') # if not isinstance(files, dict): # raise TypeError(f'files: {files} 必须为字典格式') # if not isinstance(value, (str, tuple, list)): # raise TypeError(f'value: {value} 只支持str, tuple, list格式') # raise ValueError(f'value: {value} 不合法') # if not isinstance(timeout, (int, float, tuple, list)): # raise TypeError(f'timeout: {timeout} 只支持int,float, tuple, list格式') # if not all((isinstance(connection_timeout, (int, float)), isinstance(download_timeout, (int, float)))): # raise TypeError(f'timeout: {timeout} 中前两个元素应为数字类型') # todo # todo # response.elapsed = self.curl.getinfo(pycurl.TOTAL_TIME) # todo # 握手时间 负责设置pycurl并发送请求,及组装响应 # 设置默认User-Agent # 发送请求 # 更新会话cookies 负责整合session和参数中的设置 # 负责解析和格式化请求 # todo 简化 | 2.321885 | 2 |
examples/use_case_4.py | AICPS/roadscene2vec | 8 | 6615941 | import os
import sys
sys.path.append(os.path.dirname(sys.path[0]))
# import roadscene2vec.data.real_preprocessor as ip
from roadscene2vec.util.config_parser import configuration
# import roadscene2vec.scene_graph.extraction.image_extractor as RealEx
import roadscene2vec
from roadscene2vec.learning.util.scenegraph_trainer import Scenegraph_Trainer
sys.modules['util'] = roadscene2vec.util
import torch.nn as nn
import wandb
def train():
training_config = configuration(r"use_case_4_config.yaml",from_function = True)
if training_config.wandb_configuration['project'] != None and training_config.wandb_configuration['entity'] != None:
wandb_arg= wandb.init(project=training_config.wandb_configuration['project'], entity=training_config.wandb_configuration['entity'])
else:
wandb_arg = None
trainer = Scenegraph_Trainer(training_config, wandb_arg)
trainer.load_model() #load the proper model using the trainer
trainer.loss_func = nn.CrossEntropyLoss() #set loss function
trainer.build_transfer_learning_dataset()
outputs_test, labels_test, metrics = trainer.evaluate_transfer_learning()
print(metrics)
if __name__ == "__main__":
train() #Assess risk of transfer dataset
| import os
import sys
sys.path.append(os.path.dirname(sys.path[0]))
# import roadscene2vec.data.real_preprocessor as ip
from roadscene2vec.util.config_parser import configuration
# import roadscene2vec.scene_graph.extraction.image_extractor as RealEx
import roadscene2vec
from roadscene2vec.learning.util.scenegraph_trainer import Scenegraph_Trainer
sys.modules['util'] = roadscene2vec.util
import torch.nn as nn
import wandb
def train():
training_config = configuration(r"use_case_4_config.yaml",from_function = True)
if training_config.wandb_configuration['project'] != None and training_config.wandb_configuration['entity'] != None:
wandb_arg= wandb.init(project=training_config.wandb_configuration['project'], entity=training_config.wandb_configuration['entity'])
else:
wandb_arg = None
trainer = Scenegraph_Trainer(training_config, wandb_arg)
trainer.load_model() #load the proper model using the trainer
trainer.loss_func = nn.CrossEntropyLoss() #set loss function
trainer.build_transfer_learning_dataset()
outputs_test, labels_test, metrics = trainer.evaluate_transfer_learning()
print(metrics)
if __name__ == "__main__":
train() #Assess risk of transfer dataset
| en | 0.650227 | # import roadscene2vec.data.real_preprocessor as ip # import roadscene2vec.scene_graph.extraction.image_extractor as RealEx #load the proper model using the trainer #set loss function #Assess risk of transfer dataset | 2.251426 | 2 |
tests/test_beancount_chase.py | ArthurFDLR/beancount-chase | 4 | 6615942 | import pathlib
import pytest
import datetime
from beancount_chase import __version__, ChaseBankImporter
from beancount.core.number import Decimal
TEST_FILE_NAME = 'test_file.csv'
TEST_DATE = datetime.date(2020, 12, 1)
def test_version():
assert __version__ == '0.1.0'
@pytest.fixture
def filename():
return pathlib.Path(__file__).parent.absolute() / TEST_FILE_NAME
@pytest.fixture
def importer():
return ChaseBankImporter('Assets:CB')
def test_identify(importer, filename):
with open(filename) as fd:
assert importer.identify(fd)
def test_file_date(importer, filename):
with open(filename) as fd:
assert importer.file_date(fd) == TEST_DATE
def test_extract(importer, filename):
with open(filename) as fd:
operations = importer.extract(fd)
operations_test = [
{
'date': datetime.date(2020, 12, 1),
'amount': Decimal('-59.17'),
'payee': 'Desc Debit 2 11/30',
},
{
'date': datetime.date(2020, 10, 13),
'amount': Decimal('100.00'),
'payee': 'Desc Credit 2',
},
{
'date': datetime.date(2020, 10, 5),
'amount': Decimal('-11.78'),
'payee': 'Desc Debit 1 10/04',
},
{
'date': datetime.date(2020, 10, 5),
'amount': Decimal('465.53'),
'payee': 'Desc Credit 1 1465436878 WEB ID: 453233521',
},
]
op_name_test = [op_test['payee'] for op_test in operations_test]
assert len(operations) == len(operations_test)
for op in operations:
assert op.payee in op_name_test, 'Missing operation'
op_test = operations_test[op_name_test.index(op.payee)]
assert op.payee == op_test['payee'], 'Wrong payee name'
assert op.date == op_test['date'], 'Wrong date'
assert len(op.postings) == 1
assert op.postings[0].account == 'Assets:CB', 'Wrong account name'
assert op.postings[0].units.currency == 'USD', 'Wrong currency'
assert op.postings[0].units.number == op_test['amount'], 'Wrong amount'
| import pathlib
import pytest
import datetime
from beancount_chase import __version__, ChaseBankImporter
from beancount.core.number import Decimal
TEST_FILE_NAME = 'test_file.csv'
TEST_DATE = datetime.date(2020, 12, 1)
def test_version():
assert __version__ == '0.1.0'
@pytest.fixture
def filename():
return pathlib.Path(__file__).parent.absolute() / TEST_FILE_NAME
@pytest.fixture
def importer():
return ChaseBankImporter('Assets:CB')
def test_identify(importer, filename):
with open(filename) as fd:
assert importer.identify(fd)
def test_file_date(importer, filename):
with open(filename) as fd:
assert importer.file_date(fd) == TEST_DATE
def test_extract(importer, filename):
with open(filename) as fd:
operations = importer.extract(fd)
operations_test = [
{
'date': datetime.date(2020, 12, 1),
'amount': Decimal('-59.17'),
'payee': 'Desc Debit 2 11/30',
},
{
'date': datetime.date(2020, 10, 13),
'amount': Decimal('100.00'),
'payee': 'Desc Credit 2',
},
{
'date': datetime.date(2020, 10, 5),
'amount': Decimal('-11.78'),
'payee': 'Desc Debit 1 10/04',
},
{
'date': datetime.date(2020, 10, 5),
'amount': Decimal('465.53'),
'payee': 'Desc Credit 1 1465436878 WEB ID: 453233521',
},
]
op_name_test = [op_test['payee'] for op_test in operations_test]
assert len(operations) == len(operations_test)
for op in operations:
assert op.payee in op_name_test, 'Missing operation'
op_test = operations_test[op_name_test.index(op.payee)]
assert op.payee == op_test['payee'], 'Wrong payee name'
assert op.date == op_test['date'], 'Wrong date'
assert len(op.postings) == 1
assert op.postings[0].account == 'Assets:CB', 'Wrong account name'
assert op.postings[0].units.currency == 'USD', 'Wrong currency'
assert op.postings[0].units.number == op_test['amount'], 'Wrong amount'
| none | 1 | 2.261374 | 2 | |
odd-or-even.py | ampise/python | 0 | 6615943 | # -----------------------------------------------------------------------------------------------
# Print odd and even numbers between 0 and a range provided by the user.
# -----------------------------------------------------------------------------------------------
number = input("Enter a number: ")
for x in range(number + 1):
oddeven = (x % 2)
if oddeven == 0:
# True block
print(str(x) + " is even")
else:
# False block
print(str(x) + " is odd")
| # -----------------------------------------------------------------------------------------------
# Print odd and even numbers between 0 and a range provided by the user.
# -----------------------------------------------------------------------------------------------
number = input("Enter a number: ")
for x in range(number + 1):
oddeven = (x % 2)
if oddeven == 0:
# True block
print(str(x) + " is even")
else:
# False block
print(str(x) + " is odd")
| en | 0.432211 | # ----------------------------------------------------------------------------------------------- # Print odd and even numbers between 0 and a range provided by the user. # ----------------------------------------------------------------------------------------------- # True block # False block | 4.364676 | 4 |
src/day_15.py | bengosney/Advent-Of-Code-2021 | 0 | 6615944 | <gh_stars>0
# Standard Library
# Standard Library
from collections import defaultdict
from heapq import heappop, heappush
# First Party
from utils import read_input
Position = tuple[int, int]
Grid = dict[Position, int]
Graph = dict[Position, list[tuple[Position, int]]]
def parse_input(input: str) -> Grid:
grid: Grid = {}
for y, row in enumerate(input.split("\n")):
for x, val in enumerate(row):
grid[(x, y)] = int(val)
return grid
def get_neighbors(position: Position) -> list[Position]:
x, y = position
return [(x + 1, y), (x, y + 1), (x - 1, y), (x, y - 1)]
def grid_max(grid: Grid) -> Position:
x, y = zip(*grid)
return (max(x), max(y))
def dijkstra(graph: Graph, start: Position, target: Position) -> int:
queue: list[tuple[int, Position]] = [(0, start)]
seen: set[Position] = set()
mins = {start: 0}
while queue:
(cost, current) = heappop(queue)
if current not in seen:
seen.add(current)
if current == target:
return cost
for to, value in graph.get(current, ()):
if to in seen:
continue
prev = mins.get(to)
next = cost + value
if prev is None or next < prev:
mins[to] = next
heappush(queue, (next, to))
return int("inf")
def solve(grid: Grid, target: Position) -> int:
graph: Graph = defaultdict(list)
for position in grid.keys():
for neighbor in get_neighbors(position):
if neighbor in grid:
graph[position].append((neighbor, grid[neighbor]))
return dijkstra(graph, (0, 0), target)
def part_1(input: str) -> int:
grid = parse_input(input)
return solve(grid, grid_max(grid))
def expand_grid(grid: Grid, w: int, h: int) -> Grid:
for x in range(w, w * 5):
for y in range(h):
grid[(x, y)] = grid.get((x, y), grid[(x - w, y)] % 9 + 1)
for x in range(w * 5):
for y in range(h, h * 5):
grid[(x, y)] = grid.get((x, y), grid[(x, y - w)] % 9 + 1)
return grid
def part_2(input: str) -> int:
grid = parse_input(input)
(w, h) = grid_max(grid)
grid = expand_grid(grid, w + 1, h + 1)
return solve(grid, grid_max(grid))
# -- Tests
def get_example_input() -> str:
return """1163751742
1381373672
2136511328
3694931569
7463417111
1319128137
1359912421
3125421639
1293138521
2311944581"""
def test_part_1():
input = get_example_input()
assert part_1(input) == 40
def test_part_2():
input = get_example_input()
assert part_2(input) == 315
def test_part_1_real():
input = read_input(__file__)
assert part_1(input) is not None
def test_part_2_real():
input = read_input(__file__)
assert part_2(input) == 2907
# -- Main
if __name__ == "__main__":
input = read_input(__file__)
print(f"Part1: {part_1(input)}")
print(f"Part2: {part_2(input)}")
| # Standard Library
# Standard Library
from collections import defaultdict
from heapq import heappop, heappush
# First Party
from utils import read_input
Position = tuple[int, int]
Grid = dict[Position, int]
Graph = dict[Position, list[tuple[Position, int]]]
def parse_input(input: str) -> Grid:
grid: Grid = {}
for y, row in enumerate(input.split("\n")):
for x, val in enumerate(row):
grid[(x, y)] = int(val)
return grid
def get_neighbors(position: Position) -> list[Position]:
x, y = position
return [(x + 1, y), (x, y + 1), (x - 1, y), (x, y - 1)]
def grid_max(grid: Grid) -> Position:
x, y = zip(*grid)
return (max(x), max(y))
def dijkstra(graph: Graph, start: Position, target: Position) -> int:
queue: list[tuple[int, Position]] = [(0, start)]
seen: set[Position] = set()
mins = {start: 0}
while queue:
(cost, current) = heappop(queue)
if current not in seen:
seen.add(current)
if current == target:
return cost
for to, value in graph.get(current, ()):
if to in seen:
continue
prev = mins.get(to)
next = cost + value
if prev is None or next < prev:
mins[to] = next
heappush(queue, (next, to))
return int("inf")
def solve(grid: Grid, target: Position) -> int:
graph: Graph = defaultdict(list)
for position in grid.keys():
for neighbor in get_neighbors(position):
if neighbor in grid:
graph[position].append((neighbor, grid[neighbor]))
return dijkstra(graph, (0, 0), target)
def part_1(input: str) -> int:
grid = parse_input(input)
return solve(grid, grid_max(grid))
def expand_grid(grid: Grid, w: int, h: int) -> Grid:
for x in range(w, w * 5):
for y in range(h):
grid[(x, y)] = grid.get((x, y), grid[(x - w, y)] % 9 + 1)
for x in range(w * 5):
for y in range(h, h * 5):
grid[(x, y)] = grid.get((x, y), grid[(x, y - w)] % 9 + 1)
return grid
def part_2(input: str) -> int:
grid = parse_input(input)
(w, h) = grid_max(grid)
grid = expand_grid(grid, w + 1, h + 1)
return solve(grid, grid_max(grid))
# -- Tests
def get_example_input() -> str:
return """1163751742
1381373672
2136511328
3694931569
7463417111
1319128137
1359912421
3125421639
1293138521
2311944581"""
def test_part_1():
input = get_example_input()
assert part_1(input) == 40
def test_part_2():
input = get_example_input()
assert part_2(input) == 315
def test_part_1_real():
input = read_input(__file__)
assert part_1(input) is not None
def test_part_2_real():
input = read_input(__file__)
assert part_2(input) == 2907
# -- Main
if __name__ == "__main__":
input = read_input(__file__)
print(f"Part1: {part_1(input)}")
print(f"Part2: {part_2(input)}") | en | 0.277467 | # Standard Library # Standard Library # First Party # -- Tests 1163751742 1381373672 2136511328 3694931569 7463417111 1319128137 1359912421 3125421639 1293138521 2311944581 # -- Main | 3.386299 | 3 |
subProcess.py | RichardCouperthwaite/BAREFOOT-Tutorial | 0 | 6615945 | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 3 06:17:35 2021
@author: <NAME>
"""
from pickle import dump, load
import concurrent.futures
import numpy as np
from sys import argv
from time import sleep, time
from multiprocessing import cpu_count
from util import calculate_KG, calculate_EI, fused_calculate, calculate_TS, calculate_GPHedge, calculate_Greedy, calculate_PI, calculate_UCB, calculate_EHVI, fused_EHVI
import logging
if __name__ == "__main__":
"""
This module is used within the BAREFOOT framework to run as a multi-node instance
This module runs on each of the subprocess nodes and controls the calculations on that
node.
"""
param = argv
# log_level = logging.DEBUG
log_level = logging.INFO
# create logging instance to record progress of the calculations
logger = logging.getLogger('BAREFOOT.subprocess')
logger.setLevel(log_level)
fh = logging.FileHandler('BAREFOOT.log')
fh.setLevel(log_level)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
# add the handler to the logger
logger.addHandler(fh)
logger.info("Subprocess {} | started".format(param[1]))
# Create a file to show that the subprocess has succesfully started
with open("subprocess/sub{}.start".format(param[1]), 'w') as f:
f.write("subprocess started successfully\n\n")
not_close = True
# keep the code running until it is shut down by the main node
while not_close:
try:
try:
# the main code will create these files when it is time for a
# calculation to be done on the subprocess node
with open("subprocess/sub{}.control".format(param[1]), 'rb') as f:
control_param = load(f)
with open("subprocess/sub{}.start".format(param[1]), 'a') as f:
f.write("Control File Found - {} | {}\n".format(control_param[0], control_param[1]))
logger.debug("Control File Found - {} | {}\n".format(control_param[0], control_param[1]))
except FileExistsError:
logger.debug("Control File could not be found")
control_param = [1,1]
# The main node changes the control_param value to 0 to indicate that
# there is a calculation to complete
if control_param[0] == 0:
logger.info("{} | New Subprocess calculation started\n".format(param[1]))
# The main code also specifies which acquisition function to use
if control_param[2] == "KG":
function = calculate_KG
elif control_param[2] == "EI":
function = calculate_EI
elif control_param[2] == "TS":
function = calculate_TS
elif control_param[2] == "Hedge":
function = calculate_GPHedge
elif control_param[2] == "Greedy":
function = calculate_Greedy
elif control_param[2] == "PI":
function = calculate_PI
elif control_param[2] == "UCB":
function = calculate_UCB
elif control_param[2] == "EHVI":
function = calculate_EHVI
start = time()
# there is a difference between the calculations required for the
# reduced order modesl (iteration) and the truth model (fused)
if control_param[1] == "iteration":
# Parameters for the calculations are determined in the
# main node and are saved in .dump files for each subprocess
with open("subprocess/{}.dump".format(param[1]), 'rb') as f:
parameters = load(f)
logger.debug("{} | Reduced Order Model Calculation Started | {} Calculations".format(param[1], len(parameters)))
kg_output = []
count = 0
# Calculations are conducted in parallel using the concurrent.futures appraoch
with concurrent.futures.ProcessPoolExecutor(cpu_count()) as executor:
for result_from_process in zip(parameters, executor.map(function,parameters)):
params, results = result_from_process
kg_output.append(results)
count += 1
if count % 200 == 0:
logger.info("{} | {} / {} Calculations Completed".format(param[1], count, len(parameters)))
# Once calculations are completed, they are saved to the .output file for the main node to retrieve
with open("subprocess/{}.output".format(param[1]), 'wb') as f:
dump(kg_output, f)
elif control_param[1] == "fused":
# Parameters for the calculations are determined in the
# main node and are saved in .dump files for each subprocess
with open("subprocess/{}.dump".format(param[1]), 'rb') as f:
parameters = load(f)
logger.debug("{} | Fused Model Calculation Started | {} Calculations".format(param[1], len(parameters)))
if control_param[2] == "Hedge":
fused_out = [[],[],[],[]]
else:
fused_output = []
count = 0
if control_param[2] == "EHVI":
func = fused_EHVI
else:
func = fused_calculate
# Calculations are conducted in parallel using the concurrent.futures appraoch
with concurrent.futures.ProcessPoolExecutor(cpu_count()) as executor:
for result_from_process in zip(parameters, executor.map(func,parameters)):
params, results = result_from_process
if control_param[2] == "Hedge":
fused_out[0].append(results[0][0])
fused_out[1].append(results[0][1])
fused_out[2].append(results[0][2])
fused_out[3].append(results[0][3])
else:
fused_output.append(results[0])
count += 1
if count % 200 == 0:
logger.info("{} | {} / {} Calculations Completed".format(param[1], count, len(parameters)))
# if the acquisition function approach is the GP Hedge portfolio
# optimization approach then the output from this function needs
# no further processing. If any of the others are being used,
# there is some processing to attempt to remove duplicates
if control_param[2] == "Hedge":
with open("subprocess/{}.output".format(param[1]), 'wb') as f:
dump(fused_out, f)
else:
max_values = np.zeros((results[1],2))
for ii in range(len(fused_output)):
if max_values[fused_output[ii][1],0] != 0:
if max_values[fused_output[ii][1],0] < fused_output[ii][0]:
max_values[fused_output[ii][1],0] = fused_output[ii][0]
max_values[fused_output[ii][1],1] = fused_output[ii][1]
else:
max_values[fused_output[ii][1],0] = fused_output[ii][0]
max_values[fused_output[ii][1],1] = fused_output[ii][1]
fused_output = max_values[np.where(max_values[:,0]!=0)]
# Once calculations are completed, they are saved to the .output file for the main node to retrieve
with open("subprocess/{}.output".format(param[1]), 'wb') as f:
dump(fused_output, f)
# After the calculation is completed, the control file parameter
# is changed to 1 to indicate that it has completed
with open("subprocess/sub{}.control".format(param[1]), 'wb') as f:
control_param[0] = 1
dump(control_param, f)
logger.info("{} | Calculation Results Dumped | {} hours\n".format(param[1], np.round((time()-start)/3600, 4)))
except Exception as exc:
logger.critical("Error completing Calculation | {}".format(exc))
logger.exception(exc)
pass
sleep(10)
try:
# when the main node has completed all of its calculations, it will
# create a close file that triggers this code to complete
with open('subprocess/close{}'.format(param[1]), 'r') as f:
d = f.read()
not_close = False
logger.debug("{} | Close Command Found".format(param[1]))
except FileNotFoundError:
pass
with open("subprocess/sub{}.start".format(param[1]), 'a') as f:
f.write("subprocess finished successfully\n\n")
logger.info("{} | Subprocess Finished".format(param[1])) | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 3 06:17:35 2021
@author: <NAME>
"""
from pickle import dump, load
import concurrent.futures
import numpy as np
from sys import argv
from time import sleep, time
from multiprocessing import cpu_count
from util import calculate_KG, calculate_EI, fused_calculate, calculate_TS, calculate_GPHedge, calculate_Greedy, calculate_PI, calculate_UCB, calculate_EHVI, fused_EHVI
import logging
if __name__ == "__main__":
"""
This module is used within the BAREFOOT framework to run as a multi-node instance
This module runs on each of the subprocess nodes and controls the calculations on that
node.
"""
param = argv
# log_level = logging.DEBUG
log_level = logging.INFO
# create logging instance to record progress of the calculations
logger = logging.getLogger('BAREFOOT.subprocess')
logger.setLevel(log_level)
fh = logging.FileHandler('BAREFOOT.log')
fh.setLevel(log_level)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
fh.setFormatter(formatter)
# add the handler to the logger
logger.addHandler(fh)
logger.info("Subprocess {} | started".format(param[1]))
# Create a file to show that the subprocess has succesfully started
with open("subprocess/sub{}.start".format(param[1]), 'w') as f:
f.write("subprocess started successfully\n\n")
not_close = True
# keep the code running until it is shut down by the main node
while not_close:
try:
try:
# the main code will create these files when it is time for a
# calculation to be done on the subprocess node
with open("subprocess/sub{}.control".format(param[1]), 'rb') as f:
control_param = load(f)
with open("subprocess/sub{}.start".format(param[1]), 'a') as f:
f.write("Control File Found - {} | {}\n".format(control_param[0], control_param[1]))
logger.debug("Control File Found - {} | {}\n".format(control_param[0], control_param[1]))
except FileExistsError:
logger.debug("Control File could not be found")
control_param = [1,1]
# The main node changes the control_param value to 0 to indicate that
# there is a calculation to complete
if control_param[0] == 0:
logger.info("{} | New Subprocess calculation started\n".format(param[1]))
# The main code also specifies which acquisition function to use
if control_param[2] == "KG":
function = calculate_KG
elif control_param[2] == "EI":
function = calculate_EI
elif control_param[2] == "TS":
function = calculate_TS
elif control_param[2] == "Hedge":
function = calculate_GPHedge
elif control_param[2] == "Greedy":
function = calculate_Greedy
elif control_param[2] == "PI":
function = calculate_PI
elif control_param[2] == "UCB":
function = calculate_UCB
elif control_param[2] == "EHVI":
function = calculate_EHVI
start = time()
# there is a difference between the calculations required for the
# reduced order modesl (iteration) and the truth model (fused)
if control_param[1] == "iteration":
# Parameters for the calculations are determined in the
# main node and are saved in .dump files for each subprocess
with open("subprocess/{}.dump".format(param[1]), 'rb') as f:
parameters = load(f)
logger.debug("{} | Reduced Order Model Calculation Started | {} Calculations".format(param[1], len(parameters)))
kg_output = []
count = 0
# Calculations are conducted in parallel using the concurrent.futures appraoch
with concurrent.futures.ProcessPoolExecutor(cpu_count()) as executor:
for result_from_process in zip(parameters, executor.map(function,parameters)):
params, results = result_from_process
kg_output.append(results)
count += 1
if count % 200 == 0:
logger.info("{} | {} / {} Calculations Completed".format(param[1], count, len(parameters)))
# Once calculations are completed, they are saved to the .output file for the main node to retrieve
with open("subprocess/{}.output".format(param[1]), 'wb') as f:
dump(kg_output, f)
elif control_param[1] == "fused":
# Parameters for the calculations are determined in the
# main node and are saved in .dump files for each subprocess
with open("subprocess/{}.dump".format(param[1]), 'rb') as f:
parameters = load(f)
logger.debug("{} | Fused Model Calculation Started | {} Calculations".format(param[1], len(parameters)))
if control_param[2] == "Hedge":
fused_out = [[],[],[],[]]
else:
fused_output = []
count = 0
if control_param[2] == "EHVI":
func = fused_EHVI
else:
func = fused_calculate
# Calculations are conducted in parallel using the concurrent.futures appraoch
with concurrent.futures.ProcessPoolExecutor(cpu_count()) as executor:
for result_from_process in zip(parameters, executor.map(func,parameters)):
params, results = result_from_process
if control_param[2] == "Hedge":
fused_out[0].append(results[0][0])
fused_out[1].append(results[0][1])
fused_out[2].append(results[0][2])
fused_out[3].append(results[0][3])
else:
fused_output.append(results[0])
count += 1
if count % 200 == 0:
logger.info("{} | {} / {} Calculations Completed".format(param[1], count, len(parameters)))
# if the acquisition function approach is the GP Hedge portfolio
# optimization approach then the output from this function needs
# no further processing. If any of the others are being used,
# there is some processing to attempt to remove duplicates
if control_param[2] == "Hedge":
with open("subprocess/{}.output".format(param[1]), 'wb') as f:
dump(fused_out, f)
else:
max_values = np.zeros((results[1],2))
for ii in range(len(fused_output)):
if max_values[fused_output[ii][1],0] != 0:
if max_values[fused_output[ii][1],0] < fused_output[ii][0]:
max_values[fused_output[ii][1],0] = fused_output[ii][0]
max_values[fused_output[ii][1],1] = fused_output[ii][1]
else:
max_values[fused_output[ii][1],0] = fused_output[ii][0]
max_values[fused_output[ii][1],1] = fused_output[ii][1]
fused_output = max_values[np.where(max_values[:,0]!=0)]
# Once calculations are completed, they are saved to the .output file for the main node to retrieve
with open("subprocess/{}.output".format(param[1]), 'wb') as f:
dump(fused_output, f)
# After the calculation is completed, the control file parameter
# is changed to 1 to indicate that it has completed
with open("subprocess/sub{}.control".format(param[1]), 'wb') as f:
control_param[0] = 1
dump(control_param, f)
logger.info("{} | Calculation Results Dumped | {} hours\n".format(param[1], np.round((time()-start)/3600, 4)))
except Exception as exc:
logger.critical("Error completing Calculation | {}".format(exc))
logger.exception(exc)
pass
sleep(10)
try:
# when the main node has completed all of its calculations, it will
# create a close file that triggers this code to complete
with open('subprocess/close{}'.format(param[1]), 'r') as f:
d = f.read()
not_close = False
logger.debug("{} | Close Command Found".format(param[1]))
except FileNotFoundError:
pass
with open("subprocess/sub{}.start".format(param[1]), 'a') as f:
f.write("subprocess finished successfully\n\n")
logger.info("{} | Subprocess Finished".format(param[1])) | en | 0.905818 | # -*- coding: utf-8 -*- Created on Sun Jan 3 06:17:35 2021 @author: <NAME> This module is used within the BAREFOOT framework to run as a multi-node instance This module runs on each of the subprocess nodes and controls the calculations on that node. # log_level = logging.DEBUG # create logging instance to record progress of the calculations # create formatter and add it to the handlers # add the handler to the logger # Create a file to show that the subprocess has succesfully started # keep the code running until it is shut down by the main node # the main code will create these files when it is time for a # calculation to be done on the subprocess node # The main node changes the control_param value to 0 to indicate that # there is a calculation to complete # The main code also specifies which acquisition function to use # there is a difference between the calculations required for the # reduced order modesl (iteration) and the truth model (fused) # Parameters for the calculations are determined in the # main node and are saved in .dump files for each subprocess # Calculations are conducted in parallel using the concurrent.futures appraoch # Once calculations are completed, they are saved to the .output file for the main node to retrieve # Parameters for the calculations are determined in the # main node and are saved in .dump files for each subprocess # Calculations are conducted in parallel using the concurrent.futures appraoch # if the acquisition function approach is the GP Hedge portfolio # optimization approach then the output from this function needs # no further processing. If any of the others are being used, # there is some processing to attempt to remove duplicates # Once calculations are completed, they are saved to the .output file for the main node to retrieve # After the calculation is completed, the control file parameter # is changed to 1 to indicate that it has completed # when the main node has completed all of its calculations, it will # create a close file that triggers this code to complete | 2.238658 | 2 |
Trading_states.py | YashSheth1/Algo-Trading | 0 | 6615946 | import State as st
import csv
import sys
from cassandra.cluster import Cluster
import datetime as dt
from datetime import datetime
from Trade_Module import Trades
#import matplotlib.pyplot as plt
import ast
s_t=datetime.now();
"""
---Trading using State Indicator---
1. Definations
1.1 execution
1.2 state_trade
---Flow of Code---
2.1 Highest Run Number is feched from the Trades Table from the defined keyspace
2.2 Run Number is incremented by 1 to identify unique code runs
2.3 Give dates between which you want to run the code
2.4 After all the configeration and input is given State_main function from the State Class is called
2.5 It returns the calculated States as well as the Open,High,Low,Close values for each candle of defined size
2.6 This Info is then passed on to the state_trade function to generate trades
2.7 As soon as the function detects trade signal it passes the trade info to execution function which handles execution of trade as well uploading the trade
information into cassandra
2.8 LOGIC : state_trade Generation
2.8.1 : State changes to 5 while previous state was not 5 then BUY Signal is generated
2.8.2 : If the state has changed from 5 i.e previous state was 5 and current state is something else then SELL
2.8.3 : If the state changes to 1 while previous state was not 1 then SELL
2.8.4 : if the previous state was 1 and now it has change to something else then BUY
2.9 LOGIC : Execution
2.9.1 : 1st Trade of the day is inserted into the trades List without any condition checking
2.9.2 : Now, the Current trade Time is checked if It's Time of Execution is before than that of the previous Trade then
the previous Trade is accepted
2.9.3 : Similar checking is done for every trade till its 18:00:00 and then the last trade is Accepted as it is
2.9.4 : 9 is returned in the case when 1. Trade is NOT FILLED 2. when last trades's execution time is greater than that of the current Trade
"""
def execution(date,time,product,size,price,side,order_type,ratio,day_start_indicator):
generated_trade=Trades(date,time,product,size,price,side,order_type,ratio,keyspace_name,session)
time2=''
if generated_trade[0]!='Not Found':
if day_start_indicator>1:
#Format the previous trade time so as to compare it with current order time
last_trade_time=str(trades[-1][1])[:8]
curr_time=str(time)[:8]
#if order is of after closing time then it is accepted here
if datetime.strptime(curr_time,"%H:%M:%S")>=datetime.strptime('18:00:00',"%H:%M:%S"):
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],date,trades[-1][1],'States',order_type))
trades.extend(generated_trade)
return None
#If last traded time is smaller then current order time then only accept order into database
if datetime.strptime(curr_time,"%H:%M:%S")>=datetime.strptime(last_trade_time,"%H:%M:%S"):
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],date,trades[-1][1],'States',order_type))
trades.extend(generated_trade)
return None
else:
#when order is filled but has time higher than the new trade time
#ignore the order and reset the positions to previous value
return 9
else:
#Accepting First trade of the day
trades.extend(generated_trade)
else:
#order Not Filled so change indicator to 9 hence not changing the position of our ALGO
print "Not Found "
return 9
#Generating Trades
def state_trade(l,order_type,size,price,ratio):
#Indicates the current position of of our Account
position=0
#Resets after each Day indicating day end
day_start_indicator=0
#Modify only if you want to change the buying and selling quantities
size2=size
#Indicator=9 Tells us if the Order is not Filled
indicator=0
#previous value of the State
previou_state=1
#Looping through each state value received to find buy/sell signals
for Current_item in l:
#python Datetime does not suppor time till nano second percision
#strip time till seconds and compare if its less than market closing time i.e 6:00 pm or 18:00:00
candle_time_end=str(Current_item.period.time_end)[:8]
candle_time_start=str(Current_item.period.time_start)[:8]
if datetime.strptime(candle_time_end,"%H:%M:%S")<datetime.strptime('18:00:00',"%H:%M:%S") and datetime.strptime('07:00:00',"%H:%M:%S")<=datetime.strptime(candle_time_start,"%H:%M:%S")<=datetime.strptime('19:00:00',"%H:%M:%S"):
#Generation Of BUY orders
if Current_item.state==5 and previou_state!=5:
if position==0:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size,price,'buy',order_type,ratio,day_start_indicator)
if indicator == 9:
position=0
else:
position=1
elif position==-1:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'buy',order_type,ratio,day_start_indicator)
if indicator==9:
position=-1
else:
position=0
#SELL when state changes from 5 to something else
elif position==1 and Current_item.state!=5 and previou_state==5:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'sell',order_type,ratio,day_start_indicator)
if indicator==9:
position=1
else:
position=0
#Generation Of SELL orders
elif Current_item.state==1 and previou_state!=1:
if position==1:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'sell',order_type,ratio,day_start_indicator)
if indicator==9:
position=1
else:
position=0
elif position==0:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size,price,'sell',order_type,ratio,day_start_indicator)
if indicator==9:
position=0
else:
position=-1
#BUY when state changes from 1 to something else
elif position==-1 and Current_item.state!=1 and previou_state==1:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'buy',order_type,ratio,day_start_indicator)
if indicator==9:
position=-1
else:
position=0
# After 18:00:00 HRS Trade Execution
else:
#print position
if position==1:
day_start_indicator+=1
execution(Current_item.period.date1,Current_item.period.time_end,product,1,price,'sell','Market',ratio,day_start_indicator)
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],trades[-1][4],trades[-1][1],'States',order_type))
if position==-1:
day_start_indicator+=1
execution(Current_item.period.date1,Current_item.period.time_end,product,1,price,'buy','Market',ratio,day_start_indicator)
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],trades[-1][4],trades[-1][1],'States',order_type))
if position==0:
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],trades[-1][4],trades[-1][1],'States','Market'))
position=0
pass
day_start_indicator=0
previou_state=Current_item.state
time_series_for_graph.append(str(Current_item.period.time_end))
states_list_for_graph.append(Current_item.state)
#-----------------------------------------------------Code START---------------------------------------------------
#name='try1'
name_of_file=sys.argv[1]
config_data_main={}
config_data={}
myfile=open(name_of_file)
for line in myfile:
name,var=line.partition('@')[::2]
config_data_main[name.strip()]=str(var)[:-1]
temp=(config_data_main['Trade_main_config'])
config_data=ast.literal_eval(temp)
print config_data
keyspace_name=config_data['keyspace_name']
date_start=config_data['start_date']
date_end=config_data['end_date']
product=config_data['xric']
size_of_candle=int(config_data['candle_size'])
candle_type=config_data['candle_type']
ip_address=[]
ip_address.append(config_data['ip_address'])
cluster = Cluster(ip_address)
session = cluster.connect(keyspace_name)
query="insert into trades(run_no,product,side,price,size,date1,time1,strategy,type) values (?,?,?,?,?,?,?,?,?)"
query2="select max(run_no) from trades allow filtering"
prepared = session.prepare(query)
prepared2 = session.prepare(query2)
run_no=session.execute(prepared2)
run_no=0 if run_no[0][0]==None else run_no[0][0]
run_no=int(run_no)+1
#Various List Initializations
trades=[]
time_series_for_graph=[]
states_list_for_graph=[]
#State Class Object
h=st.State()
#data received from State Class
state_data=h.state_main(size_of_candle,candle_type,date_start,date_end,product,keyspace_name,session)
#Inputs
order_type=raw_input("Market Orders or Limit Orders?\n")
number_of_lots=int(raw_input("How many Lots??\n"))
#Keep 0.0 for ordering without ratio using
ratio=float(raw_input("Input the Ratio\n"))
#Keep price whatever you want for market order -- 0.0 for Auto Pricing
price=float(raw_input("Enter Price..[0.0 for Auto Pricing feature]\n"))
#Trade Generation
state_trade(state_data,order_type,number_of_lots,price,ratio)
#Final Trades
for i in trades:
print i[0],i[1],i[4]
# Uncomment for Generating Graph (Import Matplotlib Library!!)
"""
plt.plot(ti,s,color='red',label='States')
#plt.plot(ti,sp,color='yellow',label='signal')
plt.xlabel('')
plt.ylabel('States')
plt.legend()
plt.gcf().autofmt_xdate()
plt.show()
"""
print (datetime.now()-s_t)
| import State as st
import csv
import sys
from cassandra.cluster import Cluster
import datetime as dt
from datetime import datetime
from Trade_Module import Trades
#import matplotlib.pyplot as plt
import ast
s_t=datetime.now();
"""
---Trading using State Indicator---
1. Definations
1.1 execution
1.2 state_trade
---Flow of Code---
2.1 Highest Run Number is feched from the Trades Table from the defined keyspace
2.2 Run Number is incremented by 1 to identify unique code runs
2.3 Give dates between which you want to run the code
2.4 After all the configeration and input is given State_main function from the State Class is called
2.5 It returns the calculated States as well as the Open,High,Low,Close values for each candle of defined size
2.6 This Info is then passed on to the state_trade function to generate trades
2.7 As soon as the function detects trade signal it passes the trade info to execution function which handles execution of trade as well uploading the trade
information into cassandra
2.8 LOGIC : state_trade Generation
2.8.1 : State changes to 5 while previous state was not 5 then BUY Signal is generated
2.8.2 : If the state has changed from 5 i.e previous state was 5 and current state is something else then SELL
2.8.3 : If the state changes to 1 while previous state was not 1 then SELL
2.8.4 : if the previous state was 1 and now it has change to something else then BUY
2.9 LOGIC : Execution
2.9.1 : 1st Trade of the day is inserted into the trades List without any condition checking
2.9.2 : Now, the Current trade Time is checked if It's Time of Execution is before than that of the previous Trade then
the previous Trade is accepted
2.9.3 : Similar checking is done for every trade till its 18:00:00 and then the last trade is Accepted as it is
2.9.4 : 9 is returned in the case when 1. Trade is NOT FILLED 2. when last trades's execution time is greater than that of the current Trade
"""
def execution(date,time,product,size,price,side,order_type,ratio,day_start_indicator):
generated_trade=Trades(date,time,product,size,price,side,order_type,ratio,keyspace_name,session)
time2=''
if generated_trade[0]!='Not Found':
if day_start_indicator>1:
#Format the previous trade time so as to compare it with current order time
last_trade_time=str(trades[-1][1])[:8]
curr_time=str(time)[:8]
#if order is of after closing time then it is accepted here
if datetime.strptime(curr_time,"%H:%M:%S")>=datetime.strptime('18:00:00',"%H:%M:%S"):
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],date,trades[-1][1],'States',order_type))
trades.extend(generated_trade)
return None
#If last traded time is smaller then current order time then only accept order into database
if datetime.strptime(curr_time,"%H:%M:%S")>=datetime.strptime(last_trade_time,"%H:%M:%S"):
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],date,trades[-1][1],'States',order_type))
trades.extend(generated_trade)
return None
else:
#when order is filled but has time higher than the new trade time
#ignore the order and reset the positions to previous value
return 9
else:
#Accepting First trade of the day
trades.extend(generated_trade)
else:
#order Not Filled so change indicator to 9 hence not changing the position of our ALGO
print "Not Found "
return 9
#Generating Trades
def state_trade(l,order_type,size,price,ratio):
#Indicates the current position of of our Account
position=0
#Resets after each Day indicating day end
day_start_indicator=0
#Modify only if you want to change the buying and selling quantities
size2=size
#Indicator=9 Tells us if the Order is not Filled
indicator=0
#previous value of the State
previou_state=1
#Looping through each state value received to find buy/sell signals
for Current_item in l:
#python Datetime does not suppor time till nano second percision
#strip time till seconds and compare if its less than market closing time i.e 6:00 pm or 18:00:00
candle_time_end=str(Current_item.period.time_end)[:8]
candle_time_start=str(Current_item.period.time_start)[:8]
if datetime.strptime(candle_time_end,"%H:%M:%S")<datetime.strptime('18:00:00',"%H:%M:%S") and datetime.strptime('07:00:00',"%H:%M:%S")<=datetime.strptime(candle_time_start,"%H:%M:%S")<=datetime.strptime('19:00:00',"%H:%M:%S"):
#Generation Of BUY orders
if Current_item.state==5 and previou_state!=5:
if position==0:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size,price,'buy',order_type,ratio,day_start_indicator)
if indicator == 9:
position=0
else:
position=1
elif position==-1:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'buy',order_type,ratio,day_start_indicator)
if indicator==9:
position=-1
else:
position=0
#SELL when state changes from 5 to something else
elif position==1 and Current_item.state!=5 and previou_state==5:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'sell',order_type,ratio,day_start_indicator)
if indicator==9:
position=1
else:
position=0
#Generation Of SELL orders
elif Current_item.state==1 and previou_state!=1:
if position==1:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'sell',order_type,ratio,day_start_indicator)
if indicator==9:
position=1
else:
position=0
elif position==0:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size,price,'sell',order_type,ratio,day_start_indicator)
if indicator==9:
position=0
else:
position=-1
#BUY when state changes from 1 to something else
elif position==-1 and Current_item.state!=1 and previou_state==1:
day_start_indicator+=1
indicator=execution(Current_item.period.date1,Current_item.period.time_end,product,size2,price,'buy',order_type,ratio,day_start_indicator)
if indicator==9:
position=-1
else:
position=0
# After 18:00:00 HRS Trade Execution
else:
#print position
if position==1:
day_start_indicator+=1
execution(Current_item.period.date1,Current_item.period.time_end,product,1,price,'sell','Market',ratio,day_start_indicator)
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],trades[-1][4],trades[-1][1],'States',order_type))
if position==-1:
day_start_indicator+=1
execution(Current_item.period.date1,Current_item.period.time_end,product,1,price,'buy','Market',ratio,day_start_indicator)
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],trades[-1][4],trades[-1][1],'States',order_type))
if position==0:
session.execute(prepared,(run_no,product,trades[-1][0],trades[-1][2],trades[-1][3],trades[-1][4],trades[-1][1],'States','Market'))
position=0
pass
day_start_indicator=0
previou_state=Current_item.state
time_series_for_graph.append(str(Current_item.period.time_end))
states_list_for_graph.append(Current_item.state)
#-----------------------------------------------------Code START---------------------------------------------------
#name='try1'
name_of_file=sys.argv[1]
config_data_main={}
config_data={}
myfile=open(name_of_file)
for line in myfile:
name,var=line.partition('@')[::2]
config_data_main[name.strip()]=str(var)[:-1]
temp=(config_data_main['Trade_main_config'])
config_data=ast.literal_eval(temp)
print config_data
keyspace_name=config_data['keyspace_name']
date_start=config_data['start_date']
date_end=config_data['end_date']
product=config_data['xric']
size_of_candle=int(config_data['candle_size'])
candle_type=config_data['candle_type']
ip_address=[]
ip_address.append(config_data['ip_address'])
cluster = Cluster(ip_address)
session = cluster.connect(keyspace_name)
query="insert into trades(run_no,product,side,price,size,date1,time1,strategy,type) values (?,?,?,?,?,?,?,?,?)"
query2="select max(run_no) from trades allow filtering"
prepared = session.prepare(query)
prepared2 = session.prepare(query2)
run_no=session.execute(prepared2)
run_no=0 if run_no[0][0]==None else run_no[0][0]
run_no=int(run_no)+1
#Various List Initializations
trades=[]
time_series_for_graph=[]
states_list_for_graph=[]
#State Class Object
h=st.State()
#data received from State Class
state_data=h.state_main(size_of_candle,candle_type,date_start,date_end,product,keyspace_name,session)
#Inputs
order_type=raw_input("Market Orders or Limit Orders?\n")
number_of_lots=int(raw_input("How many Lots??\n"))
#Keep 0.0 for ordering without ratio using
ratio=float(raw_input("Input the Ratio\n"))
#Keep price whatever you want for market order -- 0.0 for Auto Pricing
price=float(raw_input("Enter Price..[0.0 for Auto Pricing feature]\n"))
#Trade Generation
state_trade(state_data,order_type,number_of_lots,price,ratio)
#Final Trades
for i in trades:
print i[0],i[1],i[4]
# Uncomment for Generating Graph (Import Matplotlib Library!!)
"""
plt.plot(ti,s,color='red',label='States')
#plt.plot(ti,sp,color='yellow',label='signal')
plt.xlabel('')
plt.ylabel('States')
plt.legend()
plt.gcf().autofmt_xdate()
plt.show()
"""
print (datetime.now()-s_t)
| en | 0.854302 | #import matplotlib.pyplot as plt ---Trading using State Indicator---
1. Definations
1.1 execution
1.2 state_trade
---Flow of Code---
2.1 Highest Run Number is feched from the Trades Table from the defined keyspace
2.2 Run Number is incremented by 1 to identify unique code runs
2.3 Give dates between which you want to run the code
2.4 After all the configeration and input is given State_main function from the State Class is called
2.5 It returns the calculated States as well as the Open,High,Low,Close values for each candle of defined size
2.6 This Info is then passed on to the state_trade function to generate trades
2.7 As soon as the function detects trade signal it passes the trade info to execution function which handles execution of trade as well uploading the trade
information into cassandra
2.8 LOGIC : state_trade Generation
2.8.1 : State changes to 5 while previous state was not 5 then BUY Signal is generated
2.8.2 : If the state has changed from 5 i.e previous state was 5 and current state is something else then SELL
2.8.3 : If the state changes to 1 while previous state was not 1 then SELL
2.8.4 : if the previous state was 1 and now it has change to something else then BUY
2.9 LOGIC : Execution
2.9.1 : 1st Trade of the day is inserted into the trades List without any condition checking
2.9.2 : Now, the Current trade Time is checked if It's Time of Execution is before than that of the previous Trade then
the previous Trade is accepted
2.9.3 : Similar checking is done for every trade till its 18:00:00 and then the last trade is Accepted as it is
2.9.4 : 9 is returned in the case when 1. Trade is NOT FILLED 2. when last trades's execution time is greater than that of the current Trade #Format the previous trade time so as to compare it with current order time #if order is of after closing time then it is accepted here #If last traded time is smaller then current order time then only accept order into database #when order is filled but has time higher than the new trade time #ignore the order and reset the positions to previous value #Accepting First trade of the day #order Not Filled so change indicator to 9 hence not changing the position of our ALGO #Generating Trades #Indicates the current position of of our Account #Resets after each Day indicating day end #Modify only if you want to change the buying and selling quantities #Indicator=9 Tells us if the Order is not Filled #previous value of the State #Looping through each state value received to find buy/sell signals #python Datetime does not suppor time till nano second percision #strip time till seconds and compare if its less than market closing time i.e 6:00 pm or 18:00:00 #Generation Of BUY orders #SELL when state changes from 5 to something else #Generation Of SELL orders #BUY when state changes from 1 to something else # After 18:00:00 HRS Trade Execution #print position #-----------------------------------------------------Code START--------------------------------------------------- #name='try1' #Various List Initializations #State Class Object #data received from State Class #Inputs #Keep 0.0 for ordering without ratio using #Keep price whatever you want for market order -- 0.0 for Auto Pricing #Trade Generation #Final Trades # Uncomment for Generating Graph (Import Matplotlib Library!!) plt.plot(ti,s,color='red',label='States')
#plt.plot(ti,sp,color='yellow',label='signal')
plt.xlabel('')
plt.ylabel('States')
plt.legend()
plt.gcf().autofmt_xdate()
plt.show() | 3.472588 | 3 |
hdm/core/catalog/crawler/netezza_crawler.py | hashmapinc/hdm | 1 | 6615947 | # Copyright © 2020 Hashmap, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from providah.factories.package_factory import PackageFactory as pf
class NetezzaCrawler:
@classmethod
def _get_logger(cls):
return logging.getLogger(cls.__name__)
def __init__(self, **kwargs):
self._logger = self._get_logger()
self._connection_name = kwargs.get('connection_name')
def run(self) -> tuple:
databases = self.__get_database_names()
schemas = []
tables = {}
for db in databases:
schemas.extend(self.__get_schema_names_by_db(db))
tables.update(self.__get_tables_by_db(db))
return databases, schemas, tables
def __get_database_names(self) -> list:
dao = pf.create(key=self._connection_name, configuration={'connection': self._connection_name})
query_string = "SELECT DATABASE FROM _V_DATABASE WHERE DATABASE <> 'SYSTEM'"
databases = []
try:
with dao.connection as conn:
cursor = conn.cursor()
cursor.execute(query_string)
result = cursor.fetchall()
for row in result:
databases.append(row[0])
return databases
finally:
if cursor:
cursor.close()
def __get_schema_names_by_db(self, database) -> list:
dao = pf.create(key=self._connection_name, configuration={'connection': self._connection_name})
query_string = f"SELECT DISTINCT SCHEMA FROM {database}.._V_SCHEMA" # WHERE OBJTYPE = 'TABLE'"
schemas = []
try:
with dao.connection as conn:
cursor = conn.cursor()
cursor.execute(query_string)
result = cursor.fetchall()
for row in result:
schemas.append(row[0])
return schemas
finally:
if cursor:
cursor.close()
def __get_tables_by_db(self, database) -> dict:
dao = pf.create(key=self._connection_name, configuration={'connection': self._connection_name})
query_string = f"SELECT DATABASE, SCHEMA, NAME, ATTNAME, FORMAT_TYPE, ATTLEN, ATTNOTNULL, COLDEFAULT " \
f"FROM {database}.._V_RELATION_COLUMN " \
f"WHERE DATABASE <> 'SYSTEM' AND TYPE = 'TABLE' ORDER BY SCHEMA, NAME, ATTNUM ASC"
tables = {}
with dao.connection as conn:
cursor = conn.cursor()
cursor.execute(query_string)
result = cursor.fetchall()
for row in result:
table_name = f"{row[0]}.{row[1]}.{row[2]}" # ignoring name collisions across multiple db's for now
if table_name not in tables:
tables[table_name] = []
column = {
'database': row[0],
'schema': row[1],
'name': row[2],
'columnName': row[3],
'columnType': row[4],
'columnSize': row[5],
'notNull': row[6],
'default': row[7]
}
tables[table_name].append(column)
return tables
| # Copyright © 2020 Hashmap, Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from providah.factories.package_factory import PackageFactory as pf
class NetezzaCrawler:
@classmethod
def _get_logger(cls):
return logging.getLogger(cls.__name__)
def __init__(self, **kwargs):
self._logger = self._get_logger()
self._connection_name = kwargs.get('connection_name')
def run(self) -> tuple:
databases = self.__get_database_names()
schemas = []
tables = {}
for db in databases:
schemas.extend(self.__get_schema_names_by_db(db))
tables.update(self.__get_tables_by_db(db))
return databases, schemas, tables
def __get_database_names(self) -> list:
dao = pf.create(key=self._connection_name, configuration={'connection': self._connection_name})
query_string = "SELECT DATABASE FROM _V_DATABASE WHERE DATABASE <> 'SYSTEM'"
databases = []
try:
with dao.connection as conn:
cursor = conn.cursor()
cursor.execute(query_string)
result = cursor.fetchall()
for row in result:
databases.append(row[0])
return databases
finally:
if cursor:
cursor.close()
def __get_schema_names_by_db(self, database) -> list:
dao = pf.create(key=self._connection_name, configuration={'connection': self._connection_name})
query_string = f"SELECT DISTINCT SCHEMA FROM {database}.._V_SCHEMA" # WHERE OBJTYPE = 'TABLE'"
schemas = []
try:
with dao.connection as conn:
cursor = conn.cursor()
cursor.execute(query_string)
result = cursor.fetchall()
for row in result:
schemas.append(row[0])
return schemas
finally:
if cursor:
cursor.close()
def __get_tables_by_db(self, database) -> dict:
dao = pf.create(key=self._connection_name, configuration={'connection': self._connection_name})
query_string = f"SELECT DATABASE, SCHEMA, NAME, ATTNAME, FORMAT_TYPE, ATTLEN, ATTNOTNULL, COLDEFAULT " \
f"FROM {database}.._V_RELATION_COLUMN " \
f"WHERE DATABASE <> 'SYSTEM' AND TYPE = 'TABLE' ORDER BY SCHEMA, NAME, ATTNUM ASC"
tables = {}
with dao.connection as conn:
cursor = conn.cursor()
cursor.execute(query_string)
result = cursor.fetchall()
for row in result:
table_name = f"{row[0]}.{row[1]}.{row[2]}" # ignoring name collisions across multiple db's for now
if table_name not in tables:
tables[table_name] = []
column = {
'database': row[0],
'schema': row[1],
'name': row[2],
'columnName': row[3],
'columnType': row[4],
'columnSize': row[5],
'notNull': row[6],
'default': row[7]
}
tables[table_name].append(column)
return tables
| en | 0.851744 | # Copyright © 2020 Hashmap, Inc # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # WHERE OBJTYPE = 'TABLE'" # ignoring name collisions across multiple db's for now | 2.317759 | 2 |
model.py | kyteinsky/ASL-sign-language | 0 | 6615948 | <reponame>kyteinsky/ASL-sign-language<filename>model.py
import torch.nn as nn
import torch.nn.functional as F
import torch
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 16, 2)
self.pool = nn.MaxPool2d(2)
self.batch_norm1 = nn.BatchNorm2d(16)
self.conv2 = nn.Conv2d(16, 64, 3)
self.batch_norm2 = nn.BatchNorm2d(64)
self.conv3 = nn.Conv2d(64, 128, 3)
self.batch_norm3 = nn.BatchNorm2d(128)
self.fc1 = nn.Linear(2048, 1024)
self.dropout = nn.Dropout(0.2)
self.fc2 = nn.Linear(1024, 512)
self.batch_norm4 = nn.BatchNorm1d(512)
self.fc3 = nn.Linear(512, 256)
self.fc4 = nn.Linear(256, 25)
def forward(self, x):
# correct shape
x = x.reshape(-1, 1, 28, 28)
# conv blocks
x = self.pool(F.relu(self.conv1(x)))
x = self.batch_norm1(x)
x = self.swish(self.conv2(x))
x = self.dropout(x)
x = self.batch_norm2(x)
x = self.pool(self.swish(self.conv3(x)))
x = self.dropout(x)
x = self.batch_norm3(x)
# reshape for linear layers
x = x.view(-1, 128*4*4)
# linear block
x = self.swish(self.fc1(x))
x = self.dropout(x)
x = self.swish(self.fc2(x))
x = self.dropout(x)
x = self.batch_norm4(x)
x = F.relu(self.fc3(x))
x = self.fc4(x)
return x
def swish(self, x):
return x * torch.sigmoid(x)
| import torch.nn as nn
import torch.nn.functional as F
import torch
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = nn.Conv2d(1, 16, 2)
self.pool = nn.MaxPool2d(2)
self.batch_norm1 = nn.BatchNorm2d(16)
self.conv2 = nn.Conv2d(16, 64, 3)
self.batch_norm2 = nn.BatchNorm2d(64)
self.conv3 = nn.Conv2d(64, 128, 3)
self.batch_norm3 = nn.BatchNorm2d(128)
self.fc1 = nn.Linear(2048, 1024)
self.dropout = nn.Dropout(0.2)
self.fc2 = nn.Linear(1024, 512)
self.batch_norm4 = nn.BatchNorm1d(512)
self.fc3 = nn.Linear(512, 256)
self.fc4 = nn.Linear(256, 25)
def forward(self, x):
# correct shape
x = x.reshape(-1, 1, 28, 28)
# conv blocks
x = self.pool(F.relu(self.conv1(x)))
x = self.batch_norm1(x)
x = self.swish(self.conv2(x))
x = self.dropout(x)
x = self.batch_norm2(x)
x = self.pool(self.swish(self.conv3(x)))
x = self.dropout(x)
x = self.batch_norm3(x)
# reshape for linear layers
x = x.view(-1, 128*4*4)
# linear block
x = self.swish(self.fc1(x))
x = self.dropout(x)
x = self.swish(self.fc2(x))
x = self.dropout(x)
x = self.batch_norm4(x)
x = F.relu(self.fc3(x))
x = self.fc4(x)
return x
def swish(self, x):
return x * torch.sigmoid(x) | en | 0.7259 | # correct shape # conv blocks # reshape for linear layers # linear block | 2.848 | 3 |
2_postitionierung/5_vierQuadrate.py | Coding-for-the-Arts/drawbot-samples | 0 | 6615949 | <reponame>Coding-for-the-Arts/drawbot-samples
"""
Vier Quadrate in RGB
Die Funktion translate() verschiebt den Nullpunkt des Koordinatensystems.
"""
newPage(300, 300)
x = 0
y = 0
dia = width() / 2
fill(1, 0, 0)
stroke(0)
rect(x, y, dia, dia)
# 1. Translate
translate(30, 30)
fill(0, 1, 0)
rect(x, y, dia, dia)
# 2. Translate
translate(30, 30)
fill(0, 0, 1)
rect(x, y, dia, dia)
# 3. Translate
translate(30, 30)
fill(1, 1, 1)
rect(x, y, dia, dia)
print("x: ", x, " y: ", y)
"""
Fragen:
- Warum haben x und y auf Zeile 35 den Wert 0?
- Wie weit hat sich der Nullpunkt am Ende des Programms vom seinem Ursprung entfernt?
Aufgabe:
- Versuche die Quadrate mit translate() nebeneinander anzuordnen,
so dass sie die gesamte Fläche ausfüllen.
Tipp:
- Benutze width() und height() um die Verschiebungen zu berechnen
""" | """
Vier Quadrate in RGB
Die Funktion translate() verschiebt den Nullpunkt des Koordinatensystems.
"""
newPage(300, 300)
x = 0
y = 0
dia = width() / 2
fill(1, 0, 0)
stroke(0)
rect(x, y, dia, dia)
# 1. Translate
translate(30, 30)
fill(0, 1, 0)
rect(x, y, dia, dia)
# 2. Translate
translate(30, 30)
fill(0, 0, 1)
rect(x, y, dia, dia)
# 3. Translate
translate(30, 30)
fill(1, 1, 1)
rect(x, y, dia, dia)
print("x: ", x, " y: ", y)
"""
Fragen:
- Warum haben x und y auf Zeile 35 den Wert 0?
- Wie weit hat sich der Nullpunkt am Ende des Programms vom seinem Ursprung entfernt?
Aufgabe:
- Versuche die Quadrate mit translate() nebeneinander anzuordnen,
so dass sie die gesamte Fläche ausfüllen.
Tipp:
- Benutze width() und height() um die Verschiebungen zu berechnen
""" | de | 0.981942 | Vier Quadrate in RGB Die Funktion translate() verschiebt den Nullpunkt des Koordinatensystems. # 1. Translate # 2. Translate # 3. Translate Fragen: - Warum haben x und y auf Zeile 35 den Wert 0? - Wie weit hat sich der Nullpunkt am Ende des Programms vom seinem Ursprung entfernt? Aufgabe: - Versuche die Quadrate mit translate() nebeneinander anzuordnen, so dass sie die gesamte Fläche ausfüllen. Tipp: - Benutze width() und height() um die Verschiebungen zu berechnen | 3.426841 | 3 |
goodcode.py | G00364756/PythonExamples | 0 | 6615950 | <reponame>G00364756/PythonExamples
#good code
def isPalin(n):
"""Function tests whether a number is a palindrome or not"""
z = str(n) # creates a string from number,integer,etc.
s = z[::-1] # this reverses the order of the string
if z == s:
return "True"
else:
return "False"
def isPalarray(x):
"""Function returns a list of palindromes"""
palindromes = []
while x < 100: # this loop adds palindromes to the list if it is one
if isPalin(x) == "True": # see isPalin definition
palindromes.append(x)
x = x + 1
elif isPalin(x) == "False":
x = x + 1
return(palindromes)
def iterate(t):
"""Function iterate i and then j"""
myRange = []
q = 5 # limits the range for a
r = 5 # limit the range for b
for b in range (100,q,1):
for a in range(100,r,1):
p = a*b
myRange.append(p) # adds p to the range "myRange"
myRange.sort() # sorts the range
v = []
for l in myRange:
if l not in v: # creates a new list that takes away all the duplicates from the original list
v.append(l)
return (v)
y = 1
ans = iterate(y)
print(ans)
x = 1
u = isPalarray(x)
print(u)
def Primenumbers(number):
"""This function returns the factors of any number"""
primes = []
factors = []
j = 1
while len(primes) < 20:
for a in range(j,0,-1):
if j % a == 0:
factors.append(a)
else:
continue
if len(factors) == 2:
primes.append(j)
factors = []
else:
factors = []
j = j + 1
return primes
ans = Primenumbers(1)
print(ans) | #good code
def isPalin(n):
"""Function tests whether a number is a palindrome or not"""
z = str(n) # creates a string from number,integer,etc.
s = z[::-1] # this reverses the order of the string
if z == s:
return "True"
else:
return "False"
def isPalarray(x):
"""Function returns a list of palindromes"""
palindromes = []
while x < 100: # this loop adds palindromes to the list if it is one
if isPalin(x) == "True": # see isPalin definition
palindromes.append(x)
x = x + 1
elif isPalin(x) == "False":
x = x + 1
return(palindromes)
def iterate(t):
"""Function iterate i and then j"""
myRange = []
q = 5 # limits the range for a
r = 5 # limit the range for b
for b in range (100,q,1):
for a in range(100,r,1):
p = a*b
myRange.append(p) # adds p to the range "myRange"
myRange.sort() # sorts the range
v = []
for l in myRange:
if l not in v: # creates a new list that takes away all the duplicates from the original list
v.append(l)
return (v)
y = 1
ans = iterate(y)
print(ans)
x = 1
u = isPalarray(x)
print(u)
def Primenumbers(number):
"""This function returns the factors of any number"""
primes = []
factors = []
j = 1
while len(primes) < 20:
for a in range(j,0,-1):
if j % a == 0:
factors.append(a)
else:
continue
if len(factors) == 2:
primes.append(j)
factors = []
else:
factors = []
j = j + 1
return primes
ans = Primenumbers(1)
print(ans) | en | 0.640783 | #good code Function tests whether a number is a palindrome or not # creates a string from number,integer,etc. # this reverses the order of the string Function returns a list of palindromes # this loop adds palindromes to the list if it is one # see isPalin definition Function iterate i and then j # limits the range for a # limit the range for b # adds p to the range "myRange" # sorts the range # creates a new list that takes away all the duplicates from the original list This function returns the factors of any number | 3.963874 | 4 |