code
stringlengths 1
199k
|
|---|
import codecs
import sys
from glob import glob
import os
import subprocess
from string import Template
from distutils.core import setup
try:
from py2exe.build_exe import py2exe
except ImportError:
from py2exe.distutils_buildexe import py2exe
import syncplay
from syncplay.messages import getMissingStrings
missingStrings = getMissingStrings()
if missingStrings is not None and missingStrings != "":
import warnings
warnings.warn("MISSING/UNUSED STRINGS DETECTED:\n{}".format(missingStrings))
def get_nsis_path():
bin_name = "makensis.exe"
from winreg import HKEY_LOCAL_MACHINE as HKLM
from winreg import KEY_READ, KEY_WOW64_32KEY, OpenKey, QueryValueEx
try:
nsisreg = OpenKey(HKLM, "Software\\NSIS", 0, KEY_READ | KEY_WOW64_32KEY)
if QueryValueEx(nsisreg, "VersionMajor")[0] >= 3:
return "{}\\{}".format(QueryValueEx(nsisreg, "")[0], bin_name)
else:
raise Exception("You must install NSIS 3 or later.")
except WindowsError:
return bin_name
NSIS_COMPILE = get_nsis_path()
OUT_DIR = "syncplay_v{}".format(syncplay.version)
SETUP_SCRIPT_PATH = "syncplay_setup.nsi"
NSIS_SCRIPT_TEMPLATE = r"""
!include LogicLib.nsh
!include nsDialogs.nsh
!include FileFunc.nsh
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\English.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Polish.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Russian.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\German.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Italian.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Spanish.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\PortugueseBR.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Portuguese.nlf"
LoadLanguageFile "$${NSISDIR}\Contrib\Language files\Turkish.nlf"
Unicode true
Name "Syncplay $version"
OutFile "Syncplay-$version-Setup.exe"
InstallDir $$PROGRAMFILES\Syncplay
RequestExecutionLevel admin
ManifestDPIAware false
XPStyle on
Icon syncplay\resources\icon.ico ;Change DIR
SetCompressor /SOLID lzma
VIProductVersion "$version.0"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ENGLISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_POLISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_POLISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_RUSSIAN} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_ITALIAN} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_SPANISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_SPANISH} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESEBR} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_PORTUGUESE} "FileDescription" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "ProductName" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "FileVersion" "$version.0"
VIAddVersionKey /LANG=$${LANG_TURKISH} "LegalCopyright" "Syncplay"
VIAddVersionKey /LANG=$${LANG_TURKISH} "FileDescription" "Syncplay"
LangString ^SyncplayLanguage $${LANG_ENGLISH} "en"
LangString ^Associate $${LANG_ENGLISH} "Associate Syncplay with multimedia files."
LangString ^Shortcut $${LANG_ENGLISH} "Create Shortcuts in following locations:"
LangString ^StartMenu $${LANG_ENGLISH} "Start Menu"
LangString ^Desktop $${LANG_ENGLISH} "Desktop"
LangString ^QuickLaunchBar $${LANG_ENGLISH} "Quick Launch Bar"
LangString ^AutomaticUpdates $${LANG_ENGLISH} "Check for updates automatically"
LangString ^UninstConfig $${LANG_ENGLISH} "Delete configuration file."
LangString ^SyncplayLanguage $${LANG_POLISH} "pl"
LangString ^Associate $${LANG_POLISH} "Skojarz Syncplaya z multimediami"
LangString ^Shortcut $${LANG_POLISH} "Utworz skroty w nastepujacych miejscach:"
LangString ^StartMenu $${LANG_POLISH} "Menu Start"
LangString ^Desktop $${LANG_POLISH} "Pulpit"
LangString ^QuickLaunchBar $${LANG_POLISH} "Pasek szybkiego uruchamiania"
LangString ^UninstConfig $${LANG_POLISH} "Usun plik konfiguracyjny."
LangString ^SyncplayLanguage $${LANG_RUSSIAN} "ru"
LangString ^Associate $${LANG_RUSSIAN} "Ассоциировать Syncplay с видеофайлами"
LangString ^Shortcut $${LANG_RUSSIAN} "Создать ярлыки:"
LangString ^StartMenu $${LANG_RUSSIAN} "в меню Пуск"
LangString ^Desktop $${LANG_RUSSIAN} "на рабочем столе"
LangString ^QuickLaunchBar $${LANG_RUSSIAN} "в меню быстрого запуска"
LangString ^AutomaticUpdates $${LANG_RUSSIAN} "Проверять обновления автоматически"; TODO: Confirm Russian translation ("Check for updates automatically")
LangString ^UninstConfig $${LANG_RUSSIAN} "Удалить файл настроек."
LangString ^SyncplayLanguage $${LANG_GERMAN} "de"
LangString ^Associate $${LANG_GERMAN} "Syncplay als Standardprogramm für Multimedia-Dateien verwenden."
LangString ^Shortcut $${LANG_GERMAN} "Erstelle Verknüpfungen an folgenden Orten:"
LangString ^StartMenu $${LANG_GERMAN} "Startmenü"
LangString ^Desktop $${LANG_GERMAN} "Desktop"
LangString ^QuickLaunchBar $${LANG_GERMAN} "Schnellstartleiste"
LangString ^AutomaticUpdates $${LANG_GERMAN} "Automatisch nach Updates suchen";
LangString ^UninstConfig $${LANG_GERMAN} "Konfigurationsdatei löschen."
LangString ^SyncplayLanguage $${LANG_ITALIAN} "it"
LangString ^Associate $${LANG_ITALIAN} "Associa Syncplay con i file multimediali."
LangString ^Shortcut $${LANG_ITALIAN} "Crea i collegamenti nei percorsi seguenti:"
LangString ^StartMenu $${LANG_ITALIAN} "Menu Start"
LangString ^Desktop $${LANG_ITALIAN} "Desktop"
LangString ^QuickLaunchBar $${LANG_ITALIAN} "Barra di avvio rapido"
LangString ^AutomaticUpdates $${LANG_ITALIAN} "Controllo automatico degli aggiornamenti"
LangString ^UninstConfig $${LANG_ITALIAN} "Cancella i file di configurazione."
LangString ^SyncplayLanguage $${LANG_SPANISH} "es"
LangString ^Associate $${LANG_SPANISH} "Asociar Syncplay con archivos multimedia."
LangString ^Shortcut $${LANG_SPANISH} "Crear accesos directos en las siguientes ubicaciones:"
LangString ^StartMenu $${LANG_SPANISH} "Menú de inicio"
LangString ^Desktop $${LANG_SPANISH} "Escritorio"
LangString ^QuickLaunchBar $${LANG_SPANISH} "Barra de acceso rápido"
LangString ^AutomaticUpdates $${LANG_SPANISH} "Buscar actualizaciones automáticamente"
LangString ^UninstConfig $${LANG_SPANISH} "Borrar archivo de configuración."
LangString ^SyncplayLanguage $${LANG_PORTUGUESEBR} "pt_BR"
LangString ^Associate $${LANG_PORTUGUESEBR} "Associar Syncplay aos arquivos multimídia."
LangString ^Shortcut $${LANG_PORTUGUESEBR} "Criar atalhos nos seguintes locais:"
LangString ^StartMenu $${LANG_PORTUGUESEBR} "Menu Iniciar"
LangString ^Desktop $${LANG_PORTUGUESEBR} "Área de trabalho"
LangString ^QuickLaunchBar $${LANG_PORTUGUESEBR} "Barra de acesso rápido"
LangString ^AutomaticUpdates $${LANG_PORTUGUESEBR} "Verificar atualizações automaticamente"
LangString ^UninstConfig $${LANG_PORTUGUESEBR} "Deletar arquivo de configuração."
LangString ^SyncplayLanguage $${LANG_PORTUGUESE} "pt_PT"
LangString ^Associate $${LANG_PORTUGUESE} "Associar Syncplay aos ficheiros multimédia."
LangString ^Shortcut $${LANG_PORTUGUESE} "Criar atalhos nos seguintes locais:"
LangString ^StartMenu $${LANG_PORTUGUESE} "Menu Iniciar"
LangString ^Desktop $${LANG_PORTUGUESE} "Área de trabalho"
LangString ^QuickLaunchBar $${LANG_PORTUGUESE} "Barra de acesso rápido"
LangString ^AutomaticUpdates $${LANG_PORTUGUESE} "Verificar atualizações automaticamente"
LangString ^UninstConfig $${LANG_PORTUGUESE} "Apagar ficheiro de configuração."
LangString ^SyncplayLanguage $${LANG_TURKISH} "tr"
LangString ^Associate $${LANG_TURKISH} "Syncplay'i ortam dosyalarıyla ilişkilendirin."
LangString ^Shortcut $${LANG_TURKISH} "Aşağıdaki konumlarda kısayollar oluşturun:"
LangString ^StartMenu $${LANG_TURKISH} "Başlangıç menüsü"
LangString ^Desktop $${LANG_TURKISH} "Masaüstü"
LangString ^QuickLaunchBar $${LANG_TURKISH} "Hızlı Başlatma Çubuğu"
LangString ^AutomaticUpdates $${LANG_TURKISH} "Güncellemeleri otomatik denetle"
LangString ^UninstConfig $${LANG_TURKISH} "Yapılandırma dosyasını silin."
; Remove text to save space
LangString ^ClickInstall $${LANG_GERMAN} " "
PageEx license
LicenseData syncplay\resources\license.rtf
PageExEnd
Page custom DirectoryCustom DirectoryCustomLeave
Page instFiles
UninstPage custom un.installConfirm un.installConfirmLeave
UninstPage instFiles
Var Dialog
Var Icon_Syncplay
Var Icon_Syncplay_Handle
;Var CheckBox_Associate
Var CheckBox_AutomaticUpdates
Var CheckBox_StartMenuShortcut
Var CheckBox_DesktopShortcut
Var CheckBox_QuickLaunchShortcut
;Var CheckBox_Associate_State
Var CheckBox_AutomaticUpdates_State
Var CheckBox_StartMenuShortcut_State
Var CheckBox_DesktopShortcut_State
Var CheckBox_QuickLaunchShortcut_State
Var Button_Browse
Var Directory
Var GroupBox_DirSub
Var Label_Text
Var Label_Shortcut
Var Label_Size
Var Label_Space
Var Text_Directory
Var Uninst_Dialog
Var Uninst_Icon
Var Uninst_Icon_Handle
Var Uninst_Label_Directory
Var Uninst_Label_Text
Var Uninst_Text_Directory
Var Uninst_CheckBox_Config
Var Uninst_CheckBox_Config_State
Var Size
Var SizeHex
Var AvailibleSpace
Var AvailibleSpaceGiB
Var Drive
Var VLC_Directory
;!macro APP_ASSOCIATE EXT FileCLASS DESCRIPTION COMMANDTEXT COMMAND
; WriteRegStr HKCR ".$${EXT}" "" "$${FileCLASS}"
; WriteRegStr HKCR "$${FileCLASS}" "" `$${DESCRIPTION}`
; WriteRegStr HKCR "$${FileCLASS}\shell" "" "open"
; WriteRegStr HKCR "$${FileCLASS}\shell\open" "" `$${COMMANDTEXT}`
; WriteRegStr HKCR "$${FileCLASS}\shell\open\command" "" `$${COMMAND}`
;!macroend
!macro APP_UNASSOCIATE EXT FileCLASS
; Backup the previously associated File class
ReadRegStr $$R0 HKCR ".$${EXT}" `$${FileCLASS}_backup`
WriteRegStr HKCR ".$${EXT}" "" "$$R0"
DeleteRegKey HKCR `$${FileCLASS}`
!macroend
;!macro ASSOCIATE EXT
; !insertmacro APP_ASSOCIATE "$${EXT}" "Syncplay.$${EXT}" "$$INSTDIR\Syncplay.exe,%1%" \
; "Open with Syncplay" "$$INSTDIR\Syncplay.exe $$\"%1$$\""
;!macroend
!macro UNASSOCIATE EXT
!insertmacro APP_UNASSOCIATE "$${EXT}" "Syncplay.$${EXT}"
!macroend
;Prevents from running more than one instance of installer and sets default state of checkboxes
Function .onInit
System::Call 'kernel32::CreateMutexA(i 0, i 0, t "SyncplayMutex") i .r1 ?e'
Pop $$R0
StrCmp $$R0 0 +3
MessageBox MB_OK|MB_ICONEXCLAMATION "The installer is already running."
Abort
;StrCpy $$CheckBox_Associate_State $${BST_CHECKED}
StrCpy $$CheckBox_StartMenuShortcut_State $${BST_CHECKED}
Call GetSize
Call DriveSpace
Call Language
FunctionEnd
;Language selection dialog
Function Language
Push ""
Push $${LANG_ENGLISH}
Push English
Push $${LANG_POLISH}
Push Polski
Push $${LANG_RUSSIAN}
Push Русский
Push $${LANG_GERMAN}
Push Deutsch
Push $${LANG_ITALIAN}
Push Italiano
Push $${LANG_SPANISH}
Push Español
Push $${LANG_PORTUGUESEBR}
Push 'Português do Brasil'
Push $${LANG_PORTUGUESE}
Push 'Português de Portugal'
Push $${LANG_TURKISH}
Push 'Türkçe'
Push A ; A means auto count languages
LangDLL::LangDialog "Language Selection" "Please select the language of Syncplay and the installer"
Pop $$LANGUAGE
StrCmp $$LANGUAGE "cancel" 0 +2
Abort
FunctionEnd
Function DirectoryCustom
nsDialogs::Create 1018
Pop $$Dialog
GetFunctionAddress $$R8 DirectoryCustomLeave
nsDialogs::OnBack $$R8
$${NSD_CreateIcon} 0u 0u 22u 20u ""
Pop $$Icon_Syncplay
$${NSD_SetIconFromInstaller} $$Icon_Syncplay $$Icon_Syncplay_Handle
$${NSD_CreateLabel} 25u 0u 241u 34u "$$(^DirText)"
Pop $$Label_Text
$${NSD_CreateText} 8u 38u 187u 12u "$$INSTDIR"
Pop $$Text_Directory
$${NSD_SetFocus} $$Text_Directory
$${NSD_CreateBrowseButton} 202u 37u 55u 14u "$$(^BrowseBtn)"
Pop $$Button_Browse
$${NSD_OnClick} $$Button_Browse DirectoryBrowseDialog
$${NSD_CreateGroupBox} 1u 27u 264u 30u "$$(^DirSubText)"
Pop $$GroupBox_DirSub
$${NSD_CreateLabel} 0u 122u 132 8u "$$(^SpaceRequired)$$SizeMB"
Pop $$Label_Size
$${NSD_CreateLabel} 321u 122u 132 8u "$$(^SpaceAvailable)$$AvailibleSpaceGiB.$$AvailibleSpaceGB"
Pop $$Label_Space
;$${NSD_CreateCheckBox} 8u 59u 187u 10u "$$(^Associate)"
;Pop $$CheckBox_Associate
$${NSD_CreateCheckBox} 8u 72u 250u 10u "$$(^AutomaticUpdates)"
Pop $$CheckBox_AutomaticUpdates
$${NSD_Check} $$CheckBox_AutomaticUpdates
$${NSD_CreateLabel} 8u 95u 187u 10u "$$(^Shortcut)"
Pop $$Label_Shortcut
$${NSD_CreateCheckbox} 8u 105u 70u 10u "$$(^StartMenu)"
Pop $$CheckBox_StartMenuShortcut
$${NSD_CreateCheckbox} 78u 105u 70u 10u "$$(^Desktop)"
Pop $$CheckBox_DesktopShortcut
$${NSD_CreateCheckbox} 158u 105u 130u 10u "$$(^QuickLaunchBar)"
Pop $$CheckBox_QuickLaunchShortcut
;$${If} $$CheckBox_Associate_State == $${BST_CHECKED}
; $${NSD_Check} $$CheckBox_Associate
;$${EndIf}
$${If} $$CheckBox_StartMenuShortcut_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_StartMenuShortcut
$${EndIf}
$${If} $$CheckBox_DesktopShortcut_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_DesktopShortcut
$${EndIf}
$${If} $$CheckBox_QuickLaunchShortcut_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_QuickLaunchShortcut
$${EndIf}
$${If} $$CheckBox_AutomaticUpdates_State == $${BST_CHECKED}
$${NSD_Check} $$CheckBox_AutomaticUpdates
$${EndIf}
nsDialogs::Show
$${NSD_FreeIcon} $$Icon_Syncplay_Handle
FunctionEnd
Function DirectoryCustomLeave
$${NSD_GetText} $$Text_Directory $$INSTDIR
;$${NSD_GetState} $$CheckBox_Associate $$CheckBox_Associate_State
$${NSD_GetState} $$CheckBox_AutomaticUpdates $$CheckBox_AutomaticUpdates_State
$${NSD_GetState} $$CheckBox_StartMenuShortcut $$CheckBox_StartMenuShortcut_State
$${NSD_GetState} $$CheckBox_DesktopShortcut $$CheckBox_DesktopShortcut_State
$${NSD_GetState} $$CheckBox_QuickLaunchShortcut $$CheckBox_QuickLaunchShortcut_State
FunctionEnd
Function DirectoryBrowseDialog
nsDialogs::SelectFolderDialog $$(^DirBrowseText)
Pop $$Directory
$${If} $$Directory != error
StrCpy $$INSTDIR $$Directory
$${NSD_SetText} $$Text_Directory $$INSTDIR
Call DriveSpace
$${NSD_SetText} $$Label_Space "$$(^SpaceAvailable)$$AvailibleSpaceGiB.$$AvailibleSpaceGB"
$${EndIf}
Abort
FunctionEnd
Function GetSize
StrCpy $$Size "$totalSize"
IntOp $$Size $$Size / 1024
IntFmt $$SizeHex "0x%08X" $$Size
IntOp $$Size $$Size / 1024
FunctionEnd
;Calculates Free Space on HDD
Function DriveSpace
StrCpy $$Drive $$INSTDIR 1
$${DriveSpace} "$$Drive:\" "/D=F /S=M" $$AvailibleSpace
IntOp $$AvailibleSpaceGiB $$AvailibleSpace / 1024
IntOp $$AvailibleSpace $$AvailibleSpace % 1024
IntOp $$AvailibleSpace $$AvailibleSpace / 102
FunctionEnd
Function InstallOptions
;$${If} $$CheckBox_Associate_State == $${BST_CHECKED}
; Call Associate
; DetailPrint "Associated Syncplay with multimedia files"
;$${EndIf}
$${If} $$CheckBox_StartMenuShortcut_State == $${BST_CHECKED}
CreateDirectory $$SMPROGRAMS\Syncplay
SetOutPath "$$INSTDIR"
CreateShortCut "$$SMPROGRAMS\Syncplay\Syncplay.lnk" "$$INSTDIR\Syncplay.exe" ""
CreateShortCut "$$SMPROGRAMS\Syncplay\Syncplay Server.lnk" "$$INSTDIR\syncplayServer.exe" ""
CreateShortCut "$$SMPROGRAMS\Syncplay\Uninstall.lnk" "$$INSTDIR\Uninstall.exe" ""
WriteINIStr "$$SMPROGRAMS\Syncplay\SyncplayWebsite.url" "InternetShortcut" "URL" "https://syncplay.pl"
$${EndIf}
$${If} $$CheckBox_DesktopShortcut_State == $${BST_CHECKED}
SetOutPath "$$INSTDIR"
CreateShortCut "$$DESKTOP\Syncplay.lnk" "$$INSTDIR\Syncplay.exe" ""
$${EndIf}
$${If} $$CheckBox_QuickLaunchShortcut_State == $${BST_CHECKED}
SetOutPath "$$INSTDIR"
CreateShortCut "$$QUICKLAUNCH\Syncplay.lnk" "$$INSTDIR\Syncplay.exe" ""
$${EndIf}
FunctionEnd
;Associates extensions with Syncplay
;Function Associate
; !insertmacro ASSOCIATE avi
; !insertmacro ASSOCIATE mpg
; !insertmacro ASSOCIATE mpeg
; !insertmacro ASSOCIATE mpe
; !insertmacro ASSOCIATE m1v
; !insertmacro ASSOCIATE m2v
; !insertmacro ASSOCIATE mpv2
; !insertmacro ASSOCIATE mp2v
; !insertmacro ASSOCIATE mkv
; !insertmacro ASSOCIATE mp4
; !insertmacro ASSOCIATE m4v
; !insertmacro ASSOCIATE mp4v
; !insertmacro ASSOCIATE 3gp
; !insertmacro ASSOCIATE 3gpp
; !insertmacro ASSOCIATE 3g2
; !insertmacro ASSOCIATE 3pg2
; !insertmacro ASSOCIATE flv
; !insertmacro ASSOCIATE f4v
; !insertmacro ASSOCIATE rm
; !insertmacro ASSOCIATE wmv
; !insertmacro ASSOCIATE swf
; !insertmacro ASSOCIATE rmvb
; !insertmacro ASSOCIATE divx
; !insertmacro ASSOCIATE amv
;FunctionEnd
Function WriteRegistry
Call GetSize
WriteRegStr HKLM SOFTWARE\Syncplay "Install_Dir" "$$INSTDIR"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "DisplayName" "Syncplay"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "InstallLocation" "$$INSTDIR"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "UninstallString" '"$$INSTDIR\uninstall.exe"'
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "DisplayIcon" "$$INSTDIR\resources\icon.ico"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "Publisher" "Syncplay"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "DisplayVersion" "$version"
WriteRegStr HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "URLInfoAbout" "https://syncplay.pl/"
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "NoModify" 1
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "NoRepair" 1
WriteRegDWORD HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "EstimatedSize" "$$SizeHex"
WriteINIStr $$APPDATA\syncplay.ini general language $$(^SyncplayLanguage)
$${If} $$CheckBox_AutomaticUpdates_State == $${BST_CHECKED}
WriteINIStr $$APPDATA\syncplay.ini general CheckForUpdatesAutomatically "True"
$${Else}
WriteINIStr $$APPDATA\syncplay.ini general CheckForUpdatesAutomatically "False"
$${EndIf}
FunctionEnd
Function un.installConfirm
nsDialogs::Create 1018
Pop $$Uninst_Dialog
$${NSD_CreateIcon} 0u 1u 22u 20u ""
Pop $$Uninst_Icon
$${NSD_SetIconFromInstaller} $$Uninst_Icon $$Uninst_Icon_Handle
$${NSD_CreateLabel} 0u 45u 55u 8u "$$(^UninstallingSubText)"
Pop $$Uninst_Label_Directory
$${NSD_CreateLabel} 25u 0u 241u 34u "$$(^UninstallingText)"
Pop $$Uninst_Label_Text
ReadRegStr $$INSTDIR HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "InstallLocation"
$${NSD_CreateText} 56u 43u 209u 12u "$$INSTDIR"
Pop $$Uninst_Text_Directory
EnableWindow $$Uninst_Text_Directory 0
$${NSD_CreateCheckBox} 0u 60u 250u 10u "$$(^UninstConfig)"
Pop $$Uninst_CheckBox_Config
nsDialogs::Show
$${NSD_FreeIcon} $$Uninst_Icon_Handle
FunctionEnd
Function un.installConfirmLeave
$${NSD_GetState} $$Uninst_CheckBox_Config $$Uninst_CheckBox_Config_State
FunctionEnd
Function un.AssociateDel
!insertmacro UNASSOCIATE avi
!insertmacro UNASSOCIATE mpg
!insertmacro UNASSOCIATE mpeg
!insertmacro UNASSOCIATE mpe
!insertmacro UNASSOCIATE m1v
!insertmacro UNASSOCIATE m2v
!insertmacro UNASSOCIATE mpv2
!insertmacro UNASSOCIATE mp2v
!insertmacro UNASSOCIATE mkv
!insertmacro UNASSOCIATE mp4
!insertmacro UNASSOCIATE m4v
!insertmacro UNASSOCIATE mp4v
!insertmacro UNASSOCIATE 3gp
!insertmacro UNASSOCIATE 3gpp
!insertmacro UNASSOCIATE 3g2
!insertmacro UNASSOCIATE 3pg2
!insertmacro UNASSOCIATE flv
!insertmacro UNASSOCIATE f4v
!insertmacro UNASSOCIATE rm
!insertmacro UNASSOCIATE wmv
!insertmacro UNASSOCIATE swf
!insertmacro UNASSOCIATE rmvb
!insertmacro UNASSOCIATE divx
!insertmacro UNASSOCIATE amv
FunctionEnd
Function un.InstallOptions
Delete $$SMPROGRAMS\Syncplay\Syncplay.lnk
Delete "$$SMPROGRAMS\Syncplay\Syncplay Server.lnk"
Delete $$SMPROGRAMS\Syncplay\Uninstall.lnk
Delete $$SMPROGRAMS\Syncplay\SyncplayWebsite.url
RMDir $$SMPROGRAMS\Syncplay
Delete $$DESKTOP\Syncplay.lnk
Delete $$QUICKLAUNCH\Syncplay.lnk
ReadRegStr $$VLC_Directory HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay" "VLCInstallLocation"
IfFileExists "$$VLC_Directory\lua\intf\syncplay.lua" 0 +2
Delete $$VLC_Directory\lua\intf\syncplay.lua
FunctionEnd
Section "Install"
SetOverwrite on
SetOutPath $$INSTDIR
WriteUninstaller uninstall.exe
$installFiles
Call InstallOptions
Call WriteRegistry
SectionEnd
Section "Uninstall"
Call un.AssociateDel
Call un.InstallOptions
$uninstallFiles
DeleteRegKey HKLM "Software\Microsoft\Windows\CurrentVersion\Uninstall\Syncplay"
DeleteRegKey HKLM SOFTWARE\Syncplay
Delete $$INSTDIR\uninstall.exe
RMDir $$INSTDIR\Syncplay\\resources\lua\intf
RMDir $$INSTDIR\Syncplay\\resources\lua
RMDir $$INSTDIR\Syncplay\\resources
RMDir $$INSTDIR\resources
RMDir $$INSTDIR\lib
RMDir $$INSTDIR
$${If} $$Uninst_CheckBox_Config_State == $${BST_CHECKED}
IfFileExists "$$APPDATA\.syncplay" 0 +2
Delete $$APPDATA\.syncplay
IfFileExists "$$APPDATA\syncplay.ini" 0 +2
Delete $$APPDATA\syncplay.ini
$${EndIf}
SectionEnd
"""
class NSISScript(object):
def create(self):
fileList, totalSize = self.getBuildDirContents(OUT_DIR)
print("Total size eq: {}".format(totalSize))
installFiles = self.prepareInstallListTemplate(fileList)
uninstallFiles = self.prepareDeleteListTemplate(fileList)
if os.path.isfile(SETUP_SCRIPT_PATH):
raise RuntimeError("Cannot create setup script, file exists at {}".format(SETUP_SCRIPT_PATH))
contents = Template(NSIS_SCRIPT_TEMPLATE).substitute(
version=syncplay.version,
uninstallFiles=uninstallFiles,
installFiles=installFiles,
totalSize=totalSize,
)
with codecs.open(SETUP_SCRIPT_PATH, "w", "utf-8-sig") as outfile:
outfile.write(contents)
def compile(self):
if not os.path.isfile(NSIS_COMPILE):
return "makensis.exe not found, won't create the installer"
subproc = subprocess.Popen([NSIS_COMPILE, SETUP_SCRIPT_PATH], env=os.environ)
subproc.communicate()
retcode = subproc.returncode
os.remove(SETUP_SCRIPT_PATH)
if retcode:
raise RuntimeError("NSIS compilation return code: %d" % retcode)
def getBuildDirContents(self, path):
fileList = {}
totalSize = 0
for root, _, files in os.walk(path):
totalSize += sum(os.path.getsize(os.path.join(root, file_)) for file_ in files)
for file_ in files:
new_root = root.replace(OUT_DIR, "").strip("\\")
if new_root not in fileList:
fileList[new_root] = []
fileList[new_root].append(file_)
return fileList, totalSize
def prepareInstallListTemplate(self, fileList):
create = []
for dir_ in fileList.keys():
create.append('SetOutPath "$INSTDIR\\{}"'.format(dir_))
for file_ in fileList[dir_]:
create.append('FILE "{}\\{}\\{}"'.format(OUT_DIR, dir_, file_))
return "\n".join(create)
def prepareDeleteListTemplate(self, fileList):
delete = []
for dir_ in fileList.keys():
for file_ in fileList[dir_]:
delete.append('DELETE "$INSTDIR\\{}\\{}"'.format(dir_, file_))
delete.append('RMdir "$INSTDIR\\{}"'.format(file_))
return "\n".join(delete)
def pruneUnneededLibraries():
from pathlib import Path
cwd = os.getcwd()
libDir = cwd + '\\' + OUT_DIR + '\\lib\\'
unneededModules = ['PySide2.Qt3D*', 'PySide2.QtAxContainer.pyd', 'PySide2.QtCharts.pyd', 'PySide2.QtConcurrent.pyd',
'PySide2.QtDataVisualization.pyd', 'PySide2.QtHelp.pyd', 'PySide2.QtLocation.pyd',
'PySide2.QtMultimedia.pyd', 'PySide2.QtMultimediaWidgets.pyd', 'PySide2.QtOpenGL.pyd',
'PySide2.QtPositioning.pyd', 'PySide2.QtPrintSupport.pyd', 'PySide2.QtQml.pyd',
'PySide2.QtQuick.pyd', 'PySide2.QtQuickWidgets.pyd', 'PySide2.QtScxml.pyd', 'PySide2.QtSensors.pyd',
'PySide2.QtSql.pyd', 'PySide2.QtSvg.pyd', 'PySide2.QtTest.pyd', 'PySide2.QtTextToSpeech.pyd',
'PySide2.QtUiTools.pyd', 'PySide2.QtWebChannel.pyd', 'PySide2.QtWebEngine.pyd',
'PySide2.QtWebEngineCore.pyd', 'PySide2.QtWebEngineWidgets.pyd', 'PySide2.QtWebSockets.pyd',
'PySide2.QtWinExtras.pyd', 'PySide2.QtXml.pyd', 'PySide2.QtXmlPatterns.pyd']
unneededLibs = ['Qt53D*', 'Qt5Charts.dll', 'Qt5Concurrent.dll', 'Qt5DataVisualization.dll', 'Qt5Gamepad.dll', 'Qt5Help.dll',
'Qt5Location.dll', 'Qt5Multimedia.dll', 'Qt5MultimediaWidgets.dll', 'Qt5OpenGL.dll', 'Qt5Positioning.dll',
'Qt5PrintSupport.dll', 'Qt5Quick.dll', 'Qt5QuickWidgets.dll', 'Qt5Scxml.dll', 'Qt5Sensors.dll', 'Qt5Sql.dll',
'Qt5Svg.dll', 'Qt5Test.dll', 'Qt5TextToSpeech.dll', 'Qt5WebChannel.dll', 'Qt5WebEngine.dll',
'Qt5WebEngineCore.dll', 'Qt5WebEngineWidgets.dll', 'Qt5WebSockets.dll', 'Qt5WinExtras.dll', 'Qt5Xml.dll',
'Qt5XmlPatterns.dll']
windowsDLL = ['MSVCP140.dll', 'VCRUNTIME140.dll']
deleteList = unneededModules + unneededLibs + windowsDLL
deleteList.append('api-*')
for filename in deleteList:
for p in Path(libDir).glob(filename):
p.unlink()
def copyQtPlugins(paths):
import shutil
from PySide2 import QtCore
basePath = QtCore.QLibraryInfo.location(QtCore.QLibraryInfo.PluginsPath)
basePath = basePath.replace('/', '\\')
destBase = os.getcwd() + '\\' + OUT_DIR
for elem in paths:
elemDir, elemName = os.path.split(elem)
source = basePath + '\\' + elem
dest = destBase + '\\' + elem
destDir = destBase + '\\' + elemDir
os.makedirs(destDir, exist_ok=True)
shutil.copy(source, dest)
class build_installer(py2exe):
def run(self):
py2exe.run(self)
print('*** deleting unnecessary libraries and modules ***')
pruneUnneededLibraries()
print('*** copying qt plugins ***')
copyQtPlugins(qt_plugins)
script = NSISScript()
script.create()
print("*** compiling the NSIS setup script ***")
script.compile()
print("*** DONE ***")
guiIcons = glob('syncplay/resources/*.ico') + glob('syncplay/resources/*.png') + ['syncplay/resources/spinner.mng']
resources = [
"syncplay/resources/syncplayintf.lua",
"syncplay/resources/license.rtf",
"syncplay/resources/third-party-notices.rtf"
]
resources.extend(guiIcons)
intf_resources = ["syncplay/resources/lua/intf/syncplay.lua"]
qt_plugins = ['platforms\\qwindows.dll', 'styles\\qwindowsvistastyle.dll']
common_info = dict(
name='Syncplay',
version=syncplay.version,
author='Uriziel',
author_email='dev@syncplay.pl',
description='Syncplay',
)
info = dict(
common_info,
windows=[{
"script": "syncplayClient.py",
"icon_resources": [(1, "syncplay\\resources\\icon.ico")],
'dest_base': "Syncplay"},
],
console=['syncplayServer.py'],
# *** If you wish to make the Syncplay client use console mode (for --no-gui to work) then comment out the above two lines and uncomment the following line:
# console=['syncplayServer.py', {"script":"syncplayClient.py", "icon_resources":[(1, "resources\\icon.ico")], 'dest_base': "Syncplay"}],
options={
'py2exe': {
'dist_dir': OUT_DIR,
'packages': 'PySide2, cffi, OpenSSL, certifi',
'includes': 'twisted, sys, encodings, datetime, os, time, math, urllib, ast, unicodedata, _ssl, win32pipe, win32file',
'excludes': 'venv, doctest, pdb, unittest, win32clipboard, win32pdh, win32security, win32trace, win32ui, winxpgui, win32process, Tkinter',
'dll_excludes': 'msvcr71.dll, MSVCP90.dll, POWRPROF.dll',
'optimize': 2,
'compressed': 1
}
},
data_files=[("resources", resources), ("resources/lua/intf", intf_resources)],
zipfile="lib/libsync",
cmdclass={"py2exe": build_installer},
)
sys.argv.extend(['py2exe'])
setup(**info)
|
# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
from clr import AddReference
AddReference("System")
AddReference("QuantConnect.Algorithm")
AddReference("QuantConnect.Common")
from System import *
from QuantConnect import *
from QuantConnect.Orders import *
from QuantConnect.Algorithm import *
from QuantConnect.Algorithm.Framework import *
from QuantConnect.Algorithm.Framework.Selection import *
from Alphas.RsiAlphaModel import RsiAlphaModel
from Portfolio.EqualWeightingPortfolioConstructionModel import EqualWeightingPortfolioConstructionModel
from Execution.StandardDeviationExecutionModel import StandardDeviationExecutionModel
from datetime import timedelta
class StandardDeviationExecutionModelRegressionAlgorithm(QCAlgorithm):
'''Regression algorithm for the StandardDeviationExecutionModel.
This algorithm shows how the execution model works to split up orders and submit them
only when the price is 2 standard deviations from the 60min mean (default model settings).'''
def Initialize(self):
''' Initialise the data and resolution required, as well as the cash and start-end dates for your algorithm. All algorithms must initialized.'''
# Set requested data resolution
self.UniverseSettings.Resolution = Resolution.Minute
self.SetStartDate(2013,10,7)
self.SetEndDate(2013,10,11)
self.SetCash(1000000)
self.SetUniverseSelection(ManualUniverseSelectionModel([
Symbol.Create('AIG', SecurityType.Equity, Market.USA),
Symbol.Create('BAC', SecurityType.Equity, Market.USA),
Symbol.Create('IBM', SecurityType.Equity, Market.USA),
Symbol.Create('SPY', SecurityType.Equity, Market.USA)
]))
self.SetAlpha(RsiAlphaModel(14, Resolution.Hour))
self.SetPortfolioConstruction(EqualWeightingPortfolioConstructionModel())
self.SetExecution(StandardDeviationExecutionModel())
def OnOrderEvent(self, orderEvent):
self.Log(f"{self.Time}: {orderEvent}")
|
'''
fantastic Add-on
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib,urlparse
from resources.lib.modules import cleantitle
from resources.lib.modules import client
from resources.lib.modules import debrid
class source:
def __init__(self):
self.priority = 1
self.language = ['en']
self.domains = ['sceper.ws','sceper.unblocked.pro']
self.base_link = 'https://sceper.unblocked.pro'
self.search_link = '/search/%s/feed/rss2/'
def movie(self, imdb, title, localtitle, aliases, year):
try:
url = {'imdb': imdb, 'title': title, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if url == None: return
url = urlparse.parse_qs(url)
url = dict([(i, url[i][0]) if url[i] else (i, '') for i in url])
url['title'], url['premiered'], url['season'], url['episode'] = title, premiered, season, episode
url = urllib.urlencode(url)
return url
except:
return
def sources(self, url, hostDict, hostprDict):
try:
sources = []
if url == None: return sources
if debrid.status() == False: raise Exception()
data = urlparse.parse_qs(url)
data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data])
title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title']
hdlr = 'S%02dE%02d' % (int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else data['year']
query = '%s S%02dE%02d' % (data['tvshowtitle'], int(data['season']), int(data['episode'])) if 'tvshowtitle' in data else '%s %s' % (data['title'], data['year'])
query = re.sub('(\\\|/| -|:|;|\*|\?|"|\'|<|>|\|)', ' ', query)
url = self.search_link % urllib.quote_plus(query)
url = urlparse.urljoin(self.base_link, url)
r = client.request(url)
posts = client.parseDOM(r, 'item')
hostDict = hostprDict
items = []
for post in posts:
try:
t = client.parseDOM(post, 'title')[0]
c = client.parseDOM(post, 'content.+?')[0]
s = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', c)
s = s[0] if s else '0'
u = zip(client.parseDOM(c, 'a', ret='href'), client.parseDOM(c, 'a'))
u = [(i[0], i[1], re.findall('PT(\d+)$', i[1])) for i in u]
u = [(i[0], i[1]) for i in u if not i[2]]
if 'tvshowtitle' in data:
u = [([x for x in i[0].strip('//').split('/')][-1], i[0]) for i in u]
else:
u = [(t, i[0], s) for i in u]
items += u
except:
pass
for item in items:
try:
name = item[0]
name = client.replaceHTMLCodes(name)
t = re.sub('(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*|3D)(\.|\)|\]|\s|)(.+|)', '', name)
if not cleantitle.get(t) == cleantitle.get(title): raise Exception()
y = re.findall('[\.|\(|\[|\s](\d{4}|S\d*E\d*|S\d*)[\.|\)|\]|\s]', name)[-1].upper()
if not y == hdlr: raise Exception()
fmt = re.sub('(.+)(\.|\(|\[|\s)(\d{4}|S\d*E\d*|S\d*)(\.|\)|\]|\s)', '', name.upper())
fmt = re.split('\.|\(|\)|\[|\]|\s|\-', fmt)
fmt = [i.lower() for i in fmt]
if any(i.endswith(('subs', 'sub', 'dubbed', 'dub')) for i in fmt): raise Exception()
if any(i in ['extras'] for i in fmt): raise Exception()
if '1080p' in fmt: quality = '1080p'
elif '720p' in fmt: quality = 'HD'
else: quality = 'SD'
if any(i in ['dvdscr', 'r5', 'r6'] for i in fmt): quality = 'SCR'
elif any(i in ['camrip', 'tsrip', 'hdcam', 'hdts', 'dvdcam', 'dvdts', 'cam', 'telesync', 'ts'] for i in fmt): quality = 'CAM'
info = []
if '3d' in fmt: info.append('3D')
try:
size = re.findall('((?:\d+\.\d+|\d+\,\d+|\d+) (?:GB|GiB|MB|MiB))', item[2])[-1]
div = 1 if size.endswith(('GB', 'GiB')) else 1024
size = float(re.sub('[^0-9|/.|/,]', '', size))/div
size = '%.2f GB' % size
info.append(size)
except:
pass
if any(i in ['hevc', 'h265', 'x265'] for i in fmt): info.append('HEVC')
info = ' | '.join(info)
url = item[1]
if any(x in url for x in ['.rar', '.zip', '.iso']): raise Exception()
url = client.replaceHTMLCodes(url)
url = url.encode('utf-8')
host = re.findall('([\w]+[.][\w]+)$', urlparse.urlparse(url.strip().lower()).netloc)[0]
if not host in hostDict: raise Exception()
host = client.replaceHTMLCodes(host)
host = host.encode('utf-8')
sources.append({'source': host, 'quality': quality, 'language': 'en', 'url': url, 'info': info, 'direct': False, 'debridonly': True})
except:
pass
check = [i for i in sources if not i['quality'] == 'CAM']
if check: sources = check
return sources
except:
return sources
def resolve(self, url):
return url
|
from oauth2client import GOOGLE_TOKEN_URI
from oauth2client.client import GoogleCredentials
from .. import gcp
from .. import constants
class CloudResourcesBase(gcp.GoogleCloudApi):
def __init__(self,
config,
logger,
scope=constants.COMPUTE_SCOPE,
discovery=constants.CLOUDRESOURCES_DISCOVERY,
api_version=constants.API_V1):
super(CloudResourcesBase, self).__init__(
config,
logger,
scope,
discovery,
api_version)
def get_credentials(self, scope):
# check
# run: gcloud beta auth application-default login
# look to ~/.config/gcloud/application_default_credentials.json
credentials = GoogleCredentials(
access_token=None,
client_id=self.auth['client_id'],
client_secret=self.auth['client_secret'],
refresh_token=self.auth['refresh_token'],
token_expiry=None,
token_uri=GOOGLE_TOKEN_URI,
user_agent='Python client library'
)
return credentials
def get(self):
raise NotImplementedError()
def create(self):
raise NotImplementedError()
def delete(self):
raise NotImplementedError()
|
"""
.. module:: tomo_recon
:platform: Unix
:synopsis: runner for tests using the MPI framework
.. moduleauthor:: Mark Basham <scientificsoftware@diamond.ac.uk>
"""
import unittest
import tempfile
from savu.test import test_utils as tu
from savu.test.plugin_runner_test import run_protected_plugin_runner
class SimpleTomoTest(unittest.TestCase):
def test_process(self):
options = {
"transport": "hdf5",
"process_names": "CPU0",
"data_file": tu.get_test_data_path('24737.nxs'),
"process_file": tu.get_test_data_path('simple_recon_test_process.nxs'),
"out_path": tempfile.mkdtemp()
}
run_protected_plugin_runner(options)
if __name__ == "__main__":
unittest.main()
|
"""Tests for xquad dataset module."""
from tensorflow_datasets import testing
from tensorflow_datasets.question_answering import xquad
class XquadTest(testing.DatasetBuilderTestCase):
DATASET_CLASS = xquad.Xquad
BUILDER_CONFIG_NAMES_TO_TEST = ["ar"]
DL_EXTRACT_RESULT = {
"translate-train": "translate-train.json",
"translate-dev": "translate-dev.json",
"translate-test": "translate-test.json",
"test": "test.json",
}
SPLITS = {
"translate-train": 3,
"translate-dev": 2,
"translate-test": 3,
"test": 1,
}
if __name__ == "__main__":
testing.test_main()
|
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers
from google.api_core import operations_v1
from google.api_core import gapic_v1
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.appengine_admin_v1.types import appengine
from google.cloud.appengine_admin_v1.types import instance
from google.longrunning import operations_pb2 # type: ignore
from .base import InstancesTransport, DEFAULT_CLIENT_INFO
class InstancesGrpcTransport(InstancesTransport):
"""gRPC backend transport for Instances.
Manages instances of a version.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "appengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
self._operations_client: Optional[operations_v1.OperationsClient] = None
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
# use the credentials which are saved
credentials=self._credentials,
# Set ``credentials_file`` to ``None`` here as
# the credentials that we saved earlier should be used.
credentials_file=None,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "appengine.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def operations_client(self) -> operations_v1.OperationsClient:
"""Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
"""
# Quick check: Only create a new client if we do not already have one.
if self._operations_client is None:
self._operations_client = operations_v1.OperationsClient(self.grpc_channel)
# Return the client from cache.
return self._operations_client
@property
def list_instances(
self,
) -> Callable[[appengine.ListInstancesRequest], appengine.ListInstancesResponse]:
r"""Return a callable for the list instances method over gRPC.
Lists the instances of a version.
Tip: To aggregate details about instances over time, see the
`Stackdriver Monitoring
API <https://cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.timeSeries/list>`__.
Returns:
Callable[[~.ListInstancesRequest],
~.ListInstancesResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_instances" not in self._stubs:
self._stubs["list_instances"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/ListInstances",
request_serializer=appengine.ListInstancesRequest.serialize,
response_deserializer=appengine.ListInstancesResponse.deserialize,
)
return self._stubs["list_instances"]
@property
def get_instance(
self,
) -> Callable[[appengine.GetInstanceRequest], instance.Instance]:
r"""Return a callable for the get instance method over gRPC.
Gets instance information.
Returns:
Callable[[~.GetInstanceRequest],
~.Instance]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_instance" not in self._stubs:
self._stubs["get_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/GetInstance",
request_serializer=appengine.GetInstanceRequest.serialize,
response_deserializer=instance.Instance.deserialize,
)
return self._stubs["get_instance"]
@property
def delete_instance(
self,
) -> Callable[[appengine.DeleteInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the delete instance method over gRPC.
Stops a running instance.
The instance might be automatically recreated based on the
scaling settings of the version. For more information, see "How
Instances are Managed" (`standard
environment <https://cloud.google.com/appengine/docs/standard/python/how-instances-are-managed>`__
\| `flexible
environment <https://cloud.google.com/appengine/docs/flexible/python/how-instances-are-managed>`__).
To ensure that instances are not re-created and avoid getting
billed, you can stop all instances within the target version by
changing the serving status of the version to ``STOPPED`` with
the
```apps.services.versions.patch`` <https://cloud.google.com/appengine/docs/admin-api/reference/rest/v1/apps.services.versions/patch>`__
method.
Returns:
Callable[[~.DeleteInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_instance" not in self._stubs:
self._stubs["delete_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/DeleteInstance",
request_serializer=appengine.DeleteInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["delete_instance"]
@property
def debug_instance(
self,
) -> Callable[[appengine.DebugInstanceRequest], operations_pb2.Operation]:
r"""Return a callable for the debug instance method over gRPC.
Enables debugging on a VM instance. This allows you
to use the SSH command to connect to the virtual machine
where the instance lives. While in "debug mode", the
instance continues to serve live traffic. You should
delete the instance when you are done debugging and then
allow the system to take over and determine if another
instance should be started.
Only applicable for instances in App Engine flexible
environment.
Returns:
Callable[[~.DebugInstanceRequest],
~.Operation]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "debug_instance" not in self._stubs:
self._stubs["debug_instance"] = self.grpc_channel.unary_unary(
"/google.appengine.v1.Instances/DebugInstance",
request_serializer=appengine.DebugInstanceRequest.serialize,
response_deserializer=operations_pb2.Operation.FromString,
)
return self._stubs["debug_instance"]
def close(self):
self.grpc_channel.close()
__all__ = ("InstancesGrpcTransport",)
|
"""Support for RainMachine devices."""
import asyncio
from datetime import timedelta
import logging
from regenmaschine import Client
from regenmaschine.errors import RainMachineError
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_IP_ADDRESS,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
)
from homeassistant.core import callback
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import aiohttp_client, config_validation as cv
from homeassistant.helpers.dispatcher import async_dispatcher_send
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_time_interval
from homeassistant.helpers.service import verify_domain_control
from .const import (
CONF_ZONE_RUN_TIME,
DATA_CLIENT,
DATA_PROGRAMS,
DATA_PROVISION_SETTINGS,
DATA_RESTRICTIONS_CURRENT,
DATA_RESTRICTIONS_UNIVERSAL,
DATA_ZONES,
DATA_ZONES_DETAILS,
DEFAULT_ZONE_RUN,
DOMAIN,
PROGRAM_UPDATE_TOPIC,
SENSOR_UPDATE_TOPIC,
ZONE_UPDATE_TOPIC,
)
_LOGGER = logging.getLogger(__name__)
CONF_PROGRAM_ID = "program_id"
CONF_SECONDS = "seconds"
CONF_ZONE_ID = "zone_id"
DATA_LISTENER = "listener"
DEFAULT_ATTRIBUTION = "Data provided by Green Electronics LLC"
DEFAULT_ICON = "mdi:water"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
DEFAULT_SSL = True
SERVICE_ALTER_PROGRAM = vol.Schema({vol.Required(CONF_PROGRAM_ID): cv.positive_int})
SERVICE_ALTER_ZONE = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
SERVICE_PAUSE_WATERING = vol.Schema({vol.Required(CONF_SECONDS): cv.positive_int})
SERVICE_START_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_START_ZONE_SCHEMA = vol.Schema(
{
vol.Required(CONF_ZONE_ID): cv.positive_int,
vol.Optional(CONF_ZONE_RUN_TIME, default=DEFAULT_ZONE_RUN): cv.positive_int,
}
)
SERVICE_STOP_PROGRAM_SCHEMA = vol.Schema(
{vol.Required(CONF_PROGRAM_ID): cv.positive_int}
)
SERVICE_STOP_ZONE_SCHEMA = vol.Schema({vol.Required(CONF_ZONE_ID): cv.positive_int})
CONFIG_SCHEMA = cv.deprecated(DOMAIN, invalidation_version="0.119")
async def async_setup(hass, config):
"""Set up the RainMachine component."""
hass.data[DOMAIN] = {DATA_CLIENT: {}, DATA_LISTENER: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up RainMachine as config entry."""
entry_updates = {}
if not config_entry.unique_id:
# If the config entry doesn't already have a unique ID, set one:
entry_updates["unique_id"] = config_entry.data[CONF_IP_ADDRESS]
if CONF_ZONE_RUN_TIME in config_entry.data:
# If a zone run time exists in the config entry's data, pop it and move it to
# options:
data = {**config_entry.data}
entry_updates["data"] = data
entry_updates["options"] = {
**config_entry.options,
CONF_ZONE_RUN_TIME: data.pop(CONF_ZONE_RUN_TIME),
}
if entry_updates:
hass.config_entries.async_update_entry(config_entry, **entry_updates)
_verify_domain_control = verify_domain_control(hass, DOMAIN)
websession = aiohttp_client.async_get_clientsession(hass)
client = Client(session=websession)
try:
await client.load_local(
config_entry.data[CONF_IP_ADDRESS],
config_entry.data[CONF_PASSWORD],
port=config_entry.data[CONF_PORT],
ssl=config_entry.data.get(CONF_SSL, DEFAULT_SSL),
)
except RainMachineError as err:
_LOGGER.error("An error occurred: %s", err)
raise ConfigEntryNotReady from err
else:
# regenmaschine can load multiple controllers at once, but we only grab the one
# we loaded above:
controller = next(iter(client.controllers.values()))
rainmachine = RainMachine(hass, config_entry, controller)
# Update the data object, which at this point (prior to any sensors registering
# "interest" in the API), will focus on grabbing the latest program and zone data:
await rainmachine.async_update()
hass.data[DOMAIN][DATA_CLIENT][config_entry.entry_id] = rainmachine
for component in ("binary_sensor", "sensor", "switch"):
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
@_verify_domain_control
async def disable_program(call):
"""Disable a program."""
await rainmachine.controller.programs.disable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def disable_zone(call):
"""Disable a zone."""
await rainmachine.controller.zones.disable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_program(call):
"""Enable a program."""
await rainmachine.controller.programs.enable(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def enable_zone(call):
"""Enable a zone."""
await rainmachine.controller.zones.enable(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def pause_watering(call):
"""Pause watering for a set number of seconds."""
await rainmachine.controller.watering.pause_all(call.data[CONF_SECONDS])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_program(call):
"""Start a particular program."""
await rainmachine.controller.programs.start(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def start_zone(call):
"""Start a particular zone for a certain amount of time."""
await rainmachine.controller.zones.start(
call.data[CONF_ZONE_ID], call.data[CONF_ZONE_RUN_TIME]
)
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_all(call):
"""Stop all watering."""
await rainmachine.controller.watering.stop_all()
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_program(call):
"""Stop a program."""
await rainmachine.controller.programs.stop(call.data[CONF_PROGRAM_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def stop_zone(call):
"""Stop a zone."""
await rainmachine.controller.zones.stop(call.data[CONF_ZONE_ID])
await rainmachine.async_update_programs_and_zones()
@_verify_domain_control
async def unpause_watering(call):
"""Unpause watering."""
await rainmachine.controller.watering.unpause_all()
await rainmachine.async_update_programs_and_zones()
for service, method, schema in [
("disable_program", disable_program, SERVICE_ALTER_PROGRAM),
("disable_zone", disable_zone, SERVICE_ALTER_ZONE),
("enable_program", enable_program, SERVICE_ALTER_PROGRAM),
("enable_zone", enable_zone, SERVICE_ALTER_ZONE),
("pause_watering", pause_watering, SERVICE_PAUSE_WATERING),
("start_program", start_program, SERVICE_START_PROGRAM_SCHEMA),
("start_zone", start_zone, SERVICE_START_ZONE_SCHEMA),
("stop_all", stop_all, {}),
("stop_program", stop_program, SERVICE_STOP_PROGRAM_SCHEMA),
("stop_zone", stop_zone, SERVICE_STOP_ZONE_SCHEMA),
("unpause_watering", unpause_watering, {}),
]:
hass.services.async_register(DOMAIN, service, method, schema=schema)
hass.data[DOMAIN][DATA_LISTENER] = config_entry.add_update_listener(
async_reload_entry
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload an OpenUV config entry."""
hass.data[DOMAIN][DATA_CLIENT].pop(config_entry.entry_id)
cancel_listener = hass.data[DOMAIN][DATA_LISTENER].pop(config_entry.entry_id)
cancel_listener()
tasks = [
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in ("binary_sensor", "sensor", "switch")
]
await asyncio.gather(*tasks)
return True
async def async_reload_entry(hass, config_entry):
"""Handle an options update."""
await hass.config_entries.async_reload(config_entry.entry_id)
class RainMachine:
"""Define a generic RainMachine object."""
def __init__(self, hass, config_entry, controller):
"""Initialize."""
self._async_cancel_time_interval_listener = None
self.config_entry = config_entry
self.controller = controller
self.data = {}
self.device_mac = controller.mac
self.hass = hass
self._api_category_count = {
DATA_PROVISION_SETTINGS: 0,
DATA_RESTRICTIONS_CURRENT: 0,
DATA_RESTRICTIONS_UNIVERSAL: 0,
}
self._api_category_locks = {
DATA_PROVISION_SETTINGS: asyncio.Lock(),
DATA_RESTRICTIONS_CURRENT: asyncio.Lock(),
DATA_RESTRICTIONS_UNIVERSAL: asyncio.Lock(),
}
async def _async_update_listener_action(self, now):
"""Define an async_track_time_interval action to update data."""
await self.async_update()
@callback
def async_deregister_sensor_api_interest(self, api_category):
"""Decrement the number of entities with data needs from an API category."""
# If this deregistration should leave us with no registration at all, remove the
# time interval:
if sum(self._api_category_count.values()) == 0:
if self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener()
self._async_cancel_time_interval_listener = None
return
self._api_category_count[api_category] -= 1
async def async_fetch_from_api(self, api_category):
"""Execute the appropriate coroutine to fetch particular data from the API."""
if api_category == DATA_PROGRAMS:
data = await self.controller.programs.all(include_inactive=True)
elif api_category == DATA_PROVISION_SETTINGS:
data = await self.controller.provisioning.settings()
elif api_category == DATA_RESTRICTIONS_CURRENT:
data = await self.controller.restrictions.current()
elif api_category == DATA_RESTRICTIONS_UNIVERSAL:
data = await self.controller.restrictions.universal()
elif api_category == DATA_ZONES:
data = await self.controller.zones.all(include_inactive=True)
elif api_category == DATA_ZONES_DETAILS:
# This API call needs to be separate from the DATA_ZONES one above because,
# maddeningly, the DATA_ZONES_DETAILS API call doesn't include the current
# state of the zone:
data = await self.controller.zones.all(details=True, include_inactive=True)
self.data[api_category] = data
async def async_register_sensor_api_interest(self, api_category):
"""Increment the number of entities with data needs from an API category."""
# If this is the first registration we have, start a time interval:
if not self._async_cancel_time_interval_listener:
self._async_cancel_time_interval_listener = async_track_time_interval(
self.hass,
self._async_update_listener_action,
DEFAULT_SCAN_INTERVAL,
)
self._api_category_count[api_category] += 1
# If a sensor registers interest in a particular API call and the data doesn't
# exist for it yet, make the API call and grab the data:
async with self._api_category_locks[api_category]:
if api_category not in self.data:
await self.async_fetch_from_api(api_category)
async def async_update(self):
"""Update all RainMachine data."""
tasks = [self.async_update_programs_and_zones(), self.async_update_sensors()]
await asyncio.gather(*tasks)
async def async_update_sensors(self):
"""Update sensor/binary sensor data."""
_LOGGER.debug("Updating sensor data for RainMachine")
# Fetch an API category if there is at least one interested entity:
tasks = {}
for category, count in self._api_category_count.items():
if count == 0:
continue
tasks[category] = self.async_fetch_from_api(category)
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
continue
async_dispatcher_send(self.hass, SENSOR_UPDATE_TOPIC)
async def async_update_programs_and_zones(self):
"""Update program and zone data.
Program and zone updates always go together because of how linked they are:
programs affect zones and certain combinations of zones affect programs.
Note that this call does not take into account interested entities when making
the API calls; we make the reasonable assumption that switches will always be
enabled.
"""
_LOGGER.debug("Updating program and zone data for RainMachine")
tasks = {
DATA_PROGRAMS: self.async_fetch_from_api(DATA_PROGRAMS),
DATA_ZONES: self.async_fetch_from_api(DATA_ZONES),
DATA_ZONES_DETAILS: self.async_fetch_from_api(DATA_ZONES_DETAILS),
}
results = await asyncio.gather(*tasks.values(), return_exceptions=True)
for api_category, result in zip(tasks, results):
if isinstance(result, RainMachineError):
_LOGGER.error(
"There was an error while updating %s: %s", api_category, result
)
async_dispatcher_send(self.hass, PROGRAM_UPDATE_TOPIC)
async_dispatcher_send(self.hass, ZONE_UPDATE_TOPIC)
class RainMachineEntity(Entity):
"""Define a generic RainMachine entity."""
def __init__(self, rainmachine):
"""Initialize."""
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._device_class = None
self._name = None
self.rainmachine = rainmachine
@property
def device_class(self):
"""Return the device class."""
return self._device_class
@property
def device_info(self):
"""Return device registry information for this entity."""
return {
"identifiers": {(DOMAIN, self.rainmachine.controller.mac)},
"name": self.rainmachine.controller.name,
"manufacturer": "RainMachine",
"model": (
f"Version {self.rainmachine.controller.hardware_version} "
f"(API: {self.rainmachine.controller.api_version})"
),
"sw_version": self.rainmachine.controller.software_version,
}
@property
def device_state_attributes(self) -> dict:
"""Return the state attributes."""
return self._attrs
@property
def name(self) -> str:
"""Return the name of the entity."""
return self._name
@property
def should_poll(self):
"""Disable polling."""
return False
@callback
def _update_state(self):
"""Update the state."""
self.update_from_latest_data()
self.async_write_ha_state()
@callback
def update_from_latest_data(self):
"""Update the entity."""
raise NotImplementedError
|
"""
Installs and configures Cinder
"""
import os
import re
import uuid
import logging
from packstack.installer import exceptions
from packstack.installer import processors
from packstack.installer import validators
from packstack.installer import basedefs
from packstack.installer import utils
from packstack.modules.ospluginutils import getManifestTemplate, appendManifestFile
from packstack.installer import exceptions
from packstack.installer import output_messages
controller = None
PLUGIN_NAME = "OS-Cinder"
PLUGIN_NAME_COLORED = utils.color_text(PLUGIN_NAME, 'blue')
logging.debug("plugin %s loaded", __name__)
def initConfig(controllerObject):
global controller
controller = controllerObject
logging.debug("Adding OpenStack Cinder configuration")
paramsList = [
{"CMD_OPTION" : "cinder-host",
"USAGE" : "The IP address of the server on which to install Cinder",
"PROMPT" : "Enter the IP address of the Cinder server",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_ssh],
"DEFAULT_VALUE" : utils.get_localhost_ip(),
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_HOST",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-db-passwd",
"USAGE" : "The password to use for the Cinder to access DB",
"PROMPT" : "Enter the password for the Cinder DB access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_DB_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-ks-passwd",
"USAGE" : "The password to use for the Cinder to authenticate with Keystone",
"PROMPT" : "Enter the password for the Cinder Keystone access",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : uuid.uuid4().hex[:16],
"MASK_INPUT" : True,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_KS_PW",
"USE_DEFAULT" : True,
"NEED_CONFIRM" : True,
"CONDITION" : False },
{"CMD_OPTION" : "cinder-backend",
"USAGE" : ("The Cinder backend to use, valid options are: "
"lvm, gluster, nfs"),
"PROMPT" : "Enter the Cinder backend to be configured",
"OPTION_LIST" : ["lvm", "gluster", "nfs"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "lvm",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_BACKEND",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDER",
"DESCRIPTION" : "Cinder Config parameters",
"PRE_CONDITION" : "CONFIG_CINDER_INSTALL",
"PRE_CONDITION_MATCH" : "y",
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_lvm_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'lvm')
paramsList = [
{"CMD_OPTION" : "cinder-volumes-create",
"USAGE" : ("Create Cinder's volumes group. This should only be done for "
"testing on a proof-of-concept installation of Cinder. This "
"will create a file-backed volume group and is not suitable "
"for production usage."),
"PROMPT" : ("Should Cinder's volumes group be created (for proof-of-concept "
"installation)?"),
"OPTION_LIST" : ["y", "n"],
"VALIDATORS" : [validators.validate_options],
"DEFAULT_VALUE" : "y",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_VOLUMES_CREATE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERVOLUMECREATE",
"DESCRIPTION" : "Cinder volume create Config parameters",
"PRE_CONDITION" : check_lvm_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_lvm_vg_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'lvm' and
config.get('CONFIG_CINDER_VOLUMES_CREATE', 'y') == 'y')
paramsList = [
{"CMD_OPTION" : "cinder-volumes-size",
"USAGE" : ("Cinder's volumes group size. Note that actual volume size "
"will be extended with 3% more space for VG metadata."),
"PROMPT" : "Enter Cinder's volumes group usable size",
"OPTION_LIST" : [],
"VALIDATORS" : [validators.validate_not_empty],
"DEFAULT_VALUE" : "20G",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": False,
"CONF_NAME" : "CONFIG_CINDER_VOLUMES_SIZE",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERVOLUMESIZE",
"DESCRIPTION" : "Cinder volume size Config parameters",
"PRE_CONDITION" : check_lvm_vg_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_gluster_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'gluster')
paramsList = [
{"CMD_OPTION" : "cinder-gluster-mounts",
"USAGE" : ("A single or comma separated list of gluster volume shares "
"to mount, eg: ip-address:/vol-name "),
"PROMPT" : ("Enter a single or comma separated list of gluster volume "
"shares to use with Cinder"),
"OPTION_LIST" : ["^'([\d]{1,3}\.){3}[\d]{1,3}:/.*'"],
"VALIDATORS" : [validators.validate_multi_regexp],
"PROCESSORS" : [processors.process_add_quotes_around_values],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_GLUSTER_MOUNTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERGLUSTERMOUNTS",
"DESCRIPTION" : "Cinder gluster Config parameters",
"PRE_CONDITION" : check_gluster_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def check_nfs_options(config):
return (config.get('CONFIG_CINDER_INSTALL', 'n') == 'y' and
config.get('CONFIG_CINDER_BACKEND', 'lvm') == 'nfs')
paramsList = [
{"CMD_OPTION" : "cinder-nfs-mounts",
"USAGE" : ("A single or comma seprated list of NFS exports to mount, "
"eg: ip-address:/export-name "),
"PROMPT" : ("Enter a single or comma seprated list of NFS exports to "
"use with Cinder"),
"OPTION_LIST" : ["^'([\d]{1,3}\.){3}[\d]{1,3}:/.*'"],
"VALIDATORS" : [validators.validate_multi_regexp],
"PROCESSORS" : [processors.process_add_quotes_around_values],
"DEFAULT_VALUE" : "",
"MASK_INPUT" : False,
"LOOSE_VALIDATION": True,
"CONF_NAME" : "CONFIG_CINDER_NFS_MOUNTS",
"USE_DEFAULT" : False,
"NEED_CONFIRM" : False,
"CONDITION" : False },
]
groupDict = { "GROUP_NAME" : "CINDERNFSMOUNTS",
"DESCRIPTION" : "Cinder NFS Config parameters",
"PRE_CONDITION" : check_nfs_options,
"PRE_CONDITION_MATCH" : True,
"POST_CONDITION" : False,
"POST_CONDITION_MATCH" : True}
controller.addGroup(groupDict, paramsList)
def initSequences(controller):
if controller.CONF['CONFIG_CINDER_INSTALL'] != 'y':
return
cinder_steps = [
{'title': 'Installing dependencies for Cinder', 'functions':[install_cinder_deps]},
{'title': 'Adding Cinder Keystone manifest entries', 'functions':[create_keystone_manifest]},
{'title': 'Adding Cinder manifest entries', 'functions':[create_manifest]}
]
if controller.CONF['CONFIG_CINDER_BACKEND'] == 'lvm':
cinder_steps.append({'title': 'Checking if the Cinder server has a cinder-volumes vg', 'functions':[check_cinder_vg]})
controller.addSequence("Installing OpenStack Cinder", [], [], cinder_steps)
def install_cinder_deps(config):
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
pkgs = []
if config['CONFIG_CINDER_BACKEND'] == 'lvm':
pkgs.append('lvm2')
for p in pkgs:
server.append("rpm -q %(package)s || yum install -y %(package)s" % dict(package=p))
server.execute()
def check_cinder_vg(config):
cinders_volume = 'cinder-volumes'
# Do we have a cinder-volumes vg?
have_cinders_volume = False
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('vgdisplay %s' % cinders_volume)
try:
server.execute()
have_cinders_volume = True
except exceptions.ScriptRuntimeError:
pass
# Configure system LVM settings (snapshot_autoextend)
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('sed -i -r "s/^ *snapshot_autoextend_threshold +=.*/'
' snapshot_autoextend_threshold = 80/" '
'/etc/lvm/lvm.conf')
server.append('sed -i -r "s/^ *snapshot_autoextend_percent +=.*/'
' snapshot_autoextend_percent = 20/" '
'/etc/lvm/lvm.conf')
try:
server.execute()
except exceptions.ScriptRuntimeError:
logging.info("Warning: Unable to set system LVM settings.")
if config["CONFIG_CINDER_VOLUMES_CREATE"] != "y":
if not have_cinders_volume:
raise exceptions.MissingRequirements("The cinder server should"
" contain a cinder-volumes volume group")
else:
if have_cinders_volume:
controller.MESSAGES.append(
output_messages.INFO_CINDER_VOLUMES_EXISTS)
return
server = utils.ScriptRunner(config['CONFIG_CINDER_HOST'])
server.append('systemctl')
try:
server.execute()
rst_cmd = 'systemctl restart openstack-cinder-volume.service'
except exceptions.ScriptRuntimeError:
rst_cmd = 'service openstack-cinder-volume restart'
server.clear()
logging.info("A new cinder volumes group will be created")
err = "Cinder's volume group '%s' could not be created" % \
cinders_volume
cinders_volume_path = '/var/lib/cinder'
server.append('mkdir -p %s' % cinders_volume_path)
logging.debug("Volume's path: %s" % cinders_volume_path)
match = re.match('^(?P<size>\d+)G$',
config['CONFIG_CINDER_VOLUMES_SIZE'].strip())
if not match:
msg = 'Invalid Cinder volumes VG size.'
raise exceptions.ParamValidationError(msg)
cinders_volume_size = int(match.group('size')) * 1024
cinders_reserve = int(cinders_volume_size * 0.03)
cinders_volume_size = cinders_volume_size + cinders_reserve
cinders_volume_path = os.path.join(cinders_volume_path, cinders_volume)
server.append('dd if=/dev/zero of=%s bs=1 count=0 seek=%sM'
% (cinders_volume_path, cinders_volume_size))
server.append('LOFI=$(losetup --show -f %s)' % cinders_volume_path)
server.append('pvcreate $LOFI')
server.append('vgcreate %s $LOFI' % cinders_volume)
# Add the loop device on boot
server.append('grep %(volume)s /etc/rc.d/rc.local || '
'echo "losetup -f %(path)s && '
'vgchange -a y %(volume)s && '
'%(restart_cmd)s" '
'>> /etc/rc.d/rc.local' %
{'volume': cinders_volume, 'restart_cmd': rst_cmd,
'path': cinders_volume_path})
server.append('grep "#!" /etc/rc.d/rc.local || '
'sed -i \'1i#!/bin/sh\' /etc/rc.d/rc.local')
server.append('chmod +x /etc/rc.d/rc.local')
# Let's make sure it exists
server.append('vgdisplay %s' % cinders_volume)
try:
server.execute()
except exceptions.ScriptRuntimeError:
# Release loop device if cinder's volume creation
# fails.
try:
logging.debug("Release loop device, volume creation failed")
server = utils.ScriptRunner(controller.CONF['CONFIG_CINDER_HOST'])
server.append('losetup -d $(losetup -j %s | cut -d : -f 1)' %
cinders_volume_path
)
server.execute()
except:
pass
raise exceptions.MissingRequirements(err)
def create_keystone_manifest(config):
manifestfile = "%s_keystone.pp" % controller.CONF['CONFIG_KEYSTONE_HOST']
manifestdata = getManifestTemplate("keystone_cinder.pp")
appendManifestFile(manifestfile, manifestdata)
def create_manifest(config):
manifestfile = "%s_cinder.pp" % controller.CONF['CONFIG_CINDER_HOST']
manifestdata = getManifestTemplate("cinder.pp")
if config['CONFIG_CINDER_BACKEND'] == "gluster":
manifestdata += getManifestTemplate("cinder_gluster.pp")
if config['CONFIG_CINDER_BACKEND'] == "nfs":
manifestdata += getManifestTemplate("cinder_nfs.pp")
if config['CONFIG_CEILOMETER_INSTALL'] == 'y':
manifestdata += getManifestTemplate('cinder_ceilometer.pp')
hosts = config['CONFIG_NOVA_COMPUTE_HOSTS'].split(",")
config['FIREWALL_ALLOWED'] = ",".join(["'%s'" % i.strip() for i in hosts if i.strip()])
config['FIREWALL_SERVICE_NAME'] = "cinder"
config['FIREWALL_PORTS'] = "'3260', '8776'"
manifestdata += getManifestTemplate("firewall.pp")
appendManifestFile(manifestfile, manifestdata)
|
from models.tridentnet.builder import TridentFasterRcnn as Detector
from models.tridentnet.builder_v2 import TridentResNetV1bC4 as Backbone
from models.tridentnet.builder import TridentRpnHead as RpnHead
from models.tridentnet.builder import process_branch_outputs, process_branch_rpn_outputs
from symbol.builder import Neck
from symbol.builder import RoiAlign as RoiExtractor
from symbol.builder import BboxC5V1Head as BboxHead
from mxnext.complicate import normalizer_factory
def get_config(is_train):
class General:
log_frequency = 10
name = __name__.rsplit("/")[-1].rsplit(".")[-1]
batch_image = 1 if is_train else 1
fp16 = False
class Trident:
num_branch = 3
train_scaleaware = True
test_scaleaware = True
branch_ids = range(num_branch)
branch_dilates = [1, 2, 3]
valid_ranges = [(0, 90), (30, 160), (90, -1)]
valid_ranges_on_origin = True
branch_bn_shared = True
branch_conv_shared = True
branch_deform = False
assert num_branch == len(branch_ids)
assert num_branch == len(valid_ranges)
class KvstoreParam:
kvstore = "local"
batch_image = General.batch_image
gpus = [0, 1, 2, 3, 4, 5, 6, 7]
fp16 = General.fp16
class NormalizeParam:
# normalizer = normalizer_factory(type="syncbn", ndev=len(KvstoreParam.gpus))
normalizer = normalizer_factory(type="fixbn")
class BackboneParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
depth = 152
num_branch = Trident.num_branch
branch_ids = Trident.branch_ids
branch_dilates = Trident.branch_dilates
branch_bn_shared = Trident.branch_bn_shared
branch_conv_shared = Trident.branch_conv_shared
branch_deform = Trident.branch_deform
class NeckParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
class RpnParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
batch_image = General.batch_image * Trident.num_branch
class anchor_generate:
scale = (2, 4, 8, 16, 32)
ratio = (0.5, 1.0, 2.0)
stride = 16
image_anchor = 256
class head:
conv_channel = 512
mean = (0, 0, 0, 0)
std = (1, 1, 1, 1)
class proposal:
pre_nms_top_n = 12000 if is_train else 6000
post_nms_top_n = 500 if is_train else 300
nms_thr = 0.7
min_bbox_side = 0
class subsample_proposal:
proposal_wo_gt = True
image_roi = 128
fg_fraction = 0.5
fg_thr = 0.5
bg_thr_hi = 0.5
bg_thr_lo = 0.0
class bbox_target:
num_reg_class = 2
class_agnostic = True
weight = (1.0, 1.0, 1.0, 1.0)
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class BboxParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
num_class = 1 + 80
image_roi = 128
batch_image = General.batch_image * Trident.num_branch
class regress_target:
class_agnostic = True
mean = (0.0, 0.0, 0.0, 0.0)
std = (0.1, 0.1, 0.2, 0.2)
class RoiParam:
fp16 = General.fp16
normalizer = NormalizeParam.normalizer
out_size = 7
stride = 16
class DatasetParam:
if is_train:
image_set = ("coco_train2017", )
else:
image_set = ("coco_val2017", )
backbone = Backbone(BackboneParam)
neck = Neck(NeckParam)
rpn_head = RpnHead(RpnParam)
roi_extractor = RoiExtractor(RoiParam)
bbox_head = BboxHead(BboxParam)
detector = Detector()
if is_train:
train_sym = detector.get_train_symbol(
backbone, neck, rpn_head, roi_extractor, bbox_head,
num_branch=Trident.num_branch, scaleaware=Trident.train_scaleaware)
rpn_test_sym = None
test_sym = None
else:
train_sym = None
rpn_test_sym = detector.get_rpn_test_symbol(backbone, neck, rpn_head, Trident.num_branch)
test_sym = detector.get_test_symbol(
backbone, neck, rpn_head, roi_extractor, bbox_head, num_branch=Trident.num_branch)
class ModelParam:
train_symbol = train_sym
test_symbol = test_sym
rpn_test_symbol = rpn_test_sym
from_scratch = False
random = True
memonger = False
memonger_until = "stage3_unit21_plus"
class pretrain:
prefix = "pretrain_model/resnet%s_v1b" % BackboneParam.depth
epoch = 0
fixed_param = ["conv0", "stage1", "gamma", "beta"]
class OptimizeParam:
class optimizer:
type = "sgd"
lr = 0.01 / 8 * len(KvstoreParam.gpus) * KvstoreParam.batch_image
momentum = 0.9
wd = 0.0001
clip_gradient = 5
class schedule:
begin_epoch = 0
end_epoch = 12
lr_iter = [120000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image),
160000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)]
class warmup:
type = "gradual"
lr = 0.0
iter = 3000 * 16 // (len(KvstoreParam.gpus) * KvstoreParam.batch_image)
class TestParam:
min_det_score = 0.001
max_det_per_image = 100
process_roidb = lambda x: x
if Trident.test_scaleaware:
process_output = lambda x, y: process_branch_outputs(
x, Trident.num_branch, Trident.valid_ranges, Trident.valid_ranges_on_origin)
else:
process_output = lambda x, y: x
process_rpn_output = lambda x, y: process_branch_rpn_outputs(x, Trident.num_branch)
class model:
prefix = "experiments/{}/checkpoint".format(General.name)
epoch = OptimizeParam.schedule.end_epoch
class nms:
type = "nms"
thr = 0.5
class coco:
annotation = "data/coco/annotations/instances_minival2014.json"
# data processing
class NormParam:
mean = tuple(i * 255 for i in (0.485, 0.456, 0.406)) # RGB order
std = tuple(i * 255 for i in (0.229, 0.224, 0.225))
class ResizeParam:
short = 800
long = 1200 if is_train else 2000
class PadParam:
short = 800
long = 1200 if is_train else 2000
max_num_gt = 100
class ScaleRange:
valid_ranges = Trident.valid_ranges
cal_on_origin = Trident.valid_ranges_on_origin # True: valid_ranges on origin image scale / valid_ranges on resized image scale
class AnchorTarget2DParam:
class generate:
short = 800 // 16
long = 1200 // 16
stride = 16
scales = (2, 4, 8, 16, 32)
aspects = (0.5, 1.0, 2.0)
class assign:
allowed_border = 0
pos_thr = 0.7
neg_thr = 0.3
min_pos_thr = 0.0
class sample:
image_anchor = 256
pos_fraction = 0.5
class trident:
invalid_anchor_threshd = 0.3
class RenameParam:
mapping = dict(image="data")
from core.detection_input import ReadRoiRecord, Resize2DImageBbox, \
ConvertImageFromHwcToChw, Flip2DImageBbox, Pad2DImageBbox, \
RenameRecord, Norm2DImage
from models.tridentnet.input import ScaleAwareRange, TridentAnchorTarget2D
if is_train:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
Flip2DImageBbox(),
Pad2DImageBbox(PadParam),
ConvertImageFromHwcToChw(),
ScaleAwareRange(ScaleRange),
TridentAnchorTarget2D(AnchorTarget2DParam),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "gt_bbox"]
if Trident.train_scaleaware:
data_name.append("valid_ranges")
label_name = ["rpn_cls_label", "rpn_reg_target", "rpn_reg_weight"]
else:
transform = [
ReadRoiRecord(None),
Norm2DImage(NormParam),
Resize2DImageBbox(ResizeParam),
ConvertImageFromHwcToChw(),
RenameRecord(RenameParam.mapping)
]
data_name = ["data", "im_info", "im_id", "rec_id"]
label_name = []
import core.detection_metric as metric
rpn_acc_metric = metric.AccWithIgnore(
"RpnAcc",
["rpn_cls_loss_output"],
["rpn_cls_label"]
)
rpn_l1_metric = metric.L1(
"RpnL1",
["rpn_reg_loss_output"],
["rpn_cls_label"]
)
# for bbox, the label is generated in network so it is an output
box_acc_metric = metric.AccWithIgnore(
"RcnnAcc",
["bbox_cls_loss_output", "bbox_label_blockgrad_output"],
[]
)
box_l1_metric = metric.L1(
"RcnnL1",
["bbox_reg_loss_output", "bbox_label_blockgrad_output"],
[]
)
metric_list = [rpn_acc_metric, rpn_l1_metric, box_acc_metric, box_l1_metric]
return General, KvstoreParam, RpnParam, RoiParam, BboxParam, DatasetParam, \
ModelParam, OptimizeParam, TestParam, \
transform, data_name, label_name, metric_list
|
import os
from oslo_config import cfg
from st2common.constants.system import VERSION_STRING
def do_register_opts(opts, group=None, ignore_errors=False):
try:
cfg.CONF.register_opts(opts, group=group)
except:
if not ignore_errors:
raise
def do_register_cli_opts(opt, ignore_errors=False):
# TODO: This function has broken name, it should work with lists :/
if not isinstance(opt, (list, tuple)):
opts = [opt]
else:
opts = opt
try:
cfg.CONF.register_cli_opts(opts)
except:
if not ignore_errors:
raise
def register_opts(ignore_errors=False):
auth_opts = [
cfg.BoolOpt('enable', default=True, help='Enable authentication middleware.'),
cfg.IntOpt('token_ttl', default=86400, help='Access token ttl in seconds.')
]
do_register_opts(auth_opts, 'auth', ignore_errors)
rbac_opts = [
cfg.BoolOpt('enable', default=False, help='Enable RBAC.'),
]
do_register_opts(rbac_opts, 'rbac', ignore_errors)
system_user_opts = [
cfg.StrOpt('user',
default='stanley',
help='Default system user.'),
cfg.StrOpt('ssh_key_file',
default='/home/vagrant/.ssh/stanley_rsa',
help='SSH private key for the system user.')
]
do_register_opts(system_user_opts, 'system_user', ignore_errors)
schema_opts = [
cfg.IntOpt('version', default=4, help='Version of JSON schema to use.'),
cfg.StrOpt('draft', default='http://json-schema.org/draft-04/schema#',
help='URL to the JSON schema draft.')
]
do_register_opts(schema_opts, 'schema', ignore_errors)
system_opts = [
cfg.StrOpt('base_path', default='/opt/stackstorm',
help='Base path to all st2 artifacts.'),
cfg.ListOpt('admin_users', default=[],
help='A list of usernames for users which should have admin privileges')
]
do_register_opts(system_opts, 'system', ignore_errors)
system_packs_base_path = os.path.join(cfg.CONF.system.base_path, 'packs')
content_opts = [
cfg.StrOpt('system_packs_base_path', default=system_packs_base_path,
help='Path to the directory which contains system packs.'),
cfg.StrOpt('packs_base_paths', default=None,
help='Paths which will be searched for integration packs.')
]
do_register_opts(content_opts, 'content', ignore_errors)
db_opts = [
cfg.StrOpt('host', default='0.0.0.0', help='host of db server'),
cfg.IntOpt('port', default=27017, help='port of db server'),
cfg.StrOpt('db_name', default='st2', help='name of database'),
cfg.StrOpt('username', help='username for db login'),
cfg.StrOpt('password', help='password for db login'),
cfg.IntOpt('connection_retry_max_delay_m', help='Connection retry total time (minutes).',
default=3),
cfg.IntOpt('connection_retry_backoff_max_s', help='Connection retry backoff max (seconds).',
default=10),
cfg.IntOpt('connection_retry_backoff_mul', help='Backoff multiplier (seconds).',
default=1)
]
do_register_opts(db_opts, 'database', ignore_errors)
messaging_opts = [
# It would be nice to be able to deprecate url and completely switch to using
# url. However, this will be a breaking change and will have impact so allowing both.
cfg.StrOpt('url', default='amqp://guest:guest@127.0.0.1:5672//',
help='URL of the messaging server.'),
cfg.ListOpt('cluster_urls', default=[],
help='URL of all the nodes in a messaging service cluster.')
]
do_register_opts(messaging_opts, 'messaging', ignore_errors)
syslog_opts = [
cfg.StrOpt('host', default='127.0.0.1',
help='Host for the syslog server.'),
cfg.IntOpt('port', default=514,
help='Port for the syslog server.'),
cfg.StrOpt('facility', default='local7',
help='Syslog facility level.'),
cfg.StrOpt('protocol', default='udp',
help='Transport protocol to use (udp / tcp).')
]
do_register_opts(syslog_opts, 'syslog', ignore_errors)
log_opts = [
cfg.ListOpt('excludes', default='',
help='Exclusion list of loggers to omit.'),
cfg.BoolOpt('redirect_stderr', default=False,
help='Controls if stderr should be redirected to the logs.'),
cfg.BoolOpt('mask_secrets', default=True,
help='True to mask secrets in the log files.')
]
do_register_opts(log_opts, 'log', ignore_errors)
# Common API options
api_opts = [
cfg.StrOpt('host', default='0.0.0.0', help='StackStorm API server host'),
cfg.IntOpt('port', default=9101, help='StackStorm API server port')
]
do_register_opts(api_opts, 'api', ignore_errors)
# Common auth options
auth_opts = [
cfg.StrOpt('api_url', default=None,
help='Base URL to the API endpoint excluding the version')
]
do_register_opts(auth_opts, 'auth', ignore_errors)
# Common options (used by action runner and sensor container)
action_sensor_opts = [
cfg.BoolOpt('enable', default=True,
help='Whether to enable or disable the ability to post a trigger on action.'),
]
do_register_opts(action_sensor_opts, group='action_sensor')
# Coordination options
coord_opts = [
cfg.StrOpt('url', default=None, help='Endpoint for the coordination server.'),
cfg.IntOpt('lock_timeout', default=60, help='TTL for the lock if backend suports it.')
]
do_register_opts(coord_opts, 'coordination', ignore_errors)
# Mistral options
mistral_opts = [
cfg.StrOpt('v2_base_url', default='http://127.0.0.1:8989/v2', help='v2 API root endpoint.'),
cfg.IntOpt('max_attempts', default=180, help='Max attempts to reconnect.'),
cfg.IntOpt('retry_wait', default=5, help='Seconds to wait before reconnecting.'),
cfg.StrOpt('keystone_username', default=None, help='Username for authentication.'),
cfg.StrOpt('keystone_password', default=None, help='Password for authentication.'),
cfg.StrOpt('keystone_project_name', default=None, help='OpenStack project scope.'),
cfg.StrOpt('keystone_auth_url', default=None, help='Auth endpoint for Keystone.')
]
do_register_opts(mistral_opts, group='mistral', ignore_errors=ignore_errors)
# Common CLI options
debug = cfg.BoolOpt('debug', default=False,
help='Enable debug mode. By default this will set all log levels to DEBUG.')
profile = cfg.BoolOpt('profile', default=False,
help=('Enable profile mode. In the profile mode all the MongoDB queries and related '
'profile data are logged.'))
use_debugger = cfg.BoolOpt('use-debugger', default=True,
help='Enables debugger. Note that using this option changes how the '
'eventlet library is used to support async IO. This could result in '
'failures that do not occur under normal operation.')
cli_opts = [debug, profile, use_debugger]
do_register_cli_opts(cli_opts, ignore_errors=ignore_errors)
def parse_args(args=None):
register_opts()
cfg.CONF(args=args, version=VERSION_STRING)
|
import mock
import unittest
import sys
from cloudbaseinit import init
from cloudbaseinit.plugins import base
from cloudbaseinit.openstack.common import cfg
CONF = cfg.CONF
_win32com_mock = mock.MagicMock()
_comtypes_mock = mock.MagicMock()
_pywintypes_mock = mock.MagicMock()
_ctypes_mock = mock.MagicMock()
_ctypes_util_mock = mock.MagicMock()
mock_dict = {'ctypes.util': _ctypes_util_mock,
'win32com': _win32com_mock,
'comtypes': _comtypes_mock,
'pywintypes': _pywintypes_mock,
'ctypes': _ctypes_mock}
class InitManagerTest(unittest.TestCase):
@mock.patch.dict(sys.modules, mock_dict)
def setUp(self):
self.osutils = mock.MagicMock()
self.plugin = mock.MagicMock()
self._init = init.InitManager()
def tearDown(self):
reload(sys)
reload(init)
def test_get_plugin_status(self):
self.osutils.get_config_value.return_value = 1
response = self._init._get_plugin_status(self.osutils, 'fake plugin')
self.osutils.get_config_value.assert_called_once_with(
'fake plugin', self._init._PLUGINS_CONFIG_SECTION)
self.assertTrue(response == 1)
def test_set_plugin_status(self):
self._init._set_plugin_status(self.osutils, 'fake plugin', 'status')
self.osutils.set_config_value.assert_called_once_with(
'fake plugin', 'status', self._init._PLUGINS_CONFIG_SECTION)
@mock.patch('cloudbaseinit.init.InitManager._get_plugin_status')
@mock.patch('cloudbaseinit.init.InitManager._set_plugin_status')
def _test_exec_plugin(self, status, mock_set_plugin_status,
mock_get_plugin_status):
fake_name = 'fake name'
self.plugin.get_name.return_value = fake_name
self.plugin.execute.return_value = (status, True)
mock_get_plugin_status.return_value = status
response = self._init._exec_plugin(osutils=self.osutils,
service='fake service',
plugin=self.plugin,
shared_data='shared data')
mock_get_plugin_status.assert_called_once_with(self.osutils,
fake_name)
if status is base.PLUGIN_EXECUTE_ON_NEXT_BOOT:
self.plugin.execute.assert_called_once_with('fake service',
'shared data')
mock_set_plugin_status.assert_called_once_with(self.osutils,
fake_name, status)
self.assertTrue(response)
def test_test_exec_plugin_execution_done(self):
self._test_exec_plugin(base.PLUGIN_EXECUTION_DONE)
def test_test_exec_plugin(self):
self._test_exec_plugin(base.PLUGIN_EXECUTE_ON_NEXT_BOOT)
def _test_check_plugin_os_requirements(self, requirements):
sys.platform = 'win32'
fake_name = 'fake name'
self.plugin.get_name.return_value = fake_name
self.plugin.get_os_requirements.return_value = requirements
response = self._init._check_plugin_os_requirements(self.osutils,
self.plugin)
self.plugin.get_name.assert_called_once_with()
self.plugin.get_os_requirements.assert_called_once_with()
if requirements[0] == 'win32':
self.assertTrue(response)
else:
self.assertFalse(response)
def test_check_plugin_os_requirements(self):
self._test_check_plugin_os_requirements(('win32', (5, 2)))
def test_check_plugin_os_requirements_other_requirenments(self):
self._test_check_plugin_os_requirements(('linux', (5, 2)))
@mock.patch('cloudbaseinit.init.InitManager'
'._check_plugin_os_requirements')
@mock.patch('cloudbaseinit.init.InitManager._exec_plugin')
@mock.patch('cloudbaseinit.plugins.factory.PluginFactory.load_plugins')
@mock.patch('cloudbaseinit.osutils.factory.OSUtilsFactory.get_os_utils')
@mock.patch('cloudbaseinit.metadata.factory.MetadataServiceFactory.'
'get_metadata_service')
def test_configure_host(self, mock_get_metadata_service,
mock_get_os_utils, mock_load_plugins,
mock_exec_plugin,
mock_check_os_requirements):
fake_service = mock.MagicMock()
fake_plugin = mock.MagicMock()
mock_load_plugins.return_value = [fake_plugin]
mock_get_os_utils.return_value = self.osutils
mock_get_metadata_service.return_value = fake_service
fake_service.get_name.return_value = 'fake name'
self._init.configure_host()
self.osutils.wait_for_boot_completion.assert_called_once()
mock_get_metadata_service.assert_called_once_with()
fake_service.get_name.assert_called_once_with()
mock_check_os_requirements.assert_called_once_with(self.osutils,
fake_plugin)
mock_exec_plugin.assert_called_once_with(self.osutils, fake_service,
fake_plugin, {})
fake_service.cleanup.assert_called_once_with()
self.osutils.reboot.assert_called_once_with()
|
"""TensorFlow op that scales gradient for backwards pass."""
from typing import Tuple
from sonnet.src import types
import tensorflow as tf
@tf.custom_gradient
def scale_gradient(
t: tf.Tensor, scale: types.FloatLike
) -> Tuple[tf.Tensor, types.GradFn]:
"""Scales gradients for the backwards pass.
Args:
t: A Tensor.
scale: The scale factor for the gradient on the backwards pass.
Returns:
A Tensor same as input, with scaled backward gradient.
"""
def grad(dy: tf.Tensor) -> Tuple[tf.Tensor, None]:
"""Scaled gradient."""
return scale * dy, None
return t, grad
|
'''
Tests for the Git state
'''
from __future__ import absolute_import
import os
import shutil
import socket
import subprocess
import tempfile
from salttesting.helpers import ensure_in_syspath, skip_if_binaries_missing
ensure_in_syspath('../../')
import integration
import salt.utils
class GitTest(integration.ModuleCase, integration.SaltReturnAssertsMixIn):
'''
Validate the git state
'''
def setUp(self):
super(GitTest, self).setUp()
self.__domain = 'github.com'
try:
if hasattr(socket, 'setdefaulttimeout'):
# 10 second dns timeout
socket.setdefaulttimeout(10)
socket.gethostbyname(self.__domain)
except socket.error:
msg = 'error resolving {0}, possible network issue?'
self.skipTest(msg.format(self.__domain))
def test_latest(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_with_rev_and_submodules(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_failure(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://youSpelledGitHubWrong.com/saltstack/salt-test-repo.git',
rev='develop',
target=name,
submodules=True
)
self.assertSaltFalseReturn(ret)
self.assertFalse(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_empty_dir(self):
'''
git.latest
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_unless_no_cwd_issue_6800(self):
'''
cwd=target was being passed to _run_check which blew up if
target dir did not already exist.
'''
name = os.path.join(integration.TMP, 'salt_repo')
if os.path.isdir(name):
shutil.rmtree(name)
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev='develop',
target=name,
unless='test -e {0}'.format(name),
submodules=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_numeric_rev(self):
'''
git.latest with numeric revision
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
rev=0.11,
target=name,
submodules=True,
timeout=120
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_latest_with_local_changes(self):
'''
Ensure that we fail the state when there are local changes and succeed
when force_reset is True.
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
# Clone repo
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isdir(os.path.join(name, '.git')))
# Make change to LICENSE file.
with salt.utils.fopen(os.path.join(name, 'LICENSE'), 'a') as fp_:
fp_.write('Lorem ipsum dolor blah blah blah....\n')
# Make sure that we now have uncommitted changes
self.assertTrue(self.run_function('git.diff', [name, 'HEAD']))
# Re-run state with force_reset=False, this should fail
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name,
force_reset=False
)
self.assertSaltFalseReturn(ret)
# Now run the state with force_reset=True, this should succeed
ret = self.run_state(
'git.latest',
name='https://{0}/saltstack/salt-test-repo.git'.format(self.__domain),
target=name,
force_reset=True
)
self.assertSaltTrueReturn(ret)
# Make sure that we no longer have uncommitted changes
self.assertFalse(self.run_function('git.diff', [name, 'HEAD']))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
try:
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present_failure(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
fname = os.path.join(name, 'stoptheprocess')
with salt.utils.fopen(fname, 'a') as fh_:
fh_.write('')
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltFalseReturn(ret)
self.assertFalse(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
def test_present_empty_dir(self):
'''
git.present
'''
name = os.path.join(integration.TMP, 'salt_repo')
if not os.path.isdir(name):
os.mkdir(name)
try:
ret = self.run_state(
'git.present',
name=name,
bare=True
)
self.assertSaltTrueReturn(ret)
self.assertTrue(os.path.isfile(os.path.join(name, 'HEAD')))
finally:
shutil.rmtree(name, ignore_errors=True)
@skip_if_binaries_missing('git')
def test_config_set_value_with_space_character(self):
'''
git.config
'''
name = tempfile.mkdtemp(dir=integration.TMP)
self.addCleanup(shutil.rmtree, name, ignore_errors=True)
subprocess.check_call(['git', 'init', '--quiet', name])
ret = self.run_state(
'git.config_set',
name='user.name',
value='foo bar',
repo=name,
**{'global': False})
self.assertSaltTrueReturn(ret)
if __name__ == '__main__':
from integration import run_tests
run_tests(GitTest)
|
from lxml import etree
from tempest.common.rest_client import RestClientXML
from tempest.services.compute.xml.common import xml_to_json
class HypervisorClientXML(RestClientXML):
def __init__(self, config, username, password, auth_url, tenant_name=None):
super(HypervisorClientXML, self).__init__(config, username,
password, auth_url,
tenant_name)
self.service = self.config.compute.catalog_type
def _parse_array(self, node):
return [xml_to_json(x) for x in node]
def get_hypervisor_list(self):
"""List hypervisors information."""
resp, body = self.get('os-hypervisors', self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
def get_hypervisor_list_details(self):
"""Show detailed hypervisors information."""
resp, body = self.get('os-hypervisors/detail', self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
def get_hypervisor_show_details(self, hyper_id):
"""Display the details of the specified hypervisor."""
resp, body = self.get('os-hypervisors/%s' % hyper_id,
self.headers)
hypervisor = xml_to_json(etree.fromstring(body))
return resp, hypervisor
def get_hypervisor_servers(self, hyper_name):
"""List instances belonging to the specified hypervisor."""
resp, body = self.get('os-hypervisors/%s/servers' % hyper_name,
self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
def get_hypervisor_stats(self):
"""Get hypervisor statistics over all compute nodes."""
resp, body = self.get('os-hypervisors/statistics', self.headers)
stats = xml_to_json(etree.fromstring(body))
return resp, stats
def get_hypervisor_uptime(self, hyper_id):
"""Display the uptime of the specified hypervisor."""
resp, body = self.get('os-hypervisors/%s/uptime' % hyper_id,
self.headers)
uptime = xml_to_json(etree.fromstring(body))
return resp, uptime
def search_hypervisor(self, hyper_name):
"""Search specified hypervisor."""
resp, body = self.get('os-hypervisors/%s/search' % hyper_name,
self.headers)
hypervisors = self._parse_array(etree.fromstring(body))
return resp, hypervisors
|
import ast
import re
import mock
import novaclient.exceptions as nova_ex
import six
from sahara.conductor import resource as r
from sahara.plugins.vanilla import plugin
import sahara.service.validation as v
from sahara.tests.unit import base
from sahara.tests.unit import testutils as tu
m = {}
_types_checks = {
"string": [1, (), {}, True],
"integer": ["a", (), {}, True],
"uuid": ["z550e8400-e29b-41d4-a716-446655440000", 1, "a", (), {}, True],
"array": [{}, 'a', 1, True],
"boolean": [1, 'a', (), {}]
}
def _update_data(data, update):
data.update(update)
return data
def _get_plugins():
vanilla = plugin.VanillaProvider
vanilla.name = 'vanilla'
return [vanilla]
def _get_plugin(name):
if name == 'vanilla':
vanilla = plugin.VanillaProvider
vanilla.name = 'vanilla'
return vanilla
return None
def _get_keypair(name):
if name != "test_keypair":
raise nova_ex.NotFound("")
def _get_network(**kwargs):
if 'id' in kwargs and (
kwargs['id'] != "d9a3bebc-f788-4b81-9a93-aa048022c1ca"):
raise nova_ex.NotFound("")
return 'OK'
def _get_fl_ip_pool_list():
return [FakeNetwork("d9a3bebc-f788-4b81-9a93-aa048022c1ca")]
def _get_availability_zone_list(detailed=True):
return [FakeAvailabilityZone('nova')]
def _get_heat_stack_list(**kwargs):
if (kwargs.get('filters') and
kwargs.get('filters').get('name') == 'test-heat'):
return [FakeStack('test-heat')]
return []
class FakeStack(object):
def __init__(self, name):
self.stack_name = name
class FakeNetwork(object):
def __init__(self, name):
self.name = name
class FakeAvailabilityZone(object):
def __init__(self, name):
self.zoneName = name
class FakeFlavor(object):
def __init__(self, id):
self.id = id
class FakeSecurityGroup(object):
def __init__(self, id, name):
self.id = id
self.name = name
def _get_flavors_list():
return [FakeFlavor("42")]
def _get_security_groups_list():
return [FakeSecurityGroup("1", "default"),
FakeSecurityGroup("2", "group1"),
FakeSecurityGroup("3", "group2")]
def start_patch(patch_templates=True):
get_clusters_p = mock.patch("sahara.service.api.get_clusters")
get_cluster_p = mock.patch("sahara.service.api.get_cluster")
if patch_templates:
get_ng_templates_p = mock.patch(
"sahara.service.api.get_node_group_templates")
get_ng_template_p = mock.patch(
"sahara.service.api.get_node_group_template")
if patch_templates:
get_cl_templates_p = mock.patch(
"sahara.service.api.get_cluster_templates")
get_cl_template_p = mock.patch(
"sahara.service.api.get_cluster_template")
nova_p = mock.patch("sahara.utils.openstack.nova.client")
heat_p = mock.patch("sahara.utils.openstack.heat.client")
cinder_p = mock.patch("sahara.utils.openstack.cinder.client")
cinder_exists_p = mock.patch(
"sahara.utils.openstack.cinder.check_cinder_exists")
get_image_p = mock.patch("sahara.service.api.get_image")
get_image = get_image_p.start()
get_clusters = get_clusters_p.start()
get_cluster = get_cluster_p.start()
if patch_templates:
get_ng_templates = get_ng_templates_p.start()
get_ng_template = get_ng_template_p.start()
if patch_templates:
get_cl_templates = get_cl_templates_p.start()
get_cl_template_p.start()
nova = nova_p.start()
if patch_templates:
get_cl_templates.return_value = []
nova().flavors.list.side_effect = _get_flavors_list
nova().security_groups.list.side_effect = _get_security_groups_list
nova().keypairs.get.side_effect = _get_keypair
nova().networks.find.side_effect = _get_network
nova().networks.find.__name__ = 'find'
nova().floating_ip_pools.list.side_effect = _get_fl_ip_pool_list
nova().availability_zones.list.side_effect = _get_availability_zone_list
heat = heat_p.start()
heat().stacks.list.side_effect = _get_heat_stack_list
cinder = cinder_p.start()
cinder().availability_zones.list.side_effect = _get_availability_zone_list
cinder_exists = cinder_exists_p.start()
cinder_exists.return_value = True
class Image(object):
def __init__(self, name='test'):
self.name = name
@property
def id(self):
if self.name == 'test':
return '550e8400-e29b-41d4-a716-446655440000'
else:
return '813fe450-40d2-4acc-ade5-ea753a1bd5bc'
@property
def tags(self):
if self.name == 'test':
return ['vanilla', '1.2.1']
else:
return ['vanilla', 'wrong_tag']
def _get_image(id):
if id == '550e8400-e29b-41d4-a716-446655440000':
return Image()
else:
return Image('wrong_test')
get_image.side_effect = _get_image
nova().images.list_registered.return_value = [Image(),
Image(name='wrong_name')]
ng_dict = tu.make_ng_dict('ng', '42', ['namenode'], 1)
cluster = tu.create_cluster('test', 't', 'vanilla', '1.2.1', [ng_dict],
id=1, status='Active')
# stub clusters list
get_clusters.return_value = [cluster]
get_cluster.return_value = cluster
# stub node templates
if patch_templates:
ngt_dict = {'name': 'test', 'tenant_id': 't', 'flavor_id': '42',
'plugin_name': 'vanilla', 'hadoop_version': '1.2.1',
'id': '550e8400-e29b-41d4-a716-446655440000',
'node_processes': ['namenode']}
get_ng_templates.return_value = [r.NodeGroupTemplateResource(ngt_dict)]
ct_dict = {'name': 'test', 'tenant_id': 't',
'plugin_name': 'vanilla', 'hadoop_version': '1.2.1'}
get_cl_templates.return_value = [r.ClusterTemplateResource(ct_dict)]
def _get_ng_template(id):
for template in get_ng_templates():
if template.id == id:
return template
return None
if patch_templates:
get_ng_template.side_effect = _get_ng_template
# request data to validate
patchers = [get_clusters_p, get_cluster_p,
nova_p, get_image_p, heat_p, cinder_p,
cinder_exists_p]
if patch_templates:
patchers.extend([get_ng_template_p, get_ng_templates_p,
get_cl_template_p, get_cl_templates_p])
return patchers
def stop_patch(patchers):
for patcher in reversed(patchers):
patcher.stop()
class ValidationTestCase(base.SaharaTestCase):
def setUp(self):
super(ValidationTestCase, self).setUp()
self._create_object_fun = None
self.scheme = None
def tearDown(self):
self._create_object_fun = None
super(ValidationTestCase, self).tearDown()
def _assert_calls(self, mock, call_info):
if not call_info:
self.assertEqual(0, mock.call_count, "Unexpected call to %s: %s"
% (mock.name, str(mock.call_args)))
else:
self.assertEqual(call_info[0], mock.call_count)
self.assertEqual(call_info[1], mock.call_args[0][0].code)
possible_messages = ([call_info[2]] if isinstance(
call_info[2], six.string_types) else call_info[2])
match = False
check = mock.call_args[0][0].message
if check.find('Error ID:') != -1:
check = check.split('\n')[0]
for message in possible_messages:
if self._check_match(message, check):
match = True
break
if not match:
self.assertIn(check, possible_messages)
def _check_match(self, expected, actual):
d1, r1 = self._extract_printed_dict(expected)
d2, r2 = self._extract_printed_dict(actual)
# Note(slukjanov): regex needed because of different
# versions of jsonschema generate different
# messages.
return (r1 == r2 or re.match(r1, r2)) and (d1 == d2)
def _extract_printed_dict(self, s):
start = s.find('{')
if start == -1:
return None, s
end = s.rfind('}')
if end == -1:
return None, s
return ast.literal_eval(s[start:end+1]), s[0:start+1] + s[end]
@mock.patch("sahara.utils.api.request_data")
@mock.patch("sahara.utils.api.bad_request")
def _assert_create_object_validation(
self, bad_req=None, request_data=None,
data=None, bad_req_i=None):
request_data.return_value = data
# mock function that should be validated
patchers = start_patch()
m_func = mock.Mock()
m_func.__name__ = "m_func"
v.validate(self.scheme, self._create_object_fun)(m_func)(data=data)
self.assertEqual(1, request_data.call_count)
self._assert_calls(bad_req, bad_req_i)
stop_patch(patchers)
def _assert_valid_name_hostname_validation(self, data):
data.update({'name': None})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"None is not of type 'string'")
)
data.update({'name': ""})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'' is too short")
)
data.update({'name': ('a' * 51)})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'%s' is too long" % ('a' * 51))
)
data.update({'name': 'a-!'})
self._assert_create_object_validation(
data=data,
bad_req_i=(1, "VALIDATION_ERROR",
u"'a-!' is not a 'valid_name_hostname'")
)
def _prop_types_str(self, prop_types):
return ", ".join(["'%s'" % prop for prop in prop_types])
def _assert_types(self, default_data):
for p_name in self.scheme['properties']:
prop = self.scheme['properties'][p_name]
prop_types = prop["type"]
if type(prop_types) is not list:
prop_types = [prop_types]
for prop_type in prop_types:
if prop_type in _types_checks:
for type_ex in _types_checks[prop_type]:
data = default_data.copy()
value = type_ex
value_str = str(value)
if isinstance(value, str):
value_str = "'%s'" % value_str
data.update({p_name: value})
message = ("%s is not of type %s" %
(value_str,
self._prop_types_str(prop_types)))
if "enum" in prop:
message = [message, "%s is not one of %s" %
(value_str, prop["enum"])]
self._assert_create_object_validation(
data=data,
bad_req_i=(1, 'VALIDATION_ERROR', message)
)
def _assert_cluster_configs_validation(self, require_image_id=False):
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'cluster_configs': {
'HDFS': {
u'hadoop.tmp.dir': '/temp/'
}
},
'default_image_id': '550e8400-e29b-41d4-a716-446655440000'
}
if require_image_id:
data_without_image = data.copy()
data_without_image.pop('default_image_id')
self._assert_create_object_validation(
data=data_without_image,
bad_req_i=(1, 'NOT_FOUND',
"'default_image_id' field is not found")
)
self._assert_create_object_validation(
data=_update_data(data.copy(), {
'cluster_configs': {
'wrong_target': {
u'hadoop.tmp.dir': '/temp/'
}
}}),
bad_req_i=(1, 'INVALID_REFERENCE',
"Plugin doesn't contain applicable "
"target 'wrong_target'")
)
self._assert_create_object_validation(
data=_update_data(data.copy(), {
'cluster_configs': {
'HDFS': {
u's': '/temp/'
}
}
}),
bad_req_i=(1, 'INVALID_REFERENCE',
"Plugin's applicable target 'HDFS' doesn't "
"contain config with name 's'")
)
def _assert_cluster_default_image_tags_validation(self):
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'default_image_id': '550e8400-e29b-41d4-a716-446655440000'
}
self._assert_create_object_validation(data=data)
data = {
'name': 'test-cluster',
'plugin_name': 'vanilla',
'hadoop_version': '1.2.1',
'default_image_id': '813fe450-40d2-4acc-ade5-ea753a1bd5bc'
}
self._assert_create_object_validation(
data=data,
bad_req_i=(1, 'INVALID_REFERENCE',
"Requested image "
"'813fe450-40d2-4acc-ade5-ea753a1bd5bc' "
"doesn't contain required tags: "
"['1.2.1']"))
def assert_protected_resource_exception(self, ex):
self.assertIn("marked as protected", six.text_type(ex))
def assert_created_in_another_tenant_exception(self, ex):
self.assertIn("wasn't created in this tenant", six.text_type(ex))
|
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import neighbor
class neighbors(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/isis-neighbor-attribute/neighbors. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS neighbors.
"""
__slots__ = ("_path_helper", "_extmethods", "__neighbor")
_yang_name = "neighbors"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"isis-neighbor-attribute",
"neighbors",
]
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """neighbor must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__neighbor = t
if hasattr(self, "_set"):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
neighbor = __builtin__.property(_get_neighbor)
_pyangbind_elements = OrderedDict([("neighbor", neighbor)])
from . import neighbor
class neighbors(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/isis-neighbor-attribute/neighbors. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container describes IS neighbors.
"""
__slots__ = ("_path_helper", "_extmethods", "__neighbor")
_yang_name = "neighbors"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"isis-neighbor-attribute",
"neighbors",
]
def _get_neighbor(self):
"""
Getter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
return self.__neighbor
def _set_neighbor(self, v, load=False):
"""
Setter method for neighbor, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/isis_neighbor_attribute/neighbors/neighbor (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_neighbor is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_neighbor() directly.
YANG Description: This list defines ISIS extended reachability neighbor
attributes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """neighbor must be of a type compatible with list""",
"defined-type": "list",
"generated-type": """YANGDynClass(base=YANGListType(False,neighbor.neighbor, yang_name="neighbor", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='False', extensions=None), is_container='list', yang_name="neighbor", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='list', is_config=False)""",
}
)
self.__neighbor = t
if hasattr(self, "_set"):
self._set()
def _unset_neighbor(self):
self.__neighbor = YANGDynClass(
base=YANGListType(
False,
neighbor.neighbor,
yang_name="neighbor",
parent=self,
is_container="list",
user_ordered=False,
path_helper=self._path_helper,
yang_keys="False",
extensions=None,
),
is_container="list",
yang_name="neighbor",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="list",
is_config=False,
)
neighbor = __builtin__.property(_get_neighbor)
_pyangbind_elements = OrderedDict([("neighbor", neighbor)])
|
from mosaic import app
if __name__ == '__main__':
app.run()
|
'''
Created on Nov 2, 2012
@author: maodouzi
'''
import logging
from keystoneclient.v2_0 import client as keystone_client
from novaclient.v1_1 import client as nova_client
from cinderclient.v1 import client as cinder_client
from keystoneclient.exceptions import BadRequest
from openstack_dashboard.local.local_settings import OPENSTACK_HOST
LOG = logging.getLogger(__name__)
DEFAULT_ROLE = None
MEMBER_ROLE = "_member_"
ENDPOINT_URL = "http://%s:35357/v2.0" % OPENSTACK_HOST
ERR_MSG = {"accountExist": "Account already exist"
}
ERR_MSG = {key:"ERROR: %s !" % value for key, value in ERR_MSG.items()}
class RequestException(Exception):
def __init__(self, message=None):
self.message = str(message) or self.__class__.message
def __str__(self):
return self.message
class RequestClient(object):
def __init__(self, username, password, email, phoneNum, realName, corpName,
applyReason, quota, token, adminTenant, adminUser, adminPasswd,
endpoint=ENDPOINT_URL):
self.token = token
self.endpoint = endpoint
self.conn = keystone_client.Client(token=self.token, endpoint=self.endpoint)
self._fetchInfo()
self.novaConn = nova_client.Client(username=adminUser,
api_key=adminPasswd,
project_id=adminTenant,
auth_url=endpoint)
self.cinderConn = cinder_client.Client(username=adminUser,
api_key=adminPasswd,
project_id=adminTenant,
auth_url=endpoint)
self.quota = quota
self.username = username
self.password = password
self.email = email
self.realName = realName
self.phoneNum = phoneNum
self.corpName = corpName
self.applyReason = applyReason
self.description = "==".join((self.email, self.phoneNum, self.realName, self.corpName, self.applyReason))
if self._isAccountExist():
raise RequestException(ERR_MSG["accountExist"])
def createAccount(self):
try:
self._createTenant()
self._updateQuota()
self._createUser()
self._addRole()
except Exception as e:
self.deleteAccount()
raise RequestException(e)
def deleteAccount(self):
self._deleteTenant()
self._deleteUser()
def _checkRequestArgs(self):
return self._isRequestValid() and (not self._isAccountExist())
def _fetchInfo(self):
try:
self.tenantList = self.conn.tenants.list()
self.userList = self.conn.users.list()
self.roleList = self.conn.roles.list()
self.tenantDict = {str(item.name):str(item.id) for item in self.tenantList}
self.userDict = {str(item.name):str(item.id) for item in self.userList}
self.memberRoleId = [str(item.id) for item in self.roleList
if str(item.name) == MEMBER_ROLE][0]
try:
self.username
except AttributeError:
pass
else:
self.tenantId = self.tenantDict.get(self.username, False)
self.userId = self.userDict.get(self.username, False)
if self.tenantId and self.userId:
self.boundRoleList = self.conn.roles.roles_for_user(user=self.userId,
tenant=self.tenantId)
self.boundRoleDict = {str(item.name):str(item.id) for item in self.boundRoleList}
else:
self.boundRoleDict = {}
except BadRequest as e:
LOG.debug(e)
raise RequestException(e)
except IndexError as e:
LOG.debug(e)
raise RequestException("No role named %s" % MEMBER_ROLE)
def _isRequestValid(self):
return True
def _isAccountExist(self):
return self._isTenantNameExist() or self._isUserNameExist()
def _isTenantNameExist(self):
return self.username in self.tenantDict
def _isUserNameExist(self):
return self.username in self.userDict
def _isBound2Role(self):
return MEMBER_ROLE in self.boundRoleDict
def _createTenant(self):
if not self._isTenantNameExist():
self.conn.tenants.create(tenant_name=self.username,
description=self.description,
enabled=True)
self._fetchInfo()
def _deleteTenant(self):
if self._isTenantNameExist():
self.conn.tenants.delete(tenant=self.tenantId)
self._fetchInfo()
def _createUser(self):
self._createTenant()
if not self._isUserNameExist():
self.conn.users.create(name=self.username,
password=self.password,
email=self.email,
tenant_id=self.tenantId,
enabled=False)
self._fetchInfo()
def _deleteUser(self):
if self._isUserNameExist():
self.conn.users.delete(user=self.userId)
self._fetchInfo()
def _addRole(self):
if not self._isBound2Role():
self.conn.roles.add_user_role(self.userId, self.memberRoleId, self.tenantId)
self._fetchInfo()
def _getQuota(self):
quotaDict = {}
quotaDict["nova"] = self.novaConn.quotas.get(tenant_id=self.tenantId)
quotaDict["cinder"] = self.cinderConn.quotas.get(tenant_id=self.tenantId)
return quotaDict
def _updateQuota(self):
nova_quota = self.quota.copy()
del nova_quota["volumes"]
del nova_quota["gigabytes"]
self.novaConn.quotas.update(tenant_id=self.tenantId, **nova_quota)
self.cinderConn.quotas.update(tenant_id=self.tenantId,
volumes=self.quota["volumes"],
gigabytes=self.quota["gigabytes"]
)
|
'''
Use a git repository as a Pillar source
---------------------------------------
.. note::
This external pillar has been rewritten for the :doc:`2015.8.0
</topics/releases/2015.8.0>` release. The old method of configuring this
external pillar will be maintained for a couple releases, allowing time for
configurations to be updated to reflect the new usage.
This external pillar allows for a Pillar top file and Pillar SLS files to be
sourced from a git repository.
However, since git_pillar does not have an equivalent to the
:conf_master:`pillar_roots` parameter, configuration is slightly different. The
Pillar top file must still contain the relevant environment, like so:
.. code-block:: yaml
base:
'*':
- foo
The branch/tag which maps to that environment must then be specified along with
the repo's URL. Configuration details can be found below.
.. _git-pillar-pre-2015-8-0:
Configuring git_pillar for Salt releases before 2015.8.0
========================================================
For Salt releases earlier than :doc:`2015.8.0 </topics/releases/2015.8.0>`,
GitPython is the only supported provider for git_pillar. Individual
repositories can be configured under the :conf_master:`ext_pillar`
configuration parameter like so:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git root=subdirectory
The repository is specified in the format ``<branch> <repo_url>``, with an
optional ``root`` parameter (added in the :doc:`2014.7.0
</topics/releases/2014.7.0>` release) which allows the pillar SLS files to be
served up from a subdirectory (similar to :conf_master:`gitfs_root` in gitfs).
To use more than one branch from the same repo, multiple lines must be
specified under :conf_master:`ext_pillar`:
.. code-block:: yaml
ext_pillar:
- git: master https://gitserver/git-pillar.git
- git: dev https://gitserver/git-pillar.git
To remap a specific branch to a specific Pillar environment, use the format
``<branch>:<env>``:
.. code-block:: yaml
ext_pillar:
- git: develop:dev https://gitserver/git-pillar.git
- git: master:prod https://gitserver/git-pillar.git
In this case, the ``develop`` branch would need its own ``top.sls`` with a
``dev`` section in it, like this:
.. code-block:: yaml
dev:
'*':
- bar
The ``master`` branch would need its own ``top.sls`` with a ``prod`` section in
it:
.. code-block:: yaml
prod:
'*':
- bar
If ``__env__`` is specified as the branch name, then git_pillar will use the
branch specified by :conf_master:`gitfs_base`:
.. code-block:: yaml
ext_pillar:
- git: __env__ https://gitserver/git-pillar.git root=pillar
The corresponding Pillar top file would look like this:
.. code-block:: yaml
{{env}}:
'*':
- bar
.. _git-pillar-2015-8-0-and-later:
Configuring git_pillar for Salt releases 2015.8.0 and later
===========================================================
.. note::
In version 2015.8.0, the method of configuring git external pillars has
changed, and now more closely resembles that of the :ref:`Git Fileserver
Backend <tutorial-gitfs>`. If Salt detects the old configuration schema, it
will use the pre-2015.8.0 code to compile the external pillar. A warning
will also be logged.
Beginning with Salt version 2015.8.0, pygit2_ is now supported in addition to
GitPython_ (Dulwich_ will not be supported for the forseeable future). The
requirements for GitPython_ and pygit2_ are the same as for gitfs, as described
:ref:`here <gitfs-dependencies>`.
.. important::
git_pillar has its own set of global configuration parameters. While it may
seem intuitive to use the global gitfs configuration parameters
(:conf_master:`gitfs_base`, etc.) to manage git_pillar, this will not work.
The main difference for this is the fact that the different components
which use Salt's git backend code do not all function identically. For
instance, in git_pillar it is necessary to specify which branch/tag to be
used for git_pillar remotes. This is the reverse behavior from gitfs, where
branches/tags make up your environments.
See :ref:`here <git_pillar-config-opts>` for documentation on the
git_pillar configuration options and their usage.
Here is an example git_pillar configuration:
.. code-block:: yaml
ext_pillar:
- git:
# Use 'prod' instead of the branch name 'production' as the environment
- production https://gitserver/git-pillar.git:
- env: prod
# Use 'dev' instead of the branch name 'develop' as the environment
- develop https://gitserver/git-pillar.git:
- env: dev
# No per-remote config parameters (and no trailing colon), 'qa' will
# be used as the environment
- qa https://gitserver/git-pillar.git
# SSH key authentication
- master git@other-git-server:pillardata-ssh.git:
# Pillar SLS files will be read from the 'pillar' subdirectory in
# this repository
- root: pillar
- privkey: /path/to/key
- pubkey: /path/to/key.pub
- passphrase: CorrectHorseBatteryStaple
# HTTPS authentication
- master https://other-git-server/pillardata-https.git:
- user: git
- password: CorrectHorseBatteryStaple
The main difference between this and the old way of configuring git_pillar is
that multiple remotes can be configured under one ``git`` section under
:conf_master:`ext_pillar`. More than one ``git`` section can be used, but it is
not necessary. Remotes will be evaluated sequentially.
Per-remote configuration parameters are supported (similar to :ref:`gitfs
<gitfs-per-remote-config>`), and global versions of the git_pillar
configuration parameters can also be set.
With the addition of pygit2_ support, git_pillar can now interact with
authenticated remotes. Authentication works just like in gitfs (as outlined in
the :ref:`Git Fileserver Backend Walkthrough <gitfs-authentication>`), only
with the global authenication parameter names prefixed with ``git_pillar``
instead of ``gitfs`` (e.g. :conf_master:`git_pillar_pubkey`,
:conf_master:`git_pillar_privkey`, :conf_master:`git_pillar_passphrase`, etc.).
.. _GitPython: https://github.com/gitpython-developers/GitPython
.. _pygit2: https://github.com/libgit2/pygit2
.. _Dulwich: https://www.samba.org/~jelmer/dulwich/
'''
from __future__ import absolute_import
import copy
import logging
import hashlib
import os
import salt.utils.gitfs
import salt.utils.dictupdate
from salt.exceptions import FileserverConfigError
from salt.pillar import Pillar
import salt.ext.six as six
try:
import git
HAS_GITPYTHON = True
except ImportError:
HAS_GITPYTHON = False
PER_REMOTE_OVERRIDES = ('env', 'root', 'ssl_verify')
log = logging.getLogger(__name__)
__virtualname__ = 'git'
def __virtual__():
'''
Only load if gitpython is available
'''
git_ext_pillars = [x for x in __opts__['ext_pillar'] if 'git' in x]
if not git_ext_pillars:
# No git external pillars were configured
return False
for ext_pillar in git_ext_pillars:
if isinstance(ext_pillar['git'], six.string_types):
# Verification of legacy git pillar configuration
if not HAS_GITPYTHON:
log.error(
'Git-based ext_pillar is enabled in configuration but '
'could not be loaded, is GitPython installed?'
)
return False
if not git.__version__ > '0.3.0':
return False
return __virtualname__
else:
# Verification of new git pillar configuration
try:
salt.utils.gitfs.GitPillar(__opts__)
# Initialization of the GitPillar object did not fail, so we
# know we have valid configuration syntax and that a valid
# provider was detected.
return __virtualname__
except FileserverConfigError:
pass
return False
def ext_pillar(minion_id, repo, pillar_dirs):
'''
Checkout the ext_pillar sources and compile the resulting pillar SLS
'''
if isinstance(repo, six.string_types):
return _legacy_git_pillar(minion_id, repo, pillar_dirs)
else:
opts = copy.deepcopy(__opts__)
opts['pillar_roots'] = {}
pillar = salt.utils.gitfs.GitPillar(opts)
pillar.init_remotes(repo, PER_REMOTE_OVERRIDES)
pillar.checkout()
ret = {}
merge_strategy = __opts__.get(
'pillar_source_merging_strategy',
'smart'
)
merge_lists = __opts__.get(
'pillar_merge_lists',
False
)
for pillar_dir, env in six.iteritems(pillar.pillar_dirs):
log.debug(
'git_pillar is processing pillar SLS from {0} for pillar '
'env \'{1}\''.format(pillar_dir, env)
)
all_dirs = [d for (d, e) in six.iteritems(pillar.pillar_dirs)
if env == e]
# Ensure that the current pillar_dir is first in the list, so that
# the pillar top.sls is sourced from the correct location.
pillar_roots = [pillar_dir]
pillar_roots.extend([x for x in all_dirs if x != pillar_dir])
opts['pillar_roots'] = {env: pillar_roots}
local_pillar = Pillar(opts, __grains__, minion_id, env)
ret = salt.utils.dictupdate.merge(
ret,
local_pillar.compile_pillar(ext=False),
strategy=merge_strategy,
merge_lists=merge_lists
)
return ret
class _LegacyGitPillar(object):
'''
Deal with the remote git repository for Pillar
'''
def __init__(self, branch, repo_location, opts):
'''
Try to initialize the Git repo object
'''
self.branch = self.map_branch(branch, opts)
self.rp_location = repo_location
self.opts = opts
self._envs = set()
self.working_dir = ''
self.repo = None
hash_type = getattr(hashlib, opts.get('hash_type', 'md5'))
hash_str = '{0} {1}'.format(self.branch, self.rp_location)
repo_hash = hash_type(hash_str).hexdigest()
rp_ = os.path.join(self.opts['cachedir'], 'pillar_gitfs', repo_hash)
if not os.path.isdir(rp_):
os.makedirs(rp_)
try:
self.repo = git.Repo.init(rp_)
except (git.exc.NoSuchPathError,
git.exc.InvalidGitRepositoryError) as exc:
log.error('GitPython exception caught while '
'initializing the repo: {0}. Maybe '
'git is not available.'.format(exc))
# Git directory we are working on
# Should be the same as self.repo.working_dir
self.working_dir = rp_
if isinstance(self.repo, git.Repo):
if not self.repo.remotes:
try:
self.repo.create_remote('origin', self.rp_location)
# ignore git ssl verification if requested
if self.opts.get('pillar_gitfs_ssl_verify', True):
self.repo.git.config('http.sslVerify', 'true')
else:
self.repo.git.config('http.sslVerify', 'false')
except os.error:
# This exception occurs when two processes are
# trying to write to the git config at once, go
# ahead and pass over it since this is the only
# write.
# This should place a lock down.
pass
else:
if self.repo.remotes.origin.url != self.rp_location:
self.repo.remotes.origin.config_writer.set(
'url', self.rp_location)
def map_branch(self, branch, opts=None):
opts = __opts__ if opts is None else opts
if branch == '__env__':
branch = opts.get('environment') or 'base'
if branch == 'base':
branch = opts.get('gitfs_base') or 'master'
elif ':' in branch:
branch = branch.split(':', 1)[0]
return branch
def update(self):
'''
Ensure you are following the latest changes on the remote
Return boolean whether it worked
'''
try:
log.debug('Updating fileserver for git_pillar module')
self.repo.git.fetch()
except git.exc.GitCommandError as exc:
log.error('Unable to fetch the latest changes from remote '
'{0}: {1}'.format(self.rp_location, exc))
return False
try:
self.repo.git.checkout('origin/{0}'.format(self.branch))
except git.exc.GitCommandError as exc:
log.error('Unable to checkout branch '
'{0}: {1}'.format(self.branch, exc))
return False
return True
def envs(self):
'''
Return a list of refs that can be used as environments
'''
if isinstance(self.repo, git.Repo):
remote = self.repo.remote()
for ref in self.repo.refs:
parted = ref.name.partition('/')
short = parted[2] if parted[2] else parted[0]
if isinstance(ref, git.Head):
if short == 'master':
short = 'base'
if ref not in remote.stale_refs:
self._envs.add(short)
elif isinstance(ref, git.Tag):
self._envs.add(short)
return list(self._envs)
def _legacy_git_pillar(minion_id, repo_string, pillar_dirs):
'''
Support pre-Beryllium config schema
'''
if pillar_dirs is None:
return
# split the branch, repo name and optional extra (key=val) parameters.
options = repo_string.strip().split()
branch_env = options[0]
repo_location = options[1]
root = ''
for extraopt in options[2:]:
# Support multiple key=val attributes as custom parameters.
DELIM = '='
if DELIM not in extraopt:
log.error('Incorrectly formatted extra parameter. '
'Missing \'{0}\': {1}'.format(DELIM, extraopt))
key, val = _extract_key_val(extraopt, DELIM)
if key == 'root':
root = val
else:
log.warning('Unrecognized extra parameter: {0}'.format(key))
# environment is "different" from the branch
cfg_branch, _, environment = branch_env.partition(':')
gitpil = _LegacyGitPillar(cfg_branch, repo_location, __opts__)
branch = gitpil.branch
if environment == '':
if branch == 'master':
environment = 'base'
else:
environment = branch
# normpath is needed to remove appended '/' if root is empty string.
pillar_dir = os.path.normpath(os.path.join(gitpil.working_dir, root))
pillar_dirs.setdefault(pillar_dir, {})
if cfg_branch == '__env__' and branch not in ['master', 'base']:
gitpil.update()
elif pillar_dirs[pillar_dir].get(branch, False):
return {} # we've already seen this combo
pillar_dirs[pillar_dir].setdefault(branch, True)
# Don't recurse forever-- the Pillar object will re-call the ext_pillar
# function
if __opts__['pillar_roots'].get(branch, []) == [pillar_dir]:
return {}
opts = copy.deepcopy(__opts__)
opts['pillar_roots'][environment] = [pillar_dir]
pil = Pillar(opts, __grains__, minion_id, branch)
return pil.compile_pillar(ext=False)
def _update(branch, repo_location):
'''
Ensure you are following the latest changes on the remote
return boolean whether it worked
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.update()
def _envs(branch, repo_location):
'''
Return a list of refs that can be used as environments
'''
gitpil = _LegacyGitPillar(branch, repo_location, __opts__)
return gitpil.envs()
def _extract_key_val(kv, delimiter='='):
'''Extract key and value from key=val string.
Example:
>>> _extract_key_val('foo=bar')
('foo', 'bar')
'''
pieces = kv.split(delimiter)
key = pieces[0]
val = delimiter.join(pieces[1:])
return key, val
|
import os
import re
import asyncio
import logging
from collections import OrderedDict
from pypeman.message import Message
from pypeman.errors import PypemanConfigError
logger = logging.getLogger("pypeman.store")
DATE_FORMAT = '%Y%m%d_%H%M'
class MessageStoreFactory():
""" Message store factory class can generate Message store instance for specific store_id. """
def get_store(self, store_id):
"""
:param store_id: identifier of corresponding message store.
:return: A MessageStore corresponding to correct store_id.
"""
class MessageStore():
""" A MessageStore keep an history of processed messages. Mainly used in channels. """
async def start(self):
"""
Called at startup to initialize store.
"""
async def store(self, msg):
"""
Store a message in the store.
:param msg: The message to store.
:return: Id for this specific message.
"""
async def change_message_state(self, id, new_state):
"""
Change the `id` message state.
:param id: Message specific store id.
:param new_state: Target state.
"""
async def get(self, id):
"""
Return one message corresponding to given `id` with his status.
:param id: Message id. Message store dependant.
:return: A dict `{'id':<message_id>, 'state': <message_state>, 'message': <message_object>}`.
"""
async def search(self, start=0, count=10, order_by='timestamp'):
"""
Return a list of message with store specific `id` and processed status.
:param start: First element.
:param count: Count of elements since first element.
:param order_by: Message order. Allowed values : ['timestamp', 'status'].
:return: A list of dict `{'id':<message_id>, 'state': <message_state>, 'message': <message_object>}`.
"""
async def total(self):
"""
:return: total count of messages
"""
class NullMessageStoreFactory(MessageStoreFactory):
""" Return an NullMessageStore that do nothing at all. """
def get_store(self, store_id):
return NullMessageStore()
class NullMessageStore(MessageStore):
""" For testing purpose """
async def store(self, msg):
return None
async def get(self, id):
return None
async def search(self, **kwargs):
return None
async def total(self):
return 0
class FakeMessageStoreFactory(MessageStoreFactory):
""" Return an Fake message store """
def get_store(self, store_id):
return FakeMessageStore()
class FakeMessageStore(MessageStore):
""" For testing purpose """
async def store(self, msg):
logger.debug("Should store message %s", msg)
return 'fake_id'
async def get(self, id):
return {'id':id, 'state': 'processed', 'message': None}
async def search(self, **kwargs):
return []
async def total(self):
return 0
class MemoryMessageStoreFactory(MessageStoreFactory):
""" Return a Memory message store. All message are lost at pypeman stop. """
def __init__(self):
self.base_dict = {}
def get_store(self, store_id):
return MemoryMessageStore(self.base_dict, store_id)
class MemoryMessageStore(MessageStore):
""" Store messages in memory """
def __init__(self, base_dict, store_id):
super().__init__()
self.messages = base_dict.setdefault(store_id, OrderedDict())
async def store(self, msg):
msg_id = msg.uuid
self.messages[msg_id] = {'id': msg_id, 'state': Message.PENDING, 'timestamp': msg.timestamp, 'message': msg.to_dict()}
return msg_id
async def change_message_state(self, id, new_state):
self.messages[id]['state'] = new_state
async def get(self, id):
resp = dict(self.messages[id])
resp['message'] = Message.from_dict(resp['message'])
return resp
async def search(self, start=0, count=10, order_by='timestamp'):
if order_by.startswith('-'):
reverse = True
sort_key = order_by[1:]
else:
reverse = False
sort_key = order_by
result = []
for value in sorted(self.messages.values(), key=lambda x: x[sort_key], reverse=reverse):
resp = dict(value)
resp['message'] = Message.from_dict(resp['message'])
result.append(resp)
return result[start: start + count]
async def total(self):
return len(self.messages)
class FileMessageStoreFactory(MessageStoreFactory):
"""
Generate a FileMessageStore message store instance.
Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy.
"""
# TODO add an option to reguraly archive old file or delete them
def __init__(self, path):
super().__init__()
if path is None:
raise PypemanConfigError('file message store requires a path')
self.base_path = path
def get_store(self, store_id):
return FileMessageStore(self.base_path, store_id)
class FileMessageStore(MessageStore):
""" Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy."""
# TODO file access should be done in another thread. Waiting for file backend.
def __init__(self, path, store_id):
super().__init__()
self.base_path = os.path.join(path, store_id)
# Match msg file name
self.msg_re = re.compile(r'^([0-9]{8})_([0-9]{2})([0-9]{2})_[0-9abcdef]*$')
try:
# Try to make dirs if necessary
os.makedirs(os.path.join(self.base_path))
except FileExistsError:
pass
self._total = 0
async def start(self):
self._total = await self.count_msgs()
async def store(self, msg):
""" Store a file in `<base_path>/<store_id>/<month>/<day>/` hierachy."""
# TODO implement a safer store to avoid broken messages
# The filename is the file id
filename = "{}_{}".format(msg.timestamp.strftime(DATE_FORMAT), msg.uuid)
dirs = os.path.join(str(msg.timestamp.year), "%02d" % msg.timestamp.month, "%02d" % msg.timestamp.day)
try:
# Try to make dirs if necessary
os.makedirs(os.path.join(self.base_path, dirs))
except FileExistsError:
pass
file_path = os.path.join(dirs, filename)
# Write message to file
with open(os.path.join(self.base_path, file_path), "w") as f:
f.write(msg.to_json())
await self.change_message_state(file_path, Message.PENDING)
self._total += 1
return file_path
async def change_message_state(self, id, new_state):
with open(os.path.join(self.base_path, id + '.meta'), "w") as f:
f.write(new_state)
async def get_message_state(self, id):
with open(os.path.join(self.base_path, id + '.meta'), "r") as f:
state = f.read()
return state
async def get(self, id):
if not os.path.exists(os.path.join(self.base_path, id)):
raise IndexError
with open(os.path.join(self.base_path, id), "rb") as f:
msg = Message.from_json(f.read().decode('utf-8'))
return {'id': id, 'state': await self.get_message_state(id), 'message': msg}
async def sorted_list_directories(self, path, reverse=True):
"""
:param path: Base path
:param reverse: reverse order
:return: List of directories in specified path ordered
"""
return sorted([d for d in os.listdir(path) if os.path.isdir(os.path.join(path, d))], reverse=reverse)
async def count_msgs(self):
"""
Count message by listing all directories. To be used at startup.
"""
count = 0
for year in await self.sorted_list_directories(os.path.join(self.base_path)):
for month in await self.sorted_list_directories(os.path.join(self.base_path, year)):
for day in await self.sorted_list_directories(os.path.join(self.base_path, year, month)):
for msg_name in sorted(os.listdir(os.path.join(self.base_path, year, month, day))):
found = self.msg_re.match(msg_name)
if found:
count +=1
return count
async def search(self, start=0, count=10, order_by='timestamp'):
# TODO better performance for slicing by counting file in dirs ?
if order_by.startswith('-'):
reverse = True
sort_key = order_by[1:]
else:
reverse = False
sort_key = order_by
# TODO handle sort_key
result = []
end = start + count
position = 0
for year in await self.sorted_list_directories(os.path.join(self.base_path), reverse=reverse):
for month in await self.sorted_list_directories(os.path.join(self.base_path, year), reverse=reverse):
for day in await self.sorted_list_directories(os.path.join(self.base_path, year, month), reverse=reverse):
for msg_name in sorted(os.listdir(os.path.join(self.base_path, year, month, day)), reverse=reverse):
found = self.msg_re.match(msg_name)
if found:
if start <= position < end:
mid = os.path.join(year, month, day, msg_name)
result.append(await self.get(mid))
position += 1
return result
async def total(self):
return self._total
|
"""Eris Codebase Monitor
Eris maintains an up-to-date set of reports for every file in a codebase.
A status indicator summarises the state of each report, and a report is viewed
by selecting this status indicator with the cursor.
The reports are cached in the codebase's root directory in a ".eris"
directory.
"""
import asyncio
import contextlib
import functools
import gzip
import importlib
import importlib.resources
import itertools
import math
import multiprocessing
import os
import pickle
import shutil
import signal
import subprocess
import sys
import time
import docopt
import pygments.styles
import pyinotify
import eris
import eris.fill3 as fill3
import eris.sorted_collection as sorted_collection
import eris.terminal as terminal
import eris.termstr as termstr
import eris.tools as tools
import eris.worker as worker
import eris.paged_list as paged_list
USAGE = """
Usage:
eris [options] <directory>
eris -h | --help
eris -i | --info
Example:
# eris my_project
Options:
-h, --help Show the full help.
-i, --info Show information about the available tools.
-w COUNT, --workers=COUNT The number of processes working in parallel.
By default it is the number of cpus minus 1.
-e "COMMAND", --editor="COMMAND" The command used to start the editor, in
the *edit command. It may contain options.
-t THEME, --theme=THEME The pygment theme used for syntax
highlighting. Defaults to "native".
-c TYPE, --compression=TYPE The type of compression used in the cache:
gzip, lzma, bz2, or none. Defaults to gzip.
"""
KEYS_DOC = """Keys:
arrow keys, page up/down, mouse - Move the cursor or scroll the result pane.
tab - Change the focus between summary and result pane.
q, esc - Quit.
h - Show the help screen. (toggle)
o - Orient the result pane as portrait or landscape. (toggle)
l - Show the activity log. (toggle)
e - Edit the current file with an editor defined by -e, $EDITOR or $VISUAL.
n - Move to the next issue.
N - Move to the next issue of the current tool.
s - Sort files by type, or by directory location. (toggle)
r - Refresh the currently selected report.
R - Refresh all reports of the current tool.
f - Resize the focused pane to the full screen. (toggle)
x - Open the current file with xdg-open.
"""
class Entry:
MAX_WIDTH = 0
def __init__(self, path, results, change_time, highlighted=None,
set_results=True):
self.path = path
self.change_time = change_time
self.highlighted = highlighted
self.results = results
if set_results:
# FIX: this is missed for entries appended later
for result in results:
result.entry = self
self.widget = fill3.Row(results)
self.appearance_cache = None
self.last_width = None
def __eq__(self, other):
return self.path == other.path
def __len__(self):
return len(self.results)
def __getitem__(self, index):
return self.results[index]
def appearance_min(self):
if self.appearance_cache is None \
or self.last_width != Entry.MAX_WIDTH:
self.last_width = Entry.MAX_WIDTH
if self.highlighted is not None:
self.results[self.highlighted].is_highlighted = True
row_appearance = self.widget.appearance_min()
path = tools.path_colored(self.path)
padding = " " * (self.last_width - len(self.results) + 1)
self.appearance_cache = [row_appearance[0] + padding + path]
if self.highlighted is not None:
self.results[self.highlighted].is_highlighted = False
return self.appearance_cache
def as_html(self):
html_parts = []
styles = set()
for result in self.widget:
result_html, result_styles = result.as_html()
html_parts.append(result_html)
styles.update(result_styles)
path = tools.path_colored(self.path)
padding = " " * (Entry.MAX_WIDTH - len(self.widget) + 1)
path_html, path_styles = termstr.TermStr(padding + path).as_html()
return "".join(html_parts) + path_html, styles.union(path_styles)
def is_path_excluded(path):
return any(part.startswith(".") for part in path.split(os.path.sep))
def codebase_files(path, skip_hidden_directories=True):
for (dirpath, dirnames, filenames) in os.walk(path):
if skip_hidden_directories:
filtered_dirnames = [dirname for dirname in dirnames
if not is_path_excluded(dirname)]
dirnames[:] = filtered_dirnames
for filename in filenames:
if not is_path_excluded(filename):
yield os.path.join(dirpath, filename)
def fix_paths(root_path, paths):
return (os.path.join(".", os.path.relpath(path, root_path))
for path in paths)
def blend_color(a_color, b_color, transparency):
a_r, a_g, a_b = a_color
b_r, b_g, b_b = b_color
complement = 1 - transparency
return (int(a_r * transparency + b_r * complement),
int(a_g * transparency + b_g * complement),
int(a_b * transparency + b_b * complement))
def highlight_str(line, highlight_color, transparency):
@functools.lru_cache()
def blend_style(style):
fg_color = (style.fg_color if type(style.fg_color) == tuple
else termstr.xterm_color_to_rgb(style.fg_color))
bg_color = (style.bg_color if type(style.bg_color) == tuple
else termstr.xterm_color_to_rgb(style.bg_color))
return termstr.CharStyle(
blend_color(fg_color, highlight_color, transparency),
blend_color(bg_color, highlight_color, transparency),
is_bold=style.is_bold, is_italic=style.is_italic,
is_underlined=style.is_underlined)
return termstr.TermStr(line).transform_style(blend_style)
def in_green(str_):
return termstr.TermStr(str_, termstr.CharStyle(termstr.Color.lime))
_UP, _DOWN, _LEFT, _RIGHT = (0, -1), (0, 1), (-1, 0), (1, 0)
def directory_sort(entry):
path = entry.path
return (os.path.dirname(path), tools.splitext(path)[1],
os.path.basename(path))
def type_sort(entry):
path = entry.path
return (tools.splitext(path)[1], os.path.dirname(path),
os.path.basename(path))
class Summary:
def __init__(self, root_path, jobs_added_event):
self._root_path = root_path
self._jobs_added_event = jobs_added_event
self._view_widget = fill3.View.from_widget(self)
self.is_directory_sort = True
self._old_entries = []
self.__cursor_position = (0, 0)
self.reset()
def reset(self):
Entry.MAX_WIDTH = 0
self._max_path_length = 0
self.result_total = 0
self.completed_total = 0
self.is_loaded = False
self.closest_placeholder_generator = None
sort_func = directory_sort if self.is_directory_sort else type_sort
self._entries = sorted_collection.SortedCollection([], key=sort_func)
def __getstate__(self):
state = self.__dict__.copy()
state["closest_placeholder_generator"] = None
state["_jobs_added_event"] = None
summary_path = os.path.join(tools.CACHE_PATH, "summary_dir")
open_compressed = functools.partial(gzip.open, compresslevel=1)
x, y = self.cursor_position()
if y == 0:
entries = []
else:
entries = itertools.chain(
[self._entries[y]], itertools.islice(self._entries, y),
itertools.islice(self._entries, y+1, None))
state["_old_entries"] = paged_list.PagedList(
entries, summary_path, 2000, 1, exist_ok=True,
open_func=open_compressed)
state["_entries"] = None
state["__cursor_position"] = (x, 0)
return state
def __setstate__(self, state):
self.__dict__ = state
self.reset()
@property
def _cursor_position(self):
return self.__cursor_position
@_cursor_position.setter
def _cursor_position(self, new_position):
if new_position != self.__cursor_position:
self.__cursor_position = new_position
self.closest_placeholder_generator = None
def sort_entries(self):
key_func = directory_sort if self.is_directory_sort else type_sort
self._entries = sorted_collection.SortedCollection(
self._entries, key=key_func)
self.closest_placeholder_generator = None
def add_entry(self, entry):
if entry in self._entries:
return
for result in entry:
self.result_total += 1
if result.is_completed:
self.completed_total += 1
Entry.MAX_WIDTH = max(len(entry), Entry.MAX_WIDTH)
self._max_path_length = max(len(entry.path) - len("./"),
self._max_path_length)
entry_index = self._entries.insert(entry)
x, y = self._cursor_position
if entry_index <= y:
self.scroll(0, -1)
self._jobs_added_event.set()
if self.is_loaded:
self.closest_placeholder_generator = None
def on_file_added(self, path):
full_path = os.path.join(self._root_path, path)
try:
change_time = os.stat(full_path).st_ctime
except OSError:
return
row = [tools.Result(path, tool) for tool in tools.tools_for_path(path)]
entry = Entry(path, row, change_time)
self.add_entry(entry)
def on_file_deleted(self, path):
if os.path.exists(os.path.join(self._root_path, path)):
return
entry = Entry(path, [], None)
try:
index = self._entries.index(entry)
except ValueError:
return
x, y = self._cursor_position
if index < y:
self.scroll(0, 1)
for result in self._entries[index]:
if result.is_completed:
self.completed_total -= 1
self.result_total -= 1
result.delete()
row = self._entries[index]
del self._entries._keys[index]
del self._entries._items[index]
if len(row) == Entry.MAX_WIDTH:
Entry.MAX_WIDTH = max((len(entry) for entry in self._entries),
default=0)
if (len(path) - 2) == self._max_path_length:
self._max_path_length = max(((len(entry.path) - 2)
for entry in self._entries), default=0)
x, y = self._cursor_position
if y == len(self._entries):
self._cursor_position = x, y - 1
self.closest_placeholder_generator = None
def on_file_modified(self, path):
entry = Entry(path, [], None)
try:
entry_index = self._entries.index(entry)
except ValueError:
return
entry = self._entries[entry_index]
for result in entry:
self.refresh_result(result, only_completed=False)
self.closest_placeholder_generator = None
return entry
@contextlib.contextmanager
def keep_selection(self):
try:
cursor_path = self.get_selection().path
except AttributeError:
yield
return
x, y = self._cursor_position
yield
for index, row in enumerate(self._entries):
if row.path == cursor_path:
self._cursor_position = (x, index)
return
if y >= len(self._entries):
self._cursor_position = (x, len(self._entries) - 1)
async def sync_with_filesystem(self, appearance_changed_event, log=None):
start_time = time.time()
cache = {}
log.log_message("Started loading summary…")
for index, entry in enumerate(self._old_entries):
if index != 0 and index % 5000 == 0:
log.log_message(f"Loaded {index} files…")
await asyncio.sleep(0)
self.add_entry(entry)
if index % 1000 == 0:
appearance_changed_event.set()
cache[entry.path] = entry.change_time
duration = time.time() - start_time
log.log_message(f"Finished loading summary. {round(duration, 2)} secs")
self.is_loaded = True
self.closest_placeholder_generator = None
log.log_message("Started sync with filesystem…")
start_time = time.time()
all_paths = set()
for path in fix_paths(self._root_path, codebase_files(self._root_path)):
await asyncio.sleep(0)
all_paths.add(path)
if path in cache:
full_path = os.path.join(self._root_path, path)
change_time = os.stat(full_path).st_ctime
if change_time != cache[path]:
cache[path] = change_time
entry = self.on_file_modified(path)
entry.change_time = change_time
else:
self.on_file_added(path)
appearance_changed_event.set()
for path in cache.keys() - all_paths:
await asyncio.sleep(0)
self.on_file_deleted(path)
duration = time.time() - start_time
log.log_message(f"Finished sync with filesystem. {round(duration, 2)} secs")
def _sweep_up(self, x, y):
yield from reversed(self._entries[y][:x])
while True:
y = (y - 1) % len(self._entries)
yield from reversed(self._entries[y])
def _sweep_down(self, x, y):
yield from self._entries[y][x:]
while True:
y = (y + 1) % len(self._entries)
yield from self._entries[y]
def _sweep_combined(self, x, y):
for up_result, down_result in zip(self._sweep_up(x, y),
self._sweep_down(x, y)):
yield down_result
yield up_result
def _placeholder_sweep(self):
x, y = self.cursor_position()
for index, result in enumerate(self._sweep_combined(x, y)):
if index > self.result_total:
break
if result.status == tools.Status.pending:
yield result
async def get_closest_placeholder(self):
if self.closest_placeholder_generator is None:
self.closest_placeholder_generator = self._placeholder_sweep()
try:
return self.closest_placeholder_generator.send(None)
except StopIteration:
raise StopAsyncIteration
def appearance_dimensions(self):
return self._max_path_length + 1 + Entry.MAX_WIDTH, len(self._entries)
def appearance_interval(self, interval):
start_y, end_y = interval
x, y = self.cursor_position()
self._entries[y].highlighted = x
self._entries[y].appearance_cache = None
appearance = fill3.Column(self._entries).appearance_interval(interval)
self._entries[y].highlighted = None
self._entries[y].appearance_cache = None
return appearance
def _set_scroll_position(self, cursor_x, cursor_y, summary_height):
scroll_x, scroll_y = new_scroll_x, new_scroll_y = \
self._view_widget.position
if cursor_y < scroll_y:
new_scroll_y = max(cursor_y - summary_height + 1, 0)
if (scroll_y + summary_height - 1) < cursor_y:
new_scroll_y = cursor_y
self._view_widget.position = new_scroll_x, new_scroll_y
def _highlight_cursor_row(self, appearance, cursor_y):
scroll_x, scroll_y = self._view_widget.position
highlighted_y = cursor_y - scroll_y
appearance[highlighted_y] = (highlight_str(
appearance[highlighted_y][:-1], termstr.Color.white, 0.8)
+ appearance[highlighted_y][-1])
return appearance
def appearance(self, dimensions):
width, height = dimensions
if len(self._entries) == 0:
return [" " * width for row in range(height)]
cursor_x, cursor_y = self.cursor_position()
width, height = width - 1, height - 1 # Minus one for the scrollbars
self._set_scroll_position(cursor_x, cursor_y, height)
return self._highlight_cursor_row(
self._view_widget.appearance(dimensions), cursor_y)
def scroll(self, dx, dy):
scroll_x, scroll_y = self._view_widget.position
dy = min(dy, scroll_y)
self._view_widget.position = scroll_x, scroll_y - dy
self._move_cursor((0, -dy))
def cursor_position(self):
x, y = self._cursor_position
try:
return min(x, len(self._entries[y])-1), y
except IndexError:
return 0, 0
def get_selection(self):
x, y = self.cursor_position()
return self._entries[y][x]
def _move_cursor(self, vector):
if vector == (0, 0):
return
dx, dy = vector
if dy == 0:
x, y = self.cursor_position()
self._cursor_position = ((x + dx) % len(self._entries[y]), y)
elif dx == 0:
x, y = self._cursor_position
self._cursor_position = (x, (y + dy) % len(self._entries))
else:
raise ValueError
def cursor_right(self):
self._move_cursor(_RIGHT)
def cursor_left(self):
self._move_cursor(_LEFT)
def cursor_up(self):
self._move_cursor(_UP)
def cursor_down(self):
self._move_cursor(_DOWN)
def cursor_page_up(self):
view_width, view_height = self._view_widget.portal.last_dimensions
self.scroll(0, view_height)
def cursor_page_down(self):
view_width, view_height = self._view_widget.portal.last_dimensions
self.scroll(0, -view_height)
def cursor_home(self):
x, y = self._cursor_position
self._cursor_position = x, 0
def cursor_end(self):
x, y = self._cursor_position
self._cursor_position = x, len(self._entries) - 1
def _issue_generator(self):
x, y = self.cursor_position()
for index in range(len(self._entries) + 1):
row_index = (index + y) % len(self._entries)
row = self._entries[row_index]
for index_x, result in enumerate(row):
if (result.status == tools.Status.problem and
not (row_index == y and index_x <= x and
index != len(self._entries))):
yield result, (index_x, row_index)
def move_to_next_issue(self):
with contextlib.suppress(StopIteration):
issue, self._cursor_position = self._issue_generator().send(None)
def move_to_next_issue_of_tool(self):
current_tool = self.get_selection().tool
for issue, position in self._issue_generator():
if issue.tool == current_tool:
self._cursor_position = position
return
def refresh_result(self, result, only_completed=True):
if result.is_completed or not only_completed:
if result.is_completed:
self.completed_total -= 1
result.reset()
result.delete()
self.closest_placeholder_generator = None
self._jobs_added_event.set()
def refresh_tool(self, tool):
for row in self._entries:
for result in row:
if result.tool == tool:
self.refresh_result(result)
def clear_running(self):
for row in self._entries:
for result in row:
if result.status == tools.Status.running:
self.refresh_result(result)
def as_html(self):
html_parts = []
styles = set()
for row in self._entries:
html_row, styles_row = row.as_html()
html_parts.append(html_row)
styles.update(styles_row)
return ("<style>a { text-decoration:none; }</style><pre>" +
"<br>".join(html_parts) + "</pre>"), styles
class Log:
_GREY_BOLD_STYLE = termstr.CharStyle(termstr.Color.grey_100, is_bold=True)
_GREEN_STYLE = termstr.CharStyle(termstr.Color.lime)
def __init__(self, appearance_changed_event):
self._appearance_changed_event = appearance_changed_event
self.lines = []
self._appearance = None
def __getstate__(self):
state = self.__dict__.copy()
state["_appearance_changed_event"] = None
return state
def log_message(self, message, timestamp=None, char_style=None):
if isinstance(message, list):
message = [part[1] if isinstance(part, tuple) else part
for part in message]
message = fill3.join("", message)
if char_style is not None:
message = termstr.TermStr(message, char_style)
timestamp = (time.strftime("%H:%M:%S", time.localtime())
if timestamp is None else timestamp)
line = termstr.TermStr(timestamp, Log._GREY_BOLD_STYLE) + " " + message
if not sys.stdout.isatty():
print(line, flush=True)
return
self.lines.append(line)
self._appearance = None
self._appearance_changed_event.set()
def log_command(self, message, timestamp=None):
self.log_message(message, char_style=Log._GREEN_STYLE)
def appearance(self, dimensions):
if self._appearance is None or \
fill3.appearance_dimensions(self._appearance) != dimensions:
width, height = dimensions
del self.lines[:-height]
self._appearance = fill3.appearance_resize(self.lines, dimensions)
return self._appearance
def highlight_chars(str_, style, marker="*"):
parts = str_.split(marker)
highlighted_parts = [termstr.TermStr(part[0], style) + part[1:]
for part in parts[1:] if part != ""]
return fill3.join("", [parts[0]] + highlighted_parts)
def get_status_help():
return fill3.join("\n", ["Statuses:"] +
[" " + tools.STATUS_TO_TERMSTR[status] + " " + meaning
for status, meaning in tools.STATUS_MEANINGS])
class Help:
def __init__(self, summary, screen):
self.summary = summary
self.screen = screen
help_text = fill3.join("\n", [__doc__, KEYS_DOC, get_status_help()])
self.view = fill3.View.from_widget(fill3.Text(help_text))
self.widget = fill3.Border(self.view, title="Help")
portal = self.view.portal
self.key_map = {"h": self._exit_help, terminal.UP_KEY: portal.scroll_up,
terminal.DOWN_KEY: portal.scroll_down,
terminal.LEFT_KEY: portal.scroll_left,
terminal.RIGHT_KEY: portal.scroll_right,
"q": self._exit_help, terminal.ESC: self._exit_help}
def _exit_help(self):
self.screen._is_help_visible = False
def on_mouse_input(self, term_code, appearance_changed_event):
event = terminal.decode_mouse_input(term_code)
if event[1] == terminal.WHEEL_UP_MOUSE:
self.view.portal.scroll_up()
appearance_changed_event.set()
elif event[1] == terminal.WHEEL_DOWN_MOUSE:
self.view.portal.scroll_down()
appearance_changed_event.set()
def on_keyboard_input(self, term_code, appearance_changed_event):
action = (self.key_map.get(term_code) or
self.key_map.get(term_code.lower()))
if action is not None:
action()
appearance_changed_event.set()
def appearance(self, dimensions):
return self.widget.appearance(dimensions)
class Listing:
def __init__(self, view):
self.view = view
self.last_dimensions = None
def appearance(self, dimensions):
self.last_dimensions = dimensions
return self.view.appearance(dimensions)
class Screen:
def __init__(self, summary, log, appearance_changed_event, main_loop):
self._summary = summary
self._log = log
self._appearance_changed_event = appearance_changed_event
self._main_loop = main_loop
self._is_summary_focused = True
self.workers = None
self._is_listing_portrait = True
self._is_log_visible = True
self._is_help_visible = False
self._is_fullscreen = False
self._make_widgets()
self._last_mouse_position = 0, 0
def __getstate__(self):
state = self.__dict__.copy()
state["_appearance_changed_event"] = None
state["_main_loop"] = None
state["workers"] = None
return state
def make_workers(self, worker_count, is_being_tested, compression):
workers = []
for index in range(worker_count):
worker_ = worker.Worker(is_being_tested, compression)
workers.append(worker_)
future = worker_.job_runner(self, self._summary, self._log,
self._summary._jobs_added_event,
self._appearance_changed_event)
worker_.future = future
self.workers = workers
def stop_workers(self):
for worker_ in self.workers:
if worker_.result is not None:
worker_.result.reset()
worker_.kill()
def _partition(self, percentage, widgets, length):
smaller_length = max(int(length * (percentage / 100)), 10)
return [smaller_length, length - smaller_length]
def _make_widgets(self):
self._help_widget = Help(self._summary, self)
root_path = os.path.basename(self._summary._root_path)
summary = fill3.Border(self._summary, title="Summary of " + root_path)
self._summary_border = summary
try:
selected_widget = self._summary.get_selection()
result_widget = selected_widget.result
except IndexError:
result_widget = fill3.Text("Nothing selected")
self._view = fill3.View.from_widget(result_widget)
self._listing = fill3.Border(Listing(self._view))
log = fill3.Border(self._log, title="Log",
characters=Screen._DIMMED_BORDER)
quarter_cut = functools.partial(self._partition, 25)
golden_cut = functools.partial(self._partition, 38.198)
three_quarter_cut = functools.partial(self._partition, 75)
port_log = fill3.Row([fill3.Column([summary, log], three_quarter_cut),
self._listing], golden_cut)
land_log = fill3.Column([fill3.Row([summary, log]), self._listing],
quarter_cut)
port_no_log = fill3.Row([summary, self._listing], golden_cut)
land_no_log = fill3.Column([summary, self._listing], quarter_cut)
self._layouts = [[land_no_log, port_no_log], [land_log, port_log]]
self._set_focus()
def toggle_help(self):
self._is_help_visible = not self._is_help_visible
def toggle_log(self):
self._is_log_visible = not self._is_log_visible
def toggle_window_orientation(self):
self._is_listing_portrait = not self._is_listing_portrait
def _move_listing(self, vector):
dx, dy = vector
selected_widget = self._summary.get_selection()
x, y = selected_widget.scroll_position
widget_width, widget_height = fill3.appearance_dimensions(
selected_widget.result.appearance_min())
listing_width, listing_height = (self._listing.widget.
last_dimensions)
listing_width -= 1 # scrollbars
listing_height -= 1
x = min(x + dx, max(widget_width - listing_width, 0))
y = min(y + dy, max(widget_height - listing_height, 0))
x = max(0, x)
y = max(0, y)
selected_widget.scroll_position = x, y
def cursor_up(self):
(self._summary.cursor_up() if self._is_summary_focused
else self._move_listing(_UP))
def cursor_down(self):
(self._summary.cursor_down() if self._is_summary_focused
else self._move_listing(_DOWN))
def cursor_right(self):
(self._summary.cursor_right() if self._is_summary_focused
else self._move_listing(_RIGHT))
def cursor_left(self):
(self._summary.cursor_left() if self._is_summary_focused
else self._move_listing(_LEFT))
def cursor_page_up(self):
(self._summary.cursor_page_up() if self._is_summary_focused
else self.listing_page_up())
def cursor_page_down(self):
(self._summary.cursor_page_down() if self._is_summary_focused
else self.listing_page_down())
def cursor_end(self):
(self._summary.cursor_end() if self._is_summary_focused
else self._page_listing(_RIGHT))
def cursor_home(self):
(self._summary.cursor_home() if self._is_summary_focused
else self._page_listing(_LEFT))
def _page_listing(self, vector):
dx, dy = vector
listing_width, listing_height = self._listing.widget.last_dimensions
self._move_listing((dx * (listing_width // 2),
dy * (listing_height // 2)))
def listing_page_up(self):
self._page_listing(_UP)
def listing_page_down(self):
self._page_listing(_DOWN)
def move_to_next_issue(self):
self._summary.move_to_next_issue()
def move_to_next_issue_of_tool(self):
self._summary.move_to_next_issue_of_tool()
def edit_file(self):
if self.editor_command is None:
self._log.log_message("An editor has not been defined. "
"See option -e.")
else:
path = self._summary.get_selection().path
path_colored = tools.path_colored(path)
line_num = (self._summary.get_selection().entry[0].
scroll_position[1] + 1)
self._log.log_message([in_green("Editing "), path_colored,
in_green(f" at line {line_num}…")])
subprocess.Popen(f"{self.editor_command} +{line_num} {path}",
shell=True, stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def toggle_status_style(self):
self._summary.toggle_status_style(self._log)
def toggle_order(self):
self._summary.is_directory_sort = not self._summary.is_directory_sort
sort_order = ("directory then type" if self._summary.is_directory_sort
else "type then directory")
self._log.log_command(f"Sorting files by {sort_order}.")
with self._summary.keep_selection():
self._summary.sort_entries()
def quit_(self):
os.kill(os.getpid(), signal.SIGINT)
def refresh(self):
selection = self._summary.get_selection()
tool_name = tools.tool_name_colored(selection.tool, selection.path)
path_colored = tools.path_colored(selection.path)
self._log.log_message([in_green("Refreshing "), tool_name,
in_green(" result of "), path_colored,
in_green("…")])
self._summary.refresh_result(selection)
def refresh_tool(self):
selection = self._summary.get_selection()
tool_name = tools.tool_name_colored(selection.tool, selection.path)
self._log.log_message([in_green("Refreshing all results of "),
tool_name, in_green("…")])
self._summary.refresh_tool(selection.tool)
_DIMMED_BORDER = [termstr.TermStr(part).fg_color(termstr.Color.grey_100)
for part in fill3.Border.THIN]
def _set_focus(self):
focused, unfocused = fill3.Border.THICK, Screen._DIMMED_BORDER
self._summary_border.set_style(focused if self._is_summary_focused
else unfocused)
self._listing.set_style(unfocused if self._is_summary_focused
else focused)
def toggle_focus(self):
self._is_summary_focused = not self._is_summary_focused
self._set_focus()
def toggle_fullscreen(self):
self._is_fullscreen = not self._is_fullscreen
def xdg_open(self):
path = self._summary.get_selection().path
path_colored = tools.path_colored(path)
self._log.log_message([in_green("Opening "), path_colored,
in_green("…")])
subprocess.Popen(["xdg-open", path], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
def save(self):
worker.Worker.unsaved_jobs_total = 0
pickle_path = os.path.join(tools.CACHE_PATH, "summary.pickle")
open_compressed = functools.partial(gzip.open, compresslevel=1)
tools.dump_pickle_safe(self, pickle_path, open=open_compressed)
def _select_entry_at_position(self, x, y, view_width, view_height):
border_width = 1
if x < border_width or y < border_width or x > view_width or \
y > view_height:
return
view_x, view_y = self._summary._view_widget.portal.position
column_index = x - border_width + view_x
row_index = y - border_width + view_y
if row_index >= len(self._summary._entries):
return
row = self._summary._entries[row_index]
if column_index < 0 or column_index >= len(row):
return
self._summary._cursor_position = column_index, row_index
def _is_switching_focus(self, x, y, view_width, view_height):
return (not self._is_fullscreen and
(self._is_listing_portrait and
(x > view_width and
self._is_summary_focused or x <= view_width and
not self._is_summary_focused) or
not self._is_listing_portrait and
(y > view_height and
self._is_summary_focused or y <= view_height and
not self._is_summary_focused)))
def on_mouse_input(self, term_code):
if self._is_help_visible:
self._help_widget.on_mouse_input(
term_code, self._appearance_changed_event)
return
event = terminal.decode_mouse_input(term_code)
if event[0] not in [terminal.PRESS_MOUSE, terminal.DRAG_MOUSE]:
return
x, y = event[2:4]
if event[0] == terminal.DRAG_MOUSE:
last_x, last_y = self._last_mouse_position
dx, dy = x - last_x, y - last_y
if self._is_summary_focused:
self._summary.scroll(dx, dy)
else:
self._move_listing((-dx, -dy))
else: # Mouse press
if event[1] == terminal.WHEEL_UP_MOUSE:
self.listing_page_up()
elif event[1] == terminal.WHEEL_DOWN_MOUSE:
self.listing_page_down()
else:
view_width, view_height = \
self._summary._view_widget.portal.last_dimensions
if self._is_switching_focus(x, y, view_width, view_height):
self.toggle_focus()
else:
self._select_entry_at_position(
x, y, view_width, view_height)
self._last_mouse_position = x, y
self._appearance_changed_event.set()
def on_keyboard_input(self, term_code):
if self._is_help_visible:
self._help_widget.on_keyboard_input(
term_code, self._appearance_changed_event)
return
action = (Screen._KEY_MAP.get(term_code) or
Screen._KEY_MAP.get(term_code.lower()))
if action is not None:
action(self)
self._appearance_changed_event.set()
def _fix_listing(self):
widget = self._summary.get_selection()
view = self._listing.widget.view
view.position = widget.scroll_position
x, y = view.position
view.widget = widget.result
tool_name = tools.tool_name_colored(widget.tool, widget.path)
divider = " " + self._listing.top * 2 + " "
self._listing.title = (
tools.path_colored(widget.path) + divider + tool_name + " " +
tools.STATUS_TO_TERMSTR[widget.status] + divider +
"line " + str(y+1))
_STATUS_BAR = highlight_chars(
" *help *quit *t*a*b:focus *orient *log *edit *next *sort"
" *refresh *fullscreen *xdg-open", Log._GREEN_STYLE)
@functools.lru_cache()
def _get_partial_bar_chars(self, bar_transparency):
bar_color = blend_color(termstr.Color.black, termstr.Color.white,
bar_transparency)
return [termstr.TermStr(char).fg_color(bar_color).
bg_color(termstr.Color.black)
for char in fill3.ScrollBar._PARTIAL_CHARS[1]]
@functools.lru_cache(maxsize=2)
def _get_status_bar_appearance(self, width, progress_bar_size):
bar_transparency = 0.7
bar = self._STATUS_BAR.center(width)[:width]
fraction, whole = math.modf(progress_bar_size)
whole = int(whole)
if whole < len(bar) and bar[whole].data == " ":
left_part = bar[:whole]
right_part = (self._get_partial_bar_chars(bar_transparency)
[int(fraction * 8)] + bar[whole+1:])
else:
progress_bar_size = round(progress_bar_size)
left_part = bar[:progress_bar_size]
right_part = bar[progress_bar_size:]
return [highlight_str(left_part, termstr.Color.white, bar_transparency)
+ right_part]
def _get_status_bar(self, width):
incomplete = self._summary.result_total - self._summary.completed_total
progress_bar_size = width if self._summary.result_total == 0 else \
max(0, width * incomplete / self._summary.result_total)
return self._get_status_bar_appearance(width, progress_bar_size)
def appearance(self, dimensions):
if len(self._summary._entries) > 0:
self._fix_listing()
if self._is_help_visible:
body = self._help_widget
elif self._is_fullscreen:
body = (self._summary_border if self._is_summary_focused
else self._listing)
else:
body = (self._layouts[self._is_log_visible]
[self._is_listing_portrait])
width, height = max(dimensions[0], 10), max(dimensions[1], 20)
result = (body.appearance((width, height-1)) +
self._get_status_bar(width))
return (result if (width, height) == dimensions
else fill3.appearance_resize(result, dimensions))
_KEY_MAP = {
"o": toggle_window_orientation, "l": toggle_log, "h": toggle_help,
terminal.UP_KEY: cursor_up, terminal.DOWN_KEY: cursor_down,
terminal.LEFT_KEY: cursor_left, terminal.RIGHT_KEY: cursor_right,
terminal.PAGE_DOWN_KEY: cursor_page_down,
terminal.PAGE_UP_KEY: cursor_page_up, "s": toggle_order,
terminal.HOME_KEY: cursor_home, terminal.END_KEY: cursor_end,
"n": move_to_next_issue, "N": move_to_next_issue_of_tool,
"e": edit_file, "q": quit_, terminal.ESC: quit_, "r": refresh,
"R": refresh_tool, "\t": toggle_focus, "f": toggle_fullscreen,
"x": xdg_open}
def setup_inotify(root_path, loop, on_filesystem_event, exclude_filter):
watch_manager = pyinotify.WatchManager()
event_mask = (pyinotify.IN_CREATE | pyinotify.IN_DELETE |
pyinotify.IN_CLOSE_WRITE | pyinotify.IN_ATTRIB |
pyinotify.IN_MOVED_FROM | pyinotify.IN_MOVED_TO)
watch_manager.add_watch(root_path, event_mask, rec=True, auto_add=True,
proc_fun=on_filesystem_event,
exclude_filter=exclude_filter, quiet=False)
return pyinotify.AsyncioNotifier(watch_manager, loop,
callback=lambda notifier: None)
def load_state(pickle_path, jobs_added_event, appearance_changed_event,
root_path, loop):
is_first_run = True
try:
with gzip.open(pickle_path, "rb") as file_:
screen = pickle.load(file_)
except (FileNotFoundError, AttributeError):
summary = Summary(root_path, jobs_added_event)
log = Log(appearance_changed_event)
screen = Screen(summary, log, appearance_changed_event, loop)
else:
is_first_run = False
screen._appearance_changed_event = appearance_changed_event
screen._main_loop = loop
summary = screen._summary
summary._jobs_added_event = jobs_added_event
summary._root_path = root_path
summary.clear_running()
log = screen._log
log._appearance_changed_event = appearance_changed_event
return summary, screen, log, is_first_run
def on_filesystem_event(event, summary, root_path, appearance_changed_event):
path = list(fix_paths(root_path, [event.pathname]))[0]
if is_path_excluded(path[2:]):
return
inotify_actions = {pyinotify.IN_CREATE: summary.on_file_added,
pyinotify.IN_MOVED_TO: summary.on_file_added,
pyinotify.IN_DELETE: summary.on_file_deleted,
pyinotify.IN_MOVED_FROM: summary.on_file_deleted,
pyinotify.IN_ATTRIB: summary.on_file_modified,
pyinotify.IN_CLOSE_WRITE: summary.on_file_modified}
if event.mask not in inotify_actions:
return
try:
inotify_actions[event.mask](path)
except Exception:
tools.log_error()
raise KeyboardInterrupt
appearance_changed_event.set()
def main(root_path, loop, worker_count=None, editor_command=None, theme=None,
compression=None, is_being_tested=False):
if worker_count is None:
worker_count = max(multiprocessing.cpu_count() - 1, 1)
if theme is None:
theme = "native"
if compression is None:
compression = "gzip"
os.environ["PYGMENT_STYLE"] = theme
pickle_path = os.path.join(tools.CACHE_PATH, "summary.pickle")
jobs_added_event = asyncio.Event()
appearance_changed_event = asyncio.Event()
summary, screen, log, is_first_run = load_state(
pickle_path, jobs_added_event, appearance_changed_event, root_path,
loop)
screen.editor_command = editor_command
log.log_message("Program started.")
jobs_added_event.set()
callback = lambda event: on_filesystem_event(event, summary, root_path,
appearance_changed_event)
notifier = setup_inotify(root_path, loop, callback, is_path_excluded)
try:
log.log_message(f"Starting workers ({worker_count}) …")
screen.make_workers(worker_count, is_being_tested, compression)
def exit_loop():
log.log_command("Exiting…")
time.sleep(0.05)
screen.stop_workers()
loop.stop()
loop.create_task(summary.sync_with_filesystem(
appearance_changed_event, log))
for worker in screen.workers:
loop.create_task(worker.future)
if sys.stdout.isatty():
with fill3.context(loop, appearance_changed_event, screen,
exit_loop=exit_loop):
loop.run_forever()
log.log_message("Program stopped.")
else:
try:
loop.run_forever()
except KeyboardInterrupt:
screen.stop_workers()
loop.stop()
finally:
notifier.stop()
if summary.is_loaded:
screen.save()
@contextlib.contextmanager
def chdir(path):
old_cwd = os.getcwd()
os.chdir(path)
try:
yield
finally:
os.chdir(old_cwd)
def manage_cache(root_path):
cache_path = os.path.join(root_path, tools.CACHE_PATH)
timestamp_path = os.path.join(cache_path, "creation_time")
if os.path.exists(cache_path):
timestamp = os.stat(timestamp_path).st_mtime
for resource_path in ["__main__.py", "tools.py", "tools.toml"]:
with importlib.resources.path(eris, resource_path) as resource:
if resource.stat().st_mtime > timestamp:
print("Eris has been updated, so clearing the cache and"
" recalculating all results…")
shutil.rmtree(cache_path)
break
if not os.path.exists(cache_path):
os.mkdir(cache_path)
open(timestamp_path, "w").close()
def print_tool_info():
extensions_for_tool = {}
for extensions, tools_ in tools.TOOLS_FOR_EXTENSIONS:
for extension in extensions:
for tool in tools_:
extensions_for_tool.setdefault(
tool, {extension}).add(extension)
for tool in sorted(tools.tools_all(), key=lambda t: t.__name__):
print(termstr.TermStr(tool.__name__).bold()
if tools.is_tool_available(tool)
else termstr.TermStr(tool.__name__).fg_color(termstr.Color.red)
+ " (not available) ")
print("url:", tool.url)
extensions = list(extensions_for_tool.get(tool, {"*"}))
print("extensions:", ", ".join(extensions))
if hasattr(tool, "command"):
print(f"command: {tool.command} foo.{extensions[0]}")
else:
print("function:", "eris.tools." + tool.__name__)
print()
def check_arguments():
cmdline_help = __doc__ + USAGE.replace("*", "")
arguments = docopt.docopt(cmdline_help, help=False)
if arguments["--help"]:
print(cmdline_help)
sys.exit(0)
if arguments["--info"]:
print_tool_info()
sys.exit(0)
worker_count = None
try:
if arguments["--workers"] is not None:
worker_count = int(arguments["--workers"])
if worker_count == 0:
print("There must be at least one worker.")
sys.exit(1)
except ValueError:
print("--workers requires a number.")
sys.exit(1)
root_path = os.path.abspath(arguments["<directory>"])
if not os.path.exists(root_path):
print("File does not exist:", root_path)
sys.exit(1)
if not os.path.isdir(root_path):
print("File is not a directory:", root_path)
sys.exit(1)
if arguments["--theme"] is not None:
themes = list(pygments.styles.get_all_styles())
if arguments["--theme"] not in themes:
print("--theme must be one of:", " ".join(themes))
sys.exit(1)
if arguments["--compression"] is not None:
compressions = ["gzip", "lzma", "bz2", "none"]
if arguments["--compression"] not in compressions:
print("--compression must be one of:", " ".join(compressions))
sys.exit(1)
editor_command = arguments["--editor"] or os.environ.get("EDITOR", None)\
or os.environ.get("VISUAL", None)
return root_path, worker_count, editor_command, arguments["--theme"], \
arguments["--compression"]
def inotify_watches_exceeded():
print("Error: This codebase has too many directories to be monitored.")
print(" Fix by increasing the kernel parameter user.max_inotify_watches "
"to exceed the number of directories.")
print(" e.g. 'sudo sysctl user.max_inotify_watches=200000'")
def entry_point():
root_path, worker_count, editor_command, theme, compression = \
check_arguments()
manage_cache(root_path)
with terminal.terminal_title("eris: " + os.path.basename(root_path)):
with chdir(root_path): # FIX: Don't change directory if possible.
loop = asyncio.get_event_loop()
try:
main(root_path, loop, worker_count, editor_command, theme,
compression)
except pyinotify.WatchManagerError:
inotify_watches_exceeded()
if __name__ == "__main__":
entry_point()
|
from hamcrest import assert_that, contains, has_properties
from mammoth.styles.parser.tokeniser import tokenise
def test_unknown_tokens_are_tokenised():
assert_tokens("~", is_token("unknown", "~"))
def test_empty_string_is_tokenised_to_end_of_file_token():
assert_tokens("")
def test_whitespace_is_tokenised():
assert_tokens(" \t\t ", is_token("whitespace", " \t\t "))
def test_identifiers_are_tokenised():
assert_tokens("Overture", is_token("identifier", "Overture"))
def test_escape_sequences_in_identifiers_are_tokenised():
assert_tokens(r"\:", is_token("identifier", r"\:"))
def test_integers_are_tokenised():
assert_tokens("123", is_token("integer", "123"))
def test_strings_are_tokenised():
assert_tokens("'Tristan'", is_token("string", "'Tristan'"))
def test_escape_sequences_in_strings_are_tokenised():
assert_tokens(r"'Tristan\''", is_token("string", r"'Tristan\''"))
def test_unterminated_strings_are_tokenised():
assert_tokens("'Tristan", is_token("unterminated string", "'Tristan"))
def test_arrows_are_tokenised():
assert_tokens("=>=>", is_token("symbol", "=>"), is_token("symbol", "=>"))
def test_dots_are_tokenised():
assert_tokens(".", is_token("symbol", "."))
def test_colons_are_tokenised():
assert_tokens("::", is_token("symbol", ":"), is_token("symbol", ":"))
def test_greater_thans_are_tokenised():
assert_tokens(">>", is_token("symbol", ">"), is_token("symbol", ">"))
def test_equals_are_tokenised():
assert_tokens("==", is_token("symbol", "="), is_token("symbol", "="))
def test_open_parens_are_tokenised():
assert_tokens("((", is_token("symbol", "("), is_token("symbol", "("))
def test_close_parens_are_tokenised():
assert_tokens("))", is_token("symbol", ")"), is_token("symbol", ")"))
def test_open_square_brackets_are_tokenised():
assert_tokens("[[", is_token("symbol", "["), is_token("symbol", "["))
def test_close_square_brackets_are_tokenised():
assert_tokens("]]", is_token("symbol", "]"), is_token("symbol", "]"))
def test_choices_are_tokenised():
assert_tokens("||", is_token("symbol", "|"), is_token("symbol", "|"))
def test_bangs_are_tokenised():
assert_tokens("!!", is_token("symbol", "!"), is_token("symbol", "!"))
def test_can_tokenise_multiple_tokens():
assert_tokens("The Magic Position",
is_token("identifier", "The"),
is_token("whitespace", " "),
is_token("identifier", "Magic"),
is_token("whitespace", " "),
is_token("identifier", "Position"),
)
def assert_tokens(string, *expected):
expected = list(expected)
expected.append(is_token("end", ""))
assert_that(
tokenise(string),
contains(*expected),
)
def is_token(token_type, value):
return has_properties(
type=token_type,
value=value,
)
|
from numpy import *
from .bin_roundness import bin_roundness
def accretion(x, y, signal, noise, targetsn, pixelsize=False, quiet=False):
"""
Initial binning -- steps i-v of eq 5.1 of Cappellari & Copin (2003)
INPUTS:
x : x coordinates of pixels to bin
y : y coordinates of pixels to bin
signal : signal associated with each pixel
noise : noise (1-sigma error) associated with each pixel
targetsn : desired signal-to-noise ration in final 2d-binned data
OPTIONS:
pixelsize : pixel scale of the input data
quiet : if set, suppress printed outputs
"""
n = x.size
clas = zeros(x.size, dtype="<i8") # bin number of each pixel
good = zeros(x.size, dtype="<i8") # =1 if bin accepted as good
# for each point, find distance to all other points and select minimum
# (robust but slow way of determining the pixel size of unbinned data)
if not pixelsize:
dx = 1.e30
for j in range(x.size-1):
d = (x[j] - x[j+1:])**2 + (y[j] - y[j+1:])**2
dx = min(d.min(), dx)
pixelsize = sqrt(dx)
# start from the pixel with highest S/N
sn = (signal/noise).max()
currentbin = (signal/noise).argmax()
# rough estimate of the expected final bin number
# This value is only used to have a feeling of the expected
# remaining computation time when binning very big dataset.
wh = where(signal/noise<targetsn)
npass = size(where(signal/noise >= targetsn))
maxnum = int(round( (signal[wh]**2/noise[wh]**2).sum()/targetsn**2 ))+npass
# first bin assigned CLAS = 1 -- with N pixels, get at most N bins
for ind in range(1, n+1):
if not quiet:
print(" bin: {:} / {:}".format(ind, maxnum))
# to start the current bin is only one pixel
clas[currentbin] = ind
# centroid of bin
xbar = x[currentbin]
ybar = y[currentbin]
while True:
# stop if all pixels are binned
unbinned = where(clas == 0)[0]
if unbinned.size == 0: break
# find unbinned pixel closest to centroid of current bin
dist = (x[unbinned]-xbar)**2 + (y[unbinned]-ybar)**2
mindist = dist.min()
k = dist.argmin()
# find the distance from the closest pixel to the current bin
mindist = ((x[currentbin]-x[unbinned[k]])**2 \
+ (y[currentbin]-y[unbinned[k]])**2).min()
# estimate roundness of bin with candidate pixel added
nextbin = append(currentbin, unbinned[k])
roundness = bin_roundness(x[nextbin], y[nextbin], pixelsize)
# compute sn of bin with candidate pixel added
snold = sn
sn = signal[nextbin].sum()/sqrt((noise[nextbin]**2).sum())
# Test whether the CANDIDATE pixel is connected to the
# current bin, whether the POSSIBLE new bin is round enough
# and whether the resulting S/N would get closer to targetsn
if sqrt(mindist) > 1.2*pixelsize or roundness > 0.3 \
or abs(sn-targetsn) > abs(snold-targetsn):
if (snold > 0.8*targetsn):
good[currentbin] = 1
break
# if all the above tests are negative then accept the CANDIDATE
# pixel, add it to the current bin, and continue accreting pixels
clas[unbinned[k]] = ind
currentbin = nextbin
# update the centroid of the current bin
xbar = x[currentbin].mean()
ybar = y[currentbin].mean()
# get the centroid of all the binned pixels
binned = where(clas != 0)[0]
unbinned = where(clas == 0)[0]
# stop if all pixels are binned
if unbinned.size == 0: break
xbar = x[binned].mean()
ybar = y[binned].mean()
# find the closest unbinned pixel to the centroid of all
# the binned pixels, and start a new bin from that pixel
k = ((x[unbinned]-xbar)**2 + (y[unbinned]-ybar)**2).argmin()
currentbin = unbinned[k] # the bin is initially made of one pixel
sn = signal[currentbin] / noise[currentbin]
# set to zero all bins that did not reach the target S/N
clas = clas*good
return clas
|
"""
Tests for priors.
"""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import numpy as np
import numpy.testing as nt
import scipy.optimize as spop
import reggie.core.priors as priors
class PriorTest(object):
def __init__(self, prior):
self.prior = prior
def test_repr(self):
_ = repr(self.prior)
def test_bounds(self):
bshape = np.shape(self.prior.bounds)
assert bshape == (2,) or bshape == (self.prior.ndim, 2)
def test_sample(self):
assert np.shape(self.prior.sample()) == (self.prior.ndim,)
assert np.shape(self.prior.sample(5)) == (5, self.prior.ndim)
def test_logprior(self):
for theta in self.prior.sample(5, 0):
g1 = spop.approx_fprime(theta, self.prior.get_logprior, 1e-8)
_, g2 = self.prior.get_logprior(theta, True)
nt.assert_allclose(g1, g2, rtol=1e-6)
class TestUniform(PriorTest):
def __init__(self):
PriorTest.__init__(self, priors.Uniform([0, 0], [1, 1]))
class TestNormal(PriorTest):
def __init__(self):
PriorTest.__init__(self, priors.Normal([0, 0], [1, 1]))
class TestLogNormal(PriorTest):
def __init__(self):
PriorTest.__init__(self, priors.LogNormal([0, 0], [1, 1]))
def test_uniform():
nt.assert_raises(ValueError, priors.Uniform, 0, -1)
|
"""nrvr.xml.etree - Utilities for xml.etree.ElementTree
The main class provided by this module is ElementTreeUtil.
To be expanded as needed.
Idea and first implementation - Leo Baschy <srguiwiz12 AT nrvr DOT com>
Public repository - https://github.com/srguiwiz/nrvr-commander
Copyright (c) Nirvana Research 2006-2015.
Simplified BSD License"""
import copy
import xml.etree.ElementTree
class ElementTreeUtil(object):
"""Utilities for xml.etree.ElementTree.
Written for Python 2.6."""
@classmethod
def indent(cls, element, indent=" ", level=0):
"""Set whitespace for indentation.
element
an xml.etree.ElementTree.Element instance.
indent
the additional indent for each level down.
level
increases on recursive calls.
Need not be set on regular use."""
levelIndent = "\n" + level * indent
if len(element):
# element has child element
if not element.text or not element.text.strip():
# element has no text or text is only whitespace
element.text = levelIndent + indent
for child in element:
# child indented one level more
cls.indent(child, indent=indent, level=level + 1)
if not child.tail or not child.tail.strip():
# last child has no tail or tail is only whitespace
child.tail = levelIndent
if level > 0:
# any level except top level
if not element.tail or not element.tail.strip():
# element has no tail or tail is only whitespace
element.tail = levelIndent
else:
# top level
element.tail = ""
@classmethod
def unindent(cls, element):
"""Remove whitespace from indentation.
element
an xml.etree.ElementTree.Element instance."""
if len(element):
# element has child element
if not element.text or not element.text.strip():
# element has no text or text is only whitespace
element.text = ""
for child in element:
# child indented one level more
cls.unindent(child)
if not element.tail or not element.tail.strip():
# element has no tail or tail is only whitespace
element.tail = ""
@classmethod
def tostring(cls, element, indent=" ", xml_declaration=True, encoding="utf-8"):
"""Generate a string representation.
element
an xml.etree.ElementTree.Element instance.
Tolerates xml.etree.ElementTree.ElementTree.
indent
the additional indent for each level down.
If None then unindented.
xml_declaration
whether with XML declaration <?xml version="1.0" encoding="utf-8"?>."""
# tolerate tree instead of element
if isinstance(element, xml.etree.ElementTree.ElementTree):
# if given a tree
element = element.getroot()
element = copy.deepcopy(element)
if indent is not None:
cls.indent(element, indent)
else:
cls.unindent(element)
string = xml.etree.ElementTree.tostring(element, encoding=encoding)
if xml_declaration:
string = '<?xml version="1.0" encoding="{0}"?>\n'.format(encoding) + string
return string
@classmethod
def simpledict(cls, element):
"""Generate a dictionary from child element tags and text.
element
an xml.etree.ElementTree.Element instance."""
children = element.findall('*')
dictionary = {}
for child in children:
dictionary[child.tag] = child.text
return dictionary
if __name__ == "__main__":
import sys
tree = xml.etree.ElementTree.ElementTree(xml.etree.ElementTree.XML \
("""<e1 a1="A1">
<e2 a2="A2">E2</e2>
<e3 a3="A3">E3</e3>
<e4><e5/></e4>
<e6/></e1>"""))
tree.write(sys.stdout)
print # a newline after the write of unindented XML
ElementTreeUtil.indent(tree.getroot())
tree.write(sys.stdout)
print # a newline after the write of unindented XML
print xml.etree.ElementTree.tostring(tree.getroot())
ElementTreeUtil.unindent(tree.getroot())
tree.write(sys.stdout)
print # a newline after the write of unindented XML
print ElementTreeUtil.tostring(tree)
print ElementTreeUtil.tostring(tree.getroot())
print ElementTreeUtil.tostring(tree, indent=None)
|
from amnesia.exc import AmnesiaError
class PasteError(AmnesiaError):
def __init__(self, container):
super()
self.container = container
def __str__(self):
return 'Paste into container {} failed'.format(self.container.id)
|
from __future__ import unicode_literals
import os
import sys
import threading
from contextlib import contextmanager
from django.contrib.sites.models import Site
from mezzanine.conf import settings
from mezzanine.core.request import current_request
from mezzanine.utils.conf import middlewares_or_subclasses_installed
SITE_PERMISSION_MIDDLEWARE = \
"mezzanine.core.middleware.SitePermissionMiddleware"
def current_site_id():
"""
Responsible for determining the current ``Site`` instance to use
when retrieving data for any ``SiteRelated`` models. If we're inside an
override_current_site_id context manager, return the overriding site ID.
Otherwise, try to determine the site using the following methods in order:
- ``site_id`` in session. Used in the admin so that admin users
can switch sites and stay on the same domain for the admin.
- The id of the Site object corresponding to the hostname in the current
request. This result is cached.
- ``MEZZANINE_SITE_ID`` environment variable, so management
commands or anything else outside of a request can specify a
site.
- ``SITE_ID`` setting.
If a current request exists and the current site is not overridden, the
site ID is stored on the request object to speed up subsequent calls.
"""
if hasattr(override_current_site_id.thread_local, "site_id"):
return override_current_site_id.thread_local.site_id
from mezzanine.utils.cache import cache_installed, cache_get, cache_set
request = current_request()
site_id = getattr(request, "site_id", None)
if request and not site_id:
site_id = request.session.get("site_id", None)
if not site_id:
domain = request.get_host().lower()
if cache_installed():
# Don't use Mezzanine's cache_key_prefix here, since it
# uses this very function we're in right now to create a
# per-site cache key.
bits = (settings.CACHE_MIDDLEWARE_KEY_PREFIX, domain)
cache_key = "%s.site_id.%s" % bits
site_id = cache_get(cache_key)
if not site_id:
try:
site = Site.objects.get(domain__iexact=domain)
except Site.DoesNotExist:
pass
else:
site_id = site.id
if cache_installed():
cache_set(cache_key, site_id)
if not site_id:
site_id = os.environ.get("MEZZANINE_SITE_ID", settings.SITE_ID)
if request and site_id and not getattr(settings, "TESTING", False):
request.site_id = site_id
return site_id
@contextmanager
def override_current_site_id(site_id):
"""
Context manager that overrides the current site id for code executed
within it. Used to access SiteRelated objects outside the current site.
"""
override_current_site_id.thread_local.site_id = site_id
yield
del override_current_site_id.thread_local.site_id
override_current_site_id.thread_local = threading.local()
def has_site_permission(user):
"""
Checks if a staff user has staff-level access for the current site.
The actual permission lookup occurs in ``SitePermissionMiddleware``
which then marks the request with the ``has_site_permission`` flag,
so that we only query the db once per request, so this function
serves as the entry point for everything else to check access. We
also fall back to an ``is_staff`` check if the middleware is not
installed, to ease migration.
"""
if not middlewares_or_subclasses_installed([SITE_PERMISSION_MIDDLEWARE]):
return user.is_staff and user.is_active
return getattr(user, "has_site_permission", False)
def host_theme_path():
"""
Returns the directory of the theme associated with the given host.
"""
# Set domain to None, which we'll then query for in the first
# iteration of HOST_THEMES. We use the current site_id rather
# than a request object here, as it may differ for admin users.
domain = None
for (host, theme) in settings.HOST_THEMES:
if domain is None:
domain = Site.objects.get(id=current_site_id()).domain
if host.lower() == domain.lower():
try:
__import__(theme)
module = sys.modules[theme]
except ImportError:
pass
else:
return os.path.dirname(os.path.abspath(module.__file__))
return ""
|
import itertools
import unittest
from parameterized import parameterized
import torch
import torch.nn as nn
from nsoltChannelConcatenation2dLayer import NsoltChannelConcatenation2dLayer
nchs = [ [3, 3], [4, 4] ]
datatype = [ torch.float, torch.double ]
nrows = [ 4, 8, 16 ]
ncols = [ 4, 8, 16 ]
class NsoltChannelConcatenation2dLayerTestCase(unittest.TestCase):
"""
NSOLTCHANNELCONCATENATION2DLAYERTESTCASE
2コンポーネント入力(nComponents=2のみサポート):
nSamples x nRows x nCols x (nChsTotal-1)
nSamples x nRows x nCols
1コンポーネント出力(nComponents=1のみサポート):
nSamples x nRows x nCols x nChsTotal
Requirements: Python 3.7.x, PyTorch 1.7.x
Copyright (c) 2020-2021, Shogo MURAMATSU
All rights reserved.
Contact address: Shogo MURAMATSU,
Faculty of Engineering, Niigata University,
8050 2-no-cho Ikarashi, Nishi-ku,
Niigata, 950-2181, JAPAN
http://msiplab.eng.niigata-u.ac.jp/
"""
def testConstructor(self):
# Expected values
expctdName = 'Cn'
expctdDescription = "Channel concatenation"
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name=expctdName
)
# Actual values
actualName = layer.name
actualDescription = layer.description
# Evaluation
self.assertTrue(isinstance(layer, nn.Module))
self.assertEqual(actualName,expctdName)
self.assertEqual(actualDescription,expctdDescription)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testPredict(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols
Xdc = torch.randn(nSamples,nrows,ncols,dtype=datatype,device=device,requires_grad=True)
# Expected values
# nSamples x nRows x nCols x nChsTotal
expctdZ = torch.cat((Xdc.unsqueeze(dim=3),Xac),dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
with torch.no_grad():
actualZ = layer.forward(Xac=Xac,Xdc=Xdc)
# Evaluation
self.assertEqual(actualZ.dtype,datatype)
self.assertTrue(torch.allclose(actualZ,expctdZ,rtol=rtol,atol=atol))
self.assertFalse(actualZ.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testPredictUnsqueezedXdc(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x 1
Xdc = torch.randn(nSamples,nrows,ncols,1,dtype=datatype,device=device,requires_grad=True)
# Expected values
# nSamples x nRows x nCols x nChsTotal
expctdZ = torch.cat((Xdc,Xac),dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
with torch.no_grad():
actualZ = layer.forward(Xac=Xac,Xdc=Xdc)
# Evaluation
self.assertEqual(actualZ.dtype,datatype)
self.assertTrue(torch.allclose(actualZ,expctdZ,rtol=rtol,atol=atol))
self.assertFalse(actualZ.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testBackward(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols
Xdc = torch.randn(nSamples,nrows,ncols,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x nChsTotal
dLdZ = torch.randn(nSamples,nrows,ncols,nChsTotal,dtype=datatype)
dLdZ = dLdZ.to(device)
# Expected values
# nSamples x nRows x nCols x (nChsTotal-1)
expctddLdXac = dLdZ[:,:,:,1:]
# nSamples x nRows x nCols x 1
expctddLdXdc = dLdZ[:,:,:,0]
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
Z = layer.forward(Xac=Xac,Xdc=Xdc)
Z.backward(dLdZ)
actualdLdXac = Xac.grad
actualdLdXdc = Xdc.grad
# Evaluation
self.assertEqual(actualdLdXdc.dtype,datatype)
self.assertEqual(actualdLdXac.dtype,datatype)
self.assertTrue(torch.allclose(actualdLdXdc,expctddLdXdc,rtol=rtol,atol=atol))
self.assertTrue(torch.allclose(actualdLdXac,expctddLdXac,rtol=rtol,atol=atol))
self.assertTrue(Z.requires_grad)
@parameterized.expand(
list(itertools.product(nchs,nrows,ncols,datatype))
)
def testBackwardUnsqueezedXdc(self,
nchs,nrows,ncols,datatype):
rtol,atol=1e-5,1e-8
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
# Parameters
nSamples = 8
nChsTotal = sum(nchs)
# nSamples x nRows x nCols x (nChsTotal-1)
Xac = torch.randn(nSamples,nrows,ncols,nChsTotal-1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x 1
Xdc = torch.randn(nSamples,nrows,ncols,1,dtype=datatype,device=device,requires_grad=True)
# nSamples x nRows x nCols x nChsTotal
dLdZ = torch.randn(nSamples,nrows,ncols,nChsTotal,dtype=datatype)
dLdZ = dLdZ.to(device)
# Expected values
# nSamples x nRows x nCols x (nChsTotal-1)
expctddLdXac = dLdZ[:,:,:,1:]
# nSamples x nRows x nCols x 1
expctddLdXdc = dLdZ[:,:,:,0].unsqueeze(dim=3)
# Instantiation of target class
layer = NsoltChannelConcatenation2dLayer(
name='Cn'
)
# Actual values
Z = layer.forward(Xac=Xac,Xdc=Xdc)
Z.backward(dLdZ)
actualdLdXac = Xac.grad
actualdLdXdc = Xdc.grad
# Evaluation
self.assertEqual(actualdLdXdc.dtype,datatype)
self.assertEqual(actualdLdXac.dtype,datatype)
self.assertTrue(torch.allclose(actualdLdXdc,expctddLdXdc,rtol=rtol,atol=atol))
self.assertTrue(torch.allclose(actualdLdXac,expctddLdXac,rtol=rtol,atol=atol))
self.assertTrue(Z.requires_grad)
if __name__ == '__main__':
unittest.main()
|
import random
class Character:
def __init__(self):
self.name = ""
self.life = 20
self.health = random.choice(0, life)
self.zombie_life = 10
self.zombie_health = random.choice(0, zombie_life)
def attack(self, zombie):
self.hit = self.health - self.zombie_health
zombie.life -= self.hit
if self.hit == 0:
print ("..like a nimble sloth, %s evades %s's attack." % (zombie.name, self.name))
else:
print ("%s inflicts debilitating damage on %s!!" % (self.name, zombie.name))
return zombie.health <= 0
class Zombie(Character):
def __init__(self, player):
Character.__init__(self)
ran_adj = random.choice['wretched', 'filthy', 'disgusting', 'oozing']
self.name = "a", rand_adj(), " zombie"
self.health = random.choice(player.health)
class Player(Character):
def __init__(self):
Character.__init__(self)
self.level = 'normal'
self.health = 10
self.health_max = 10
def quit(self):
print ("The zombie virus has infected %s. You are now undead and crave brains.") % self.name
self.health = 0
def help(self):
print Commands.keys()
def status(self):
print ("%s's health: %d/%d" % (self.name, self.health, self.health_max))
def weak(self):
print ("%s is cold, hungry and tired.") % self.name
self.health = max(1, self.health - 1)
def rest(self):
ran_adj = random.choice['under a desk','in a locker','in a closet']
if self.state != 'normal':
print ("keep moving %s, zombies coming in hot!") % (self.name, self.zombie_attack)
else:
print ("%s hides" + ran_adj + " and takes a breather.") % self.name
if randint(0,1):
self.zombie = Zombie(self)
print (%s is surprised by %s) %(self.name self.zombie)
self.state = 'fight'
self.zombie_attacks()
def attack(self):
if randint(0, 1):
self.zombie = Zombie(self)
print ("Look out %s! -%s appears!") % (self.name, self.zombie_name)
self.state = 'fight'
self.zombie_attacks()
else:
if self.health < self.health_max:
self.health = self.health + 1
else:
print ("%s has hidden too long.") % self.name
self.health -= 1
def look_around(self):
if self.state != 'normal':
print ("%s runs into %s") % (self.name, self.zombie.name)
self.zombie_attacks()
else:
print ("%s runs into the "+ look) % self.name
look=random.choice["gymnasium","library","metal shop","cafeteria"]
if random.randint(0, 1):
self.zombie = Zombie(self)
print "%s encounters %s!" % (self.name, self.zombie.name)
self.state = 'fight'
else:
if random.randint(0, 1):
self.tired()
def flee(self):
if self.state != 'fight':
print "%s runs down a corridor" % self.name
self.tired()
else:
if random.randint(1, self.health + 5) > random.randint(1, self.zombie.health):
print "%s flees from %s." % (self.name, self.zombie.name)
self.zombie = None
self.state = 'normal'
else:
print "%s couldn't escape from %s!" % (self.name, self.zombie.name);
self.zombie_attacks()
def attack(self):
if self.state != 'fight':
print "%s flails in the air like a twit." % self.name;
self.tired()
else:
if self.do_damage(self.zombie):
print ("%s decapitates %s!") % (self.name, self.zombie.name)
self.zombie = None
self.state = 'normal'
if random.choice(self.health) < 10:
self.health += 1
self.health_max += 1
print "%s is rejuvenated" % self.name
else:
self.zombie_attacks()
def zombie_attacks(self):
if self.zombie.attack(self):
print ("%s's brains were devoured by %s!!!\nyou are undead and crave BRAINS!!/nunless you're a veggetarian then seek GRAINS!!") % (self.name, self.zombie.name)
def menu():
Commands = {
'quit': Player.quit,
'help': Player.help,
'status': Player.status,
'rest': Player.rest,
'look around': Player.look_around,
'flee': Player.flee,
'attack': Player.attack,
}
hero = Player()
hero.name = raw_input("What is your character's name? ")
print "(type help to get a list of actions)\n"
print """When %s leaves homeroom, they
a strange stench in the air
maybe we are dissecting a frog in biology today...""" % hero.name
while (p.health > 0):
line = raw_input("> ")
args = line.split()
if len(args) > 0:
commandFound = False
for c in Commands.keys():
if args[0] == c[:len(args[0])]:
Commands[c](p)
commandFound = True
break
if not commandFound:
print "%s is confused, enter a command" % p.name
"""
living on the outskirts of a government national lab
has it's pros and cons. when the kids in school
say that a rouge virus has started to infect people
and turn them into zombies, you laugh it off.
"""
|
from .elementwise import (
Unary,
Binary,
ColsBinary,
Reduce,
func2class_name,
unary_module,
make_unary,
binary_module,
make_binary,
reduce_module,
make_reduce,
binary_dict_int_tst,
unary_dict_gen_tst,
binary_dict_gen_tst,
)
from .linear_map import LinearMap
from .nexpr import NumExprABC
from .mixufunc import make_local, make_local_dict, get_ufunc_args, MixUfuncABC
from ._elementwise import (
BitwiseNot,
Absolute,
Arccos,
Arccosh,
Arcsin,
Arcsinh,
Arctan,
Arctanh,
Cbrt,
Ceil,
Conj,
Conjugate,
Cos,
Cosh,
Deg2rad,
Degrees,
Exp,
Exp2,
Expm1,
Fabs,
Floor,
Frexp,
Invert,
Isfinite,
Isinf,
Isnan,
Isnat,
Log,
Log10,
Log1p,
Log2,
LogicalNot,
Modf,
Negative,
Positive,
Rad2deg,
Radians,
Reciprocal,
Rint,
Sign,
Signbit,
Sin,
Sinh,
Spacing,
Sqrt,
Square,
Tan,
Tanh,
Trunc,
Abs,
Add,
Arctan2,
BitwiseAnd,
BitwiseOr,
BitwiseXor,
Copysign,
Divide,
Divmod,
Equal,
FloorDivide,
FloatPower,
Fmax,
Fmin,
Fmod,
Gcd,
Greater,
GreaterEqual,
Heaviside,
Hypot,
Lcm,
Ldexp,
LeftShift,
Less,
LessEqual,
Logaddexp,
Logaddexp2,
LogicalAnd,
LogicalOr,
LogicalXor,
Maximum,
Minimum,
Mod,
Multiply,
Nextafter,
NotEqual,
Power,
Remainder,
RightShift,
Subtract,
TrueDivide,
ColsAdd,
ColsArctan2,
ColsBitwiseAnd,
ColsBitwiseOr,
ColsBitwiseXor,
ColsCopysign,
ColsDivide,
ColsDivmod,
ColsEqual,
ColsFloorDivide,
ColsFloatPower,
ColsFmax,
ColsFmin,
ColsFmod,
ColsGcd,
ColsGreater,
ColsGreaterEqual,
ColsHeaviside,
ColsHypot,
ColsLcm,
ColsLdexp,
ColsLeftShift,
ColsLess,
ColsLessEqual,
ColsLogaddexp,
ColsLogaddexp2,
ColsLogicalAnd,
ColsLogicalOr,
ColsLogicalXor,
ColsMaximum,
ColsMinimum,
ColsMod,
ColsMultiply,
ColsNextafter,
ColsNotEqual,
ColsPower,
ColsRemainder,
ColsRightShift,
ColsSubtract,
ColsTrueDivide,
AddReduce,
Arctan2Reduce,
BitwiseAndReduce,
BitwiseOrReduce,
BitwiseXorReduce,
CopysignReduce,
DivideReduce,
DivmodReduce,
EqualReduce,
FloorDivideReduce,
FloatPowerReduce,
FmaxReduce,
FminReduce,
FmodReduce,
GcdReduce,
GreaterReduce,
GreaterEqualReduce,
HeavisideReduce,
HypotReduce,
LcmReduce,
LdexpReduce,
LeftShiftReduce,
LessReduce,
LessEqualReduce,
LogaddexpReduce,
Logaddexp2Reduce,
LogicalAndReduce,
LogicalOrReduce,
LogicalXorReduce,
MaximumReduce,
MinimumReduce,
ModReduce,
MultiplyReduce,
NextafterReduce,
NotEqualReduce,
PowerReduce,
RemainderReduce,
RightShiftReduce,
SubtractReduce,
TrueDivideReduce,
)
__all__ = [
"Unary",
"Binary",
"ColsBinary",
"Reduce",
"func2class_name",
"unary_module",
"make_unary",
"binary_module",
"make_binary",
"reduce_module",
"make_reduce",
"binary_dict_int_tst",
"unary_dict_gen_tst",
"binary_dict_gen_tst",
"LinearMap",
"NumExprABC",
"make_local",
"make_local_dict",
"get_ufunc_args",
"MixUfuncABC",
"BitwiseNot",
"Absolute",
"Arccos",
"Arccosh",
"Arcsin",
"Arcsinh",
"Arctan",
"Arctanh",
"Cbrt",
"Ceil",
"Conj",
"Conjugate",
"Cos",
"Cosh",
"Deg2rad",
"Degrees",
"Exp",
"Exp2",
"Expm1",
"Fabs",
"Floor",
"Frexp",
"Invert",
"Isfinite",
"Isinf",
"Isnan",
"Isnat",
"Log",
"Log10",
"Log1p",
"Log2",
"LogicalNot",
"Modf",
"Negative",
"Positive",
"Rad2deg",
"Radians",
"Reciprocal",
"Rint",
"Sign",
"Signbit",
"Sin",
"Sinh",
"Spacing",
"Sqrt",
"Square",
"Tan",
"Tanh",
"Trunc",
"Abs",
"Add",
"Arctan2",
"BitwiseAnd",
"BitwiseOr",
"BitwiseXor",
"Copysign",
"Divide",
"Divmod",
"Equal",
"FloorDivide",
"FloatPower",
"Fmax",
"Fmin",
"Fmod",
"Gcd",
"Greater",
"GreaterEqual",
"Heaviside",
"Hypot",
"Lcm",
"Ldexp",
"LeftShift",
"Less",
"LessEqual",
"Logaddexp",
"Logaddexp2",
"LogicalAnd",
"LogicalOr",
"LogicalXor",
"Maximum",
"Minimum",
"Mod",
"Multiply",
"Nextafter",
"NotEqual",
"Power",
"Remainder",
"RightShift",
"Subtract",
"TrueDivide",
"ColsAdd",
"ColsArctan2",
"ColsBitwiseAnd",
"ColsBitwiseOr",
"ColsBitwiseXor",
"ColsCopysign",
"ColsDivide",
"ColsDivmod",
"ColsEqual",
"ColsFloorDivide",
"ColsFloatPower",
"ColsFmax",
"ColsFmin",
"ColsFmod",
"ColsGcd",
"ColsGreater",
"ColsGreaterEqual",
"ColsHeaviside",
"ColsHypot",
"ColsLcm",
"ColsLdexp",
"ColsLeftShift",
"ColsLess",
"ColsLessEqual",
"ColsLogaddexp",
"ColsLogaddexp2",
"ColsLogicalAnd",
"ColsLogicalOr",
"ColsLogicalXor",
"ColsMaximum",
"ColsMinimum",
"ColsMod",
"ColsMultiply",
"ColsNextafter",
"ColsNotEqual",
"ColsPower",
"ColsRemainder",
"ColsRightShift",
"ColsSubtract",
"ColsTrueDivide",
"AddReduce",
"Arctan2Reduce",
"BitwiseAndReduce",
"BitwiseOrReduce",
"BitwiseXorReduce",
"CopysignReduce",
"DivideReduce",
"DivmodReduce",
"EqualReduce",
"FloorDivideReduce",
"FloatPowerReduce",
"FmaxReduce",
"FminReduce",
"FmodReduce",
"GcdReduce",
"GreaterReduce",
"GreaterEqualReduce",
"HeavisideReduce",
"HypotReduce",
"LcmReduce",
"LdexpReduce",
"LeftShiftReduce",
"LessReduce",
"LessEqualReduce",
"LogaddexpReduce",
"Logaddexp2Reduce",
"LogicalAndReduce",
"LogicalOrReduce",
"LogicalXorReduce",
"MaximumReduce",
"MinimumReduce",
"ModReduce",
"MultiplyReduce",
"NextafterReduce",
"NotEqualReduce",
"PowerReduce",
"RemainderReduce",
"RightShiftReduce",
"SubtractReduce",
"TrueDivideReduce",
]
|
from indra.statements import *
from rdflib import URIRef, Namespace
from rdflib.namespace import RDF
import collections
import urllib
import re
import keyword
BEL = Namespace("http://www.openbel.org/")
prefixes = """
PREFIX belvoc: <http://www.openbel.org/vocabulary/>
PREFIX belsc: <http://www.openbel.org/bel/>
PREFIX belns: <http://www.openbel.org/bel/namespace/>
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>"""
phospho_mods = [
'PhosphorylationSerine',
'PhosphorylationThreonine',
'PhosphorylationTyrosine',
'Phosphorylation',
]
class InvalidNameError(ValueError):
def __init__(self, name):
ValueError.__init__(self, "Not a valid name: %s" % name)
def name_from_uri(uri):
"""Make the URI term usable as a valid Python identifier, if possible.
First strips of the extra URI information by calling term_from_uri,
then checks to make sure the name is a valid Python identifier.
Currently fixes identifiers starting with numbers by prepending with
the letter 'p'. For other cases it raises an exception.
This function should be called when the string that is returned is to be
used as a PySB component name, which are required to be valid Python
identifiers.
"""
name = term_from_uri(uri)
# Handle the case where the string starts with a number
if name[0].isdigit():
name = 'p' + name
if re.match("[_A-Za-z][_a-zA-Z0-9]*$", name) \
and not keyword.iskeyword(name):
pass
else:
raise InvalidNameError(name)
return name
def gene_name_from_uri(uri):
return name_from_uri(uri).upper()
def term_from_uri(uri):
"""Basic conversion of RDF URIs to more friendly strings.
Removes prepended URI information, and replaces spaces and hyphens with
underscores.
"""
if uri is None:
return None
# Strip gene name off from URI
term = uri.rsplit('/')[-1]
# Decode URL to handle spaces, special characters
term = urllib.unquote(term)
# Replace any spaces, hyphens, or periods with underscores
term = term.replace(' ', '_')
term = term.replace('-', '_')
term = term.replace('.', '_')
term = term.encode('ascii', 'ignore')
return term
def strip_statement(uri):
uri = uri.replace(r'http://www.openbel.org/bel/', '')
uri = uri.replace(r'http://www.openbel.org/vocabulary/', '')
return uri
class BelProcessor(object):
def __init__(self, g):
self.g = g
self.statements = []
self.all_stmts = []
self.converted_stmts = []
self.degenerate_stmts = []
self.indirect_stmts = []
def get_evidence(self, statement):
evidence = None
citation = None
annotations = []
# Query for evidence text and citation
q_evidence = prefixes + """
SELECT ?evidenceText ?citation
WHERE {
<%s> belvoc:hasEvidence ?evidence .
?evidence belvoc:hasEvidenceText ?evidenceText .
?evidence belvoc:hasCitation ?citation .
}
""" % statement.format()
res_evidence = self.g.query(q_evidence)
for stmt in res_evidence:
try:
evidence = stmt[0].format()
citation = stmt[1].format()
except KeyError:
warnings.warn('Problem converting evidence/citation string')
# Query for all annotations of the statement
q_annotations = prefixes + """
SELECT ?annotation
WHERE {
<%s> belvoc:hasEvidence ?evidence .
?evidence belvoc:hasAnnotation ?annotation .
}
""" % statement.format()
res_annotations = self.g.query(q_annotations)
for stmt in res_annotations:
annotations.append(stmt[0].format())
return (citation, evidence, annotations)
def get_modifications(self):
q_phospho = prefixes + """
SELECT ?enzName ?actType ?substrateName ?mod ?pos
?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship belvoc:DirectlyIncreases .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?subject a belvoc:AbundanceActivity .
?subject belvoc:hasActivityType ?actType .
?subject belvoc:hasChild ?enzyme .
?enzyme a belvoc:ProteinAbundance .
?enzyme belvoc:hasConcept ?enzName .
?object a belvoc:ModifiedProteinAbundance .
?object belvoc:hasModificationType ?mod .
?object belvoc:hasChild ?substrate .
?substrate belvoc:hasConcept ?substrateName .
OPTIONAL { ?object belvoc:hasModificationPosition ?pos . }
}
"""
# Now make the PySB for the phosphorylation
res_phospho = self.g.query(q_phospho)
for stmt in res_phospho:
(citation, evidence, annotations) = self.get_evidence(stmt[5])
# Parse out the elements of the query
enz_name = gene_name_from_uri(stmt[0])
enz = Agent(enz_name)
act_type = name_from_uri(stmt[1])
sub_name = gene_name_from_uri(stmt[2])
sub = Agent(sub_name)
mod = term_from_uri(stmt[3])
mod_pos = term_from_uri(stmt[4])
stmt_str = strip_statement(stmt[5])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
if act_type == 'Kinase' and mod in phospho_mods:
self.statements.append(
Phosphorylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif act_type == 'Catalytic':
if mod == 'Hydroxylation':
self.statements.append(
Hydroxylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif mod == 'Sumoylation':
self.statements.append(
Sumoylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif mod == 'Acetylation':
self.statements.append(
Acetylation(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
elif mod == 'Ubiquitination':
self.statements.append(
Ubiquitination(enz, sub, mod, mod_pos, stmt_str,
citation, evidence, annotations))
else:
print "Warning: Unknown modification type!"
print("Activity: %s, Mod: %s, Mod_Pos: %s" %
(act_type, mod, mod_pos))
else:
print "Warning: Unknown modification type!"
print("Activity: %s, Mod: %s, Mod_Pos: %s" %
(act_type, mod, mod_pos))
def get_dephosphorylations(self):
q_phospho = prefixes + """
SELECT ?phosName ?substrateName ?mod ?pos ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship belvoc:DirectlyDecreases .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?subject belvoc:hasActivityType belvoc:Phosphatase .
?subject belvoc:hasChild ?phosphatase .
?phosphatase a belvoc:ProteinAbundance .
?phosphatase belvoc:hasConcept ?phosName .
?object a belvoc:ModifiedProteinAbundance .
?object belvoc:hasModificationType ?mod .
?object belvoc:hasChild ?substrate .
?substrate belvoc:hasConcept ?substrateName .
OPTIONAL { ?object belvoc:hasModificationPosition ?pos . }
}
"""
# Now make the PySB for the phosphorylation
res_phospho = self.g.query(q_phospho)
for stmt in res_phospho:
(citation, evidence, annotations) = self.get_evidence(stmt[4])
# Parse out the elements of the query
phos_name = gene_name_from_uri(stmt[0])
phos = Agent(phos_name)
sub_name = gene_name_from_uri(stmt[1])
sub = Agent(sub_name)
mod = term_from_uri(stmt[2])
mod_pos = term_from_uri(stmt[3])
stmt_str = strip_statement(stmt[4])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
Dephosphorylation(phos, sub, mod, mod_pos,
stmt_str, citation,
evidence, annotations))
def get_composite_activating_mods(self):
# To eliminate multiple matches, we use pos1 < pos2 but this will
# only work if the pos is given, otherwise multiple matches of
# the same mod combination may appear in the result
q_mods = prefixes + """
SELECT ?speciesName ?actType ?mod1 ?pos1 ?mod2 ?pos2 ?rel ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship ?rel .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?object belvoc:hasActivityType ?actType .
?object belvoc:hasChild ?species .
?species a belvoc:ProteinAbundance .
?species belvoc:hasConcept ?speciesName .
?subject a belvoc:CompositeAbundance .
?subject belvoc:hasChild ?subject1 .
?subject1 a belvoc:ModifiedProteinAbundance .
?subject1 belvoc:hasModificationType ?mod1 .
?subject1 belvoc:hasChild ?species .
?subject belvoc:hasChild ?subject2 .
?subject2 a belvoc:ModifiedProteinAbundance .
?subject2 belvoc:hasModificationType ?mod2 .
?subject2 belvoc:hasChild ?species .
OPTIONAL { ?subject1 belvoc:hasModificationPosition ?pos1 . }
OPTIONAL { ?subject2 belvoc:hasModificationPosition ?pos2 . }
FILTER ((?rel = belvoc:DirectlyIncreases ||
?rel = belvoc:DirectlyDecreases) &&
?pos1 < ?pos2)
}
"""
# Now make the PySB for the phosphorylation
res_mods = self.g.query(q_mods)
for stmt in res_mods:
(citation, evidence, annotations) = self.get_evidence(stmt[7])
# Parse out the elements of the query
species_name = gene_name_from_uri(stmt[0])
species = Agent(species_name)
act_type = term_from_uri(stmt[1])
mod1 = term_from_uri(stmt[2])
mod_pos1 = term_from_uri(stmt[3])
mod2 = term_from_uri(stmt[4])
mod_pos2 = term_from_uri(stmt[5])
rel = term_from_uri(stmt[6])
if rel == 'DirectlyDecreases':
rel = 'decreases'
else:
rel = 'increases'
stmt_str = strip_statement(stmt[7])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
ActivityModification(species, (mod1, mod2),
(mod_pos1, mod_pos2),
rel, act_type, stmt_str,
citation, evidence, annotations))
def get_activating_mods(self):
q_mods = prefixes + """
SELECT ?speciesName ?actType ?mod ?pos ?rel ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship ?rel .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?object belvoc:hasActivityType ?actType .
?object belvoc:hasChild ?species .
?species a belvoc:ProteinAbundance .
?species belvoc:hasConcept ?speciesName .
?subject a belvoc:ModifiedProteinAbundance .
?subject belvoc:hasModificationType ?mod .
?subject belvoc:hasChild ?species .
OPTIONAL { ?subject belvoc:hasModificationPosition ?pos . }
FILTER (?rel = belvoc:DirectlyIncreases ||
?rel = belvoc:DirectlyDecreases)
}
"""
# Now make the PySB for the phosphorylation
res_mods = self.g.query(q_mods)
for stmt in res_mods:
(citation, evidence, annotations) = self.get_evidence(stmt[5])
# Parse out the elements of the query
species_name = gene_name_from_uri(stmt[0])
species = Agent(species_name)
act_type = term_from_uri(stmt[1])
mod = term_from_uri(stmt[2])
mod_pos = term_from_uri(stmt[3])
rel = term_from_uri(stmt[4])
if rel == 'DirectlyDecreases':
rel = 'decreases'
else:
rel = 'increases'
stmt_str = strip_statement(stmt[5])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
ActivityModification(species, (mod,), (mod_pos,), rel,
act_type, stmt_str,
citation, evidence, annotations))
def get_complexes(self):
# Find all complexes described in the corpus
q_cmplx = prefixes + """
SELECT ?complexTerm ?childName
WHERE {
?complexTerm a belvoc:Term .
?complexTerm a belvoc:ComplexAbundance .
?complexTerm belvoc:hasChild ?child .
?child belvoc:hasConcept ?childName .
}
"""
# Run the query
res_cmplx = self.g.query(q_cmplx)
# Store the members of each complex in a dict of lists, keyed by the
# term for the complex
cmplx_dict = collections.defaultdict(list)
for stmt in res_cmplx:
cmplx_name = term_from_uri(stmt[0])
child_name = gene_name_from_uri(stmt[1])
child = Agent(child_name)
cmplx_dict[cmplx_name].append(child)
# Now iterate over the stored complex information and create binding
# statements
for cmplx_name, cmplx_list in cmplx_dict.iteritems():
if len(cmplx_list) < 2:
msg = 'Complex %s has less than 2 members! Skipping.' % \
cmplx_name
warnings.warn(msg)
else:
self.statements.append(Complex(cmplx_list))
def get_activating_subs(self):
"""
p_HGNC_NRAS_sub_Q_61_K_DirectlyIncreases_gtp_p_HGNC_NRAS
p_HGNC_KRAS_sub_G_12_R_DirectlyIncreases_gtp_p_PFH_RAS_Family
p_HGNC_BRAF_sub_V_600_E_DirectlyIncreases_kin_p_HGNC_BRAF
"""
q_mods = prefixes + """
SELECT ?enzyme_name ?sub_label ?act_type ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasRelationship belvoc:DirectlyIncreases .
?stmt belvoc:hasSubject ?subject .
?stmt belvoc:hasObject ?object .
?subject a belvoc:ProteinAbundance .
?subject belvoc:hasConcept ?enzyme_name .
?subject belvoc:hasChild ?sub_expr .
?sub_expr rdfs:label ?sub_label .
?object a belvoc:AbundanceActivity .
?object belvoc:hasActivityType ?act_type .
?object belvoc:hasChild ?enzyme .
?enzyme a belvoc:ProteinAbundance .
?enzyme belvoc:hasConcept ?enzyme_name .
}
"""
# Now make the PySB for the phosphorylation
res_mods = self.g.query(q_mods)
for stmt in res_mods:
(citation, evidence, annotations) = self.get_evidence(stmt[3])
# Parse out the elements of the query
enz_name = gene_name_from_uri(stmt[0])
enz = Agent(enz_name)
sub_expr = term_from_uri(stmt[1])
act_type = term_from_uri(stmt[2])
# Parse the WT and substituted residues from the node label.
# Strangely, the RDF for substituted residue doesn't break the
# terms of the BEL expression down into their meaning, as happens
# for modified protein abundances. Instead, the substitution
# just comes back as a string, e.g., "sub(V,600,E)". This code
# parses the arguments back out using a regular expression.
match = re.match('sub\(([A-Z]),([0-9]*),([A-Z])\)', sub_expr)
if match:
matches = match.groups()
wt_residue = matches[0]
position = matches[1]
sub_residue = matches[2]
else:
print("Warning: Could not parse substitution expression %s" %
sub_expr)
continue
stmt_str = strip_statement(stmt[3])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
self.statements.append(
ActivatingSubstitution(enz, wt_residue, position,
sub_residue, act_type,
stmt_str,
citation, evidence, annotations))
def get_activity_activity(self):
# Query for all statements where the activity of one protein
# directlyIncreases the activity of another protein, without reference
# to a modification.
q_stmts = prefixes + """
SELECT ?subjName ?subjActType ?rel ?objName ?objActType
?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasSubject ?subj .
?stmt belvoc:hasObject ?obj .
?stmt belvoc:hasRelationship ?rel .
?subj belvoc:hasActivityType ?subjActType .
?subj belvoc:hasChild ?subjProt .
?subjProt belvoc:hasConcept ?subjName .
?obj belvoc:hasActivityType ?objActType .
?obj belvoc:hasChild ?objProt .
?objProt belvoc:hasConcept ?objName .
FILTER (?rel = belvoc:DirectlyIncreases ||
?rel = belvoc:DirectlyDecreases)
}
"""
res_stmts = self.g.query(q_stmts)
for stmt in res_stmts:
(citation, evidence, annotations) = self.get_evidence(stmt[5])
subj_name = gene_name_from_uri(stmt[0])
subj = Agent(subj_name)
subj_activity = name_from_uri(stmt[1])
rel = term_from_uri(stmt[2])
if rel == 'DirectlyDecreases':
rel = 'decreases'
else:
rel = 'increases'
obj_name = gene_name_from_uri(stmt[3])
obj = Agent(obj_name)
obj_activity = name_from_uri(stmt[4])
stmt_str = strip_statement(stmt[5])
# Mark this as a converted statement
self.converted_stmts.append(stmt_str)
# Distinguish the case when the activator is a RasGTPase
# (since this may involve unique and stereotyped mechanisms)
if subj_activity == 'GtpBound':
self.statements.append(
RasGtpActivityActivity(subj, subj_activity,
rel, obj, obj_activity,
stmt_str,
citation, evidence, annotations))
# If the object is a Ras-like GTPase, and the subject *increases*
# its GtpBound activity, then the subject is a RasGEF
elif obj_activity == 'GtpBound' and \
rel == 'DirectlyIncreases':
self.statements.append(
RasGef(subj, subj_activity, obj,
stmt_str, citation, evidence, annotations))
# If the object is a Ras-like GTPase, and the subject *decreases*
# its GtpBound activity, then the subject is a RasGAP
elif obj_activity == 'GtpBound' and \
rel == 'DirectlyDecreases':
self.statements.append(
RasGap(subj, subj_activity, obj,
stmt_str, citation, evidence, annotations))
# Otherwise, create a generic Activity->Activity statement
else:
self.statements.append(
ActivityActivity(subj, subj_activity,
rel, obj, obj_activity,
stmt_str,
citation, evidence, annotations))
"""
#print "--------------------------------"
print stmt_str
print("This statement says that:")
print("%s activity increases activity of %s" %
(subj_name, obj_name))
print "It doesn't specify the site."
act_mods = []
for bps in self.statements:
if type(bps) == ActivatingModification and \
bps.monomer_name == obj_name:
act_mods.append(bps)
# If we know about an activation modification...
if act_mods:
print "However, I happen to know about the following"
print "activating modifications for %s:" % obj_name
for act_mod in act_mods:
print " %s at %s" % (act_mod.mod, act_mod.mod_pos)
"""
def get_all_direct_statements(self):
"""Get all directlyIncreases/Decreases statements in the corpus.
Stores the results of the query in self.all_stmts.
"""
print "Getting all direct statements...\n"
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasSubject ?subj .
?stmt belvoc:hasObject ?obj .
{
{ ?subj a belvoc:AbundanceActivity . }
UNION
{ ?subj a belvoc:ComplexAbundance . }
UNION
{ ?subj a belvoc:ProteinAbundance . }
UNION
{ ?subj a belvoc:ModifiedProteinAbundance . }
}
{
{ ?obj a belvoc:AbundanceActivity . }
UNION
{ ?obj a belvoc:ComplexAbundance . }
UNION
{ ?obj a belvoc:ProteinAbundance . }
UNION
{ ?obj a belvoc:ModifiedProteinAbundance . }
}
{
{ ?stmt belvoc:hasRelationship belvoc:DirectlyIncreases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:DirectlyDecreases . }
}
}
"""
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
{
{ ?stmt belvoc:hasRelationship belvoc:DirectlyIncreases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:DirectlyDecreases . }
}
}
"""
res_stmts = self.g.query(q_stmts)
self.all_stmts = [strip_statement(stmt[0]) for stmt in res_stmts]
def get_indirect_statements(self):
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
{
{ ?stmt belvoc:hasRelationship belvoc:Increases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:Decreases . }
}
}
"""
res_stmts = self.g.query(q_stmts)
self.indirect_stmts = [strip_statement(stmt[0]) for stmt in res_stmts]
def get_degenerate_statements(self):
print "Checking for 'degenerate' statements...\n"
# Get rules of type protein X -> activity Y
q_stmts = prefixes + """
SELECT ?stmt
WHERE {
?stmt a belvoc:Statement .
?stmt belvoc:hasSubject ?subj .
?stmt belvoc:hasObject ?obj .
{
{ ?stmt belvoc:hasRelationship belvoc:DirectlyIncreases . }
UNION
{ ?stmt belvoc:hasRelationship belvoc:DirectlyDecreases . }
}
{
{ ?subj a belvoc:ProteinAbundance . }
UNION
{ ?subj a belvoc:ModifiedProteinAbundance . }
}
?subj belvoc:hasConcept ?xName .
{
{
?obj a belvoc:ProteinAbundance .
?obj belvoc:hasConcept ?yName .
}
UNION
{
?obj a belvoc:ModifiedProteinAbundance .
?obj belvoc:hasChild ?proteinY .
?proteinY belvoc:hasConcept ?yName .
}
UNION
{
?obj a belvoc:AbundanceActivity .
?obj belvoc:hasChild ?objChild .
?objChild a belvoc:ProteinAbundance .
?objChild belvoc:hasConcept ?yName .
}
}
FILTER (?xName != ?yName)
}
"""
res_stmts = self.g.query(q_stmts)
print "Protein -> Protein/Activity statements:"
print "---------------------------------------"
for stmt in res_stmts:
stmt_str = strip_statement(stmt[0])
print stmt_str
self.degenerate_stmts.append(stmt_str)
def print_statement_coverage(self):
"""Display how many of the direct statements have been converted,
and how many are considered 'degenerate' and not converted."""
if not self.all_stmts:
self.get_all_direct_statements()
if not self.degenerate_stmts:
self.get_degenerate_statements()
if not self.indirect_stmts:
self.get_indirect_statements()
print
print("Total indirect statements: %d" % len(self.indirect_stmts))
print("Total direct statements: %d" % len(self.all_stmts))
print("Converted statements: %d" % len(self.converted_stmts))
print("Degenerate statements: %d" % len(self.degenerate_stmts))
print(">> Total unhandled statements: %d" %
(len(self.all_stmts) - len(self.converted_stmts) -
len(self.degenerate_stmts)))
print
print "--- Unhandled statements ---------"
for stmt in self.all_stmts:
if not (stmt in self.converted_stmts or
stmt in self.degenerate_stmts):
print stmt
def print_statements(self):
for i, stmt in enumerate(self.statements):
print "%s: %s" % (i, stmt)
|
"""
Copyright (c) 2013, Adel Qodmani
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this
list of conditions and the following disclaimer in the documentation and/or
other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
import tarfile # For the compression
import os # For everything related to path
import logging
import sys # For the argv and exit
import datetime
def main():
""" zipper source-dir-full-path dest-dir-full-path
Tars and zips the source-dir and put it in the dest-dir with the name:
source-dir-name_date_time.tar.gz
"""
check_args()
source_path = sys.argv[1]
source_path = source_path.rstrip('/')
logging.debug("source_path: %s" % source_path)
dest_path = sys.argv[2]
dest_path = dest_path.rstrip('/')
logging.debug("dest_path: %s" % dest_path)
# source name is the name of the dir to be archived
source_name = source_path.split("/")[-1]
logging.debug("source_name: %s" % source_name)
# tar_path
tar_path = create_tar_path(source_name, dest_path)
logging.debug("tar_path: %s" % tar_path)
create_tar_file(tar_path, source_path)
def check_args():
""" Checks if the args supplied to the script are what it expects """
if len(sys.argv) > 1 and sys.argv[1] == "--help":
help_text = ("zipper creates a zipped tar-ball of the <source> directory"
+ "and puts it in \nthe <destination> directory ")
usage = "e.g: zipper /tmp/ /home/sally/Desktop/"
result = "will create a file called tmp_date_time.tar.gz in "
result += "/home/sally/Desktop/ which has all the contents of /tmp/"
print(help_text)
print(usage)
print(result)
sys.exit(0)
elif len(sys.argv) < 3:
print("Missing arguments!")
print("Usage:")
print("\tzipper source destination")
print("You can get the help by: zipper --help")
logging.error("Missing arguments!")
logging.error("Shutting down!")
sys.exit(1)
elif not os.path.isabs(sys.argv[1]):
print("Source directory is not an absolute path")
print("You can get the help by: zipper --help")
logging.error("Source is not absolute")
logging.error("Shutting down")
sys.exit(2)
elif not os.path.isabs(sys.argv[2]):
print("Destination directory is not an absolute path")
print("You can get the help by: zipper --help")
logging.error("Destination is not absolute")
logging.error("Shutting down")
sys.exit(3)
elif not os.path.isdir(sys.argv[1]):
print("Path given as a source directory is not a directory")
print("You can get the help by: zipper --help")
logging.error("Source is not a directory")
logging.error("Shutting down")
sys.exit(4)
elif not os.path.isdir(sys.argv[2]):
print("Path given as destination directory is not a directory")
print("You can get the help by: zipper --help")
logging.error("Destination is not a directory")
logging.error("Shutting down")
sys.exit(5)
def create_tar_path(source_name, dest_path):
""" Creates a path for a backup that will be in the desktop of the user
and the file name will be the /path/to/desktktop/source_name_date.tar.gz
"""
# Get the path to the desktop ready
path = os.path.expanduser('~') # changes ~ to home dir path
logging.debug(path)
path = os.path.join(path, dest_path+"/")
logging.debug(path)
# string from time(strftime): %Year %month %day %Hour %Minute $Second
now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
logging.debug(now)
# The dest path is the path + source_name + date + extension
path = os.path.join(path, source_name)
logging.debug(path)
path += '_' + now + ".tar.gz"
logging.debug(path)
return path
def create_tar_file(tar_path, source_path):
# "w:gz" is open for writing a gz tarball
try:
tar = tarfile.open(tar_path, "w:gz")
tar.add(source_path)
tar.close()
logging.debug("Tar ball [%s] created for directory [%s]" % (tar_path,
source_path))
except IOError:
logging.critical("IOError exception! Aborting ..")
sys.exit(6)
except TarError:
logging.critical("TarError exception! Aborting ...")
sys.exit(7)
if __name__ == "__main__":
# Set up the logging env
# Format: (asctime) (filename) (funcname) (linenumber) (level) (msg)
# The time can be formated with the datefmt parameter
FORMAT = "%(asctime)s %(filename)s::%(funcName)s::%(lineno)d"
FORMAT += " [%(levelname)s]: %(msg)s"
DATE_FORMAT = "%Y-%m-%d %H:%M:%S"
try:
STREAM = open("/home/aral/learn/zipper/log", "a+")
except IOError:
print("Can't create a log file in [%s]" % STREAM)
sys.abort()
# Setting the log stream to go to stderr and print all log info from debug
# and higher levels (debug, info, warning, error, critical)
logging.basicConfig(stream=STREAM, level=logging.DEBUG, format=FORMAT,
datefmt=DATE_FORMAT)
main()
|
import unittest
from barf.core.reil import ReilParser
from barf.core.smt.smtsymbol import BitVec
from barf.core.smt.smtsymbol import Bool
from barf.core.smt.smtsolver import Z3Solver as SmtSolver
class SmtSolverBitVecTests(unittest.TestCase):
def setUp(self):
self._address_size = 32
self._parser = ReilParser()
self._solver = SmtSolver()
# Arithmetic operations.
def test_add(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x + y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val + y_val == z_val)
def test_sub(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x - y == z)
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
# Add constraints to avoid trivial solutions.
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val - y_val) & 0xffffffff == z_val)
def test_mul(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x * y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val * y_val) & 0xffffffff == z_val)
def test_div(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x / y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val / y_val == z_val)
def test_mod(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x % y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val % y_val == z_val)
def test_neg(self):
x = BitVec(32, "x")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("z", z)
self._solver.add(-x == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
z_val = self._solver.get_value(z)
self.assertTrue(-x_val & 0xffffffff == z_val)
# Bitwise operations.
def test_and(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x & y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val & y_val == z_val)
def test_xor(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x ^ y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val ^ y_val == z_val)
def test_or(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x | y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val | y_val == z_val)
def test_lshift(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x << y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue((x_val << y_val) & 0xffffffff == z_val)
def test_rshift(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(x >> y == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self._solver.add(x != y)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(x_val >> y_val == z_val)
def test_invert(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
z = BitVec(32, "z")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.declare_fun("z", z)
self._solver.add(~x == z)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
z_val = self._solver.get_value(z)
self.assertTrue(~x_val & 0xffffffff == z_val)
# Comparison operators (signed)
def test_lt(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x < y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val < y_val)
def test_le(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x <= y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val <= y_val)
def test_eq(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x == y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val == y_val)
def test_neq(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x != y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val != y_val)
def test_gt(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x > y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val > y_val)
def test_ge(self):
x = BitVec(32, "x")
y = BitVec(32, "y")
self._solver.declare_fun("x", x)
self._solver.declare_fun("y", y)
self._solver.add(x >= y)
# Add constraints to avoid trivial solutions.
self._solver.add(x > 1)
self._solver.add(y > 1)
self.assertEqual(self._solver.check(), "sat")
x_val = self._solver.get_value(x)
y_val = self._solver.get_value(y)
self.assertTrue(x_val >= y_val)
# Comparison operators (unsigned)
def test_ult(self):
# TODO Implement.
pass
def test_ule(self):
# TODO Implement.
pass
def test_ugt(self):
# TODO Implement.
pass
def test_uge(self):
# TODO Implement.
pass
# Arithmetic operators (unsigned)
def test_udiv(self):
# TODO Implement.
pass
def test_urem(self):
# TODO Implement.
pass
def main():
unittest.main()
if __name__ == '__main__':
main()
|
from __future__ import absolute_import, division, print_function
from dynd._pydynd import w_type, \
make_var_dim, make_strided_dim, make_fixed_dim, make_cfixed_dim
__all__ = ['var', 'strided', 'fixed', 'cfixed']
class _Dim(object):
__slots__ = []
def __mul__(self, rhs):
if isinstance(rhs, w_type):
# Apply all the dimensions to get
# produce a type
for dim in reversed(self.dims):
rhs = dim.create(rhs)
return rhs
elif isinstance(rhs, (str, type)):
# Allow:
# ndt.strided * 'int32'
# ndt.strided * int
rhs = w_type(rhs)
for dim in reversed(self.dims):
rhs = dim.create(rhs)
return rhs
elif isinstance(rhs, _Dim):
# Combine the dimension fragments
return _DimFragment(self.dims + rhs.dims)
else:
raise TypeError('Expected a dynd dimension or type, not %r' % rhs)
def __pow__(self, count):
return _DimFragment(self.dims * count)
class _DimFragment(_Dim):
__slots__ = ['dims']
def __init__(self, dims):
self.dims = dims
def __repr__(self):
return ' * '.join(repr(dim) for dim in self.dims)
class _Var(_Dim):
"""
Creates a var dimension when combined with other types.
Examples
--------
>>> ndt.var * ndt.int32
ndt.type('var * int32')
>>> ndt.fixed[5] * ndt.var * ndt.float64
ndt.type('5 * var * float64')
"""
__slots__ = []
@property
def dims(self):
return (self,)
def create(self, eltype):
return make_var_dim(eltype)
def __repr__(self):
return 'ndt.var'
class _Strided(_Dim):
"""
Creates a strided dimension when combined with other types.
Examples
--------
>>> ndt.strided * ndt.int32
ndt.type('strided * int32')
>>> ndt.fixed[5] * ndt.strided * ndt.float64
ndt.type('5 * strided * float64')
"""
__slots__ = []
@property
def dims(self):
return (self,)
def create(self, eltype):
return make_strided_dim(eltype)
def __repr__(self):
return 'ndt.strided'
class _Fixed(_Dim):
"""
Creates a fixed dimension when combined with other types.
Examples
--------
>>> ndt.fixed[3] * ndt.int32
ndt.type('3 * int32')
>>> ndt.fixed[5] * ndt.var * ndt.float64
ndt.type('5 * var * float64')
"""
__slots__ = ['dim_size']
def __init__(self, dim_size = None):
self.dim_size = dim_size
@property
def dims(self):
if self.dim_size is not None:
return (self,)
else:
raise TypeError('Need to specify ndt.fixed[dim_size],' +
' not just ndt.fixed')
def create(self, eltype):
return make_fixed_dim(self.dim_size, eltype)
def __getitem__(self, dim_size):
return _Fixed(dim_size)
def __repr__(self):
if self.dim_size is not None:
return 'ndt.fixed[%d]' % self.dim_size
else:
return 'ndt.fixed'
class _CFixed(_Dim):
"""
Creates a cfixed dimension when combined with other types.
Examples
--------
>>> ndt.cfixed[3] * ndt.int32
ndt.type('cfixed[3] * int32')
>>> ndt.fixed[5] * ndt.cfixed[2] * ndt.float64
ndt.type('5 * cfixed[2] * float64')
"""
__slots__ = ['dim_size']
def __init__(self, dim_size = None):
self.dim_size = dim_size
@property
def dims(self):
if self.dim_size is not None:
return (self,)
else:
raise TypeError('Need to specify ndt.cfixed[dim_size],' +
' not just ndt.cfixed')
def create(self, eltype):
return make_cfixed_dim(self.dim_size, eltype)
def __getitem__(self, dim_size):
return _CFixed(dim_size)
def __repr__(self):
if self.dim_size is not None:
return 'ndt.cfixed[%d]' % self.dim_size
else:
return 'ndt.cfixed'
var = _Var()
strided = _Strided()
fixed = _Fixed()
cfixed = _CFixed()
|
import numpy as np
import dragon.core.workspace as ws
from dragon.core.tensor import Tensor, GetTensorName
def shared(value, name=None, **kwargs):
"""Construct a Tensor initialized with ``value``.
Parameters
----------
value : basic type, list or numpy.ndarray
The numerical values.
name : str
The name of tensor.
Returns
-------
Tensor
The initialized tensor.
"""
if not isinstance(value, (int, float, list, np.ndarray)):
raise TypeError("Unsupported type of value: {}".format(type(value)))
if name is None: name = GetTensorName()
tensor = Tensor(name).Variable()
ws.FeedTensor(tensor, value)
return tensor
|
from ..tools import add_bias, confirm
from ..activation_functions import softmax_function
from ..cost_functions import softmax_neg_loss
import numpy as np
def resilient_backpropagation(network, trainingset, testset, cost_function, ERROR_LIMIT=1e-3, max_iterations = (), weight_step_max = 50., weight_step_min = 0., start_step = 0.5, learn_max = 1.2, learn_min = 0.5, print_rate = 1000, save_trained_network = False ):
# Implemented according to iRprop+
# http://sci2s.ugr.es/keel/pdf/algorithm/articulo/2003-Neuro-Igel-IRprop+.pdf
assert softmax_function != network.layers[-1][1] or cost_function == softmax_neg_loss,\
"When using the `softmax` activation function, the cost function MUST be `softmax_neg_loss`."
assert cost_function != softmax_neg_loss or softmax_function == network.layers[-1][1],\
"When using the `softmax_neg_loss` cost function, the activation function in the final layer MUST be `softmax`."
assert trainingset[0].features.shape[0] == network.n_inputs, \
"ERROR: input size varies from the defined input setting"
assert trainingset[0].targets.shape[0] == network.layers[-1][0], \
"ERROR: output size varies from the defined output setting"
training_data = np.array( [instance.features for instance in trainingset ] )
training_targets = np.array( [instance.targets for instance in trainingset ] )
test_data = np.array( [instance.features for instance in testset ] )
test_targets = np.array( [instance.targets for instance in testset ] )
# Storing the current / previous weight step size
weight_step = [ np.full( weight_layer.shape, start_step ) for weight_layer in network.weights ]
# Storing the current / previous weight update
dW = [ np.ones(shape=weight_layer.shape) for weight_layer in network.weights ]
# Storing the previous derivative
previous_dEdW = [ 1 ] * len( network.weights )
# Storing the previous error measurement
prev_error = ( ) # inf
input_signals, derivatives = network.update( training_data, trace=True )
out = input_signals[-1]
cost_derivative = cost_function(out, training_targets, derivative=True).T
delta = cost_derivative * derivatives[-1]
error = cost_function(network.update( test_data ), test_targets )
n_samples = float(training_data.shape[0])
layer_indexes = range( len(network.layers) )[::-1] # reversed
epoch = 0
while error > ERROR_LIMIT and epoch < max_iterations:
epoch += 1
for i in layer_indexes:
# Loop over the weight layers in reversed order to calculate the deltas
# Calculate the delta with respect to the weights
dEdW = (np.dot( delta, add_bias(input_signals[i]) )/n_samples).T
if i != 0:
"""Do not calculate the delta unnecessarily."""
# Skip the bias weight
weight_delta = np.dot( network.weights[ i ][1:,:], delta )
# Calculate the delta for the subsequent layer
delta = weight_delta * derivatives[i-1]
# Calculate sign changes and note where they have changed
diffs = np.multiply( dEdW, previous_dEdW[i] )
pos_indexes = np.where( diffs > 0 )
neg_indexes = np.where( diffs < 0 )
zero_indexes = np.where( diffs == 0 )
# positive
if np.any(pos_indexes):
# Calculate the weight step size
weight_step[i][pos_indexes] = np.minimum( weight_step[i][pos_indexes] * learn_max, weight_step_max )
# Calculate the weight step direction
dW[i][pos_indexes] = np.multiply( -np.sign( dEdW[pos_indexes] ), weight_step[i][pos_indexes] )
# Apply the weight deltas
network.weights[i][ pos_indexes ] += dW[i][pos_indexes]
# negative
if np.any(neg_indexes):
weight_step[i][neg_indexes] = np.maximum( weight_step[i][neg_indexes] * learn_min, weight_step_min )
if error > prev_error:
# iRprop+ version of resilient backpropagation
network.weights[i][ neg_indexes ] -= dW[i][neg_indexes] # backtrack
dEdW[ neg_indexes ] = 0
# zeros
if np.any(zero_indexes):
dW[i][zero_indexes] = np.multiply( -np.sign( dEdW[zero_indexes] ), weight_step[i][zero_indexes] )
network.weights[i][ zero_indexes ] += dW[i][zero_indexes]
# Store the previous weight step
previous_dEdW[i] = dEdW
#end weight adjustment loop
prev_error = error
input_signals, derivatives = network.update( training_data, trace=True )
out = input_signals[-1]
cost_derivative = cost_function(out, training_targets, derivative=True).T
delta = cost_derivative * derivatives[-1]
error = cost_function(network.update( test_data ), test_targets )
if epoch%print_rate==0:
# Show the current training status
print "[training] Current error:", error, "\tEpoch:", epoch
print "[training] Finished:"
print "[training] Converged to error bound (%.4g) with error %.4g." % ( ERROR_LIMIT, error )
print "[training] Measured quality: %.4g" % network.measure_quality( training_data, training_targets, cost_function )
print "[training] Trained for %d epochs." % epoch
if save_trained_network and confirm( promt = "Do you wish to store the trained network?" ):
network.save_network_to_file()
|
import os.path
import tempfile
import pkg_resources
import pytest
import hdf5storage
import hdf5storage.plugins
try:
import example_hdf5storage_marshaller_plugin
has_example_hdf5storage_marshaller_plugin = True
except:
has_example_hdf5storage_marshaller_plugin = False
def test_marshaller_api_versions():
assert ('1.0', ) == \
hdf5storage.plugins.supported_marshaller_api_versions()
def test_find_thirdparty_marshaller_plugins():
found_example = False
apivs = hdf5storage.plugins.supported_marshaller_api_versions()
plugins = hdf5storage.plugins.find_thirdparty_marshaller_plugins()
assert isinstance(plugins, dict)
assert set(apivs) == set(plugins)
for k, v in plugins.items():
assert isinstance(k, str)
assert isinstance(v, dict)
for k2, v2 in v.items():
assert isinstance(k2, str)
assert isinstance(v2, pkg_resources.EntryPoint)
if k2 == 'example_hdf5storage_marshaller_plugin':
found_example = True
assert has_example_hdf5storage_marshaller_plugin == found_example
@pytest.mark.skipif(has_example_hdf5storage_marshaller_plugin,
reason='requires example_hdf5storage_marshaller_'
'plugin')
def test_plugin_marshaller_SubList():
mc = hdf5storage.MarshallerCollection(load_plugins=True,
lazy_loading=True)
options = hdf5storage.Options(store_python_metadata=True,
matlab_compatible=False,
marshaller_collection=mc)
ell = [1, 2, 'b1', b'3991', True, None]
data = example_hdf5storage_marshaller_plugin.SubList(ell)
name = '/a'
with tempfile.TemporaryDirectory() as folder:
filename = os.path.join(folder, 'data.h5')
hdf5storage.write(data, path=name, filename=filename,
options=options)
out = hdf5storage.read(path=name, filename=filename,
options=options)
assert ell == list(out)
assert type(out) == example_hdf5storage_marshaller_plugin.SubList
|
from . import gxapi_cy
from geosoft.gxapi import GXContext, float_ref, int_ref, str_ref
from .GXDB import GXDB
from .GXVA import GXVA
from .GXVV import GXVV
class GXDBWRITE(gxapi_cy.WrapDBWRITE):
"""
GXDBWRITE class.
The `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` class is used to open and write to databases. Large blocks of data
are split into blocks and served up sequentially to prevent the over-use of virtual memory when VVs or VAs are being written to channels.
Individual data blocks are limited by default to 1 MB (which is user-alterable). Data less than the block size
are served up whole, one block per line.
"""
def __init__(self, handle=0):
super(GXDBWRITE, self).__init__(GXContext._get_tls_geo(), handle)
@classmethod
def null(cls):
"""
A null (undefined) instance of `GXDBWRITE <geosoft.gxapi.GXDBWRITE>`
:returns: A null `GXDBWRITE <geosoft.gxapi.GXDBWRITE>`
:rtype: GXDBWRITE
"""
return GXDBWRITE()
def is_null(self):
"""
Check if this is a null (undefined) instance
:returns: True if this is a null (undefined) instance, False otherwise.
:rtype: bool
"""
return self._internal_handle() == 0
@classmethod
def create(cls, db):
"""
Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.channel.
:param db: Database input
:type db: GXDB
:returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
:rtype: GXDBWRITE
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapDBWRITE._create(GXContext._get_tls_geo(), db)
return GXDBWRITE(ret_val)
@classmethod
def create_xy(cls, db):
"""
Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object for a XY-located data. Add channels using the
`add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.
:param db: Database input
:type db: GXDB
:returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
:rtype: GXDBWRITE
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapDBWRITE._create_xy(GXContext._get_tls_geo(), db)
return GXDBWRITE(ret_val)
@classmethod
def create_xyz(cls, db):
"""
Create a `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object for a XYZ-located data.
Add channels using the `add_channel <geosoft.gxapi.GXDBWRITE.add_channel>` method.channel
:param db: Database input
:type db: GXDB
:returns: `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object
:rtype: GXDBWRITE
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = gxapi_cy.WrapDBWRITE._create_xyz(GXContext._get_tls_geo(), db)
return GXDBWRITE(ret_val)
def add_channel(self, chan):
"""
Add a data channel to the `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object.
:param chan: Channel handle (does not need to be locked, but can be.)
:type chan: int
:returns: Channel index. Use for getting the correct `GXVV <geosoft.gxapi.GXVV>` or `GXVA <geosoft.gxapi.GXVA>` object.
:rtype: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = self._add_channel(chan)
return ret_val
def get_db(self):
"""
Get the output `GXDB <geosoft.gxapi.GXDB>` handle from the `GXDBWRITE <geosoft.gxapi.GXDBWRITE>` object.
:returns: `GXDB <geosoft.gxapi.GXDB>` handle
:rtype: GXDB
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
ret_val = self._get_db()
return GXDB(ret_val)
def get_vv(self, chan):
"""
Get the `GXVV <geosoft.gxapi.GXVV>` handle for a channel.
:param chan: Index of channel to access.
:type chan: int
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Call only for single-column (regular) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBWRITE.get_chan_array_size>`
function to find the number fo columns in a given channel. The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
"""
ret_val = self._get_vv(chan)
return GXVV(ret_val)
def get_va(self, chan):
"""
Get the `GXVA <geosoft.gxapi.GXVA>` handle for an array channel.
:param chan: Index of channel to access.
:type chan: int
:returns: `GXVA <geosoft.gxapi.GXVA>` handle
:rtype: GXVA
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Call only for array (multi-column) channels. You can call the `get_chan_array_size <geosoft.gxapi.GXDBWRITE.get_chan_array_size>`
function to find the number fo columns in a given channel, or you can call `GXVA.col <geosoft.gxapi.GXVA.col>` on the returned `GXVA <geosoft.gxapi.GXVA>` handle.
The `GXVA <geosoft.gxapi.GXVA>` is filled anew for each block served up.
"""
ret_val = self._get_va(chan)
return GXVA(ret_val)
def get_v_vx(self):
"""
Get the X channel `GXVV <geosoft.gxapi.GXVV>` handle.
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Only available for the CreateXY or CreateXYZ methods.
The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
"""
ret_val = self._get_v_vx()
return GXVV(ret_val)
def get_v_vy(self):
"""
Get the Y channel `GXVV <geosoft.gxapi.GXVV>` handle.
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Only available for the CreateXY or CreateXYZ methods.
The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
"""
ret_val = self._get_v_vy()
return GXVV(ret_val)
def get_v_vz(self):
"""
Get the Z channel `GXVV <geosoft.gxapi.GXVV>` handle.
:returns: `GXVV <geosoft.gxapi.GXVV>` handle
:rtype: GXVV
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Only available for the CreateXY or CreateXYZ methods.
The `GXVV <geosoft.gxapi.GXVV>` is filled anew for each block served up.
If the Z channel is an array channel, the returned `GXVV <geosoft.gxapi.GXVV>` is the "base" `GXVV <geosoft.gxapi.GXVV>` of the `GXVA <geosoft.gxapi.GXVA>` and contains all items sequentially.
"""
ret_val = self._get_v_vz()
return GXVV(ret_val)
def get_chan_array_size(self, chan):
"""
Get the number of columns of data in a channel.
:param chan: Index of channel to access.
:type chan: int
:returns: The number of columns (array size) for a channel
:rtype: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Regular channels have one column of data. Array channels have more than one column of data.
This function should be called to determine whether to use `get_vv <geosoft.gxapi.GXDBWRITE.get_vv>` or `get_va <geosoft.gxapi.GXDBWRITE.get_va>` to access data
for a channel.
"""
ret_val = self._get_chan_array_size(chan)
return ret_val
def add_block(self, line):
"""
Add the current block of data.
:param line: Line
:type line: int
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** First, set up the data for each channel by copying values into the individual channel VVs and VAs.
"""
self._add_block(line)
def commit(self):
"""
Commit remaining data to the database.
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
"""
self._commit()
def test_func(self, ra):
"""
Temporary test function.
:param ra: `GXRA <geosoft.gxapi.GXRA>` handle to text file to import.
:type ra: GXRA
.. versionadded:: 9.0
**License:** `Geosoft Open License <https://geosoftgxdev.atlassian.net/wiki/spaces/GD/pages/2359406/License#License-open-lic>`_
**Note:** Designed to import the "Massive.xyz" file, which has data in the format "X Y Z Data".
"""
self._test_func(ra)
|
from django.db import models
from lino_xl.lib.ledger.choicelists import VoucherStates
from lino.api import dd, _
class OrderStates(VoucherStates):
pass
add = OrderStates.add_item
add('10', _("Waiting"), 'draft', is_editable=True)
add('20', _("Active"), 'active', is_editable=True)
add('30', _("Urgent"), 'urgent', is_editable=True)
add('40', _("Done"), 'registered')
add('50', _("Cancelled"), 'cancelled')
OrderStates.draft.add_transition(required_states="active urgent registered cancelled")
OrderStates.active.add_transition(required_states="draft urgent registered cancelled")
OrderStates.urgent.add_transition(required_states="draft active registered cancelled")
OrderStates.registered.add_transition(required_states="draft active urgent cancelled")
OrderStates.cancelled.add_transition(required_states="draft active urgent registered")
|
import unittest
import numpy as np
import socket
import Pyro4
from nested_sampling import NestedSampling, MonteCarloWalker, Harmonic, Replica
class TestNS(unittest.TestCase):
"""to test distributed computing must start a dispatcher with --server-name test and --port 9090
"""
def setUp(self):
self.setUp1()
def setUp1(self, nproc=1, multiproc=True):
self.ndim = 3
self.harmonic = Harmonic(self.ndim)
self.nreplicas = 10
self.stepsize = 0.1
self.nproc = nproc
self.mc_runner = MonteCarloWalker(self.harmonic, mciter=40)
if multiproc == False:
hostname=socket.gethostname()
host = Pyro4.socketutil.getIpAddress(hostname, workaround127=True)
self.dispatcher_URI = "PYRO:"+"test@"+host+":9090"
else:
self.dispatcher_URI = None
replicas = []
for i in xrange(self.nreplicas):
x = self.harmonic.get_random_configuration()
replicas.append(Replica(x, self.harmonic.get_energy(x)))
self.ns = NestedSampling(replicas, self.mc_runner,
stepsize=0.1, nproc=nproc, verbose=False, dispatcher_URI=self.dispatcher_URI)
self.Emax0 = self.ns.replicas[-1].energy
self.niter = 100
for i in xrange(self.niter):
self.ns.one_iteration()
self.Emax = self.ns.replicas[-1].energy
self.Emin = self.ns.replicas[0].energy
def test1(self):
print "running TestNS"
self.assert_(len(self.ns.replicas) == self.nreplicas)
self.assert_(self.Emax < self.Emax0)
self.assert_(self.Emin < self.Emax)
self.assert_(self.Emin >= 0)
self.assert_(self.ns.stepsize != self.stepsize)
self.assertEqual(len(self.ns.max_energies), self.niter * self.nproc)
class testNSParMultiproc(TestNS):
def setUp(self):
self.setUp1(nproc=3)
class testNSParPyro(TestNS):
def setUp(self):
self.setUp1(nproc=3,multiproc=False)
if __name__ == "__main__":
unittest.main()
|
__all__ = [
"CorruptedMessage",
"Stats",
"UnexpectedCommand",
"UnexpectedEOF",
"UnknownCommand",
"log",
]
import logging
import struct
log = logging.getLogger("offhand")
class UnexpectedEOF(Exception):
def __init__(self):
Exception.__init__(self, "Connection closed unexpectedly")
class UnknownCommand(Exception):
def __init__(self, command):
Exception.__init__(self, "Unknown command: %r" % command)
class UnexpectedCommand(Exception):
def __init__(self, command):
Exception.__init__(self, "Unexpected command: %r" % command)
class CorruptedMessage(Exception):
def __init__(self):
Exception.__init__(self, "Corrupted message")
class Stats(object):
__slots__ = [
"connecting",
"connected",
"idle",
"busy",
"total_engaged",
"total_canceled",
"total_rolledback",
"total_timeouts",
"total_disconnects",
"total_errors",
]
def __init__(self, copy=None):
for key in self.__slots__:
setattr(self, key, getattr(copy, key) if copy else 0)
def __nonzero__(self):
return any(getattr(self, key) for key in self.__slots__)
def __str__(self):
return " ".join("%s=%s" % (key, getattr(self, key)) for key in self.__slots__)
def parse_message(data):
message = []
offset = 0
while True:
remain = len(data) - offset
if remain == 0:
break
if remain < 4:
raise CorruptedMessage()
part_size, = struct.unpack("<I", data[offset: offset + 4])
offset += 4
if remain < 4 + part_size:
raise CorruptedMessage()
message.append(data[offset: offset + part_size])
offset += part_size
return message
|
from django import forms
try:
from django.utils.encoding import smart_unicode as smart_text
except ImportError:
from django.utils.encoding import smart_text
from cached_modelforms.tests.utils import SettingsTestCase
from cached_modelforms.tests.models import SimpleModel
from cached_modelforms import (
CachedModelChoiceField, CachedModelMultipleChoiceField)
class TestFields(SettingsTestCase):
def setUp(self):
self.settings_manager.set(INSTALLED_APPS=('cached_modelforms.tests',))
self.obj1 = SimpleModel.objects.create(name='name1')
self.obj2 = SimpleModel.objects.create(name='name2')
self.obj3 = SimpleModel.objects.create(name='name3')
self.cached_list = [self.obj1, self.obj2, self.obj3]
class FormSingle(forms.Form):
obj = CachedModelChoiceField(
objects=lambda:self.cached_list,
required=False
)
class FormMultiple(forms.Form):
obj = CachedModelMultipleChoiceField(
objects=lambda:self.cached_list,
required=False
)
self.FormSingle = FormSingle
self.FormMultiple = FormMultiple
def test_modelchoicefield_objects_arg(self):
'''
Test, how the field accepts different types of ``objects`` argument.
'''
as_list = CachedModelChoiceField(objects=lambda:self.cached_list)
as_iterable = CachedModelChoiceField(
objects=lambda:iter(self.cached_list)
)
list_of_tuples = [(x.pk, x) for x in self.cached_list]
as_list_of_tuples = CachedModelChoiceField(
objects=lambda:list_of_tuples
)
as_dict = CachedModelChoiceField(objects=lambda:dict(list_of_tuples))
choices_without_empty_label = as_list.choices[:]
if as_list.empty_label is not None:
choices_without_empty_label.pop(0)
# make sure all of the ``choices`` attrs are the same
self.assertTrue(
as_list.choices ==
as_iterable.choices ==
as_list_of_tuples.choices ==
as_dict.choices
)
# same for ``objects``
self.assertTrue(
as_list.objects ==
as_iterable.objects ==
as_list_of_tuples.objects ==
as_dict.objects
)
# ``objects`` should be a dict as ``{smart_text(pk1): obj1, ...}``
self.assertEqual(
set(as_list.objects.keys()),
set(smart_text(x.pk) for x in self.cached_list)
)
self.assertEqual(set(as_list.objects.values()), set(self.cached_list))
# ``choices`` should be a list as ``[(smart_text(pk1), smart_text(obj1)), ...]``
self.assertEqual(
choices_without_empty_label,
[(smart_text(x.pk), smart_text(x)) for x in self.cached_list]
)
def test_modelmultiplechoicefield_objects_arg(self):
'''
Test, how the field accepts different types of ``objects`` argument.
'''
as_list = CachedModelMultipleChoiceField(
objects=lambda:self.cached_list
)
as_iterable = CachedModelMultipleChoiceField(
objects=lambda:iter(self.cached_list)
)
list_of_tuples = [(x.pk, x) for x in self.cached_list]
as_list_of_tuples = CachedModelMultipleChoiceField(
objects=lambda:list_of_tuples
)
as_dict = CachedModelMultipleChoiceField(objects=dict(list_of_tuples))
# make sure all of the ``choices`` attrs are the same
self.assertTrue(
as_list.choices ==
as_iterable.choices ==
as_list_of_tuples.choices ==
as_dict.choices)
# same for ``objects``
self.assertTrue(
as_list.objects ==
as_iterable.objects ==
as_list_of_tuples.objects ==
as_dict.objects)
# ``objects`` should be a dict as ``{smart_text(pk1): obj1, ...}``
self.assertEqual(
set(as_list.objects.keys()),
set(smart_text(x.pk) for x in self.cached_list)
)
self.assertEqual(set(as_list.objects.values()), set(self.cached_list))
# ``choices`` should be a list as ``[(smart_text(pk1), smart_text(obj1)), ...]``
self.assertEqual(
as_list.choices,
[(smart_text(x.pk), smart_text(x)) for x in self.cached_list]
)
def test_modelchoicefield_behavior(self):
'''
Test, how the field handles data in form.
'''
# some value
form = self.FormSingle({'obj': smart_text(self.obj1.pk)})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['obj'], self.obj1)
# no value
form = self.FormSingle({})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['obj'], None)
# invalid value
form = self.FormSingle({'obj': '-1'})
self.assertFalse(form.is_valid())
self.assertTrue(form._errors['obj'])
def test_modelmultiplechoicefield_behavior(self):
'''
Test, how the field handles data in form.
'''
# some value
form = self.FormMultiple({'obj': [smart_text(self.obj1.pk), smart_text(self.obj2.pk)]})
self.assertTrue(form.is_valid())
self.assertEqual(set(form.cleaned_data['obj']), set([self.obj1, self.obj2]))
# no value
form = self.FormMultiple({})
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data['obj'], [])
# invalid value
form = self.FormMultiple({'obj': [smart_text(self.obj1.pk), '-1']})
self.assertFalse(form.is_valid())
self.assertTrue(form._errors['obj'])
# invalid list
form = self.FormMultiple({'obj': '-1'})
self.assertFalse(form.is_valid())
self.assertTrue(form._errors['obj'])
def test_modelchoicefield_objects_assignment(self):
field = CachedModelChoiceField(objects=self.cached_list)
field2 = CachedModelChoiceField(objects=self.cached_list[:2])
field.objects = self.cached_list[:2]
self.assertEqual(field.objects, field2.objects)
self.assertEqual(field.choices, field2.choices)
def test_modelmultiplechoicefield_objects_assignment(self):
field = CachedModelMultipleChoiceField(objects=self.cached_list)
field2 = CachedModelMultipleChoiceField(objects=self.cached_list[:2])
field.objects = self.cached_list[:2]
self.assertEqual(field.objects, field2.objects)
self.assertEqual(field.choices, field2.choices)
|
import sys
import notes
thenote = sys.argv[1]
outfile = sys.argv[2]
notes.init_repo(sys.argv[3:])
note = notes.repo.get(thenote)
deps = note.get_deps()
print "%s.txt(note) -> %s(referenced keys)" % (thenote, outfile)
f = file(outfile, 'w')
for d in deps:
print >>f, d
f.close()
|
"""interpret a comapct grid specification using regex"""
import re
real_short1 = \
r'\s*(?P<lower>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*'
real_short2 = \
r'\s*(?P<upper>-?(\d+(\.\d*)?|\d*\.\d+)([eE][+\-]?\d+)?)\s*'
domain = r'\[' + real_short1 + ',' + real_short2 + r'\]'
indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]'
examples = ('domain=[0,10] indices=[0:11]',
'domain=[0.1,1.1]x[0,2E+00] indices=[1:21]x[1:101]',
'[0,1]x[0,2]x[-1,1.5] [1:21]x[1:11]x[-10:15]')
for ex in examples:
print re.findall(indices, ex)
# a nested list is returned; requires nested group counting
print re.findall(domain, ex)
print
print 'work with groupindex:'
for ex in examples:
print re.findall(indices, ex)
c = re.compile(domain)
groups = c.findall(ex)
intervals = []
for i in range(len(groups)):
intervals.append(
(groups[i][c.groupindex['lower']-1],
groups[i][c.groupindex['upper']-1]))
print intervals
print
real_short1 = \
r'\s*(?P<lower>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*'
real_short2 = \
r'\s*(?P<upper>-?(?:\d+(?:\.\d*)?|\d*\.\d+)(?:[eE][+\-]?\d+)?)\s*'
domain = r'\[' + real_short1 + ',' + real_short2 + r'\]'
print 'non-capturing groups:'
for ex in examples:
print re.findall(domain, ex)
print
real_sn = r'-?\d\.?\d*[Ee][+\-][0-9]+'
real_dn = r'-?\d*\.\d*'
real_in = r'-?\d+'
real1 = \
r'\s*(?P<lower>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*'
real2 = \
r'\s*(?P<upper>' + real_sn + '|' + real_dn + '|' + real_in + ')\s*'
domain = r'\[' + real1 + ',' + real2 + r'\]'
indices = r'\[\s*(-?\d+)\s*:\s*(-?\d+)\s*\]'
print '\navoid so many parenthesis (just two groups now for each interval):'
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
domain = r'\[([^,]*),([^\]]*)\]'
indices = r'\[([^:,]*):([^\]]*)\]'
print '\nsimpler regular expressions:\n', domain, indices
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
domain = r'\[(.*?),(.*?)\]'
indices = r'\[(.*?):(.*?)\]'
print '\nalternative; simpler regular expressions:\n', domain, indices
for ex in examples:
print re.findall(indices, ex)
print re.findall(domain, ex)
print
|
import pandas as pd
import numpy as np
import cobra
from pyefm.ElementaryFluxModes import EFMToolWrapper
from tqdm import tqdm
class EFVWrapper(EFMToolWrapper):
def create_matrices(self, extra_g=None, extra_h=None):
""" Initialize the augmented stoichiometric matrix.
extra_g: (n x nr) array
Extra entries in the constraint matrix. postive values for lower
bounds, negative values for upper bounds
extra_h: (n) array
Corresponding bounds for the extra entries matrix
"""
# Create stoichiometric matrix, get key dimensions
N = cobra.util.create_stoichiometric_matrix(self.model)
nm, nr = N.shape
self.nm = nm
self.nr = nr
# Construct full G and h matrices, then drop homogeneous (or near
# homogeneous) entries
g_full = np.vstack([np.eye(nr), -np.eye(nr)])
h_full = np.array([(r.lower_bound, -r.upper_bound)
for r in self.model.reactions]).T.flatten()
inhomogeneous = ~((h_full <= -1000) | np.isclose(h_full, 0))
h_full = h_full[inhomogeneous]
g_full = g_full[inhomogeneous]
if extra_g is not None:
assert extra_g.shape[1] == nr
assert extra_g.shape[0] == len(extra_h)
g_full = np.vstack([g_full, extra_g])
h_full = np.hstack([h_full, extra_h])
G = g_full
h = h_full
self.nt = nt = len(h)
self.D = np.vstack([
np.hstack([N, np.zeros((nm, nt)), np.zeros((nm, 1))]),
np.hstack([G, -np.eye(nt), np.atleast_2d(-h).T])
])
def create_model_files(self, temp_dir):
# Stoichiometric Matrix
np.savetxt(temp_dir + '/stoich.txt', self.D, delimiter='\t')
# Reaction reversibilities
np.savetxt(
temp_dir + '/revs.txt', np.hstack([
np.array([r.lower_bound < 0 for r in self.model.reactions]),
np.zeros((self.nt + 1))]),
delimiter='\t', fmt='%d', newline='\t')
# Reaction Names
r_names = np.hstack([
np.array([r.id for r in self.model.reactions]),
np.array(['s{}'.format(i) for i in range(self.nt)]),
np.array(['lambda'])
])
with open(temp_dir + '/rnames.txt', 'w') as f:
f.write('\t'.join(('"{}"'.format(name) for name in r_names)))
# Metabolite Names
m_names = np.hstack([
np.array([m.id for m in self.model.metabolites]),
np.array(['s{}'.format(i) for i in range(self.nt)]),
])
with open(temp_dir + '/mnames.txt', 'w') as f:
f.write('\t'.join(('"{}"'.format(name) for name in m_names)))
pass
def read_double_out(self, out_file):
with open(out_file, 'rb') as f:
out_arr = np.fromstring(f.read()[13:], dtype='>d').reshape(
(-1, self.nt + self.nr + 1)).T
out_arr = np.asarray(out_arr, dtype=np.float64).T
# Sort by the absolute value of the stoichiometry
sort_inds= np.abs(out_arr[:, :self.nr]).sum(1).argsort()
out_arr = out_arr[sort_inds]
unbounded = out_arr[np.isclose(out_arr[:,-1], 0.)]
bounded = out_arr[~np.isclose(out_arr[:,-1], 0.)]
if bounded.size: # Test if its empty
bounded /= np.atleast_2d(bounded[:,-1]).T
unbounded_df = pd.DataFrame(
unbounded[:, :self.nr],
columns=[r.id for r in self.model.reactions],
index=['UEV{}'.format(i)
for i in range(1, unbounded.shape[0] + 1)])
bounded_df = pd.DataFrame(
bounded[:, :self.nr],
columns=[r.id for r in self.model.reactions],
index=('BEV{}'.format(i)
for i in range(1, bounded.shape[0] + 1)))
return unbounded_df.append(bounded_df)
def calculate_elementary_vectors(cobra_model, opts=None, verbose=True,
java_args=None, extra_g=None, extra_h=None):
"""Calculate elementary flux vectors, which capture arbitrary linear
constraints. Approach as detailed in S. Klamt et al., PLoS Comput Biol. 13,
e1005409–22 (2017).
Augmented constraints as a hacky workaround for implementing more
complicated constraints without using optlang.
java_args: string
Extra command-line options to pass to the java virtual machine.
Eg. '-Xmx1g' will set the heap space to 1 GB.
extra_g: (n x nr) array
Extra entries in the constraint matrix. postive values for lower
bounds, negative values for upper bounds
extra_h: (n) array
Corresponding bounds for the extra entries matrix
"""
efv_wrap = EFVWrapper(cobra_model, opts, verbose, java_args=java_args)
efv_wrap.create_matrices(extra_g=extra_g, extra_h=extra_h)
return efv_wrap()
def get_support_minimal(efvs):
"""Return only those elementary flux vectors whose support is not a proper
superset of another EFV"""
bool_df = pd.DataFrame(np.isclose(efvs, 0),
columns=efvs.columns, index=efvs.index)
set_df = bool_df.apply(lambda x: set(x.index[~x]), 1)
set_df = set_df[set_df != set()] # Drop the empty set EFV
set_dict = set_df.to_dict()
is_support_minimal = _get_support_minimal_list(set_dict)
return efvs.loc[is_support_minimal]
def _get_support_minimal_list(set_dict):
all_keys = set(set_dict.keys())
is_support_minimal = []
for this_key, val in tqdm(set_dict.items()):
for key in all_keys.difference(set([this_key])):
if val.issuperset(set_dict[key]):
break
else:
is_support_minimal.append(this_key)
return is_support_minimal
|
"""
"""
from django.core.urlresolvers import reverse
from django.test import TestCase
from wagtail.tests.utils import WagtailTestUtils
class BaseTestIndexView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD index view.
"""
url_namespace = None
template_dir = None
def _create_sequential_instance(self, index):
"""
Stub method for extending class to create sequential
model instances.
:param index: the sequential index to use.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def get(self, params=None):
if not params:
params = {}
return self.client.get(
reverse('{0}:index'.format(self.url_namespace)), params)
def populate(self):
"""
Populates several model class instance.
"""
for i in range(50):
self._create_sequential_instance(i)
def test_get(self):
# Generate the response.
response = self.get()
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
def test_search(self):
# Generate the response.
response = self.get({'q': 'keyword'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], 'keyword')
def test_pagination(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 2})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 2)
def test_pagination_invalid(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 'fake'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 1)
def test_pagination_out_of_range(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 99999})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/index.html'.format(self.template_dir)
)
self.assertEqual(
response.context['page_obj'].number,
response.context['paginator'].num_pages
)
def test_ordering(self):
orderings = ['title', '-created_at']
for ordering in orderings:
response = self.get({'ordering': ordering})
self.assertEqual(response.status_code, 200)
class BaseTestCreateView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD add view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_post_data(self):
"""
Stub method for extending class to return data dictionary
to create a new model instance on POST.
:rtype: dict.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse('{0}:add'.format(self.url_namespace))
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/add.html'.format(self.template_dir)
)
def test_post(self):
# Get POST data.
data = self._get_post_data()
# Generate the response.
response = self.client.post(
reverse('{0}:add'.format(self.url_namespace)),
data
)
# Check assertions.
self.assertRedirects(
response,
reverse('{0}:index'.format(self.url_namespace))
)
self.assertTrue(
self.model_class.objects.filter(**data).exists()
)
class BaseTestUpdateView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD edit view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_instance(self):
"""
Stub method for extending class to return saved model class
instance.
:rtype: django.db.models.Model.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def _get_post_data(self):
"""
Stub method for extending class to return data dictionary
to create a new model instance on POST.
:rtype: dict.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
# Create the instance and login.
self.instance = self._get_instance()
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse(
'{0}:edit'.format(self.url_namespace),
args=(self.instance.pk,)
)
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/edit.html'.format(self.template_dir)
)
def test_post(self):
# Get POST data.
data = self._get_post_data()
# Generate the response.
response = self.client.post(
reverse(
'{0}:edit'.format(self.url_namespace),
args=(self.instance.pk,)
),
data
)
# Check assertions.
self.assertRedirects(
response,
reverse('{0}:index'.format(self.url_namespace)))
self.assertTrue(
self.model_class.objects.filter(**data).exists()
)
class BaseTestDeleteView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD delete view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_instance(self):
"""
Stub method for extending class to return saved model class
instance.
:rtype: django.db.models.Model.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
# Create the instance and login.
self.instance = self._get_instance()
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse(
'{0}:delete'.format(self.url_namespace),
args=(self.instance.pk,)
)
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/confirm_delete.html'.format(self.template_dir)
)
def test_delete(self):
# Generate the response.
response = self.client.post(
reverse(
'{0}:delete'.format(self.url_namespace),
args=(self.instance.pk,)
),
{'foo': 'bar'}
)
# Check assertions.
self.assertRedirects(
response,
reverse('{0}:index'.format(self.url_namespace))
)
self.assertFalse(
self.model_class.objects.filter(pk=self.instance.pk).exists()
)
class BaseTestChooserView(TestCase, WagtailTestUtils):
"""
Base test for chooser view.
"""
url_namespace = None
template_dir = None
model_class = None
def _create_sequential_instance(self, index):
"""
Stub method for extending class to create sequential
model instances.
:param index: the sequential index to use.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def get(self, params=None):
if not params:
params = {}
return self.client.get(
reverse('{0}:choose'.format(self.url_namespace)),
params
)
def populate(self):
"""
Populates several model class instance.
"""
for i in range(50):
self._create_sequential_instance(i)
def test_get(self):
# Generate the response.
response = self.get()
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/chooser.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/chooser.js'.format(self.template_dir)
)
def test_search(self):
# Generate the response.
response = self.get({'q': 'keyword'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context['query_string'], 'keyword')
def test_pagination(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 2})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 2)
def test_pagination_invalid(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 'fake'})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertEqual(response.context['page_obj'].number, 1)
def test_pagination_out_of_range(self):
# Create model class instances.
self.populate()
# Generate the response.
response = self.get({'p': 99999})
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertEqual(
response.context['page_obj'].number,
response.context['paginator'].num_pages
)
class BaseTestChosenView(TestCase, WagtailTestUtils):
url_namespace = None
template_dir = None
model_class = None
def _get_instance(self):
"""
Stub method for extending class to return saved model class
instance.
:rtype: django.db.models.Model.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
# Create the instance and login.
self.instance = self._get_instance()
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse(
'{0}:chosen'.format(self.url_namespace),
args=(self.instance.id,)
)
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/chosen.js'.format(self.template_dir)
)
class BaseTestChooserCreateView(TestCase, WagtailTestUtils):
"""
Base test case for CRUD add view.
"""
url_namespace = None
template_dir = None
model_class = None
def _get_post_data(self):
"""
Stub method for extending class to return data dictionary
to create a new model instance on POST.
:rtype: dict.
"""
raise NotImplemented(
'This method must be implemented by {0}'.format(
self.__class__.__name__
)
)
def setUp(self):
self.login()
def test_get(self):
# Generate the response.
response = self.client.get(
reverse('{0}:choose'.format(self.url_namespace))
)
# Check assertions.
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(
response,
'{0}/chooser.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/results.html'.format(self.template_dir)
)
self.assertTemplateUsed(
response,
'{0}/chooser.js'.format(self.template_dir)
)
def test_post(self):
# Get POST data.
data = self._get_post_data()
# Generate the response.
response = self.client.post(
reverse('{0}:choose'.format(self.url_namespace)),
data
)
# Check assertions.
self.assertTemplateUsed(
response,
'{0}/chosen.js'.format(self.template_dir)
)
self.assertContains(
response,
'modal.respond'
)
self.assertTrue(
self.model_class.objects.filter(**data).exists()
)
|
from __future__ import unicode_literals
from future.builtins import str
from future.utils import with_metaclass
from json import loads
try:
from urllib.request import urlopen
from urllib.parse import urlencode
except ImportError:
from urllib import urlopen, urlencode
from django.contrib.contenttypes.generic import GenericForeignKey
from django.db import models
from django.db.models.base import ModelBase
from django.db.models.signals import post_save
from django.template.defaultfilters import truncatewords_html
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import strip_tags
from django.utils.timesince import timesince
from django.utils.timezone import now
from django.utils.translation import ugettext, ugettext_lazy as _
from mezzanine.conf import settings
from mezzanine.core.fields import RichTextField
from mezzanine.core.managers import DisplayableManager, CurrentSiteManager
from mezzanine.generic.fields import KeywordsField
from mezzanine.utils.html import TagCloser
from mezzanine.utils.models import base_concrete_model, get_user_model_name
from mezzanine.utils.sites import current_site_id
from mezzanine.utils.urls import admin_url, slugify, unique_slug
user_model_name = get_user_model_name()
class SiteRelated(models.Model):
"""
Abstract model for all things site-related. Adds a foreignkey to
Django's ``Site`` model, and filters by site with all querysets.
See ``mezzanine.utils.sites.current_site_id`` for implementation
details.
"""
objects = CurrentSiteManager()
class Meta:
abstract = True
site = models.ForeignKey("sites.Site", editable=False)
def save(self, update_site=False, *args, **kwargs):
"""
Set the site to the current site when the record is first
created, or the ``update_site`` argument is explicitly set
to ``True``.
"""
if update_site or not self.id:
self.site_id = current_site_id()
super(SiteRelated, self).save(*args, **kwargs)
@python_2_unicode_compatible
class Slugged(SiteRelated):
"""
Abstract model that handles auto-generating slugs. Each slugged
object is also affiliated with a specific site object.
"""
title = models.CharField(_("Title"), max_length=500)
slug = models.CharField(_("URL"), max_length=2000, blank=True, null=True,
help_text=_("Leave blank to have the URL auto-generated from "
"the title."))
class Meta:
abstract = True
def __str__(self):
return self.title
def save(self, *args, **kwargs):
"""
If no slug is provided, generates one before saving.
"""
if not self.slug:
self.slug = self.generate_unique_slug()
super(Slugged, self).save(*args, **kwargs)
def generate_unique_slug(self):
"""
Create a unique slug by passing the result of get_slug() to
utils.urls.unique_slug, which appends an index if necessary.
"""
# For custom content types, use the ``Page`` instance for
# slug lookup.
concrete_model = base_concrete_model(Slugged, self)
slug_qs = concrete_model.objects.exclude(id=self.id)
return unique_slug(slug_qs, "slug", self.get_slug())
def get_slug(self):
"""
Allows subclasses to implement their own slug creation logic.
"""
attr = "title"
if settings.USE_MODELTRANSLATION:
from modeltranslation.utils import build_localized_fieldname
attr = build_localized_fieldname(attr, settings.LANGUAGE_CODE)
# Get self.title_xx where xx is the default language, if any.
# Get self.title otherwise.
return slugify(getattr(self, attr, None) or self.title)
def admin_link(self):
return "<a href='%s'>%s</a>" % (self.get_absolute_url(),
ugettext("View on site"))
admin_link.allow_tags = True
admin_link.short_description = ""
class MetaData(models.Model):
"""
Abstract model that provides meta data for content.
"""
_meta_title = models.CharField(_("Title"), null=True, blank=True,
max_length=500,
help_text=_("Optional title to be used in the HTML title tag. "
"If left blank, the main title field will be used."))
description = models.TextField(_("Description"), blank=True)
gen_description = models.BooleanField(_("Generate description"),
help_text=_("If checked, the description will be automatically "
"generated from content. Uncheck if you want to manually "
"set a custom description."), default=True)
keywords = KeywordsField(verbose_name=_("Keywords"))
class Meta:
abstract = True
def save(self, *args, **kwargs):
"""
Set the description field on save.
"""
if self.gen_description:
self.description = strip_tags(self.description_from_content())
super(MetaData, self).save(*args, **kwargs)
def meta_title(self):
"""
Accessor for the optional ``_meta_title`` field, which returns
the string version of the instance if not provided.
"""
return self._meta_title or str(self)
def description_from_content(self):
"""
Returns the first block or sentence of the first content-like
field.
"""
description = ""
# Use the first RichTextField, or TextField if none found.
for field_type in (RichTextField, models.TextField):
if not description:
for field in self._meta.fields:
if isinstance(field, field_type) and \
field.name != "description":
description = getattr(self, field.name)
if description:
from mezzanine.core.templatetags.mezzanine_tags \
import richtext_filters
description = richtext_filters(description)
break
# Fall back to the title if description couldn't be determined.
if not description:
description = str(self)
# Strip everything after the first block or sentence.
ends = ("</p>", "<br />", "<br/>", "<br>", "</ul>",
"\n", ". ", "! ", "? ")
for end in ends:
pos = description.lower().find(end)
if pos > -1:
description = TagCloser(description[:pos]).html
break
else:
description = truncatewords_html(description, 100)
return description
class TimeStamped(models.Model):
"""
Provides created and updated timestamps on models.
"""
class Meta:
abstract = True
created = models.DateTimeField(null=True, editable=False)
updated = models.DateTimeField(null=True, editable=False)
def save(self, *args, **kwargs):
_now = now()
self.updated = _now
if not self.id:
self.created = _now
super(TimeStamped, self).save(*args, **kwargs)
CONTENT_STATUS_DRAFT = 1
CONTENT_STATUS_PUBLISHED = 2
CONTENT_STATUS_CHOICES = (
(CONTENT_STATUS_DRAFT, _("Draft")),
(CONTENT_STATUS_PUBLISHED, _("Published")),
)
class Displayable(Slugged, MetaData, TimeStamped):
"""
Abstract model that provides features of a visible page on the
website such as publishing fields. Basis of Mezzanine pages,
blog posts, and Cartridge products.
"""
status = models.IntegerField(_("Status"),
choices=CONTENT_STATUS_CHOICES, default=CONTENT_STATUS_PUBLISHED,
help_text=_("With Draft chosen, will only be shown for admin users "
"on the site."))
publish_date = models.DateTimeField(_("Published from"),
help_text=_("With Published chosen, won't be shown until this time"),
blank=True, null=True)
expiry_date = models.DateTimeField(_("Expires on"),
help_text=_("With Published chosen, won't be shown after this time"),
blank=True, null=True)
short_url = models.URLField(blank=True, null=True)
in_sitemap = models.BooleanField(_("Show in sitemap"), default=True)
objects = DisplayableManager()
search_fields = {"keywords": 10, "title": 5}
class Meta:
abstract = True
def save(self, *args, **kwargs):
"""
Set default for ``publish_date``. We can't use ``auto_now_add`` on
the field as it will be blank when a blog post is created from
the quick blog form in the admin dashboard.
"""
if self.publish_date is None:
self.publish_date = now()
super(Displayable, self).save(*args, **kwargs)
def get_admin_url(self):
return admin_url(self, "change", self.id)
def publish_date_since(self):
"""
Returns the time since ``publish_date``.
"""
return timesince(self.publish_date)
publish_date_since.short_description = _("Published from")
def get_absolute_url(self):
"""
Raise an error if called on a subclass without
``get_absolute_url`` defined, to ensure all search results
contains a URL.
"""
name = self.__class__.__name__
raise NotImplementedError("The model %s does not have "
"get_absolute_url defined" % name)
def set_short_url(self):
"""
Sets the ``short_url`` attribute using the bit.ly credentials
if they have been specified, and saves it. Used by the
``set_short_url_for`` template tag, and ``TweetableAdmin``.
"""
if not self.short_url:
from mezzanine.conf import settings
settings.use_editable()
parts = (self.site.domain, self.get_absolute_url())
self.short_url = "http://%s%s" % parts
if settings.BITLY_ACCESS_TOKEN:
url = "https://api-ssl.bit.ly/v3/shorten?%s" % urlencode({
"access_token": settings.BITLY_ACCESS_TOKEN,
"uri": self.short_url,
})
response = loads(urlopen(url).read().decode("utf-8"))
if response["status_code"] == 200:
self.short_url = response["data"]["url"]
self.save()
return ""
def _get_next_or_previous_by_publish_date(self, is_next, **kwargs):
"""
Retrieves next or previous object by publish date. We implement
our own version instead of Django's so we can hook into the
published manager and concrete subclasses.
"""
arg = "publish_date__gt" if is_next else "publish_date__lt"
order = "publish_date" if is_next else "-publish_date"
lookup = {arg: self.publish_date}
concrete_model = base_concrete_model(Displayable, self)
try:
queryset = concrete_model.objects.published
except AttributeError:
queryset = concrete_model.objects.all
try:
return queryset(**kwargs).filter(**lookup).order_by(order)[0]
except IndexError:
pass
def get_next_by_publish_date(self, **kwargs):
"""
Retrieves next object by publish date.
"""
return self._get_next_or_previous_by_publish_date(True, **kwargs)
def get_previous_by_publish_date(self, **kwargs):
"""
Retrieves previous object by publish date.
"""
return self._get_next_or_previous_by_publish_date(False, **kwargs)
class RichText(models.Model):
"""
Provides a Rich Text field for managing general content and making
it searchable.
"""
content = RichTextField(_("Content"))
search_fields = ("content",)
class Meta:
abstract = True
class OrderableBase(ModelBase):
"""
Checks for ``order_with_respect_to`` on the model's inner ``Meta``
class and if found, copies it to a custom attribute and deletes it
since it will cause errors when used with ``ForeignKey("self")``.
Also creates the ``ordering`` attribute on the ``Meta`` class if
not yet provided.
"""
def __new__(cls, name, bases, attrs):
if "Meta" not in attrs:
class Meta:
pass
attrs["Meta"] = Meta
if hasattr(attrs["Meta"], "order_with_respect_to"):
order_field = attrs["Meta"].order_with_respect_to
attrs["order_with_respect_to"] = order_field
del attrs["Meta"].order_with_respect_to
if not hasattr(attrs["Meta"], "ordering"):
setattr(attrs["Meta"], "ordering", ("_order",))
return super(OrderableBase, cls).__new__(cls, name, bases, attrs)
class Orderable(with_metaclass(OrderableBase, models.Model)):
"""
Abstract model that provides a custom ordering integer field
similar to using Meta's ``order_with_respect_to``, since to
date (Django 1.2) this doesn't work with ``ForeignKey("self")``,
or with Generic Relations. We may also want this feature for
models that aren't ordered with respect to a particular field.
"""
_order = models.IntegerField(_("Order"), null=True)
class Meta:
abstract = True
def with_respect_to(self):
"""
Returns a dict to use as a filter for ordering operations
containing the original ``Meta.order_with_respect_to`` value
if provided. If the field is a Generic Relation, the dict
returned contains names and values for looking up the
relation's ``ct_field`` and ``fk_field`` attributes.
"""
try:
name = self.order_with_respect_to
value = getattr(self, name)
except AttributeError:
# No ``order_with_respect_to`` specified on the model.
return {}
# Support for generic relations.
field = getattr(self.__class__, name)
if isinstance(field, GenericForeignKey):
names = (field.ct_field, field.fk_field)
return dict([(n, getattr(self, n)) for n in names])
return {name: value}
def save(self, *args, **kwargs):
"""
Set the initial ordering value.
"""
if self._order is None:
lookup = self.with_respect_to()
lookup["_order__isnull"] = False
concrete_model = base_concrete_model(Orderable, self)
self._order = concrete_model.objects.filter(**lookup).count()
super(Orderable, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
"""
Update the ordering values for siblings.
"""
lookup = self.with_respect_to()
lookup["_order__gte"] = self._order
concrete_model = base_concrete_model(Orderable, self)
after = concrete_model.objects.filter(**lookup)
after.update(_order=models.F("_order") - 1)
super(Orderable, self).delete(*args, **kwargs)
def _get_next_or_previous_by_order(self, is_next, **kwargs):
"""
Retrieves next or previous object by order. We implement our
own version instead of Django's so we can hook into the
published manager, concrete subclasses and our custom
``with_respect_to`` method.
"""
lookup = self.with_respect_to()
lookup["_order"] = self._order + (1 if is_next else -1)
concrete_model = base_concrete_model(Orderable, self)
try:
queryset = concrete_model.objects.published
except AttributeError:
queryset = concrete_model.objects.filter
try:
return queryset(**kwargs).get(**lookup)
except concrete_model.DoesNotExist:
pass
def get_next_by_order(self, **kwargs):
"""
Retrieves next object by order.
"""
return self._get_next_or_previous_by_order(True, **kwargs)
def get_previous_by_order(self, **kwargs):
"""
Retrieves previous object by order.
"""
return self._get_next_or_previous_by_order(False, **kwargs)
class Ownable(models.Model):
"""
Abstract model that provides ownership of an object for a user.
"""
user = models.ForeignKey(user_model_name, verbose_name=_("Author"),
related_name="%(class)ss")
class Meta:
abstract = True
def is_editable(self, request):
"""
Restrict in-line editing to the objects's owner and superusers.
"""
return request.user.is_superuser or request.user.id == self.user_id
class SitePermission(models.Model):
"""
Permission relationship between a user and a site that's
used instead of ``User.is_staff``, for admin and inline-editing
access.
"""
user = models.ForeignKey(user_model_name, verbose_name=_("Author"),
related_name="%(class)ss")
sites = models.ManyToManyField("sites.Site", blank=True,
verbose_name=_("Sites"))
class Meta:
verbose_name = _("Site permission")
verbose_name_plural = _("Site permissions")
def create_site_permission(sender, **kw):
sender_name = "%s.%s" % (sender._meta.app_label, sender._meta.object_name)
if sender_name.lower() != user_model_name.lower():
return
user = kw["instance"]
if user.is_staff and not user.is_superuser:
perm, created = SitePermission.objects.get_or_create(user=user)
if created or perm.sites.count() < 1:
perm.sites.add(current_site_id())
post_save.connect(create_site_permission)
|
from PyObjCTools.TestSupport import *
from Quartz.QuartzCore import *
from Quartz import *
class TestCIPluginInterfaceHelper (NSObject):
def load_(self, h): return 1
class TestCIPlugInInterface (TestCase):
def testMethods(self):
self.assertResultIsBOOL(TestCIPluginInterfaceHelper.load_)
def no_testProtocol(self):
p = objc.protocolNamed('CIPlugInRegistration')
self.assertIsInstancE(p, objc.formal_protocol)
if __name__ == "__main__":
main()
|
'''
Created on Mar 7, 2012
@author: clarkmatthew
Place holder class to provide convenience for testing, modifying, and
retrieving Eucalyptus cloud property information
Intention is to reduce the time in looking up property names, and values
outside of the eutester test lib, etc
Note: Debug output for the tester.sys command are controled by the
eutester/eucaops object
Sample:
cat my_cloud.conf
> 192.168.1.76 CENTOS 6.3 64 REPO [CLC WS]
> 192.168.1.77 CENTOS 6.3 64 REPO [SC00 CC00]
> 192.168.1.78 CENTOS 6.3 64 REPO [NC00]
from eucaops import Eucaops
from eutester import euproperties
Eucaops(config_file='my_cloud.conf', password='mypassword')
ep_mgr = euproperties.Euproperty_Manager(tester,
verbose=True,
debugmethod=tester.debug)
#Get/Set value from dynamic method created in Euproperty_Manager...
san_host_prop_value = ep_mgr.get_storage_sanhost_value()
ep_mgr.set_storage_sanhost_value('192.168.1.200')
#Get/set value from euproperty directly...
san_host_prop = ep_mgr.get_property('san_host', 'storage', 'PARTI00')
san_host_prop_value = san_host_prop.get()
san_host_prop_set('192.168.1.200'
#Get multiple properties at once based on certain filters...
storage_properties = ep_mgr.get_properties(service_type='storage')
partition1_properties = ep_mgr.get_properties(partition='partition1')
'''
import types
import re
import copy
class Euproperty_Type():
authentication = 'authentication'
autoscaling = 'autoscaling'
bootstrap = 'bootstrap'
cloud = 'cloud'
cloudwatch = 'cloudwatch'
cluster = 'cluster'
dns = 'dns'
imaging = 'imaging'
loadbalancing = 'loadbalancing'
objectstorage = 'objectstorage'
reporting = 'reporting'
storage = 'storage'
system = 'system'
tagging = 'tagging'
tokens = 'tokens'
vmwarebroker = 'vmwarebroker'
walrus = 'walrus'
www = 'www'
@classmethod
def get_type_by_string(cls, typestring):
try:
if hasattr(cls, str(typestring)):
return getattr(cls, str(typestring))
except AttributeError, ae:
print ('Property type:' + str(str) +
" not defined, new property type?")
raise ae
class Euproperty():
def __init__(self, prop_mgr, property_string, service_type, partition,
name, value, mandatory=False, description=""):
self.prop_mgr = prop_mgr
self.service_type = Euproperty_Type.get_type_by_string(service_type)
self.partition = partition
self.name = name
self.value = value
self.property_string = property_string
self.prop_mgr = prop_mgr
self.lastvalue = value
self.mandatory = mandatory
self.description = description
def update(self):
newprop = self.prop_mgr.update_property_list(
property_name=self.property_string)[0]
self = newprop
def get(self):
return self.value
def set(self, value):
return self.prop_mgr.set_property(self, value)
def reset_to_default(self):
return self.prop_mgr.reset_property_to_default(self)
def print_self(self, include_header=True, show_description=True,
print_method=None, printout=True):
if printout and not print_method:
print_method = self.prop_mgr.debug
name_len = 50
service_len = 20
part_len = 20
value_len = 30
line_len = 120
ret = ""
header = str('NAME').ljust(name_len)
header += "|" + str('SERVICE TYPE').center(service_len)
header += "|" + str('PARTITION').center(part_len)
header += "|" + str('VALUE').center(value_len)
header += "\n"
out = str(self.name).ljust(name_len)
out += "|" + str(self.service_type).center(service_len)
out += "|" + str(self.partition).center(part_len)
out += "|" + str(self.value).center(value_len)
out += "\n"
line = "-"
for x in xrange(0, line_len):
line += "-"
line += "\n"
if include_header:
ret = "\n" + line + header + line
ret += out
if show_description:
ret += "DESCRIPTION: " + self.description + "\n"
ret += line
if print_method:
print_method(ret)
return ret
class Property_Map():
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
class Euproperty_Manager():
tester = None
verbose = False
debugmethod = None
def __init__(self, tester, verbose=False, machine=None,
service_url=None, debugmethod=None):
self.tester = tester
self.debugmethod = debugmethod or tester.debug
self.verbose = verbose
#self.work_machine = machine or self.get_clc()
#self.access_key = self.tester.aws_access_key_id
#self.secret_key = self.tester.aws_secret_access_key
#self.service_url = service_url or str(
# 'http://' + str(self.get_clc().hostname) +
# ':8773/services/Eucalytpus')
#self.cmdpath = self.tester.eucapath+'/usr/sbin/'
self.properties = []
#self.property_map = Property_Map()
self.update_property_list()
#self.tester.property_manager = self
#self.zones = self.tester.ec2.get_zones()
#def get_clc(self):
# return self.tester.service_manager.get_enabled_clc().machine
def debug(self, msg):
'''
simple method for printing debug.
msg - mandatory - string to be printed
method - optional - callback to over ride default printing method
'''
if (self.debugmethod is None):
print (str(msg))
else:
self.debugmethod(msg)
def show_all_authentication_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(
service_type=Euproperty_Type.authentication,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_bootstrap_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.bootstrap,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_cloud_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.cloud,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_cluster_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.cluster,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_reporting_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.reporting,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_storage_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.storage,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_system_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.system,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_vmwarebroker_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(
service_type=Euproperty_Type.vmwarebroker,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_walrus_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.walrus,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_objectstorage_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.objectstorage,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_www_properties(self, partition=None, debug_method=None, descriptions=True):
return self.show_all_properties(service_type=Euproperty_Type.www,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_autoscaling_properties(self,
partition=None,
debug_method=None,
descriptions=True):
return self.show_all_properties(
service_type=Euproperty_Type.autoscaling,
partition=partition,
debug_method=debug_method,
descriptions=descriptions)
def show_all_loadbalancing_properties(self,
partition=None,
debug_method=None):
return self.show_all_properties(
service_type=Euproperty_Type.loadbalancing,
partition=partition,
debug_method=debug_method,
descriptions=True)
def show_all_tagging_properties(self, partition=None, debug_method=None):
return self.show_all_properties(service_type=Euproperty_Type.tagging,
partition=partition,
debug_method=debug_method,
descriptions=True)
def show_all_imaging_properties(self, partition=None, debug_method=None):
return self.show_all_properties(service_type=Euproperty_Type.imaging,
partition=partition,
debug_method=debug_method,
descriptions=True)
def show_all_properties(self,
partition=None,
service_type=None,
value=None,
search_string=None,
list=None,
debug_method=None,
descriptions=True):
debug_method = debug_method or self.debug
list = list or self.get_properties(partition=partition,
service_type=service_type,
value=value,
search_string=search_string)
first = list.pop(0)
buf = first.print_self(include_header=True,
show_description=descriptions,
printout=False)
count = 1
last_service_type = first.service_type
for prop in list:
count += 1
if prop.service_type != last_service_type:
last_service_type = prop.service_type
print_header = True
else:
print_header = False
buf += prop.print_self(include_header=print_header,
show_description=descriptions,
printout=False)
debug_method(buf)
def get_properties(self,
partition=None,
service_type=None,
value=None,
search_string=None,
force_update=False):
self.debug('get_properties: partition:' +
str(partition) + ", service_type:" + str(service_type) +
", value:" + str(value) + ", force_update:" +
str(force_update))
ret_props = []
if not self.properties or force_update:
self.update_property_list()
properties = copy.copy(self.properties)
if partition and properties:
properties = self.get_all_properties_for_partition(partition,
list=properties)
if service_type and properties:
properties = self.get_all_properties_for_service(service_type,
list=properties)
if search_string and properties:
properties = self.get_all_properties_by_search_string(
search_string, list=properties)
if properties:
if value:
for prop in properties:
if prop.value == value:
ret_props.append(prop)
else:
ret_props.extend(properties)
return ret_props
def get_property(self, name, service_type, partition, force_update=False):
self.debug('Get Property:' + str(name))
ret_prop = None
list = self.get_properties(partition=partition,
service_type=service_type,
force_update=force_update)
if list:
ret_prop = self.get_euproperty_by_name(name, list=list)
return ret_prop
def update_property_list(self, property_name=''):
newlist = []
newprop = None
self.debug("updating property list...")
self.zones = self.tester.ec2.get_zones()
cmdout = self.work_machine.sys(
self.cmdpath+'euca-describe-properties -v -U ' +
str(self.service_url) + ' -I ' + str(self.access_key) +
' -S ' + str(self.secret_key) + ' ' + property_name,
code=0, verbose=self.verbose)
for propstring in cmdout:
try:
if re.search("^PROPERTY", propstring):
newprop = self.parse_euproperty_from_string(propstring)
elif newprop:
if (re.search("^DESCRIPTION", propstring) and
re.search(newprop.name, propstring)):
newprop.description = \
self.parse_euproperty_description(propstring)
else:
newprop.value = str(newprop.value) + str(propstring)
except Exception, e:
self.debug('Error processing property line: ' + propstring)
raise e
if not newprop in newlist:
newlist.append(newprop)
if property_name:
for newprop in newlist:
for oldprop in self.properties:
if oldprop.property_string == newprop.property_string:
oldprop = newprop
self.create_dynamic_property_map_from_property(newprop)
else:
self.properties = newlist
self.property_map = Property_Map()
for prop in self.properties:
self.create_dynamic_property_map_from_property(prop)
return newlist
def parse_euproperty_description(self, propstring):
'''
Example string to parse:
"DESCRIPTION www.http_port Listen to HTTP on this port."
'''
split = str(propstring).replace('DESCRIPTION', '').split()
description = " ".join(str(x) for x in split[1:])
return str(description)
def parse_property_value_from_string(self, propstring):
split = str(propstring).replace('PROPERTY', '').split()
prop_value = " ".join(str(x) for x in split[1:])
return str(prop_value)
def parse_euproperty_from_string(self, propstring):
'''
Intended to convert a line of ouptut from euca-describe-properties into
a euproperty.
:param str: line of output, example:
"PROPERTY walrus.storagemaxbucketsizeinmb 5120"
:returns euproperty
'''
propstring = str(propstring).replace('PROPERTY', '').strip()
ret_service_type = None
ret_partition = None
splitstring = propstring.split()
#get the property string, example: "walrus.storagemaxbucketsizeinmb"
property_string = splitstring.pop(0)
ret_value = " ".join(splitstring)
for prop in self.properties:
#if this property is in our list, update the value and return
if prop.property_string == property_string:
prop.lastvalue = prop.value
prop.value = ret_value
return prop
ret_name = property_string
#...otherwise this property is not in our list yet,
# create a new property
#parse property string into values...
propattrs = property_string.split('.')
#See if the first element is a zone-partition
#First store and remove the zone-partition if it's in the list
for zone in self.zones:
if zone == propattrs[0]:
#Assume this is the zone-partition id/name,
# remove it from the propattrs list
ret_partition = propattrs.pop(0)
break
#Move along items in list until we reach a service type
for index in xrange(0, len(propattrs)):
try:
ret_service_type = Euproperty_Type.get_type_by_string(
propattrs[index])
propattrs.remove(propattrs[index])
break
except AttributeError:
pass
except IndexError:
self.debug("\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"!!!!!!!!!!!!!!!!!!!!!!!!!\n" +
"Need to add new service? " +
"No service type found for: " +
str(property_string) +
"\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
"!!!!!!!!!!!!!!!!!!!!!!!!!\n")
ret_service_type = propattrs.pop(0)
#self.debug("ret_service_type: "+str(ret_service_type))
#Store the name of the property
ret_name = ".".join(propattrs)
newprop = Euproperty(self, property_string, ret_service_type,
ret_partition, ret_name, ret_value)
return newprop
def create_dynamic_property_map_from_property(self, euproperty):
context = self.property_map
if not hasattr(context, 'all'):
setattr(context, 'all', Property_Map())
all_map = getattr(context, 'all')
if euproperty.partition:
if not hasattr(context, str(euproperty.partition)):
setattr(context, str(euproperty.partition), Property_Map())
context = getattr(context, str(euproperty.partition))
if euproperty.service_type:
if not hasattr(context, str(euproperty.service_type)):
setattr(context, str(euproperty.service_type), Property_Map())
context = getattr(context, str(euproperty.service_type))
object_name = str(euproperty.name).replace('.', '_')
if not hasattr(context, object_name):
setattr(context, object_name, euproperty)
if not hasattr(all_map, object_name):
setattr(all_map, object_name, euproperty)
def get_euproperty_by_name(self, name, list=None):
props = []
list = list or self.properties
for property in list:
if property.name == name:
return property
raise EupropertyNotFoundException('Property not found by name:' +
str(name))
def get_all_properties_for_partition(self,
partition,
list=None,
verbose=False):
self.debug('Get all properties for partition:' + str(partition))
props = []
list = list or self.properties
for property in list:
if property.partition == partition:
if verbose:
self.debug('property:' + str(property.name) +
", prop.partition:" + str(property.partition) +
",partition:" + str(partition))
props.append(property)
self.debug('Returning list of len:' + str(len(props)))
return props
def get_all_properties_for_service(self, service, list=None):
props = []
list = list or self.properties
for property in list:
if property.service_type == service:
props.append(property)
return props
def get_all_properties_by_search_string(self, search_string, list=None):
props = []
list = list or self.properties
for property in list:
if re.search(search_string, property.property_string):
props.append(property)
return props
def set_property(self, property, value):
if isinstance(property, Euproperty):
return self.set_property_by_property_string(
property.property_string, value)
else:
return self.set_property_by_property_string(str(property), value)
def set_property(self, property, value, reset_to_default=False):
'''
Sets the property 'prop' at eucaops/eutester object 'tester' to 'value'
Returns new value
prop - mandatory - str representing the property to set
value - mandatory - str representing the value to set the property to
eucaops - optional - the eucaops/eutester object to set the property at
'''
value = str(value)
if not isinstance(property, Euproperty):
try:
property = self.get_all_properties_by_search_string(property)
if len(property) > 1:
raise Exception('More than one euproperty found for '
'property string:' + str(property))
else:
property = property[0]
except Exception, e:
raise Exception('Could not fetch property to set. '
'Using string:' + str(property))
property.lastvalue = property.value
self.debug('Setting property(' + property.property_string +
') to value:' + str(value))
if reset_to_default:
ret_string = self.work_machine.sys(
self.cmdpath + 'euca-modify-property -U ' +
str(self.service_url) + ' -I ' + str(self.access_key) +
' -S ' + str(self.secret_key) + ' -r ' +
str(property.property_string), code=0)[0]
else:
ret_string = self.work_machine.sys(
self.cmdpath + 'euca-modify-property -U ' +
str(self.service_url) + ' -I '+str(self.access_key) + ' -S ' +
str(self.secret_key) + ' -p ' +
str(property.property_string) + '=' + str(value),
code=0)[0]
if ret_string:
ret_value = str(ret_string).split()[2]
else:
raise EupropertiesException("set_property output from modify "
"was None")
#Confirm property value was set
if not reset_to_default and (ret_value != value) and\
not (not value and ret_value == '{}'):
ret_string = "\n".join(str(x) for x in ret_string)
raise EupropertiesException(
"set property(" + property.property_string + ") to value(" +
str(value) + ") failed.Ret Value (" + str(ret_value) +
")\nRet String\n" + ret_string)
property.value = ret_value
return ret_value
def get_property_by_string(self, property_string):
property = None
for prop in self.properties:
if prop.property_string == property_string:
property = prop
break
return property
def set_property_value_by_string(self, property_string, value):
property = self.get_property_by_string(property_string)
if not property:
raise Exception('Property not found for:' + str(property_string))
property.set(value)
def get_property_value_by_string(self, property_string):
property = self.get_property_by_string(property_string)
if not property:
raise Exception('Property not found for:' + str(property_string))
return property.value
def reset_property_to_default(self, prop):
'''
Sets a property 'prop' at eucaops/eutester object 'eucaops' to it's
default value
Returns new value
prop - mandatory - string representing the property to set
ucaops - optional - the eucaops/eutester object to set the property at
'''
if not isinstance(prop, Euproperty):
prop = self.get_all_properties_by_search_string(prop)[0]
return self.set_property(prop, None, reset_to_default=True)
def get_property_default_value(self, prop, ireadthewarning=False):
'''
Note: This hack method is intrusive! It will briefly reset the property
This is a temporary method to get a properties default method
prop - mandatory - string, eucalyptus property
ireadthewarning - mandatory - boolean, to warn user this method
is intrusive
'''
if (ireadthewarning is False):
raise EupropertiesException("ireadthewarning is set to false in "
"get_property_default_value")
original = prop.get()
default = self.reset_property_to_default(prop)
prop.set(original)
return default
class EupropertiesException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class EupropertyNotFoundException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
|
"""Unittests that do not require the server to be running an common tests of responses.
The TestCase here just calls the functions that provide the logic to the ws views with DummyRequest
objects to mock a real request.
The functions starting with `check_...` are called with UnitTest.TestCase instance as the first
arg and the response. These functions are used within the unit tests in this file, but also
in the `ws-tests` calls that perform the tests through http.
"""
import os
import unittest
from pyramid import testing
from phylesystem_api.utility import fill_app_settings, umbrella_from_request
from phylesystem_api.views import import_nexson_from_crossref_metadata
def get_app_settings_for_testing(settings):
"""Fills the settings of a DummyRequest, with info from the development.ini
This allows the dummy requests to mock a real request wrt configuration-dependent settings."""
from peyotl.utility.imports import SafeConfigParser
cfg = SafeConfigParser()
devini_path = os.path.abspath(os.path.join('..', 'development.ini'))
if not os.path.isfile(devini_path):
raise RuntimeError('Expecting a INI file at "{}" to run tests'.format(devini_path))
cfg.read(devini_path)
settings['repo_parent'] = cfg.get('app:main', 'repo_parent')
fill_app_settings(settings=settings)
def gen_versioned_dummy_request():
"""Adds a version number (3) to the request to mimic the matching based on URL in the real app.
"""
req = testing.DummyRequest()
get_app_settings_for_testing(req.registry.settings)
req.matchdict['api_version'] = 'v3'
return req
def check_index_response(test_case, response):
"""Verifies the existene of expected keys in the response to an index call.
'documentation_url', 'description', and 'source_url' keys must be in the response.
"""
for k in ['documentation_url', 'description', 'source_url']:
test_case.assertIn(k, response)
def check_render_markdown_response(test_case, response):
"""Check of `response` to a `render_markdown` call."""
expected = '<p>hi from <a href="http://phylo.bio.ku.edu" target="_blank">' \
'http://phylo.bio.ku.edu</a> and ' \
'<a href="https://github.com/orgs/OpenTreeOfLife/dashboard" target="_blank">' \
'https://github.com/orgs/OpenTreeOfLife/dashboard</a></p>'
test_case.assertEquals(response.body, expected)
def check_study_list_and_config_response(test_case,
sl_response,
config_response,
from_generic_config):
"""Checks of responses from study_list, config, and the generic config calls."""
nsis = sum([i['number of documents'] for i in config_response['shards']])
test_case.assertEquals(nsis, len(sl_response))
test_case.assertEquals(from_generic_config, config_response)
def check_unmerged_response(test_case, ub):
"""Check of `ub` response from an `unmerged_branches` call"""
test_case.assertTrue('master' not in ub)
def check_config_response(test_case, cfg):
"""Check of `cfg` response from a `config` call"""
test_case.assertSetEqual(set(cfg.keys()), {"initialization", "shards", "number_of_shards"})
def check_external_url_response(test_case, doc_id, resp):
"""Simple check of an `external_url` `resp` response for `doc_id`.
`doc_id` and `url` fields of the response are checked."""
test_case.assertEquals(resp.get('doc_id'), doc_id)
test_case.assertTrue(resp.get('url', '').endswith('{}.json'.format(doc_id)))
def check_push_failure_response(test_case, resp):
"""Check of the `resp` response of a `push_failure` method call to verify it has the right keys.
"""
test_case.assertSetEqual(set(resp.keys()), {"doc_type", "errors", "pushes_succeeding"})
test_case.assertTrue(resp["pushes_succeeding"])
render_test_input = 'hi from <a href="http://phylo.bio.ku.edu" target="new">' \
'http://phylo.bio.ku.edu</a> and ' \
'https://github.com/orgs/OpenTreeOfLife/dashboard'
class ViewTests(unittest.TestCase):
"""UnitTest of the functions that underlie the ws views."""
def setUp(self):
"""Calls pyramid testing.setUp"""
self.config = testing.setUp()
def tearDown(self):
"""Calls pyramid testing.tearDown"""
testing.tearDown()
def test_index(self):
"""Test of index view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import index
check_index_response(self, index(request))
def test_render_markdown(self):
"""Test of render_markdown view"""
request = testing.DummyRequest(post={'src': render_test_input})
from phylesystem_api.views import render_markdown
check_render_markdown_response(self, render_markdown(request))
def test_study_list_and_config(self):
"""Test of study_list and phylesystem_config views"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import study_list
sl = study_list(request)
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config
x = phylesystem_config(request)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import generic_config
y = generic_config(request)
check_study_list_and_config_response(self, sl, x, y)
if not sl:
return
from phylesystem_api.views import external_url
doc_id = sl[0]
request.matchdict['doc_id'] = doc_id
e = external_url(request)
check_external_url_response(self, doc_id, e)
def test_unmerged(self):
"""Test of unmerged_branches view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
from phylesystem_api.views import unmerged_branches
check_unmerged_response(self, unmerged_branches(request))
def test_config(self):
"""Test of generic_config view"""
request = gen_versioned_dummy_request()
from phylesystem_api.views import phylesystem_config, generic_config
r2 = phylesystem_config(request)
check_config_response(self, r2)
request.matchdict['resource_type'] = 'study'
r = generic_config(request)
check_config_response(self, r)
self.assertDictEqual(r, r2)
request.matchdict['resource_type'] = 'amendment'
ra = generic_config(request)
check_config_response(self, ra)
self.assertNotEqual(ra, r)
def test_push_failure_state(self):
"""Test of push_failure view"""
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'collection'
from phylesystem_api.views import push_failure
pf = push_failure(request)
check_push_failure_response(self, pf)
def test_doi_import(self):
"""Make sure that fetching from DOI generates a valid study shell."""
doi = "10.3732/ajb.0800060"
document = import_nexson_from_crossref_metadata(doi=doi,
ref_string=None,
include_cc0=None)
request = gen_versioned_dummy_request()
request.matchdict['resource_type'] = 'study'
umbrella = umbrella_from_request(request)
errors = umbrella.validate_and_convert_doc(document, {})[1]
self.assertEquals(len(errors), 0)
if __name__ == '__main__':
unittest.main()
|
import sys
'''
Returns the index of the element in the grid. Element passed in
must have a unique position. If not present returns [-1, -1]. If
multiple occurences present, returns the first one
'''
def findIndex(grid,charElem):
for i in range(len(grid)):
for j in range(len(grid[i])):
if(grid[i][j] == charElem):
return [i,j]
return [-1,-1]
'''
Function that generates the valid surrounding indices for a parti
- cular index in a grid The surroundings are just 4 as of now. But
this function can easily be modified by modifying the surrIndices
array.
Returns a list of tuples that are indicative of valid indices
'''
def genSurr(grid,i,j):
validIndices = []
surrIndices = [ (1,0) , (-1,0) , (0,1) , (0,-1) ]
if(len(grid) == 0): return -1
else:
# Number of rows and columns in grid
ROWS = len(grid)
COLS = len(grid[0])
for (a,b) in surrIndices:
xIndex = i + a
yIndex = j + b
if(xIndex >= ROWS or xIndex < 0):
continue
if(yIndex >= COLS or yIndex < 0):
continue
validIndices.append((xIndex,yIndex))
return validIndices
'''
Returns a list of tuples that belong to the validChars set and have
not yet been visited (not cointained in visited Set)
'''
def genValidSurr(grid,surr,validChars,visitedSet):
validSet = []
for point in surr:
indexI = point[0]
indexJ = point[1]
gridPoint = grid[indexI][indexJ]
if((gridPoint in validChars) and not(point in visitedSet)):
validSet.append(point)
return validSet
'''
DFS on a matrix graph/grid which computes one of the Paths from
start to the goal passed in as parameters. Returns the path as an
array of indices from start to goal
Slight Modification for problem [wandUse variable]
wandUse is used each time we encounter a point from which there are
variable routes and we know that there exists a path from this point
till the end
'''
def dfsPathSearch(grid,
startIndex,
goalIndex,
pathSoFar,
visitedNodes):
# Marking the current node as explored
visitedNodes.add(startIndex)
# Base case of recursion in case we want to stop
# after certain condition
if(startIndex == goalIndex):
return True
else: # Recursive steps
# Generate all valid surrounding points
s = genSurr(grid,startIndex[0],startIndex[1])
validChars = set()
validChars.add('.')
validChars.add('*')
sValid = genValidSurr(grid,s,validChars,visitedNodes)
# Return False in case no valid surrounding pt found
if(len(sValid) == 0): return False
# Iterate through all valid surrouding points
for point in sValid:
pathExists = dfsPathSearch(grid,
point,
goalIndex,
pathSoFar,
visitedNodes)
if(pathExists):
# If there were more than one choices here, increment
# wand use by one
pathSoFar.append(point)
return True
# Return false if no point in valid surroundings
# can generate a path to goal
return False
'''
Parses a grid from the passed in stream. Can be used to parse the
grid from standard input (by passing in sys.stdin) as well as from
a text file (by passing in f, where f = open('somename.txt'))
'''
def parseGrid(stream,r,c):
grid = [[] for x in range(r)]
for i in range(r):
grid[i] = list(stream.readline().rstrip())
return grid
'''
Main Function to run the program. We first find a path using DFS and
later compute the number of turns that are necessary (wand usage)
'''
if __name__ == "__main__":
# No of test cases
t = int(sys.stdin.readline().rstrip())
for i in range(t): # For each test case
# Parsing the input for the test case
[r,c] = [int(x) for x in sys.stdin.readline().rstrip().split()]
grid = parseGrid(sys.stdin,r,c)
k = int(sys.stdin.readline().rstrip())
# Exploring and computing the path from start to goal using DFS
# Path is an array of indices
startIndex = tuple(findIndex(grid,'M'))
goalIndex = tuple(findIndex(grid,'*'))
visitedNodes = set()
path = []
dfsPathSearch(grid,
startIndex,
goalIndex,
path,
visitedNodes)
path.append(startIndex)
path.reverse()
# Prints the path in order from start to goal
print path
|
import unittest
from mock import Mock
from nosealert.plugin import AlertPlugin
from nosealert.notifications import Notification
class TestAlertPlugin(unittest.TestCase):
def setUp(self):
self.plugin = AlertPlugin()
def test_get_notification_success(self):
result = Mock(
failures=[],
errors=[],
testsRun=3,
)
self.assertEqual(self.plugin.get_notification(result), Notification(
total=3,
))
def test_get_notification_with_fails(self):
result = Mock(
failures=[1, 2],
errors=[3],
testsRun=5,
)
self.assertEqual(self.plugin.get_notification(result), Notification(
fails=2,
errors=1,
total=5,
))
def test_finalize_sends_notification(self):
notification = Mock()
result = Mock()
self.plugin.get_notification = Mock(return_value=notification)
self.plugin.finalize(result)
notification.send.assert_called_once_with()
|
"""
hydrogen
~~~~~~~~
Hydrogen is an extremely lightweight workflow enhancement tool for Python
web applications, providing bower/npm-like functionality for both pip and
bower packages.
:author: David Gidwani <david.gidwani@gmail.com>
:license: BSD, see LICENSE for details
"""
import atexit
from collections import defaultdict
from functools import update_wrapper
import json
import os
import re
import shutil
import sys
import tempfile
import yaml
import zipfile
import click
import envoy
from pathlib import Path, PurePath
from pathspec import GitIgnorePattern, PathSpec
from pip._vendor import pkg_resources
import requests
import rfc6266
import semver
__version__ = "0.0.1-alpha"
prog_name = "hydrogen"
app_dir = click.get_app_dir(prog_name)
github_api_uri = "https://api.github.com"
debug = True
PY2 = sys.version_info[0] == 2
if PY2:
from urlparse import urlparse
text_type = unicode # noqa: Undefined in py3
else:
from urllib.parse import urlparse
text_type = str
class InvalidRequirementSpecError(Exception):
pass
class InvalidPackageError(Exception):
pass
class PackageNotFoundError(Exception):
pass
class VersionNotFoundError(Exception):
pass
def get_installed_pypackages():
return {p.project_name.lower(): p for p in pkg_resources.working_set}
def success(message, **kwargs):
kwargs["fg"] = kwargs.get("fg", "green")
click.secho(message, **kwargs)
def warning(message, **kwargs):
kwargs["fg"] = kwargs.get("fg", "red")
click.secho(u"warning: {}".format(message), **kwargs)
def error(message, level="error", exit_code=1, **kwargs):
kwargs["fg"] = kwargs.get("fg", "red")
click.secho(u"error: {}".format(message), **kwargs)
sys.exit(exit_code)
def fatal(message, **kwargs):
error(message, level="fatal", **kwargs)
def secure_filename(filename):
r"""Borrowed from :mod:`werkzeug.utils`, under the BSD 3-clause license.
Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows systems the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you generate random
filename if the function returned an empty one.
:param filename: the filename to secure
"""
_filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]')
_windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4',
'LPT1', 'LPT2', 'LPT3', 'PRN', 'NUL')
if isinstance(filename, text_type):
from unicodedata import normalize
filename = normalize('NFKD', filename).encode('ascii', 'ignore')
if not PY2:
filename = filename.decode('ascii')
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, ' ')
filename = str(_filename_ascii_strip_re.sub('', '_'.join(
filename.split()))).strip('._')
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
# this case we prepend an underline
if os.name == 'nt' and filename and \
filename.split('.')[0].upper() in _windows_device_files:
filename = '_' + filename
return filename
def get(url, session=None, silent=not debug, **kwargs):
"""Retrieve a given URL and log response.
:param session: a :class:`requests.Session` object.
:param silent: if **True**, response status and URL will not be printed.
"""
session = session or requests
kwargs["verify"] = kwargs.get("verify", True)
r = session.get(url, **kwargs)
if not silent:
status_code = click.style(
str(r.status_code),
fg="green" if r.status_code in (200, 304) else "red")
click.echo(status_code + " " + url)
if r.status_code == 404:
raise PackageNotFoundError
return r
def download_file(url, dest=None, chunk_size=1024, replace="ask",
label="Downloading {dest_basename} ({size:.2f}MB)",
expected_extension=None):
"""Download a file from a given URL and display progress.
:param dest: If the destination exists and is a directory, the filename
will be guessed from the Content-Disposition header. If the destination
is an existing file, the user will either be prompted to overwrite, or
the file will be replaced (depending on the value of **replace**). If
the destination does not exist, it will be used as the filename.
:param int chunk_size: bytes read in at a time.
:param replace: If `False`, an existing destination file will not be
overwritten.
:param label: a string which is formatted and displayed as the progress bar
label. Variables provided include *dest_basename*, *dest*, and *size*.
:param expected_extension: if set, the filename will be sanitized to ensure
it has the given extension. The extension should not start with a dot
(`.`).
"""
dest = Path(dest or url.split("/")[-1])
response = get(url, stream=True)
if (dest.exists()
and dest.is_dir()
and "Content-Disposition" in response.headers):
content_disposition = rfc6266.parse_requests_response(response)
if expected_extension is not None:
filename = content_disposition.filename_sanitized(
expected_extension)
filename = secure_filename(filename)
dest = dest / filename
if dest.exists() and not dest.is_dir():
if (replace is False
or replace == "ask"
and not click.confirm("Replace {}?".format(dest))):
return str(dest)
size = int(response.headers.get("content-length", 0))
label = label.format(dest=dest, dest_basename=dest.name,
size=size/1024.0/1024)
with click.open_file(str(dest), "wb") as f:
content_iter = response.iter_content(chunk_size=chunk_size)
with click.progressbar(content_iter, length=size/1024,
label=label) as bar:
for chunk in bar:
if chunk:
f.write(chunk)
f.flush()
return str(dest)
def get_dir_from_zipfile(zip_file, fallback=None):
"""Return the name of the root folder in a zip file.
:param zip_file: a :class:`zipfile.ZipFile` instance.
:param fallback: if `None`, the name of the zip file is used. This is
returned if the zip file contains more than one top-level directory,
or none at all.
"""
fallback = fallback or zip_file.filename
directories = [name for name in zip_file.namelist() if name.endswith("/")
and len(PurePath(name).parts) == 1]
return fallback if len(directories) > 1 else directories[0]
def mkdtemp(suffix="", prefix=__name__ + "_", dir=None, cleanup=True,
on_cleanup_error=None):
"""Create a temporary directory and register a handler to cleanup on exit.
:param suffix: suffix of the temporary directory, defaults to empty.
:param prefix: prefix of the temporary directory, defaults to `__name__`
and an underscore.
:param dir: if provided, the directory will be created in `dir` rather than
the system default temp directory.
:param cleanup: if `True`, an atexit handler will be registered to remove
the temp directory on exit.
:param on_cleanup_error: a callback which is called if the atexit handler
encounters an exception. It is passed three parameters: *function*,
*path*, and *excinfo*. For more information, see the :mod:`atexit`
documentation.
"""
path = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dir)
if cleanup:
if on_cleanup_error is None:
def on_cleanup_error(function, path, excinfo):
click.secho("warning: failed to remove file or directory: {}\n"
"please delete it manually.".format(path),
fg="red")
atexit.register(shutil.rmtree, path=path, onerror=on_cleanup_error)
return path
class Requirement(object):
"""Represents a single package requirement.
.. note::
This class overrides `__hash__` in order to ensure that package
names remain unique when in a set.
.. todo::
Extend :class:`pkg_resources.Requirement` for Python requirements.
"""
# TODO: support multiple version specs (e.g. >=1.0,<=2.0)
spec_regex = r"(.+?)\s*(?:([<>~=]?=)\s*(.+?))?$"
def __init__(self, package, version):
"""Construct a new requirement.
:param package: the package name.
:param version: a semver compatible version specification.
"""
self.package = package
self.version = version
if self.version and not re.match(r"[<=>~]", version[:2]):
self.version = "=={}".format(self.version)
@classmethod
def coerce(cls, string):
"""Create a :class:`Requirement` object from a given package spec."""
match = re.match(cls.spec_regex, string)
if not match:
raise InvalidRequirementSpecError("could not parse requirement")
package = match.group(1)
if all(match.group(2, 3)):
version = "".join(match.group(2, 3))
else:
version = None
return cls(package, version)
def load_installed_version(self):
installed_packages = get_installed_pypackages()
if self.package in installed_packages:
self.version = "=={}".format(
installed_packages[self.package].version)
def __eq__(self, other):
return (isinstance(other, self.__class__) and
other.package == self.package)
def __hash__(self):
return hash(self.package)
def __str__(self):
return "".join([self.package, self.version or ""])
def __repr__(self):
return "<Requirement(package={package}, version='{version}')>".format(
package=self.package, version=self.version)
class Requirements(set):
"""Represents a set of requirements."""
def __init__(self, filename=None):
self.filename = None
if filename:
self.load(filename)
def add(self, elem, replace=False):
"""Add a requirement.
:param elem: a string or :class:`Requirement` instance.
:param replace: if `True`, packages in the set with the same name will
be removed first.
"""
if isinstance(elem, text_type):
elem = Requirement.coerce(elem)
if replace and elem in self:
self.remove(elem)
super(Requirements, self).add(elem)
def load(self, requirements_file=None):
"""Load or reload requirements from a requirements.txt file.
:param requirements_file: if not given, the filename used from
initialization will be read again.
"""
if requirements_file is None:
requirements_file = self.filename
if requirements_file is None:
raise ValueError("no filename provided")
elif isinstance(requirements_file, text_type):
requirements_file = Path(requirements_file)
self.clear()
with requirements_file.open() as f:
self.loads(f.read())
if isinstance(requirements_file, (text_type, Path)):
self.filename = requirements_file
def loads(self, requirements_text):
lines = re.findall(Requirement.spec_regex,
requirements_text,
re.MULTILINE)
for line in lines:
self.add(Requirement(line[0], "".join(line[1:])))
def remove(self, elem):
"""Remove a requirement.
:param elem: a string or :class:`Requirement` instance.
"""
if isinstance(elem, text_type):
for requirement in self:
if requirement.package == elem:
return super(Requirements, self).remove(requirement)
return super(Requirements, self).remove(elem)
def __str__(self):
return "\n".join([str(x) for x in self])
def __repr__(self):
return "<Requirements({})>".format(self.filename.name or "")
class NamedRequirements(Requirements):
def __init__(self, name, filename=None):
self.name = name
super(NamedRequirements, self).__init__(filename=filename)
def __repr__(self):
return "<NamedRequirements({}{})>".format(
self.name,
", filename='{}'".format(self.filename.name) if self.filename
else "")
class GroupedRequirements(defaultdict):
default_groups = ["all", "dev", "bower", "bower-dev"]
default_pip_files = {
"all": "requirements.txt",
"dev": "dev-requirements.txt"
}
def __init__(self, groups=None):
super(GroupedRequirements, self).__init__(NamedRequirements)
self.groups = groups or self.default_groups
self.filename = None
self.create_default_groups()
def clear(self):
super(GroupedRequirements, self).clear()
self.create_default_groups()
def create_default_groups(self):
for group in self.groups:
group = group.replace(" ", "_").lower()
self[group] = NamedRequirements(group)
def load_pip_requirements(self, files_map=None, freeze=True):
if files_map is None:
files_map = self.default_pip_files
for group, requirements_txt in files_map.items():
path = Path(requirements_txt)
if not path.exists() and group.lower() == "all" and freeze:
cmd = envoy.run("pip freeze")
self[group].loads(cmd.std_out)
elif path.exists():
self[group].load(path)
def load(self, filename, create_if_missing=True):
filename = Path(filename)
if not filename.exists() and create_if_missing:
self.load_pip_requirements()
with filename.open("w") as f:
f.write(yaml.dump(self.serialized, default_flow_style=False,
encoding=None))
self.filename = filename
return self.save(filename)
with filename.open() as f:
for group, requirements in yaml.load(f.read()).items():
for requirement in requirements:
self[group].add(Requirement.coerce(requirement))
self.filename = filename
def save(self, filename=None):
filename = Path(filename) if filename is not None else self.filename
with filename.open("w") as f:
f.write(self.yaml)
@property
def serialized(self):
to_ret = {}
for group, requirements in self.items():
to_ret[group] = [str(requirement) for requirement in requirements]
return to_ret
@property
def yaml(self):
return yaml.dump(self.serialized, default_flow_style=False,
encoding=None)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
else:
ret = self[key] = self.default_factory(name=key)
return ret
class Bower(object):
bower_base_uri = "https://bower.herokuapp.com"
@classmethod
def get_package_url(cls, package, session=None, silent=False):
response = get("{}/packages/{}".format(cls.bower_base_uri, package))
return response.json().get("url", None)
@classmethod
def clean_semver(cls, version_spec):
return re.sub(r"([<>=~])\s+?v?", "\\1", version_spec, re.IGNORECASE)
class Hydrogen(object):
def __init__(self, assets_dir=None, requirements_file="requirements.yml"):
self.assets_dir = assets_dir or Path(".") / "assets"
self.requirements = GroupedRequirements()
self.requirements.load(requirements_file)
self.temp_dir = mkdtemp()
def extract_bower_zipfile(self, zip_file, dest, expected_version=None):
bower_json = None
root = None
deps_installed = []
for info in zip_file.infolist():
if PurePath(info.filename).name == "bower.json":
with zip_file.open(info) as f:
bower_json = json.load(f)
root = str(PurePath(info.filename).parent)
break
version = bower_json["version"]
if expected_version is not None:
expected_version = Bower.clean_semver(expected_version)
if not semver.match(version, expected_version):
click.secho("error: versions do not match ({} =/= {})".format(
version, expected_version))
raise InvalidPackageError
if "dependencies" in bower_json:
for package, version in bower_json["dependencies"].items():
url = Bower.get_package_url(package)
deps_installed.extend(self.get_bower_package(
url, dest=dest, version=version))
ignore_patterns = [GitIgnorePattern(ig) for ig in bower_json["ignore"]]
path_spec = PathSpec(ignore_patterns)
namelist = [path for path in zip_file.namelist()
if PurePath(path).parts[0] == root]
ignored = list(path_spec.match_files(namelist))
for path in namelist:
dest_path = PurePath(
bower_json["name"],
*PurePath(path).parts[1:])
if path in ignored:
continue
for path in ignored:
for parent in PurePath(path):
if parent in ignored:
continue
if path.endswith("/"):
if list(path_spec.match_files([str(dest_path)])):
ignored.append(PurePath(path))
elif not (dest / dest_path).is_dir():
(dest / dest_path).mkdir(parents=True)
else:
target_path = dest / dest_path.parent / dest_path.name
source = zip_file.open(path)
target = target_path.open("wb")
with source, target:
shutil.copyfileobj(source, target)
deps_installed.append((bower_json["name"], bower_json["version"]))
return deps_installed
def get_bower_package(self, url, dest=None, version=None,
process_deps=True):
dest = dest or Path(".") / "assets"
parsed_url = urlparse(url)
if parsed_url.scheme == "git" or parsed_url.path.endswith(".git"):
if parsed_url.netloc == "github.com":
user, repo = parsed_url.path[1:-4].split("/")
response = get(github_api_uri +
"/repos/{}/{}/tags".format(user, repo))
tags = response.json()
target = None
if not len(tags):
click.secho("fatal: no tags exist for {}/{}".format(
user, repo), fg="red")
raise InvalidPackageError
if version is None:
target = tags[0]
else:
for tag in tags:
if semver.match(tag["name"],
Bower.clean_semver(version)):
target = tag
break
if not target:
click.secho(
"fatal: failed to find matching tag for "
"{user}/{repo} {version}".format(user, repo, version),
fg="red")
raise VersionNotFoundError
click.secho("installing {}/{}#{}".format(
user, repo, tags[0]["name"]), fg="green")
return self.get_bower_package(
url=target["zipball_url"],
dest=dest,
version=version)
raise NotImplementedError
click.echo("git clone {url}".format(url=url))
cmd = envoy.run('git clone {url} "{dest}"'.format(
url=url, dest=dest))
elif parsed_url.scheme in ("http", "https"):
zip_dest = download_file(url, dest=self.temp_dir,
label="{dest_basename}",
expected_extension="zip")
with zipfile.ZipFile(zip_dest, "r") as pkg:
return self.extract_bower_zipfile(pkg, dest,
expected_version=version)
# pkg.extractall(str(dest))
else:
click.secho("protocol currently unsupported :(")
sys.exit(1)
def install_bower(self, package, save=True, save_dev=False):
"""Installs a bower package.
:param save: if `True`, pins the package to the Hydrogen requirements
YAML file.
:param save_dev: if `True`, pins the package as a development
dependency to the Hydrogen requirements YAML file.
:param return: a list of tuples, containing all installed package names
and versions, including any dependencies.
"""
requirement = Requirement.coerce(package)
url = Bower.get_package_url(requirement.package)
installed = []
for name, _ in self.get_bower_package(url):
installed.append(Requirement(name, requirement.version))
for requirement in installed:
if save:
self.requirements["bower"].add(requirement, replace=True)
if save_dev:
self.requirements["bower-dev"].add(requirement, replace=True)
success("installed {}".format(str(requirement)))
if save or save_dev:
self.requirements.save()
return installed
def install_pip(self, package, save=True, save_dev=False):
"""Installs a pip package.
:param save: if `True`, pins the package to the Hydrogen requirements
YAML file.
:param save_dev: if `True`, pins the package as a development
dependency to the Hydrogen requirements YAML file.
:param return: a **single** :class:`Requirement` object, representing
the installed version of the given package.
"""
requirement = Requirement.coerce(package)
click.echo("pip install " + requirement.package)
cmd = envoy.run("pip install {}".format(str(requirement)))
if cmd.status_code == 0:
installed_packages = get_installed_pypackages()
package = installed_packages[requirement.package]
requirement.version = "=={}".format(package.version)
if save:
self.requirements["all"].add(requirement)
if save_dev:
self.requirements["dev"].add(requirement)
if save or save_dev:
self.requirements.save()
return requirement
else:
fatal(cmd.std_err)
def groups_option(f):
new_func = click.option("-g", "--groups",
help="Comma-separated list of requirement groups "
"to include.")(f)
return update_wrapper(new_func, f)
@click.group()
@click.version_option(prog_name=prog_name)
@click.pass_context
def main(ctx):
which = "where" if sys.platform == "win32" else "which"
if envoy.run(which + " git").status_code != 0:
click.secho("fatal: git not found in PATH", fg="red")
sys.exit(1)
ctx.obj = Hydrogen()
@main.command()
@click.pass_obj
@click.option("output_yaml", "--yaml", "-y", is_flag=True,
help="Show requirements in YAML format.")
@click.option("--resolve", "-r", is_flag=True,
help="Resolve version numbers for ambiguous packages.")
@groups_option
def freeze(h, output_yaml, resolve, groups):
"""Output installed packages."""
if not groups:
groups = filter(lambda group: not group.lower().startswith("bower"),
h.requirements.keys())
else:
groups = [text_type.strip(group) for group in groups.split(",")]
if output_yaml:
for requirements in h.requirements.values():
for requirement in requirements:
if resolve and not requirement.version:
requirement.load_installed_version()
click.echo(h.requirements.yaml)
else:
for group in groups:
if not h.requirements[group]:
continue
click.echo("# {}".format(group))
for requirement in h.requirements[group]:
if resolve and not requirement.version:
requirement.load_installed_version()
click.echo(str(requirement))
@main.command()
@click.pass_obj
@click.option("--pip/--bower", default=True)
@groups_option
@click.option("--save", is_flag=True)
@click.option("--save-dev", is_flag=True)
@click.argument("packages", nargs=-1)
def install(h, pip, groups, save, save_dev, packages):
"""Install a pip or bower package."""
if groups:
groups = [text_type.strip(group) for group in groups.split(",")]
else:
groups = h.requirements.keys()
if not packages:
for group in groups:
if group not in h.requirements:
warning("{} not in requirements".format(group))
continue
install = (h.install_bower if group.startswith("bower")
else h.install_pip)
for requirement in h.requirements[group]:
install(str(requirement), save=False, save_dev=False)
if pip:
for package in packages:
h.install_pip(package, save=save, save_dev=save_dev)
else:
for package in packages:
h.install_bower(package, save=save, save_dev=save_dev)
if __name__ == "__main__":
main()
|
from __future__ import unicode_literals
import os.path
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(name='django-aloha-edit',
version='0.4.0',
description='Django Aloha Edit',
author='Nathaniel Tucker',
author_email='me@ntucker.me',
url='https://github.com/ntucker/django-aloha-edit',
packages=find_packages(),
include_package_data=True,
install_requires=['django>=1.6', 'bleach>=1.4', 'lxml>=2.3', 'Pillow>=2.9.0', 'tinycss>=0.3'],
long_description=read('README.rst'),
license="BSD",
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
],
)
|
from __future__ import unicode_literals
from future.builtins import str
from datetime import datetime
import re
try:
from urllib.parse import quote
except ImportError:
# Python 2
from urllib import quote
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from django.utils.html import urlize
from django.utils.timezone import make_aware, utc
from django.utils.translation import ugettext_lazy as _
from requests_oauthlib import OAuth1
import requests
from mezzanine.conf import settings
from mezzanine.twitter import QUERY_TYPE_CHOICES, QUERY_TYPE_USER, \
QUERY_TYPE_LIST, QUERY_TYPE_SEARCH
from mezzanine.twitter import get_auth_settings
from mezzanine.twitter.managers import TweetManager
re_usernames = re.compile("@([0-9a-zA-Z+_]+)", re.IGNORECASE)
re_hashtags = re.compile("#([0-9a-zA-Z+_]+)", re.IGNORECASE)
replace_hashtags = "<a href=\"http://twitter.com/search?q=%23\\1\">#\\1</a>"
replace_usernames = "<a href=\"http://twitter.com/\\1\">@\\1</a>"
class TwitterQueryException(Exception):
pass
@python_2_unicode_compatible
class Query(models.Model):
type = models.CharField(_("Type"), choices=QUERY_TYPE_CHOICES,
max_length=10)
value = models.CharField(_("Value"), max_length=140)
interested = models.BooleanField("Interested", default=True)
class Meta:
verbose_name = _("Twitter query")
verbose_name_plural = _("Twitter queries")
ordering = ("-id",)
def __str__(self):
return "%s: %s" % (self.get_type_display(), self.value)
def run(self):
"""
Request new tweets from the Twitter API.
"""
try:
value = quote(self.value)
except KeyError:
value = self.value
urls = {
QUERY_TYPE_USER: ("https://api.twitter.com/1.1/statuses/"
"user_timeline.json?screen_name=%s"
"&include_rts=true" % value.lstrip("@")),
QUERY_TYPE_LIST: ("https://api.twitter.com/1.1/lists/statuses.json"
"?list_id=%s&include_rts=true" % value),
QUERY_TYPE_SEARCH: "https://api.twitter.com/1.1/search/tweets.json"
"?q=%s" % value,
}
try:
url = urls[self.type]
except KeyError:
raise TwitterQueryException("Invalid query type: %s" % self.type)
settings.use_editable()
auth_settings = get_auth_settings()
if not auth_settings:
from mezzanine.conf import registry
if self.value == registry["TWITTER_DEFAULT_QUERY"]["default"]:
# These are some read-only keys and secrets we use
# for the default query (eg nothing has been configured)
auth_settings = (
"KxZTRD3OBft4PP0iQW0aNQ",
"sXpQRSDUVJ2AVPZTfh6MrJjHfOGcdK4wRb1WTGQ",
"1368725588-ldWCsd54AJpG2xcB5nyTHyCeIC3RJcNVUAkB1OI",
"r9u7qS18t8ad4Hu9XVqmCGxlIpzoCN3e1vx6LOSVgyw3R",
)
else:
raise TwitterQueryException("Twitter OAuth settings missing")
try:
tweets = requests.get(url, auth=OAuth1(*auth_settings)).json()
except Exception as e:
raise TwitterQueryException("Error retrieving: %s" % e)
try:
raise TwitterQueryException(tweets["errors"][0]["message"])
except (IndexError, KeyError, TypeError):
pass
if self.type == "search":
tweets = tweets["statuses"]
for tweet_json in tweets:
remote_id = str(tweet_json["id"])
tweet, created = self.tweets.get_or_create(remote_id=remote_id)
if not created:
continue
if "retweeted_status" in tweet_json:
user = tweet_json['user']
tweet.retweeter_user_name = user["screen_name"]
tweet.retweeter_full_name = user["name"]
tweet.retweeter_profile_image_url = user["profile_image_url"]
tweet_json = tweet_json["retweeted_status"]
if self.type == QUERY_TYPE_SEARCH:
tweet.user_name = tweet_json['user']['screen_name']
tweet.full_name = tweet_json['user']['name']
tweet.profile_image_url = \
tweet_json['user']["profile_image_url"]
date_format = "%a %b %d %H:%M:%S +0000 %Y"
else:
user = tweet_json["user"]
tweet.user_name = user["screen_name"]
tweet.full_name = user["name"]
tweet.profile_image_url = user["profile_image_url"]
date_format = "%a %b %d %H:%M:%S +0000 %Y"
tweet.text = urlize(tweet_json["text"])
tweet.text = re_usernames.sub(replace_usernames, tweet.text)
tweet.text = re_hashtags.sub(replace_hashtags, tweet.text)
if getattr(settings, 'TWITTER_STRIP_HIGH_MULTIBYTE', False):
chars = [ch for ch in tweet.text if ord(ch) < 0x800]
tweet.text = ''.join(chars)
d = datetime.strptime(tweet_json["created_at"], date_format)
tweet.created_at = make_aware(d, utc)
try:
tweet.save()
except Warning:
pass
tweet.save()
self.interested = False
self.save()
class Tweet(models.Model):
remote_id = models.CharField(_("Twitter ID"), max_length=50)
created_at = models.DateTimeField(_("Date/time"), null=True)
text = models.TextField(_("Message"), null=True)
profile_image_url = models.URLField(_("Profile image URL"), null=True)
user_name = models.CharField(_("User name"), max_length=100, null=True)
full_name = models.CharField(_("Full name"), max_length=100, null=True)
retweeter_profile_image_url = models.URLField(
_("Profile image URL (Retweeted by)"), null=True)
retweeter_user_name = models.CharField(
_("User name (Retweeted by)"), max_length=100, null=True)
retweeter_full_name = models.CharField(
_("Full name (Retweeted by)"), max_length=100, null=True)
query = models.ForeignKey("Query", related_name="tweets")
objects = TweetManager()
class Meta:
verbose_name = _("Tweet")
verbose_name_plural = _("Tweets")
ordering = ("-created_at",)
def __str__(self):
return "%s: %s" % (self.user_name, self.text)
def is_retweet(self):
return self.retweeter_user_name is not None
|
fig, ax = plt.subplots()
data['2012':].mean().plot(kind='bar', ax=ax, rot=0, color='C0')
ax.set_ylabel("NO$_2$ concentration (µg/m³)")
ax.axhline(y=40., color='darkorange')
ax.text(0.01, 0.48, 'Yearly limit is 40 µg/m³',
horizontalalignment='left', fontsize=13,
transform=ax.transAxes, color='darkorange');
|
"""
OOB configuration.
This module should be included in (or replace) the
default module set in settings.OOB_PLUGIN_MODULES
All functions defined in this module are made available
to be called by the OOB handler.
See src/server/oob_msdp.py for more information.
function execution - the oob protocol can execute a function directly on
the server. The available functions must be defined
as global functions via settings.OOB_PLUGIN_MODULES.
repeat func execution - the oob protocol can request a given function be
executed repeatedly at a regular interval. This
uses an internal script pool.
tracking - the oob protocol can request Evennia to track changes to
fields on objects, as well as changes in Attributes. This is
done by dynamically adding tracker-objects on entities. The
behaviour of those objects can be customized via
settings.OOB_PLUGIN_MODULES.
oob functions have the following call signature:
function(caller, session, *args, **kwargs)
oob trackers should inherit from the OOBTracker class in src/server.oob_msdp.py
and implement a minimum of the same functionality.
a global function oob_error will be used as optional error management.
"""
from src.server.oob_cmds import *
|
import os.path
import os
import random
def rename(src, dst):
"Atomic rename on windows."
# This is taken from mercurial
try:
os.rename(src, dst)
except OSError, err:
# If dst exists, rename will fail on windows, and we cannot
# unlink an opened file. Instead, the destination is moved to
# a temporary location if it already exists.
def tempname(prefix):
for i in range(5):
fn = '%s-%08x' % (prefix, random.randint(0, 0xffffffff))
if not os.path.exists(fn):
return fn
raise IOError, (errno.EEXIST, "No usable temporary filename found")
temp = tempname(dst)
os.rename(dst, temp)
try:
os.unlink(temp)
except:
# Some rude AV-scanners on Windows may cause the unlink to
# fail. Not aborting here just leaks the temp file, whereas
# aborting at this point may leave serious inconsistencies.
# Ideally, we would notify the user here.
pass
os.rename(src, dst)
|
from dateutil.relativedelta import relativedelta
from script.models import Script, ScriptProgress
from rapidsms.models import Connection
import datetime
from rapidsms.models import Contact
from rapidsms.contrib.locations.models import Location
from poll.models import Poll
from script.models import ScriptStep
from django.db.models import Count
from django.conf import settings
from education.scheduling import schedule_at, at
def is_holiday(date1, holidays = getattr(settings, 'SCHOOL_HOLIDAYS', [])):
for date_start, date_end in holidays:
if isinstance(date_end, str):
if date1.date() == date_start.date():
return True
elif date1.date() >= date_start.date() and date1.date() <= date_end.date():
return True
return False
def is_empty(arg):
"""
Generalizes 'empty' checks on Strings, sequences, and dicts.
Returns 'True' for None, empty strings, strings with just white-space,
and sequences with len == 0
"""
if arg is None:
return True
if isinstance(arg, basestring):
arg = arg.strip()
try:
if not len(arg):
return True
except TypeError:
# wasn't a sequence
pass
return False
def previous_calendar_week(t=None):
"""
To education monitoring, a week runs between Thursdays,
Thursday marks the beginning of a new week of data submission
Data for a new week is accepted until Wednesday evening of the following week
"""
d = t or datetime.datetime.now()
if not d.weekday() == 3:
# last Thursday == next Thursday minus 7 days.
last_thursday = d + (datetime.timedelta((3-d.weekday())%7) - (datetime.timedelta(days=7)))
else:
last_thursday = d
end_date = last_thursday + datetime.timedelta(days=6)
return (last_thursday.date(), end_date)
def _this_thursday(sp=None, get_time=datetime.datetime.now, time_set=None, holidays=getattr(settings, 'SCHOOL_HOLIDAYS', [])):
"""
This Thursday of the week which is not a school holiday.
"""
schedule = time_set or get_time()
d = sp.time if sp else schedule
d = d + datetime.timedelta((3 - d.weekday()) % 7)
while(is_holiday(d, holidays)):
d = d + datetime.timedelta(1) # try next day
return at(d.date(), 10)
def get_polls(**kwargs):
script_polls = ScriptStep.objects.values_list('poll', flat=True).exclude(poll=None)
return Poll.objects.exclude(pk__in=script_polls).annotate(Count('responses'))
def compute_average_percentage(list_of_percentages):
"""
Average percentage
-> this is also a handly tool to compute averages generally while sanitizing
"""
sanitize = []
try:
for i in list_of_percentages:
if isinstance(float(i), float):
sanitize.append(float(i))
else:
pass
except ValueError:
print "non-numeric characters used"
pass
if len(sanitize) <= 0:
return 0
return sum(sanitize) / float(len(sanitize))
def list_poll_responses(poll, **kwargs):
"""
pass a poll queryset and you get yourself a dict with locations vs responses (quite handy for the charts)
dependecies: Contact and Location must be in your module; this lists all Poll responses by district
"""
#forceful import
from poll.models import Poll
to_ret = {}
"""
narrowed down to 3 districts (and up to 14 districts)
"""
DISTRICT = ['Kaabong', 'Kabarole', 'Kyegegwa', 'Kotido']
if not kwargs:
# if no other arguments are provided
for location in Location.objects.filter(name__in=DISTRICT):
to_ret[location.__unicode__()] = compute_average_percentage([msg.message.text for msg in poll.responses.filter(contact__in=Contact.objects.filter(reporting_location=location))])
return to_ret
else:
# filter by number of weeks
#TODO more elegant solution to coincide with actual school term weeks
date_filter = kwargs['weeks'] #give the date in weeks
date_now = datetime.datetime.now()
date_diff = date_now - datetime.timedelta(weeks=date_filter)
all_emis_reports = EmisReporter.objects.filter(reporting_location__in=[loc for loc in Locations.objects.filter(name__in=DISTRICT)])
for location in Location.objects.filter(name__in=DISTRICT):
to_ret[location.__unicode__()] = compute_average_percentage([msg.message.text for msg in poll.responses.filter(date__gte=date_diff, contact__in=Contact.objects.filter(reporting_location=location))])
return to_ret
themes = {
1.1 : "Name and location of our Sub-county/Division",
1.2 : 'Physical features of our Sub-County/Division',
1.3 : 'People in our Sub-county/Division',
2.1 : 'Occupations of people in our Sub-county/Division and their importance',
2.2 : 'Social Services and their importance',
2.3 : 'Challenges in social services and their possible solutions',
3.1 : 'Soil',
3.2 : 'Natural causes of changes in the environment',
3.3 : 'Changes in the environment through human activities',
4.1 : 'Air and the Sun',
4.2 : 'Water',
4.3 : 'Managing Water',
5.1 : 'Living things',
5.2 : 'Birds and Insects',
5.3 : 'Care for insects, birds and animals',
6.1 : 'Plants and their habitat',
6.2 : 'Parts of a flowering plant and their uses',
6.3 : 'Crop-growing practices',
7.1 : 'Saving resources',
7.2 : 'Spending resources',
7.3 : 'Projects',
8.1 : 'Living in peace with others',
8.2 : 'Child rights, needs and their importance',
8.3 : 'Child responsibility',
9.1 : 'Customs in our sub-county/division',
9.2 : 'Gender',
9.3 : 'Ways of promoting and preserving culture',
10.1: 'Disease vectors',
10.2: 'Diseases spread by vectors',
10.3: 'HIV/AIDS',
11.1: 'Concept of technology',
11.2: 'Processing and making things from natural materials',
11.3: 'Making things from artificial materials',
12.1: 'Sources of energy',
12.2: 'Ways of saving energy',
12.3: 'Dangers of energy and ways of avoiding them'
}
"""
Descriptive statistical analysis tool.
"""
class StatisticsException(Exception):
"""Statistics Exception class."""
pass
class Statistics(object):
"""Class for descriptive statistical analysis.
Behavior:
Computes numerical statistics for a given data set.
Available public methods:
None
Available instance attributes:
N: total number of elements in the data set
sum: sum of all values (n) in the data set
min: smallest value of the data set
max: largest value of the data set
mode: value(s) that appear(s) most often in the data set
mean: arithmetic average of the data set
range: difference between the largest and smallest value in the data set
median: value which is in the exact middle of the data set
variance: measure of the spread of the data set about the mean
stddev: standard deviation - measure of the dispersion of the data set
based on variance
identification: Instance ID
Raised Exceptions:
StatisticsException
Bases Classes:
object (builtin)
Example Usage:
x = [ -1, 0, 1 ]
try:
stats = Statistics(x)
except StatisticsException, mesg:
<handle exception>
print "N: %s" % stats.N
print "SUM: %s" % stats.sum
print "MIN: %s" % stats.min
print "MAX: %s" % stats.max
print "MODE: %s" % stats.mode
print "MEAN: %0.2f" % stats.mean
print "RANGE: %s" % stats.range
print "MEDIAN: %0.2f" % stats.median
print "VARIANCE: %0.5f" % stats.variance
print "STDDEV: %0.5f" % stats.stddev
print "DATA LIST: %s" % stats.sample
"""
def __init__(self, sample=[], population=False):
"""Statistics class initializer method."""
# Raise an exception if the data set is empty.
if (not sample):
raise StatisticsException, "Empty data set!: %s" % sample
# The data set (a list).
self.sample = sample
# Sample/Population variance determination flag.
self.population = population
self.N = len(self.sample)
self.sum = float(sum(self.sample))
self.min = min(self.sample)
self.max = max(self.sample)
self.range = self.max - self.min
self.mean = self.sum/self.N
# Inplace sort (list is now in ascending order).
self.sample.sort()
self.__getMode()
# Instance identification attribute.
self.identification = id(self)
def __getMode(self):
"""Determine the most repeated value(s) in the data set."""
# Initialize a dictionary to store frequency data.
frequency = {}
# Build dictionary: key - data set values; item - data frequency.
for x in self.sample:
if (x in frequency):
frequency[x] += 1
else:
frequency[x] = 1
# Create a new list containing the values of the frequency dict. Convert
# the list, which may have duplicate elements, into a set. This will
# remove duplicate elements. Convert the set back into a sorted list
# (in descending order). The first element of the new list now contains
# the frequency of the most repeated values(s) in the data set.
# mode = sorted(list(set(frequency.values())), reverse=True)[0]
# Or use the builtin - max(), which returns the largest item of a
# non-empty sequence.
mode = max(frequency.values())
# If the value of mode is 1, there is no mode for the given data set.
if (mode == 1):
self.mode = []
return
# Step through the frequency dictionary, looking for values equaling
# the current value of mode. If found, append the value and its
# associated key to the self.mode list.
self.mode = [(x, mode) for x in frequency if (mode == frequency[x])]
def __getVariance(self):
"""Determine the measure of the spread of the data set about the mean.
Sample variance is determined by default; population variance can be
determined by setting population attribute to True.
"""
x = 0 # Summation variable.
# Subtract the mean from each data item and square the difference.
# Sum all the squared deviations.
for item in self.sample:
x += (item - self.mean)**2.0
try:
if (not self.population):
# Divide sum of squares by N-1 (sample variance).
self.variance = x/(self.N-1)
else:
# Divide sum of squares by N (population variance).
self.variance = x/self.N
except:
self.variance = 0
def __getStandardDeviation(self):
"""Determine the measure of the dispersion of the data set based on the
variance.
"""
from math import sqrt # Mathematical functions.
# Take the square root of the variance.
self.stddev = sqrt(self.variance)
def extract_key_count(list, key=None):
"""
A utility function written to count the number of times a `key` would appear in, for example, a categorized poll.
Examples:
>>> extract_key_count('yes',
"""
if list and key:
# go through a list of dictionaries
for dict in list:
if dict.get('category__name') == key:
return dict.get('value')
else:
return 0
def get_week_count(reference_date, d):
week_count = 0
while(reference_date.date() <= d.date()):
d = d - datetime.timedelta(days=7)
week_count = week_count + 1
return week_count
def get_months(start_date,end_date):
to_ret = []
first_day = start_date
while start_date < end_date:
last_day = start_date + relativedelta(day=1, months=+1, days=-1,hour=23,minute=59)
start_date += relativedelta(months=1)
to_ret.append([
datetime.datetime(first_day.year, first_day.month, first_day.day,first_day.hour,first_day.minute),
datetime.datetime(last_day.year, last_day.month, last_day.day,last_day.hour,last_day.minute)])
first_day = start_date + relativedelta(day=1,hour=00,minute=00)
to_ret.append([
datetime.datetime(first_day.year, first_day.month, first_day.day,first_day.hour,first_day.minute),
datetime.datetime(end_date.year, end_date.month, end_date.day,end_date.hour,end_date.minute)])
return to_ret
|
import os
from example_builder import ExampleBuilder
RST_TEMPLATE = """
.. _%(sphinx_tag)s:
%(docstring)s
%(image_list)s
.. raw:: html
<div class="toggle_trigger"><a href="#">
**Code output:**
.. raw:: html
</a></div>
<div class="toggle_container">
.. literalinclude:: %(stdout)s
.. raw:: html
</div>
<div class="toggle_trigger" id="start_open"><a href="#">
**Python source code:**
.. raw:: html
</a></div>
<div class="toggle_container">
.. literalinclude:: %(fname)s
:lines: %(end_line)s-
.. raw:: html
</div>
<div align="right">
:download:`[download source: %(fname)s] <%(fname)s>`
.. raw:: html
</div>
"""
def main(app):
target_dir = os.path.join(app.builder.srcdir, 'book_figures')
source_dir = os.path.abspath(app.builder.srcdir + '/../' + 'examples')
try:
plot_gallery = eval(app.builder.config.plot_gallery)
except TypeError:
plot_gallery = bool(app.builder.config.plot_gallery)
if not os.path.exists(source_dir):
os.makedirs(source_dir)
if not os.path.exists(target_dir):
os.makedirs(target_dir)
EB = ExampleBuilder(source_dir, target_dir,
execute_files=plot_gallery,
contents_file='contents.txt',
dir_info_file='README.rst',
dir_footer_file='FOOTER.rst',
sphinx_tag_base='book_fig',
template_example=RST_TEMPLATE)
EB.run()
def setup(app):
app.connect('builder-inited', main)
#app.add_config_value('plot_gallery', True, 'html')
|
from importlib import import_module
from django.core.urlresolvers import (RegexURLPattern,
RegexURLResolver, LocaleRegexURLResolver)
from django.core.exceptions import ImproperlyConfigured
from django.utils import six
__all__ = ['handler400', 'handler403', 'handler404', 'handler500', 'include', 'patterns', 'url']
handler400 = 'django.views.defaults.bad_request'
handler403 = 'django.views.defaults.permission_denied'
handler404 = 'django.views.defaults.page_not_found'
handler500 = 'django.views.defaults.server_error'
def include(arg, namespace=None, app_name=None):
if isinstance(arg, tuple):
# callable returning a namespace hint
if namespace:
raise ImproperlyConfigured('Cannot override the namespace for a dynamic module that provides a namespace')
urlconf_module, app_name, namespace = arg
else:
# No namespace hint - use manually provided namespace
urlconf_module = arg
if isinstance(urlconf_module, six.string_types):
urlconf_module = import_module(urlconf_module)
patterns = getattr(urlconf_module, 'urlpatterns', urlconf_module)
# Make sure we can iterate through the patterns (without this, some
# testcases will break).
if isinstance(patterns, (list, tuple)):
for url_pattern in patterns:
# Test if the LocaleRegexURLResolver is used within the include;
# this should throw an error since this is not allowed!
if isinstance(url_pattern, LocaleRegexURLResolver):
raise ImproperlyConfigured(
'Using i18n_patterns in an included URLconf is not allowed.')
return (urlconf_module, app_name, namespace)
def patterns(prefix, *args):
pattern_list = []
for t in args:
if isinstance(t, (list, tuple)):
t = url(prefix=prefix, *t)
elif isinstance(t, RegexURLPattern):
t.add_prefix(prefix)
pattern_list.append(t)
return pattern_list
def url(regex, view, kwargs=None, name=None, prefix=''):
if isinstance(view, (list, tuple)):
# For include(...) processing.
urlconf_module, app_name, namespace = view
return RegexURLResolver(regex, urlconf_module, kwargs, app_name=app_name, namespace=namespace)
else:
if isinstance(view, six.string_types):
if not view:
raise ImproperlyConfigured('Empty URL pattern view name not permitted (for pattern %r)' % regex)
if prefix:
view = prefix + '.' + view
return RegexURLPattern(regex, view, kwargs, name)
|
from __future__ import absolute_import
import datetime
import jwt
import re
import logging
from six.moves.urllib.parse import parse_qs, urlparse, urlsplit
from sentry.integrations.atlassian_connect import get_query_hash
from sentry.shared_integrations.exceptions import ApiError
from sentry.integrations.client import ApiClient
from sentry.utils.http import absolute_uri
logger = logging.getLogger("sentry.integrations.jira")
JIRA_KEY = "%s.jira" % (urlparse(absolute_uri()).hostname,)
ISSUE_KEY_RE = re.compile(r"^[A-Za-z][A-Za-z0-9]*-\d+$")
class JiraCloud(object):
"""
Contains the jira-cloud specifics that a JiraClient needs
in order to communicate with jira
"""
def __init__(self, shared_secret):
self.shared_secret = shared_secret
@property
def cache_prefix(self):
return "sentry-jira-2:"
def request_hook(self, method, path, data, params, **kwargs):
"""
Used by Jira Client to apply the jira-cloud authentication
"""
# handle params that are already part of the path
url_params = dict(parse_qs(urlsplit(path).query))
url_params.update(params or {})
path = path.split("?")[0]
jwt_payload = {
"iss": JIRA_KEY,
"iat": datetime.datetime.utcnow(),
"exp": datetime.datetime.utcnow() + datetime.timedelta(seconds=5 * 60),
"qsh": get_query_hash(path, method.upper(), url_params),
}
encoded_jwt = jwt.encode(jwt_payload, self.shared_secret)
params = dict(jwt=encoded_jwt, **(url_params or {}))
request_spec = kwargs.copy()
request_spec.update(dict(method=method, path=path, data=data, params=params))
return request_spec
def user_id_field(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
def user_query_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use query
"""
return "query"
def user_id_get_param(self):
"""
Jira-Cloud requires GDPR compliant API usage so we have to use accountId
"""
return "accountId"
class JiraApiClient(ApiClient):
# TODO: Update to v3 endpoints
COMMENTS_URL = "/rest/api/2/issue/%s/comment"
COMMENT_URL = "/rest/api/2/issue/%s/comment/%s"
STATUS_URL = "/rest/api/2/status"
CREATE_URL = "/rest/api/2/issue"
ISSUE_URL = "/rest/api/2/issue/%s"
META_URL = "/rest/api/2/issue/createmeta"
PRIORITIES_URL = "/rest/api/2/priority"
PROJECT_URL = "/rest/api/2/project"
SEARCH_URL = "/rest/api/2/search/"
VERSIONS_URL = "/rest/api/2/project/%s/versions"
USERS_URL = "/rest/api/2/user/assignable/search"
USER_URL = "/rest/api/2/user"
SERVER_INFO_URL = "/rest/api/2/serverInfo"
ASSIGN_URL = "/rest/api/2/issue/%s/assignee"
TRANSITION_URL = "/rest/api/2/issue/%s/transitions"
EMAIL_URL = "/rest/api/3/user/email"
integration_name = "jira"
# This timeout is completely arbitrary. Jira doesn't give us any
# caching headers to work with. Ideally we want a duration that
# lets the user make their second jira issue with cached data.
cache_time = 240
def __init__(self, base_url, jira_style, verify_ssl, logging_context=None):
self.base_url = base_url
# `jira_style` encapsulates differences between jira server & jira cloud.
# We only support one API version for Jira, but server/cloud require different
# authentication mechanisms and caching.
self.jira_style = jira_style
super(JiraApiClient, self).__init__(verify_ssl, logging_context)
def get_cache_prefix(self):
return self.jira_style.cache_prefix
def request(self, method, path, data=None, params=None, **kwargs):
"""
Use the request_hook method for our specific style of Jira to
add authentication data and transform parameters.
"""
request_spec = self.jira_style.request_hook(method, path, data, params, **kwargs)
if "headers" not in request_spec:
request_spec["headers"] = {}
# Force adherence to the GDPR compliant API conventions.
# See
# https://developer.atlassian.com/cloud/jira/platform/deprecation-notice-user-privacy-api-migration-guide
request_spec["headers"]["x-atlassian-force-account-id"] = "true"
return self._request(**request_spec)
def user_id_get_param(self):
return self.jira_style.user_id_get_param()
def user_id_field(self):
return self.jira_style.user_id_field()
def user_query_param(self):
return self.jira_style.user_query_param()
def get_issue(self, issue_id):
return self.get(self.ISSUE_URL % (issue_id,))
def search_issues(self, query):
# check if it looks like an issue id
if ISSUE_KEY_RE.match(query):
jql = 'id="%s"' % query.replace('"', '\\"')
else:
jql = 'text ~ "%s"' % query.replace('"', '\\"')
return self.get(self.SEARCH_URL, params={"jql": jql})
def create_comment(self, issue_key, comment):
return self.post(self.COMMENTS_URL % issue_key, data={"body": comment})
def update_comment(self, issue_key, comment_id, comment):
return self.put(self.COMMENT_URL % (issue_key, comment_id), data={"body": comment})
def get_projects_list(self):
return self.get_cached(self.PROJECT_URL)
def get_project_key_for_id(self, project_id):
if not project_id:
return ""
projects = self.get_projects_list()
for project in projects:
if project["id"] == project_id:
return project["key"].encode("utf-8")
return ""
def get_create_meta_for_project(self, project):
params = {"expand": "projects.issuetypes.fields", "projectIds": project}
metas = self.get_cached(self.META_URL, params=params)
# We saw an empty JSON response come back from the API :(
if not metas:
logger.info(
"jira.get-create-meta.empty-response",
extra={"base_url": self.base_url, "project": project},
)
return None
# XXX(dcramer): document how this is possible, if it even is
if len(metas["projects"]) > 1:
raise ApiError(u"More than one project found matching {}.".format(project))
try:
return metas["projects"][0]
except IndexError:
logger.info(
"jira.get-create-meta.key-error",
extra={"base_url": self.base_url, "project": project},
)
return None
def get_versions(self, project):
return self.get_cached(self.VERSIONS_URL % project)
def get_priorities(self):
return self.get_cached(self.PRIORITIES_URL)
def get_users_for_project(self, project):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(self.USERS_URL, params={"project": project_key})
def search_users_for_project(self, project, username):
# Jira Server wants a project key, while cloud is indifferent.
project_key = self.get_project_key_for_id(project)
return self.get_cached(
self.USERS_URL, params={"project": project_key, self.user_query_param(): username}
)
def search_users_for_issue(self, issue_key, email):
return self.get_cached(
self.USERS_URL, params={"issueKey": issue_key, self.user_query_param(): email}
)
def get_user(self, user_id):
user_id_get_param = self.user_id_get_param()
return self.get_cached(self.USER_URL, params={user_id_get_param: user_id})
def create_issue(self, raw_form_data):
data = {"fields": raw_form_data}
return self.post(self.CREATE_URL, data=data)
def get_server_info(self):
return self.get(self.SERVER_INFO_URL)
def get_valid_statuses(self):
return self.get_cached(self.STATUS_URL)
def get_transitions(self, issue_key):
return self.get_cached(self.TRANSITION_URL % issue_key)["transitions"]
def transition_issue(self, issue_key, transition_id):
return self.post(self.TRANSITION_URL % issue_key, {"transition": {"id": transition_id}})
def assign_issue(self, key, name_or_account_id):
user_id_field = self.user_id_field()
return self.put(self.ASSIGN_URL % key, data={user_id_field: name_or_account_id})
def get_email(self, account_id):
user = self.get_cached(self.EMAIL_URL, params={"accountId": account_id})
return user.get("email")
|
import fbchat
from fbchat import PageData
def test_page_from_graphql(session):
data = {
"id": "123456",
"name": "Some school",
"profile_picture": {"uri": "https://scontent-arn2-1.xx.fbcdn.net/v/..."},
"url": "https://www.facebook.com/some-school/",
"category_type": "SCHOOL",
"city": None,
}
assert PageData(
session=session,
id="123456",
photo=fbchat.Image(url="https://scontent-arn2-1.xx.fbcdn.net/v/..."),
name="Some school",
url="https://www.facebook.com/some-school/",
city=None,
category="SCHOOL",
) == PageData._from_graphql(session, data)
|
import tempfile
import shutil
from voltgrid import GitManager
def git_checkout(git_url, git_branch=None, git_tag=None, git_hash=None):
git_dst = tempfile.mkdtemp()
g = GitManager(url=git_url, git_dst=git_dst, git_branch=git_branch, git_tag=git_tag, git_hash=git_hash)
g.run()
shutil.rmtree(git_dst)
def test_git_tag():
""" Test checkout w/ Tag """
git_checkout(git_url='https://github.com/voltgrid/voltgrid-pie.git', git_branch=None, git_tag='v0.1.0')
def test_git_branch():
""" Test checkout w/ Branch """
git_checkout(git_url='https://github.com/voltgrid/voltgrid-pie.git', git_branch='master', git_tag=None)
def test_git_hash():
""" Test checkout w/ Commit Hash """
git_checkout(git_url='https://github.com/voltgrid/voltgrid-pie.git', git_hash='ab052369c675057dccc90a75fb92317e9b689a56')
|
"""
===========================================
Main Components (:mod:`artview.components`)
===========================================
.. currentmodule:: artview.components
ARTview offers some basic Components for visualization
of weather radar data using Py-ART and
ARTview functions.
.. autosummary::
:toctree: generated/
RadarDisplay
GridDisplay
Menu
LevelButtonWindow
FieldButtonWindow
LinkPlugins
SelectRegion
PlotDisplay
"""
import pyart
from pkg_resources import parse_version
from .plot_radar import RadarDisplay
if parse_version(pyart.__version__) >= parse_version('1.6.0'):
from .plot_grid import GridDisplay
else:
from .plot_grid_legacy import GridDisplay
from .plot_points import PointsDisplay
from .menu import Menu
from .level import LevelButtonWindow
from .field import FieldButtonWindow
from .component_control import LinkPlugins
from .select_region import SelectRegion as SelectRegion_dev
from .select_region_old import SelectRegion
from .plot_simple import PlotDisplay
del pyart
del parse_version
|
import scrapy
class TiebaItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
pass
class ThreadItem(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
preview = scrapy.Field()
author = scrapy.Field()
tieba = scrapy.Field()
date = scrapy.Field()
keywords = scrapy.Field()
class NoneItem(scrapy.Item):
url = scrapy.Field()
title = scrapy.Field()
preview = scrapy.Field()
author = scrapy.Field()
tieba = scrapy.Field()
date = scrapy.Field()
keywords = scrapy.Field()
|
__version__=''' $Id$ '''
___doc__=''
from reportlab.pdfgen import canvas
import time, os, sys
try:
import _rl_accel
ACCEL = 1
except ImportError:
ACCEL = 0
from reportlab.lib.units import inch, cm
from reportlab.lib.pagesizes import A4
top_margin = A4[1] - inch
bottom_margin = inch
left_margin = inch
right_margin = A4[0] - inch
frame_width = right_margin - left_margin
def drawPageFrame(canv):
canv.line(left_margin, top_margin, right_margin, top_margin)
canv.setFont('Times-Italic',12)
canv.drawString(left_margin, top_margin + 2, "Homer's Odyssey")
canv.line(left_margin, top_margin, right_margin, top_margin)
canv.line(left_margin, bottom_margin, right_margin, bottom_margin)
canv.drawCentredString(0.5*A4[0], 0.5 * inch,
"Page %d" % canv.getPageNumber())
def run(verbose=1):
if sys.platform[0:4] == 'java':
impl = 'Jython'
else:
impl = 'Python'
verStr = '%d.%d' % (sys.version_info[0:2])
if ACCEL:
accelStr = 'with _rl_accel'
else:
accelStr = 'without _rl_accel'
print 'Benchmark of %s %s %s' % (impl, verStr, accelStr)
started = time.time()
canv = canvas.Canvas('odyssey.pdf', invariant=1)
canv.setPageCompression(1)
drawPageFrame(canv)
#do some title page stuff
canv.setFont("Times-Bold", 36)
canv.drawCentredString(0.5 * A4[0], 7 * inch, "Homer's Odyssey")
canv.setFont("Times-Bold", 18)
canv.drawCentredString(0.5 * A4[0], 5 * inch, "Translated by Samuel Burton")
canv.setFont("Times-Bold", 12)
tx = canv.beginText(left_margin, 3 * inch)
tx.textLine("This is a demo-cum-benchmark for PDFgen. It renders the complete text of Homer's Odyssey")
tx.textLine("from a text file. On my humble P266, it does 77 pages per secondwhile creating a 238 page")
tx.textLine("document. If it is asked to computer text metrics, measuring the width of each word as ")
tx.textLine("one would for paragraph wrapping, it still manages 22 pages per second.")
tx.textLine("")
tx.textLine("Andy Robinson, Robinson Analytics Ltd.")
canv.drawText(tx)
canv.showPage()
#on with the text...
drawPageFrame(canv)
canv.setFont('Times-Roman', 12)
tx = canv.beginText(left_margin, top_margin - 0.5*inch)
for fn in ('odyssey.full.txt','odyssey.txt'):
if os.path.isfile(fn):
break
data = open(fn,'r').readlines()
for line in data:
#this just does it the fast way...
tx.textLine(line.rstrip())
#page breaking
y = tx.getY() #get y coordinate
if y < bottom_margin + 0.5*inch:
canv.drawText(tx)
canv.showPage()
drawPageFrame(canv)
canv.setFont('Times-Roman', 12)
tx = canv.beginText(left_margin, top_margin - 0.5*inch)
#page
pg = canv.getPageNumber()
if verbose and pg % 10 == 0:
print 'formatted page %d' % canv.getPageNumber()
if tx:
canv.drawText(tx)
canv.showPage()
drawPageFrame(canv)
if verbose:
print 'about to write to disk...'
canv.save()
finished = time.time()
elapsed = finished - started
pages = canv.getPageNumber()-1
speed = pages / elapsed
fileSize = os.stat('odyssey.pdf')[6] / 1024
print '%d pages in %0.2f seconds = %0.2f pages per second, file size %d kb' % (
pages, elapsed, speed, fileSize)
import md5
print 'file digest: %s' % md5.md5(open('odyssey.pdf','rb').read()).hexdigest()
if __name__=='__main__':
quiet = ('-q' in sys.argv)
run(verbose = not quiet)
|
import os
import sys
from distutils.core import setup
from distutils.sysconfig import get_python_lib
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join)
in a platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
packages, package_data = [], {}
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('bibletext'):
# Ignore PEP 3147 cache dirs and those whose names start with '.'
dirnames[:] = [d for d in dirnames if not d.startswith('.') and d != '__pycache__']
parts = fullsplit(dirpath)
package_name = '.'.join(parts)
if '__init__.py' in filenames:
packages.append(package_name)
elif filenames:
relative_path = []
while '.'.join(parts) not in packages:
relative_path.append(parts.pop())
relative_path.reverse()
path = os.path.join(*relative_path)
package_files = package_data.setdefault('.'.join(parts), [])
package_files.extend([os.path.join(path, f) for f in filenames])
version = '0.1.0'
setup(
name='Django-Bibletext',
version=version,
url='https://github.com/richardbolt/django-bibletext',
author='Richard Bolt',
author_email='richard@richardbolt.com',
description=('A Django app that has a full Bible viewer, '
'including the KJV text by default.'),
license='BSD',
packages=packages,
package_data=package_data,
classifiers=[
'Development Status :: 3 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
|
"""
CharNullField from
http://stackoverflow.com/questions/454436/unique-fields-that-allow-nulls-in-django/1934764#1934764
"""
from django.db import models
class CharNullField(models.CharField):
description = "CharField that stores NULL but returns ''"
def to_python(self, value):
"""return django-friendly '' if NULL in DB"""
if isinstance(value, models.CharField):
return value
if value==None:
return ""
else:
return value
def get_db_prep_value(self, value):
"""Save NULL in DB if field is empty"""
if value=="":
return None
else:
return value #otherwise, just pass the value
|
"""Base material for signature backends."""
from django.urls import reverse
class SignatureBackend(object):
"""Encapsulate signature workflow and integration with vendor backend.
Here is a typical workflow:
* :class:`~django_anysign.models.SignatureType` instance is created. It
encapsulates the backend type and its configuration.
* A :class:`~django_anysign.models.Signature` instance is created.
The signature instance has a signature type attribute, hence a backend.
* Signers are notified, by email, text or whatever. They get an hyperlink
to the "signer view". The URL may vary depending on the signature
backend.
* A signer goes to the backend's "signer view" entry point: typically a
view that integrates backend specific form to sign a document.
* Most backends have a "notification view", for the third-party service to
signal updates.
* Most backends have a "signer return view", where the signer is redirected
when he ends the signature process (whatever signature status).
* The backend's specific workflow can be made of several views. At the
beginning, there is a Signature instance which carries data (typically a
document). At the end, Signature is done.
"""
def __init__(self, name, code, url_namespace='anysign', **kwargs):
"""Configure backend."""
#: Human-readable name.
self.name = name
#: Machine-readable name. Should be lowercase alphanumeric only, i.e.
#: PEP-8 compliant.
self.code = code
#: Namespace for URL resolution.
self.url_namespace = url_namespace
def send_signature(self, signature):
"""Initiate the signature process.
At this state, the signature object has been configured.
Typical implementation consists in sending signer URL to first signer.
Raise ``NotImplementedError`` if the backend does not support such a
feature.
"""
raise NotImplementedError()
def get_signer_url(self, signer):
"""Return URL where signer signs document.
Raise ``NotImplementedError`` in case the backend does not support
"signer view" feature.
Default implementation reverses :meth:`get_signer_url_name` with
``signer.pk`` as argument.
"""
return reverse(self.get_signer_url_name(), args=[signer.pk])
def get_signer_url_name(self):
"""Return URL name where signer signs document.
Raise ``NotImplementedError`` in case the backend does not support
"signer view" feature.
Default implementation returns ``anysign:signer``.
"""
return '{ns}:signer'.format(ns=self.url_namespace)
def get_signer_return_url(self, signer):
"""Return absolute URL where signer is redirected after signing.
The URL must be **absolute** because it is typically used by external
signature service: the signer uses external web UI to sign the
document(s) and then the signature service redirects the signer to
(this) `Django` website.
Raise ``NotImplementedError`` in case the backend does not support
"signer return view" feature.
Default implementation reverses :meth:`get_signer_return_url_name`
with ``signer.pk`` as argument.
"""
return reverse(
self.get_signer_return_url_name(),
args=[signer.pk])
def get_signer_return_url_name(self):
"""Return URL name where signer is redirected once document has been
signed.
Raise ``NotImplementedError`` in case the backend does not support
"signer return view" feature.
Default implementation returns ``anysign:signer_return``.
"""
return '{ns}:signer_return'.format(ns=self.url_namespace)
def get_signature_callback_url(self, signature):
"""Return URL where backend can post signature notifications.
Raise ``NotImplementedError`` in case the backend does not support
"signature callback url" feature.
Default implementation reverses :meth:`get_signature_callback_url_name`
with ``signature.pk`` as argument.
"""
return reverse(
self.get_signature_callback_url_name(),
args=[signature.pk])
def get_signature_callback_url_name(self):
"""Return URL name where backend can post signature notifications.
Raise ``NotImplementedError`` in case the backend does not support
"signer return view" feature.
Default implementation returns ``anysign:signature_callback``.
"""
return '{ns}:signature_callback'.format(ns=self.url_namespace)
def create_signature(self, signature):
"""Register ``signature`` in backend, return updated object.
This method is typically called by views which create
:class:`~django_anysign.models.Signature` instances.
If backend stores a signature object, then implementation should update
:attr:`~django_anysign.models.Signature.signature_backend_id`.
Base implementation does nothing: override this method in backends.
"""
return signature
|
from wrapper import get, run
import logging
import requests
@get('/')
def f(*args, **kwargs):
return '<html><head></head><body><h1>Hello!</h1></body></html>'
@get('/test', ['php'])
def test_f(*args, **kwargs):
arguments = kwargs['arguments']
php = arguments['php'][0]
self = args[0]
self.write("Head")
return 'Test{}'.format(php)
def test():
run(8888)
def main():
pass
if __name__ == '__main__':
test()
|
def extract17LiterarycornerWordpressCom(item):
'''
Parser for '17literarycorner.wordpress.com'
'''
vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title'])
if not (chp or vol) or "preview" in item['title'].lower():
return None
tagmap = [
('King Of Hell\'s Genius Pampered Wife', 'King Of Hell\'s Genius Pampered Wife', 'translated'),
('KOH', 'King Of Hell\'s Genius Pampered Wife', 'translated'),
('Addicted to Boundlessly Pampering You', 'Addicted to Boundlessly Pampering You', 'translated'),
('ATBPY', 'Addicted to Boundlessly Pampering You', 'translated'),
('PRC', 'PRC', 'translated'),
('Loiterous', 'Loiterous', 'oel'),
]
for tagname, name, tl_type in tagmap:
if tagname in item['tags']:
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
if item['tags'] == ['Uncategorized']:
titlemap = [
('KOH Chapter ', 'King Of Hell\'s Genius Pampered Wife', 'translated'),
('Tensei Shoujo no Rirekisho', 'Tensei Shoujo no Rirekisho', 'translated'),
('Master of Dungeon', 'Master of Dungeon', 'oel'),
]
for titlecomponent, name, tl_type in titlemap:
if titlecomponent.lower() in item['title'].lower():
return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type)
return False
|
class SqliteMixin:
config_params = {'DATASTORE': 'sqlite://'}
class OdmUtils:
config_file = 'tests.odm'
async def _create_task(self, token, subject='This is a task', person=None,
**data):
data['subject'] = subject
if person:
data['assigned'] = person['id']
request = await self.client.post(self.api_url('tasks'),
json=data,
token=token)
data = self.json(request.response, 201)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
self.assertEqual(data['subject'], subject)
self.assertTrue('created' in data)
self.assertEqual(len(request.cache.new_items), 1)
self.assertEqual(request.cache.new_items[0]['id'], data['id'])
self.assertFalse(request.cache.new_items_before_commit)
return data
async def _get_task(self, token, id):
request = await self.client.get(
'/tasks/{}'.format(id),
token=token)
response = request.response
self.assertEqual(response.status_code, 200)
data = self.json(response)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
return data
async def _delete_task(self, token, id):
request = await self.client.delete(
'/tasks/{}'.format(id),
token=token)
response = request.response
self.assertEqual(response.status_code, 204)
async def _create_person(self, token, username, name=None):
name = name or username
request = await self.client.post(
'/people',
json={'username': username, 'name': name},
token=token)
data = self.json(request.response, 201)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
self.assertEqual(data['name'], name)
return data
async def _update_person(self, token, id, username=None, name=None):
request = await self.client.patch(
self.api_url('people/%s' % id),
json={'username': username, 'name': name},
token=token
)
data = self.json(request.response, 200)
self.assertIsInstance(data, dict)
self.assertTrue('id' in data)
if name:
self.assertEqual(data['name'], name)
return data
|
from django.conf.urls import patterns, include, url
from django.utils.translation import ugettext_lazy as _
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.views.generic.base import TemplateView
from django.views.generic.detail import DetailView
from django.views.generic.base import RedirectView
from preferences import preferences
from jmbo.urls import v1_api
from foundry.models import Page
from foundry import views, forms
from foundry.api import ListingResource, LinkResource, NavbarResource, \
MenuResource, PageResource, BlogPostResource
admin.autodiscover()
try:
import object_tools
object_tools.autodiscover()
except ImportError:
pass
v1_api.register(ListingResource())
v1_api.register(LinkResource())
v1_api.register(NavbarResource())
v1_api.register(MenuResource())
v1_api.register(PageResource())
v1_api.register(BlogPostResource())
urlpatterns = patterns('',
# Pre-empt url call for comment post
url(
r'^comments/post/$',
'foundry.views.post_comment',
{},
name='comments-post-comment'
),
(r'^favicon\.ico$', RedirectView.as_view(url='/static/images/favicon.ico', permanent=False)),
(r'^googlesearch/', include('googlesearch.urls')),
(r'^jmbo/', include('jmbo.urls')),
(r'^comments/', include('django.contrib.comments.urls')),
(r'^likes/', include('likes.urls')),
(r'^object-tools/', include(object_tools.tools.urls)),
(r'^ckeditor/', include('ckeditor.urls')),
(r'^contact/', include('contact.urls')),
(r'^post/', include('post.urls')),
(r'^simple-autocomplete/', include('simple_autocomplete.urls')),
(r'^jmbo-analytics/', include('jmbo_analytics.urls')),
url(r'social-auth', include('social_auth.urls')),
(r'^admin/', include(admin.site.urls)),
url(
r'^$',
TemplateView.as_view(template_name='base.html'),
name='home'
),
url(
r'^logo/$',
TemplateView.as_view(template_name='foundry/logo.html'),
name='logo'
),
url(
r'^header/$',
TemplateView.as_view(template_name='foundry/inclusion_tags/header.html'),
name='header'
),
url(
r'^footer/$',
TemplateView.as_view(template_name='foundry/inclusion_tags/footer.html'),
name='footer'
),
# Join, login, password reset
url(
r'^join/$',
'foundry.views.join',
{},
name='join',
),
url(
r'^join-finish/$',
'foundry.views.join_finish',
{},
name='join-finish',
),
(r'^auth/', include('django.contrib.auth.urls')),
url(
r'^login/$',
'django.contrib.auth.views.login',
{'authentication_form': forms.LoginForm},
name='login',
),
url(
r'^logout/$',
'django.contrib.auth.views.logout',
{'next_page':'/'},
name='logout',
),
# Password reset with custom form
url(
r'^password_reset/$',
'django.contrib.auth.views.password_reset',
{
'password_reset_form': forms.PasswordResetForm,
},
name='password_reset',
),
# Pages defined in preferences
url(
r'^about-us/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.about_us,
title=_("About us")
),
name='about-us'
),
url(
r'^terms-and-conditions/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.terms_and_conditions,
title=_("Terms and conditions")
),
name='terms-and-conditions'
),
url(
r'^privacy-policy/$',
views.StaticView.as_view(
content=lambda:preferences.GeneralPreferences.privacy_policy,
title=_("Privacy policy")
),
name='privacy-policy'
),
# Age gateway
url(
r'^age-gateway/$',
'foundry.views.age_gateway',
{},
name='age-gateway',
),
# Listing
url(
r'^listing/(?P<slug>[\w-]+)/$',
'foundry.views.listing_detail',
{},
name='listing-detail'
),
# Listing feed
url(
r'^listing/(?P<slug>[\w-]+)/feed/$',
'foundry.feeds.listing_feed',
{},
name='listing-feed'
),
# Edit profile
url(r'^edit-profile/$',
login_required(
views.EditProfile.as_view(
form_class=forms.EditProfileForm,
template_name='foundry/edit_profile.html'
)
),
name='edit-profile'
),
# Complete profile
url(r'^complete-profile/$',
login_required(
views.EditProfile.as_view(
form_class=forms.EditProfileForm,
template_name='foundry/complete_profile.html'
)
),
name='complete-profile'
),
# Page detail
url(
r'^page/(?P<slug>[\w-]+)/$',
'foundry.views.page_detail',
{},
name='page-detail'
),
# Lorem ipsum
url(
r'^lorem-ipsum/$',
TemplateView.as_view(template_name='foundry/lorem_ipsum.html'),
name='lorem-ipsum'
),
# Search
url(
r'^search/$',
'foundry.views.search',
{},
name='search'
),
# Search results
url(
r'^search-results/$',
'foundry.views.search_results',
{},
name='search-results'
),
# Comment reply form in case of no javascript
url(
r'^comment-reply-form/$',
'foundry.views.comment_reply_form',
{},
name='comment-reply-form'
),
# Report comment
url(
r'^report-comment/(?P<comment_id>\d+)/$',
'foundry.views.report_comment',
{},
name='report-comment'
),
# Chatroom detail
url(
r'^chatroom/(?P<slug>[\w-]+)/$',
'foundry.views.chatroom_detail',
{},
name='chatroom-detail'
),
# Create blogpost
url(
r'^create-blogpost/$',
'foundry.views.create_blogpost',
{},
name='create-blogpost',
),
# Blogpost list
url(
r'^blogposts/$',
views.BlogPostObjectList.as_view(),
{'limit': 300},
name='blogpost_object_list'
),
# Blogpost detail
url(
r'^blogpost/(?P<slug>[\w-]+)/$',
views.BlogPostObjectDetail.as_view(),
{},
name='blogpost_object_detail'
),
# Member notifications
url(
r'^member-notifications/$',
login_required(views.member_notifications),
{},
name='member-notifications'
),
# User detail page
url(
r'^users/(?P<username>[=@\.\w-]+)/$',
'foundry.views.user_detail',
{},
name='user-detail'
),
# Coming soon
url(
r'^coming-soon/$',
TemplateView.as_view(template_name='foundry/coming_soon.html'),
name='coming-soon'
),
# Load new comments
url(
r'^fetch-new-comments-ajax/(?P<content_type_id>\d+)/(?P<oid>\d+)/(?P<last_comment_id>\d+)/$',
'foundry.views.fetch_new_comments_ajax',
{},
name='fetch-new-comments-ajax'
),
# Test views
url(
r'^test-plain-response/$',
'foundry.views.test_plain_response',
{},
name='test-plain-response'
),
url(
r'^test-redirect/$',
'foundry.views.test_redirect',
{},
name='test-redirect'
),
url(
r'^pages/$',
DetailView.as_view(),
{'queryset':Page.permitted.all().order_by('title')},
'page-list'
),
# Member detail page
url(
r'^members/(?P<username>[\w-]+)/$',
'foundry.views.member_detail',
{},
name='member-detail'
),
# Admin
url(
r'^admin-row-create-ajax/$',
'foundry.admin_views.row_create_ajax',
{},
name='admin-row-create-ajax',
),
url(
r'^admin-column-create-ajax/$',
'foundry.admin_views.column_create_ajax',
{},
name='admin-column-create-ajax',
),
url(
r'^admin-tile-create-ajax/$',
'foundry.admin_views.tile_create_ajax',
{},
name='admin-tile-create-ajax',
),
url(
r'^admin-row-edit-ajax/$',
'foundry.admin_views.row_edit_ajax',
{},
name='admin-row-edit-ajax',
),
url(
r'^admin-column-edit-ajax/$',
'foundry.admin_views.column_edit_ajax',
{},
name='admin-column-edit-ajax',
),
url(
r'^admin-tile-edit-ajax/$',
'foundry.admin_views.tile_edit_ajax',
{},
name='admin-tile-edit-ajax',
),
url(
r'^admin-row-delete-ajax/$',
'foundry.admin_views.row_delete_ajax',
{},
name='admin-row-delete-ajax',
),
url(
r'^admin-column-delete-ajax/$',
'foundry.admin_views.column_delete_ajax',
{},
name='admin-column-delete-ajax',
),
url(
r'^admin-tile-delete-ajax/$',
'foundry.admin_views.tile_delete_ajax',
{},
name='admin-tile-delete-ajax',
),
url(
r'^admin-persist-sort-ajax/$',
'foundry.admin_views.persist_sort_ajax',
{},
name='admin-persist-sort-ajax',
),
url(
r'^admin-remove-comment/(?P<comment_id>\d+)/$',
'foundry.admin_views.remove_comment',
{},
name='admin-remove-comment'
),
url(
r'^admin-allow-comment/(?P<comment_id>\d+)/$',
'foundry.admin_views.allow_comment',
{},
name='admin-allow-comment'
),
)
if "banner" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^banner/', include('banner.urls')))
if "chart" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^chart/', include('chart.urls')))
if "competition" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^competition/', include('competition.urls')))
if "downloads" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^downloads/', include('downloads.urls')))
if "friends" in settings.INSTALLED_APPS:
# Friends has a fancy member detail page and needs to resolve first
urlpatterns.insert(1, url(r'^friends/', include('friends.urls')))
if "gallery" in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^gallery/', include('gallery.urls')),
(r'^admin/', include('gallery.admin_urls')),
)
if "jmbo_calendar" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^calendar/', include('jmbo_calendar.urls')))
if "jmbo_twitter" in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^jmbo_twitter', include('jmbo_twitter.urls')),
(r'^admin/', include('jmbo_twitter.admin_urls')),
)
if "music" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^music/', include('music.urls')))
if "poll" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^poll/', include('poll.urls')))
if "show" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^show/', include('show.urls')))
if "video" in settings.INSTALLED_APPS:
urlpatterns += patterns('', (r'^video/', include('video.urls')))
if "jmbo_sitemap" in settings.INSTALLED_APPS:
from jmbo_sitemap import sitemaps
from jmbo_sitemap.views import sitemap, SitemapHTMLView
urlpatterns += patterns(
'',
# Unidentified issue with Jmbo URLPatternItem class means
# (r'^', include('jmbo_sitemap.urls')) causes error. Use a workaround.
url(
r'^sitemap\.xml$',
sitemap,
{'sitemaps': sitemaps},
name='sitemap'
),
url(
r'^sitemap/$',
SitemapHTMLView.as_view(),
name='html-sitemap'
),
)
urlpatterns += patterns('', (r'^api/', include(v1_api.urls)))
urlpatterns += staticfiles_urlpatterns()
urlpatterns += patterns('', ('r^/', include('django.contrib.flatpages.urls')))
handler500 = 'foundry.views.server_error'
if settings.DEBUG:
urlpatterns += patterns('',
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
)
|
"""An implementation of the ReplicationConfig proto interface."""
from __future__ import print_function
import json
import os
import shutil
import sys
from chromite.api.gen.config import replication_config_pb2
from chromite.lib import constants
from chromite.lib import cros_logging as logging
from chromite.lib import osutils
from chromite.utils import field_mask_util
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
def _ValidateFileReplicationRule(rule):
"""Raises an error if a FileReplicationRule is invalid.
For example, checks that if REPLICATION_TYPE_FILTER, destination_fields
are specified.
Args:
rule: (FileReplicationRule) The rule to validate.
"""
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_FILTER:
raise ValueError(
'Rule for JSON source %s must use REPLICATION_TYPE_FILTER.' %
rule.source_path)
elif rule.file_type == replication_config_pb2.FILE_TYPE_OTHER:
if rule.replication_type != replication_config_pb2.REPLICATION_TYPE_COPY:
raise ValueError('Rule for source %s must use REPLICATION_TYPE_COPY.' %
rule.source_path)
else:
raise NotImplementedError('Replicate not implemented for file type %s' %
rule.file_type)
if rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY:
if rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_COPY cannot use destination_fields.')
elif rule.replication_type == replication_config_pb2.REPLICATION_TYPE_FILTER:
if not rule.destination_fields.paths:
raise ValueError(
'Rule with REPLICATION_TYPE_FILTER must use destination_fields.')
else:
raise NotImplementedError(
'Replicate not implemented for replication type %s' %
rule.replication_type)
if os.path.isabs(rule.source_path) or os.path.isabs(rule.destination_path):
raise ValueError(
'Only paths relative to the source root are allowed. In rule: %s' %
rule)
def _ApplyStringReplacementRules(destination_path, rules):
"""Read the file at destination path, apply rules, and write a new file.
Args:
destination_path: (str) Path to the destination file to read. The new file
will also be written at this path.
rules: (list[StringReplacementRule]) Rules to apply. Must not be empty.
"""
assert rules
with open(destination_path, 'r') as f:
dst_data = f.read()
for string_replacement_rule in rules:
dst_data = dst_data.replace(string_replacement_rule.before,
string_replacement_rule.after)
with open(destination_path, 'w') as f:
f.write(dst_data)
def Replicate(replication_config):
"""Run the replication described in replication_config.
Args:
replication_config: (ReplicationConfig) Describes the replication to run.
"""
# Validate all rules before any of them are run, to decrease chance of ending
# with a partial replication.
for rule in replication_config.file_replication_rules:
_ValidateFileReplicationRule(rule)
for rule in replication_config.file_replication_rules:
logging.info('Processing FileReplicationRule: %s', rule)
src = os.path.join(constants.SOURCE_ROOT, rule.source_path)
dst = os.path.join(constants.SOURCE_ROOT, rule.destination_path)
osutils.SafeMakedirs(os.path.dirname(dst))
if rule.file_type == replication_config_pb2.FILE_TYPE_JSON:
assert (rule.replication_type ==
replication_config_pb2.REPLICATION_TYPE_FILTER)
assert rule.destination_fields.paths
with open(src, 'r') as f:
source_json = json.load(f)
try:
source_device_configs = source_json['chromeos']['configs']
except KeyError:
raise NotImplementedError(
('Currently only ChromeOS Configs are supported (expected file %s '
'to have a list at "$.chromeos.configs")') % src)
destination_device_configs = []
for source_device_config in source_device_configs:
destination_device_configs.append(
field_mask_util.CreateFilteredDict(rule.destination_fields,
source_device_config))
destination_json = {'chromeos': {'configs': destination_device_configs}}
logging.info('Writing filtered JSON source to %s', dst)
with open(dst, 'w') as f:
# Use the print function, so the file ends in a newline.
print(
json.dumps(
destination_json,
sort_keys=True,
indent=2,
separators=(',', ': ')),
file=f)
else:
assert rule.file_type == replication_config_pb2.FILE_TYPE_OTHER
assert (
rule.replication_type == replication_config_pb2.REPLICATION_TYPE_COPY)
assert not rule.destination_fields.paths
logging.info('Copying full file from %s to %s', src, dst)
shutil.copy2(src, dst)
if rule.string_replacement_rules:
_ApplyStringReplacementRules(dst, rule.string_replacement_rules)
|
import math
from sympy.mpmath import *
def test_bessel():
mp.dps = 15
assert j0(1).ae(0.765197686557966551)
assert j0(pi).ae(-0.304242177644093864)
assert j0(1000).ae(0.0247866861524201746)
assert j0(-25).ae(0.0962667832759581162)
assert j1(1).ae(0.440050585744933516)
assert j1(pi).ae(0.284615343179752757)
assert j1(1000).ae(0.00472831190708952392)
assert j1(-25).ae(0.125350249580289905)
assert besselj(5,1).ae(0.000249757730211234431)
assert besselj(5,pi).ae(0.0521411843671184747)
assert besselj(5,1000).ae(0.00502540694523318607)
assert besselj(5,-25).ae(0.0660079953984229934)
assert besselj(-3,2).ae(-0.128943249474402051)
assert besselj(-4,2).ae(0.0339957198075684341)
assert besselj(3,3+2j).ae(0.424718794929639595942 + 0.625665327745785804812j)
assert besselj(0.25,4).ae(-0.374760630804249715)
assert besselj(1+2j,3+4j).ae(0.319247428741872131 - 0.669557748880365678j)
assert (besselj(3, 10**10) * 10**5).ae(0.76765081748139204023)
assert bessely(-0.5, 0) == 0
assert bessely(0.5, 0) == -inf
assert bessely(1.5, 0) == -inf
assert bessely(0,0) == -inf
assert bessely(-0.4, 0) == -inf
assert bessely(-0.6, 0) == inf
assert bessely(-1, 0) == inf
assert bessely(-1.4, 0) == inf
assert bessely(-1.6, 0) == -inf
assert bessely(-1, 0) == inf
assert bessely(-2, 0) == -inf
assert bessely(-3, 0) == inf
assert bessely(0.5, 0) == -inf
assert bessely(1, 0) == -inf
assert bessely(1.5, 0) == -inf
assert bessely(2, 0) == -inf
assert bessely(2.5, 0) == -inf
assert bessely(3, 0) == -inf
assert bessely(0,0.5).ae(-0.44451873350670655715)
assert bessely(1,0.5).ae(-1.4714723926702430692)
assert bessely(-1,0.5).ae(1.4714723926702430692)
assert bessely(3.5,0.5).ae(-138.86400867242488443)
assert bessely(0,3+4j).ae(4.6047596915010138655-8.8110771408232264208j)
assert bessely(0,j).ae(-0.26803248203398854876+1.26606587775200833560j)
assert (bessely(3, 10**10) * 10**5).ae(0.21755917537013204058)
assert besseli(0,0) == 1
assert besseli(1,0) == 0
assert besseli(2,0) == 0
assert besseli(-1,0) == 0
assert besseli(-2,0) == 0
assert besseli(0,0.5).ae(1.0634833707413235193)
assert besseli(1,0.5).ae(0.25789430539089631636)
assert besseli(-1,0.5).ae(0.25789430539089631636)
assert besseli(3.5,0.5).ae(0.00068103597085793815863)
assert besseli(0,3+4j).ae(-3.3924877882755196097-1.3239458916287264815j)
assert besseli(0,j).ae(besselj(0,1))
assert (besseli(3, 10**10) * mpf(10)**(-4342944813)).ae(4.2996028505491271875)
assert besselk(0,0) == inf
assert besselk(1,0) == inf
assert besselk(2,0) == inf
assert besselk(-1,0) == inf
assert besselk(-2,0) == inf
assert besselk(0,0.5).ae(0.92441907122766586178)
assert besselk(1,0.5).ae(1.6564411200033008937)
assert besselk(-1,0.5).ae(1.6564411200033008937)
assert besselk(3.5,0.5).ae(207.48418747548460607)
assert besselk(0,3+4j).ae(-0.007239051213570155013+0.026510418350267677215j)
assert besselk(0,j).ae(-0.13863371520405399968-1.20196971531720649914j)
assert (besselk(3, 10**10) * mpf(10)**4342944824).ae(1.1628981033356187851)
def test_hankel():
mp.dps = 15
assert hankel1(0,0.5).ae(0.93846980724081290423-0.44451873350670655715j)
assert hankel1(1,0.5).ae(0.2422684576748738864-1.4714723926702430692j)
assert hankel1(-1,0.5).ae(-0.2422684576748738864+1.4714723926702430692j)
assert hankel1(1.5,0.5).ae(0.0917016996256513026-2.5214655504213378514j)
assert hankel1(1.5,3+4j).ae(0.0066806866476728165382-0.0036684231610839127106j)
assert hankel2(0,0.5).ae(0.93846980724081290423+0.44451873350670655715j)
assert hankel2(1,0.5).ae(0.2422684576748738864+1.4714723926702430692j)
assert hankel2(-1,0.5).ae(-0.2422684576748738864-1.4714723926702430692j)
assert hankel2(1.5,0.5).ae(0.0917016996256513026+2.5214655504213378514j)
assert hankel2(1.5,3+4j).ae(14.783528526098567526-7.397390270853446512j)
def test_struve():
mp.dps = 15
assert struveh(2,3).ae(0.74238666967748318564)
assert struveh(-2.5,3).ae(0.41271003220971599344)
assert struvel(2,3).ae(1.7476573277362782744)
assert struvel(-2.5,3).ae(1.5153394466819651377)
def test_whittaker():
mp.dps = 15
assert whitm(2,3,4).ae(49.753745589025246591)
assert whitw(2,3,4).ae(14.111656223052932215)
def test_kelvin():
mp.dps = 15
assert ber(2,3).ae(0.80836846563726819091)
assert ber(3,4).ae(-0.28262680167242600233)
assert ber(-3,2).ae(-0.085611448496796363669)
assert bei(2,3).ae(-0.89102236377977331571)
assert bei(-3,2).ae(-0.14420994155731828415)
assert ker(2,3).ae(0.12839126695733458928)
assert ker(-3,2).ae(-0.29802153400559142783)
assert ker(0.5,3).ae(-0.085662378535217097524)
assert kei(2,3).ae(0.036804426134164634000)
assert kei(-3,2).ae(0.88682069845786731114)
assert kei(0.5,3).ae(0.013633041571314302948)
def test_hyper_misc():
mp.dps = 15
assert hyp0f1(1,0) == 1
assert hyp1f1(1,2,0) == 1
assert hyp1f2(1,2,3,0) == 1
assert hyp2f1(1,2,3,0) == 1
assert hyp2f2(1,2,3,4,0) == 1
assert hyp2f3(1,2,3,4,5,0) == 1
# Degenerate case: 0F0
assert hyper([],[],0) == 1
assert hyper([],[],-2).ae(exp(-2))
# Degenerate case: 1F0
assert hyper([2],[],1.5) == 4
#
assert hyp2f1((1,3),(2,3),(5,6),mpf(27)/32).ae(1.6)
assert hyp2f1((1,4),(1,2),(3,4),mpf(80)/81).ae(1.8)
assert hyp2f1((2,3),(1,1),(3,2),(2+j)/3).ae(1.327531603558679093+0.439585080092769253j)
mp.dps = 25
v = mpc('1.2282306665029814734863026', '-0.1225033830118305184672133')
assert hyper([(3,4),2+j,1],[1,5,j/3],mpf(1)/5+j/8).ae(v)
mp.dps = 15
def test_elliptic_integrals():
mp.dps = 15
assert ellipk(0).ae(pi/2)
assert ellipk(0.5).ae(gamma(0.25)**2/(4*sqrt(pi)))
assert ellipk(1) == inf
assert ellipk(1+0j) == inf
assert ellipk(-1).ae('1.3110287771460599052')
assert ellipk(-2).ae('1.1714200841467698589')
assert isinstance(ellipk(-2), mpf)
assert isinstance(ellipe(-2), mpf)
assert ellipk(-50).ae('0.47103424540873331679')
mp.dps = 30
n1 = +fraction(99999,100000)
n2 = +fraction(100001,100000)
mp.dps = 15
assert ellipk(n1).ae('7.1427724505817781901')
assert ellipk(n2).ae(mpc('7.1427417367963090109', '-1.5707923998261688019'))
assert ellipe(n1).ae('1.0000332138990829170')
v = ellipe(n2)
assert v.real.ae('0.999966786328145474069137')
assert (v.imag*10**6).ae('7.853952181727432')
assert ellipk(2).ae(mpc('1.3110287771460599052', '-1.3110287771460599052'))
assert ellipk(50).ae(mpc('0.22326753950210985451', '-0.47434723226254522087'))
assert ellipk(3+4j).ae(mpc('0.91119556380496500866', '0.63133428324134524388'))
assert ellipk(3-4j).ae(mpc('0.91119556380496500866', '-0.63133428324134524388'))
assert ellipk(-3+4j).ae(mpc('0.95357894880405122483', '0.23093044503746114444'))
assert ellipk(-3-4j).ae(mpc('0.95357894880405122483', '-0.23093044503746114444'))
assert isnan(ellipk(nan))
assert isnan(ellipe(nan))
assert ellipk(inf) == 0
assert isinstance(ellipk(inf), mpc)
assert ellipk(-inf) == 0
assert ellipk(1+0j) == inf
assert ellipe(0).ae(pi/2)
assert ellipe(0.5).ae(pi**(mpf(3)/2)/gamma(0.25)**2 +gamma(0.25)**2/(8*sqrt(pi)))
assert ellipe(1) == 1
assert ellipe(1+0j) == 1
assert ellipe(inf) == mpc(0,inf)
assert ellipe(-inf) == inf
assert ellipe(3+4j).ae(1.4995535209333469543-1.5778790079127582745j)
assert ellipe(3-4j).ae(1.4995535209333469543+1.5778790079127582745j)
assert ellipe(-3+4j).ae(2.5804237855343377803-0.8306096791000413778j)
assert ellipe(-3-4j).ae(2.5804237855343377803+0.8306096791000413778j)
assert ellipe(2).ae(0.59907011736779610372+0.59907011736779610372j)
assert ellipe('1e-1000000000').ae(pi/2)
assert ellipk('1e-1000000000').ae(pi/2)
assert ellipe(-pi).ae(2.4535865983838923)
mp.dps = 50
assert ellipk(1/pi).ae('1.724756270009501831744438120951614673874904182624739673')
assert ellipe(1/pi).ae('1.437129808135123030101542922290970050337425479058225712')
assert ellipk(-10*pi).ae('0.5519067523886233967683646782286965823151896970015484512')
assert ellipe(-10*pi).ae('5.926192483740483797854383268707108012328213431657645509')
v = ellipk(pi)
assert v.real.ae('0.973089521698042334840454592642137667227167622330325225')
assert v.imag.ae('-1.156151296372835303836814390793087600271609993858798016')
v = ellipe(pi)
assert v.real.ae('0.4632848917264710404078033487934663562998345622611263332')
assert v.imag.ae('1.0637961621753130852473300451583414489944099504180510966')
mp.dps = 15
def test_exp_integrals():
mp.dps = 15
x = +e
z = e + sqrt(3)*j
assert ei(x).ae(8.21168165538361560)
assert li(x).ae(1.89511781635593676)
assert si(x).ae(1.82104026914756705)
assert ci(x).ae(0.213958001340379779)
assert shi(x).ae(4.11520706247846193)
assert chi(x).ae(4.09647459290515367)
assert fresnels(x).ae(0.437189718149787643)
assert fresnelc(x).ae(0.401777759590243012)
assert airyai(x).ae(0.0108502401568586681)
assert airybi(x).ae(8.98245748585468627)
assert ei(z).ae(3.72597969491314951 + 7.34213212314224421j)
assert li(z).ae(2.28662658112562502 + 1.50427225297269364j)
assert si(z).ae(2.48122029237669054 + 0.12684703275254834j)
assert ci(z).ae(0.169255590269456633 - 0.892020751420780353j)
assert shi(z).ae(1.85810366559344468 + 3.66435842914920263j)
assert chi(z).ae(1.86787602931970484 + 3.67777369399304159j)
assert fresnels(z/3).ae(0.034534397197008182 + 0.754859844188218737j)
assert fresnelc(z/3).ae(1.261581645990027372 + 0.417949198775061893j)
assert airyai(z).ae(-0.0162552579839056062 - 0.0018045715700210556j)
assert airybi(z).ae(-4.98856113282883371 + 2.08558537872180623j)
assert li(0) == 0.0
assert li(1) == -inf
assert li(inf) == inf
assert isinstance(li(0.7), mpf)
assert si(inf).ae(pi/2)
assert si(-inf).ae(-pi/2)
assert ci(inf) == 0
assert ci(0) == -inf
assert isinstance(ei(-0.7), mpf)
assert airyai(inf) == 0
assert airybi(inf) == inf
assert airyai(-inf) == 0
assert airybi(-inf) == 0
assert fresnels(inf) == 0.5
assert fresnelc(inf) == 0.5
assert fresnels(-inf) == -0.5
assert fresnelc(-inf) == -0.5
assert shi(0) == 0
assert shi(inf) == inf
assert shi(-inf) == -inf
assert chi(0) == -inf
assert chi(inf) == inf
def test_ei():
mp.dps = 15
assert ei(0) == -inf
assert ei(inf) == inf
assert ei(-inf) == -0.0
assert ei(20+70j).ae(6.1041351911152984397e6 - 2.7324109310519928872e6j)
# tests for the asymptotic expansion
# values checked with Mathematica ExpIntegralEi
mp.dps = 50
r = ei(20000)
s = '3.8781962825045010930273870085501819470698476975019e+8681'
assert str(r) == s
r = ei(-200)
s = '-6.8852261063076355977108174824557929738368086933303e-90'
assert str(r) == s
r =ei(20000 + 10*j)
sre = '-3.255138234032069402493850638874410725961401274106e+8681'
sim = '-2.1081929993474403520785942429469187647767369645423e+8681'
assert str(r.real) == sre and str(r.imag) == sim
mp.dps = 15
# More asymptotic expansions
assert chi(-10**6+100j).ae('1.3077239389562548386e+434288 + 7.6808956999707408158e+434287j')
assert shi(-10**6+100j).ae('-1.3077239389562548386e+434288 - 7.6808956999707408158e+434287j')
mp.dps = 15
assert ei(10j).ae(-0.0454564330044553726+3.2291439210137706686j)
assert ei(100j).ae(-0.0051488251426104921+3.1330217936839529126j)
u = ei(fmul(10**20, j, exact=True))
assert u.real.ae(-6.4525128526578084421345e-21, abs_eps=0, rel_eps=8*eps)
assert u.imag.ae(pi)
assert ei(-10j).ae(-0.0454564330044553726-3.2291439210137706686j)
assert ei(-100j).ae(-0.0051488251426104921-3.1330217936839529126j)
u = ei(fmul(-10**20, j, exact=True))
assert u.real.ae(-6.4525128526578084421345e-21, abs_eps=0, rel_eps=8*eps)
assert u.imag.ae(-pi)
assert ei(10+10j).ae(-1576.1504265768517448+436.9192317011328140j)
u = ei(-10+10j)
assert u.real.ae(7.6698978415553488362543e-7, abs_eps=0, rel_eps=8*eps)
assert u.imag.ae(3.141595611735621062025)
def test_e1():
mp.dps = 15
assert e1(0) == inf
assert e1(inf) == 0
assert e1(-inf) == mpc(-inf, -pi)
assert e1(10j).ae(0.045456433004455372635 + 0.087551267423977430100j)
assert e1(100j).ae(0.0051488251426104921444 - 0.0085708599058403258790j)
assert e1(fmul(10**20, j, exact=True)).ae(6.4525128526578084421e-21 - 7.6397040444172830039e-21j, abs_eps=0, rel_eps=8*eps)
assert e1(-10j).ae(0.045456433004455372635 - 0.087551267423977430100j)
assert e1(-100j).ae(0.0051488251426104921444 + 0.0085708599058403258790j)
assert e1(fmul(-10**20, j, exact=True)).ae(6.4525128526578084421e-21 + 7.6397040444172830039e-21j, abs_eps=0, rel_eps=8*eps)
def test_expint():
mp.dps = 15
assert expint(0,0) == inf
assert expint(0,1).ae(1/e)
assert expint(0,1.5).ae(2/exp(1.5)/3)
assert expint(1,1).ae(-ei(-1))
assert expint(2,0).ae(1)
assert expint(3,0).ae(1/2.)
assert expint(4,0).ae(1/3.)
assert expint(-2, 0.5).ae(26/sqrt(e))
assert expint(-1,-1) == 0
assert expint(-2,-1).ae(-e)
assert expint(5.5, 0).ae(2/9.)
assert expint(2.00000001,0).ae(100000000./100000001)
assert expint(2+3j,4-j).ae(0.0023461179581675065414+0.0020395540604713669262j)
assert expint('1.01', '1e-1000').ae(99.9999999899412802)
assert expint('1.000000000001', 3.5).ae(0.00697013985754701819446)
assert expint(2,3).ae(3*ei(-3)+exp(-3))
assert (expint(10,20)*10**10).ae(0.694439055541231353)
assert expint(3,inf) == 0
assert expint(3.2,inf) == 0
assert expint(3.2+2j,inf) == 0
assert expint(1,3j).ae(-0.11962978600800032763 + 0.27785620120457163717j)
assert expint(1,3).ae(0.013048381094197037413)
assert expint(1,-3).ae(-ei(3)-pi*j)
#assert expint(3) == expint(1,3)
assert expint(1,-20).ae(-25615652.66405658882 - 3.1415926535897932385j)
assert expint(1000000,0).ae(1./999999)
assert expint(0,2+3j).ae(-0.025019798357114678171 + 0.027980439405104419040j)
assert expint(-1,2+3j).ae(-0.022411973626262070419 + 0.038058922011377716932j)
assert expint(-1.5,0) == inf
def test_trig_integrals():
mp.dps = 30
assert si(mpf(1)/1000000).ae('0.000000999999999999944444444444446111')
assert ci(mpf(1)/1000000).ae('-13.2382948930629912435014366276')
assert si(10**10).ae('1.5707963267075846569685111517747537')
assert ci(10**10).ae('-4.87506025174822653785729773959e-11')
assert si(10**100).ae(pi/2)
assert (ci(10**100)*10**100).ae('-0.372376123661276688262086695553')
assert si(-3) == -si(3)
assert ci(-3).ae(ci(3) + pi*j)
# Test complex structure
mp.dps = 15
assert mp.ci(50).ae(-0.0056283863241163054402)
assert mp.ci(50+2j).ae(-0.018378282946133067149+0.070352808023688336193j)
assert mp.ci(20j).ae(1.28078263320282943611e7+1.5707963267949j)
assert mp.ci(-2+20j).ae(-4.050116856873293505e6+1.207476188206989909e7j)
assert mp.ci(-50+2j).ae(-0.0183782829461330671+3.0712398455661049023j)
assert mp.ci(-50).ae(-0.0056283863241163054+3.1415926535897932385j)
assert mp.ci(-50-2j).ae(-0.0183782829461330671-3.0712398455661049023j)
assert mp.ci(-2-20j).ae(-4.050116856873293505e6-1.207476188206989909e7j)
assert mp.ci(-20j).ae(1.28078263320282943611e7-1.5707963267949j)
assert mp.ci(50-2j).ae(-0.018378282946133067149-0.070352808023688336193j)
assert mp.si(50).ae(1.5516170724859358947)
assert mp.si(50+2j).ae(1.497884414277228461-0.017515007378437448j)
assert mp.si(20j).ae(1.2807826332028294459e7j)
assert mp.si(-2+20j).ae(-1.20747603112735722103e7-4.050116856873293554e6j)
assert mp.si(-50+2j).ae(-1.497884414277228461-0.017515007378437448j)
assert mp.si(-50).ae(-1.5516170724859358947)
assert mp.si(-50-2j).ae(-1.497884414277228461+0.017515007378437448j)
assert mp.si(-2-20j).ae(-1.20747603112735722103e7+4.050116856873293554e6j)
assert mp.si(-20j).ae(-1.2807826332028294459e7j)
assert mp.si(50-2j).ae(1.497884414277228461+0.017515007378437448j)
assert mp.chi(50j).ae(-0.0056283863241163054+1.5707963267948966192j)
assert mp.chi(-2+50j).ae(-0.0183782829461330671+1.6411491348185849554j)
assert mp.chi(-20).ae(1.28078263320282943611e7+3.1415926535898j)
assert mp.chi(-20-2j).ae(-4.050116856873293505e6+1.20747571696809187053e7j)
assert mp.chi(-2-50j).ae(-0.0183782829461330671-1.6411491348185849554j)
assert mp.chi(-50j).ae(-0.0056283863241163054-1.5707963267948966192j)
assert mp.chi(2-50j).ae(-0.0183782829461330671-1.500443518771208283j)
assert mp.chi(20-2j).ae(-4.050116856873293505e6-1.20747603112735722951e7j)
assert mp.chi(20).ae(1.2807826332028294361e7)
assert mp.chi(2+50j).ae(-0.0183782829461330671+1.500443518771208283j)
assert mp.shi(50j).ae(1.5516170724859358947j)
assert mp.shi(-2+50j).ae(0.017515007378437448+1.497884414277228461j)
assert mp.shi(-20).ae(-1.2807826332028294459e7)
assert mp.shi(-20-2j).ae(4.050116856873293554e6-1.20747603112735722103e7j)
assert mp.shi(-2-50j).ae(0.017515007378437448-1.497884414277228461j)
assert mp.shi(-50j).ae(-1.5516170724859358947j)
assert mp.shi(2-50j).ae(-0.017515007378437448-1.497884414277228461j)
assert mp.shi(20-2j).ae(-4.050116856873293554e6-1.20747603112735722103e7j)
assert mp.shi(20).ae(1.2807826332028294459e7)
assert mp.shi(2+50j).ae(-0.017515007378437448+1.497884414277228461j)
def ae(x,y,tol=1e-12):
return abs(x-y) <= abs(y)*tol
assert fp.ci(fp.inf) == 0
assert ae(fp.ci(fp.ninf), fp.pi*1j)
assert ae(fp.si(fp.inf), fp.pi/2)
assert ae(fp.si(fp.ninf), -fp.pi/2)
assert fp.si(0) == 0
assert ae(fp.ci(50), -0.0056283863241163054402)
assert ae(fp.ci(50+2j), -0.018378282946133067149+0.070352808023688336193j)
assert ae(fp.ci(20j), 1.28078263320282943611e7+1.5707963267949j)
assert ae(fp.ci(-2+20j), -4.050116856873293505e6+1.207476188206989909e7j)
assert ae(fp.ci(-50+2j), -0.0183782829461330671+3.0712398455661049023j)
assert ae(fp.ci(-50), -0.0056283863241163054+3.1415926535897932385j)
assert ae(fp.ci(-50-2j), -0.0183782829461330671-3.0712398455661049023j)
assert ae(fp.ci(-2-20j), -4.050116856873293505e6-1.207476188206989909e7j)
assert ae(fp.ci(-20j), 1.28078263320282943611e7-1.5707963267949j)
assert ae(fp.ci(50-2j), -0.018378282946133067149-0.070352808023688336193j)
assert ae(fp.si(50), 1.5516170724859358947)
assert ae(fp.si(50+2j), 1.497884414277228461-0.017515007378437448j)
assert ae(fp.si(20j), 1.2807826332028294459e7j)
assert ae(fp.si(-2+20j), -1.20747603112735722103e7-4.050116856873293554e6j)
assert ae(fp.si(-50+2j), -1.497884414277228461-0.017515007378437448j)
assert ae(fp.si(-50), -1.5516170724859358947)
assert ae(fp.si(-50-2j), -1.497884414277228461+0.017515007378437448j)
assert ae(fp.si(-2-20j), -1.20747603112735722103e7+4.050116856873293554e6j)
assert ae(fp.si(-20j), -1.2807826332028294459e7j)
assert ae(fp.si(50-2j), 1.497884414277228461+0.017515007378437448j)
assert ae(fp.chi(50j), -0.0056283863241163054+1.5707963267948966192j)
assert ae(fp.chi(-2+50j), -0.0183782829461330671+1.6411491348185849554j)
assert ae(fp.chi(-20), 1.28078263320282943611e7+3.1415926535898j)
assert ae(fp.chi(-20-2j), -4.050116856873293505e6+1.20747571696809187053e7j)
assert ae(fp.chi(-2-50j), -0.0183782829461330671-1.6411491348185849554j)
assert ae(fp.chi(-50j), -0.0056283863241163054-1.5707963267948966192j)
assert ae(fp.chi(2-50j), -0.0183782829461330671-1.500443518771208283j)
assert ae(fp.chi(20-2j), -4.050116856873293505e6-1.20747603112735722951e7j)
assert ae(fp.chi(20), 1.2807826332028294361e7)
assert ae(fp.chi(2+50j), -0.0183782829461330671+1.500443518771208283j)
assert ae(fp.shi(50j), 1.5516170724859358947j)
assert ae(fp.shi(-2+50j), 0.017515007378437448+1.497884414277228461j)
assert ae(fp.shi(-20), -1.2807826332028294459e7)
assert ae(fp.shi(-20-2j), 4.050116856873293554e6-1.20747603112735722103e7j)
assert ae(fp.shi(-2-50j), 0.017515007378437448-1.497884414277228461j)
assert ae(fp.shi(-50j), -1.5516170724859358947j)
assert ae(fp.shi(2-50j), -0.017515007378437448-1.497884414277228461j)
assert ae(fp.shi(20-2j), -4.050116856873293554e6-1.20747603112735722103e7j)
assert ae(fp.shi(20), 1.2807826332028294459e7)
assert ae(fp.shi(2+50j), -0.017515007378437448+1.497884414277228461j)
def test_airy():
mp.dps = 15
assert (airyai(10)*10**10).ae(1.1047532552898687)
assert (airybi(10)/10**9).ae(0.45564115354822515)
assert (airyai(1000)*10**9158).ae(9.306933063179556004)
assert (airybi(1000)/10**9154).ae(5.4077118391949465477)
assert airyai(-1000).ae(0.055971895773019918842)
assert airybi(-1000).ae(-0.083264574117080633012)
assert (airyai(100+100j)*10**188).ae(2.9099582462207032076 + 2.353013591706178756j)
assert (airybi(100+100j)/10**185).ae(1.7086751714463652039 - 3.1416590020830804578j)
def test_hyper_0f1():
mp.dps = 15
v = 8.63911136507950465
assert hyper([],[(1,3)],1.5).ae(v)
assert hyper([],[1/3.],1.5).ae(v)
assert hyp0f1(1/3.,1.5).ae(v)
assert hyp0f1((1,3),1.5).ae(v)
# Asymptotic expansion
assert hyp0f1(3,1e9).ae('4.9679055380347771271e+27455')
assert hyp0f1(3,1e9j).ae('-2.1222788784457702157e+19410 + 5.0840597555401854116e+19410j')
def test_hyper_1f1():
mp.dps = 15
v = 1.2917526488617656673
assert hyper([(1,2)],[(3,2)],0.7).ae(v)
assert hyper([(1,2)],[(3,2)],0.7+0j).ae(v)
assert hyper([0.5],[(3,2)],0.7).ae(v)
assert hyper([0.5],[1.5],0.7).ae(v)
assert hyper([0.5],[(3,2)],0.7+0j).ae(v)
assert hyper([0.5],[1.5],0.7+0j).ae(v)
assert hyper([(1,2)],[1.5+0j],0.7).ae(v)
assert hyper([0.5+0j],[1.5],0.7).ae(v)
assert hyper([0.5+0j],[1.5+0j],0.7+0j).ae(v)
assert hyp1f1(0.5,1.5,0.7).ae(v)
assert hyp1f1((1,2),1.5,0.7).ae(v)
# Asymptotic expansion
assert hyp1f1(2,3,1e10).ae('2.1555012157015796988e+4342944809')
assert (hyp1f1(2,3,1e10j)*10**10).ae(-0.97501205020039745852 - 1.7462392454512132074j)
# Shouldn't use asymptotic expansion
assert hyp1f1(-2, 1, 10000).ae(49980001)
# Bug
assert hyp1f1(1j,fraction(1,3),0.415-69.739j).ae(25.857588206024346592 + 15.738060264515292063j)
def test_hyper_2f1():
mp.dps = 15
v = 1.0652207633823291032
assert hyper([(1,2), (3,4)], [2], 0.3).ae(v)
assert hyper([(1,2), 0.75], [2], 0.3).ae(v)
assert hyper([0.5, 0.75], [2.0], 0.3).ae(v)
assert hyper([0.5, 0.75], [2.0], 0.3+0j).ae(v)
assert hyper([0.5+0j, (3,4)], [2.0], 0.3+0j).ae(v)
assert hyper([0.5+0j, (3,4)], [2.0], 0.3).ae(v)
assert hyper([0.5, (3,4)], [2.0+0j], 0.3).ae(v)
assert hyper([0.5+0j, 0.75+0j], [2.0+0j], 0.3+0j).ae(v)
v = 1.09234681096223231717 + 0.18104859169479360380j
assert hyper([(1,2),0.75+j], [2], 0.5).ae(v)
assert hyper([0.5,0.75+j], [2.0], 0.5).ae(v)
assert hyper([0.5,0.75+j], [2.0], 0.5+0j).ae(v)
assert hyper([0.5,0.75+j], [2.0+0j], 0.5+0j).ae(v)
v = 0.9625 - 0.125j
assert hyper([(3,2),-1],[4], 0.1+j/3).ae(v)
assert hyper([1.5,-1.0],[4], 0.1+j/3).ae(v)
assert hyper([1.5,-1.0],[4+0j], 0.1+j/3).ae(v)
assert hyper([1.5+0j,-1.0+0j],[4+0j], 0.1+j/3).ae(v)
v = 1.02111069501693445001 - 0.50402252613466859521j
assert hyper([(2,10),(3,10)],[(4,10)],1.5).ae(v)
assert hyper([0.2,(3,10)],[0.4+0j],1.5).ae(v)
assert hyper([0.2,(3,10)],[0.4+0j],1.5+0j).ae(v)
v = 0.76922501362865848528 + 0.32640579593235886194j
assert hyper([(2,10),(3,10)],[(4,10)],4+2j).ae(v)
assert hyper([0.2,(3,10)],[0.4+0j],4+2j).ae(v)
assert hyper([0.2,(3,10)],[(4,10)],4+2j).ae(v)
def test_hyper_2f1_hard():
mp.dps = 15
# Singular cases
assert hyp2f1(2,-1,-1,3).ae(0.25)
assert hyp2f1(2,-2,-2,3).ae(0.25)
assert hyp2f1(2,-1,-1,3,eliminate=False) == 7
assert hyp2f1(2,-2,-2,3,eliminate=False) == 34
assert hyp2f1(2,-2,-3,3) == 14
assert hyp2f1(2,-3,-2,3) == inf
assert hyp2f1(2,-1.5,-1.5,3) == 0.25
assert hyp2f1(1,2,3,0) == 1
assert hyp2f1(0,1,0,0) == 1
assert hyp2f1(0,0,0,0) == 1
assert isnan(hyp2f1(1,1,0,0))
assert hyp2f1(2,-1,-5, 0.25+0.25j).ae(1.1+0.1j)
assert hyp2f1(2,-5,-5, 0.25+0.25j, eliminate=False).ae(163./128 + 125./128*j)
assert hyp2f1(0.7235, -1, -5, 0.3).ae(1.04341)
assert hyp2f1(0.7235, -5, -5, 0.3, eliminate=False).ae(1.2939225017815903812)
assert hyp2f1(-1,-2,4,1) == 1.5
assert hyp2f1(1,2,-3,1) == inf
assert hyp2f1(-2,-2,1,1) == 6
assert hyp2f1(1,-2,-4,1).ae(5./3)
assert hyp2f1(0,-6,-4,1) == 1
assert hyp2f1(0,-3,-4,1) == 1
assert hyp2f1(0,0,0,1) == 1
assert hyp2f1(1,0,0,1,eliminate=False) == 1
assert hyp2f1(1,1,0,1) == inf
assert hyp2f1(1,-6,-4,1) == inf
assert hyp2f1(-7.2,-0.5,-4.5,1) == 0
assert hyp2f1(-7.2,-1,-2,1).ae(-2.6)
assert hyp2f1(1,-0.5,-4.5, 1) == inf
assert hyp2f1(1,0.5,-4.5, 1) == -inf
# Check evaluation on / close to unit circle
z = exp(j*pi/3)
w = (nthroot(2,3)+1)*exp(j*pi/12)/nthroot(3,4)**3
assert hyp2f1('1/2','1/6','1/3', z).ae(w)
assert hyp2f1('1/2','1/6','1/3', z.conjugate()).ae(w.conjugate())
assert hyp2f1(0.25, (1,3), 2, '0.999').ae(1.06826449496030635)
assert hyp2f1(0.25, (1,3), 2, '1.001').ae(1.06867299254830309446-0.00001446586793975874j)
assert hyp2f1(0.25, (1,3), 2, -1).ae(0.96656584492524351673)
assert hyp2f1(0.25, (1,3), 2, j).ae(0.99041766248982072266+0.03777135604180735522j)
assert hyp2f1(2,3,5,'0.99').ae(27.699347904322690602)
assert hyp2f1((3,2),-0.5,3,'0.99').ae(0.68403036843911661388)
assert hyp2f1(2,3,5,1j).ae(0.37290667145974386127+0.59210004902748285917j)
assert fsum([hyp2f1((7,10),(2,3),(-1,2), 0.95*exp(j*k)) for k in range(1,15)]).ae(52.851400204289452922+6.244285013912953225j)
assert fsum([hyp2f1((7,10),(2,3),(-1,2), 1.05*exp(j*k)) for k in range(1,15)]).ae(54.506013786220655330-3.000118813413217097j)
assert fsum([hyp2f1((7,10),(2,3),(-1,2), exp(j*k)) for k in range(1,15)]).ae(55.792077935955314887+1.731986485778500241j)
assert hyp2f1(2,2.5,-3.25,0.999).ae(218373932801217082543180041.33)
# Branches
assert hyp2f1(1,1,2,1.01).ae(4.5595744415723676911-3.1104877758314784539j)
assert hyp2f1(1,1,2,1.01+0.1j).ae(2.4149427480552782484+1.4148224796836938829j)
assert hyp2f1(1,1,2,3+4j).ae(0.14576709331407297807+0.48379185417980360773j)
assert hyp2f1(1,1,2,4).ae(-0.27465307216702742285 - 0.78539816339744830962j)
assert hyp2f1(1,1,2,-4).ae(0.40235947810852509365)
# Other:
# Cancellation with a large parameter involved (bug reported on sage-devel)
assert hyp2f1(112, (51,10), (-9,10), -0.99999).ae(-1.6241361047970862961e-24, abs_eps=0, rel_eps=eps*16)
def test_hyper_3f2_etc():
assert hyper([1,2,3],[1.5,8],-1).ae(0.67108992351533333030)
assert hyper([1,2,3,4],[5,6,7], -1).ae(0.90232988035425506008)
assert hyper([1,2,3],[1.25,5], 1).ae(28.924181329701905701)
assert hyper([1,2,3,4],[5,6,7],5).ae(1.5192307344006649499-1.1529845225075537461j)
assert hyper([1,2,3,4,5],[6,7,8,9],-1).ae(0.96288759462882357253)
assert hyper([1,2,3,4,5],[6,7,8,9],1).ae(1.0428697385885855841)
assert hyper([1,2,3,4,5],[6,7,8,9],5).ae(1.33980653631074769423-0.07143405251029226699j)
assert hyper([1,2.79,3.08,4.37],[5.2,6.1,7.3],5).ae(1.0996321464692607231-1.7748052293979985001j)
assert hyper([1,1,1],[1,2],1) == inf
assert hyper([1,1,1],[2,(101,100)],1).ae(100.01621213528313220)
# slow -- covered by doctests
#assert hyper([1,1,1],[2,3],0.9999).ae(1.2897972005319693905)
def test_hyper_u():
mp.dps = 15
assert hyperu(2,-3,0).ae(0.05)
assert hyperu(2,-3.5,0).ae(4./99)
assert hyperu(2,0,0) == 0.5
assert hyperu(-5,1,0) == -120
assert hyperu(-5,2,0) == inf
assert hyperu(-5,-2,0) == 0
assert hyperu(7,7,3).ae(0.00014681269365593503986) #exp(3)*gammainc(-6,3)
assert hyperu(2,-3,4).ae(0.011836478100271995559)
assert hyperu(3,4,5).ae(1./125)
assert hyperu(2,3,0.0625) == 256
assert hyperu(-1,2,0.25+0.5j) == -1.75+0.5j
assert hyperu(0.5,1.5,7.25).ae(2/sqrt(29))
assert hyperu(2,6,pi).ae(0.55804439825913399130)
assert (hyperu((3,2),8,100+201j)*10**4).ae(-0.3797318333856738798 - 2.9974928453561707782j)
assert (hyperu((5,2),(-1,2),-5000)*10**10).ae(-5.6681877926881664678j)
# XXX: fails because of undetected cancellation in low level series code
# Alternatively: could use asymptotic series here, if convergence test
# tweaked back to recognize this one
#assert (hyperu((5,2),(-1,2),-500)*10**7).ae(-1.82526906001593252847j)
def test_hyper_2f0():
mp.dps = 15
assert hyper([1,2],[],3) == hyp2f0(1,2,3)
assert hyp2f0(2,3,7).ae(0.0116108068639728714668 - 0.0073727413865865802130j)
assert hyp2f0(2,3,0) == 1
assert hyp2f0(0,0,0) == 1
assert hyp2f0(-1,-1,1).ae(2)
assert hyp2f0(-4,1,1.5).ae(62.5)
assert hyp2f0(-4,1,50).ae(147029801)
assert hyp2f0(-4,1,0.0001).ae(0.99960011997600240000)
assert hyp2f0(0.5,0.25,0.001).ae(1.0001251174078538115)
assert hyp2f0(0.5,0.25,3+4j).ae(0.85548875824755163518 + 0.21636041283392292973j)
# Important: cancellation check
assert hyp2f0((1,6),(5,6),-0.02371708245126284498).ae(0.996785723120804309)
# Should be exact; polynomial case
assert hyp2f0(-2,1,0.5+0.5j) == 0
assert hyp2f0(1,-2,0.5+0.5j) == 0
# There used to be a bug in thresholds that made one of the following hang
for d in [15, 50, 80]:
mp.dps = d
assert hyp2f0(1.5, 0.5, 0.009).ae('1.006867007239309717945323585695344927904000945829843527398772456281301440034218290443367270629519483 + 1.238277162240704919639384945859073461954721356062919829456053965502443570466701567100438048602352623e-46j')
def test_hyper_1f2():
mp.dps = 15
assert hyper([1],[2,3],4) == hyp1f2(1,2,3,4)
a1,b1,b2 = (1,10),(2,3),1./16
assert hyp1f2(a1,b1,b2,10).ae(298.7482725554557568)
assert hyp1f2(a1,b1,b2,100).ae(224128961.48602947604)
assert hyp1f2(a1,b1,b2,1000).ae(1.1669528298622675109e+27)
assert hyp1f2(a1,b1,b2,10000).ae(2.4780514622487212192e+86)
assert hyp1f2(a1,b1,b2,100000).ae(1.3885391458871523997e+274)
assert hyp1f2(a1,b1,b2,1000000).ae('9.8851796978960318255e+867')
assert hyp1f2(a1,b1,b2,10**7).ae('1.1505659189516303646e+2746')
assert hyp1f2(a1,b1,b2,10**8).ae('1.4672005404314334081e+8685')
assert hyp1f2(a1,b1,b2,10**20).ae('3.6888217332150976493e+8685889636')
assert hyp1f2(a1,b1,b2,10*j).ae(-16.163252524618572878 - 44.321567896480184312j)
assert hyp1f2(a1,b1,b2,100*j).ae(61938.155294517848171 + 637349.45215942348739j)
assert hyp1f2(a1,b1,b2,1000*j).ae(8455057657257695958.7 + 6261969266997571510.6j)
assert hyp1f2(a1,b1,b2,10000*j).ae(-8.9771211184008593089e+60 + 4.6550528111731631456e+59j)
assert hyp1f2(a1,b1,b2,100000*j).ae(2.6398091437239324225e+193 + 4.1658080666870618332e+193j)
assert hyp1f2(a1,b1,b2,1000000*j).ae('3.5999042951925965458e+613 + 1.5026014707128947992e+613j')
assert hyp1f2(a1,b1,b2,10**7*j).ae('-8.3208715051623234801e+1939 - 3.6752883490851869429e+1941j')
assert hyp1f2(a1,b1,b2,10**8*j).ae('2.0724195707891484454e+6140 - 1.3276619482724266387e+6141j')
assert hyp1f2(a1,b1,b2,10**20*j).ae('-1.1734497974795488504e+6141851462 + 1.1498106965385471542e+6141851462j')
def test_hyper_2f3():
mp.dps = 15
assert hyper([1,2],[3,4,5],6) == hyp2f3(1,2,3,4,5,6)
a1,a2,b1,b2,b3 = (1,10),(2,3),(3,10), 2, 1./16
# Check asymptotic expansion
assert hyp2f3(a1,a2,b1,b2,b3,10).ae(128.98207160698659976)
assert hyp2f3(a1,a2,b1,b2,b3,1000).ae(6.6309632883131273141e25)
assert hyp2f3(a1,a2,b1,b2,b3,10000).ae(4.6863639362713340539e84)
assert hyp2f3(a1,a2,b1,b2,b3,100000).ae(8.6632451236103084119e271)
assert hyp2f3(a1,a2,b1,b2,b3,10**6).ae('2.0291718386574980641e865')
assert hyp2f3(a1,a2,b1,b2,b3,10**7).ae('7.7639836665710030977e2742')
assert hyp2f3(a1,a2,b1,b2,b3,10**8).ae('3.2537462584071268759e8681')
assert hyp2f3(a1,a2,b1,b2,b3,10**20).ae('1.2966030542911614163e+8685889627')
assert hyp2f3(a1,a2,b1,b2,b3,10*j).ae(-18.551602185587547854 - 13.348031097874113552j)
assert hyp2f3(a1,a2,b1,b2,b3,100*j).ae(78634.359124504488695 + 74459.535945281973996j)
assert hyp2f3(a1,a2,b1,b2,b3,1000*j).ae(597682550276527901.59 - 65136194809352613.078j)
assert hyp2f3(a1,a2,b1,b2,b3,10000*j).ae(-1.1779696326238582496e+59 + 1.2297607505213133872e+59j)
assert hyp2f3(a1,a2,b1,b2,b3,100000*j).ae(2.9844228969804380301e+191 + 7.5587163231490273296e+190j)
assert hyp2f3(a1,a2,b1,b2,b3,1000000*j).ae('7.4859161049322370311e+610 - 2.8467477015940090189e+610j')
assert hyp2f3(a1,a2,b1,b2,b3,10**7*j).ae('-1.7477645579418800826e+1938 - 1.7606522995808116405e+1938j')
assert hyp2f3(a1,a2,b1,b2,b3,10**8*j).ae('-1.6932731942958401784e+6137 - 2.4521909113114629368e+6137j')
assert hyp2f3(a1,a2,b1,b2,b3,10**20*j).ae('-2.0988815677627225449e+6141851451 + 5.7708223542739208681e+6141851452j')
def test_hyper_2f2():
mp.dps = 15
assert hyper([1,2],[3,4],5) == hyp2f2(1,2,3,4,5)
a1,a2,b1,b2 = (3,10),4,(1,2),1./16
assert hyp2f2(a1,a2,b1,b2,10).ae(448225936.3377556696)
assert hyp2f2(a1,a2,b1,b2,10000).ae('1.2012553712966636711e+4358')
assert hyp2f2(a1,a2,b1,b2,-20000).ae(-0.04182343755661214626)
assert hyp2f2(a1,a2,b1,b2,10**20).ae('1.1148680024303263661e+43429448190325182840')
def test_orthpoly():
mp.dps = 15
assert jacobi(-4,2,3,0.7).ae(22800./4913)
assert jacobi(3,2,4,5.5) == 4133.125
assert jacobi(1.5,5/6.,4,0).ae(-1.0851951434075508417)
assert jacobi(-2, 1, 2, 4).ae(-0.16)
assert jacobi(2, -1, 2.5, 4).ae(34.59375)
#assert jacobi(2, -1, 2, 4) == 28.5
assert legendre(5, 7) == 129367
assert legendre(0.5,0).ae(0.53935260118837935667)
assert legendre(-1,-1) == 1
assert legendre(0,-1) == 1
assert legendre(0, 1) == 1
assert legendre(1, -1) == -1
assert legendre(7, 1) == 1
assert legendre(7, -1) == -1
assert legendre(8,1.5).ae(15457523./32768)
assert legendre(j,-j).ae(2.4448182735671431011 + 0.6928881737669934843j)
assert chebyu(5,1) == 6
assert chebyt(3,2) == 26
assert legendre(3.5,-1) == inf
assert legendre(4.5,-1) == -inf
assert legendre(3.5+1j,-1) == mpc(inf,inf)
assert legendre(4.5+1j,-1) == mpc(-inf,-inf)
assert laguerre(4, -2, 3).ae(-1.125)
assert laguerre(3, 1+j, 0.5).ae(0.2291666666666666667 + 2.5416666666666666667j)
def test_hermite():
mp.dps = 15
assert hermite(-2, 0).ae(0.5)
assert hermite(-1, 0).ae(0.88622692545275801365)
assert hermite(0, 0).ae(1)
assert hermite(1, 0) == 0
assert hermite(2, 0).ae(-2)
assert hermite(0, 2).ae(1)
assert hermite(1, 2).ae(4)
assert hermite(1, -2).ae(-4)
assert hermite(2, -2).ae(14)
assert hermite(0.5, 0).ae(0.69136733903629335053)
assert hermite(9, 0) == 0
assert hermite(4,4).ae(3340)
assert hermite(3,4).ae(464)
assert hermite(-4,4).ae(0.00018623860287512396181)
assert hermite(-3,4).ae(0.0016540169879668766270)
assert hermite(9, 2.5j).ae(13638725j)
assert hermite(9, -2.5j).ae(-13638725j)
assert hermite(9, 100).ae(511078883759363024000)
assert hermite(9, -100).ae(-511078883759363024000)
assert hermite(9, 100j).ae(512922083920643024000j)
assert hermite(9, -100j).ae(-512922083920643024000j)
assert hermite(-9.5, 2.5j).ae(-2.9004951258126778174e-6 + 1.7601372934039951100e-6j)
assert hermite(-9.5, -2.5j).ae(-2.9004951258126778174e-6 - 1.7601372934039951100e-6j)
assert hermite(-9.5, 100).ae(1.3776300722767084162e-22, abs_eps=0, rel_eps=eps)
assert hermite(-9.5, -100).ae('1.3106082028470671626e4355')
assert hermite(-9.5, 100j).ae(-9.7900218581864768430e-23 - 9.7900218581864768430e-23j, abs_eps=0, rel_eps=eps)
assert hermite(-9.5, -100j).ae(-9.7900218581864768430e-23 + 9.7900218581864768430e-23j, abs_eps=0, rel_eps=eps)
assert hermite(2+3j, -1-j).ae(851.3677063883687676 - 1496.4373467871007997j)
def test_gegenbauer():
mp.dps = 15
assert gegenbauer(1,2,3).ae(12)
assert gegenbauer(2,3,4).ae(381)
assert gegenbauer(0,0,0) == 0
assert gegenbauer(2,-1,3) == 0
assert gegenbauer(-7, 0.5, 3).ae(8989)
assert gegenbauer(1, -0.5, 3).ae(-3)
assert gegenbauer(1, -1.5, 3).ae(-9)
assert gegenbauer(1, -0.5, 3).ae(-3)
assert gegenbauer(-0.5, -0.5, 3).ae(-2.6383553159023906245)
assert gegenbauer(2+3j, 1-j, 3+4j).ae(14.880536623203696780 + 20.022029711598032898j)
#assert gegenbauer(-2, -0.5, 3).ae(-12)
def test_legenp():
mp.dps = 15
assert legenp(2,0,4) == legendre(2,4)
assert legenp(-2, -1, 0.5).ae(0.43301270189221932338)
assert legenp(-2, -1, 0.5, type=3).ae(0.43301270189221932338j)
assert legenp(-2, 1, 0.5).ae(-0.86602540378443864676)
assert legenp(2+j, 3+4j, -j).ae(134742.98773236786148 + 429782.72924463851745j)
assert legenp(2+j, 3+4j, -j, type=3).ae(802.59463394152268507 - 251.62481308942906447j)
assert legenp(2,4,3).ae(0)
assert legenp(2,4,3,type=3).ae(0)
assert legenp(2,1,0.5).ae(-1.2990381056766579701)
assert legenp(2,1,0.5,type=3).ae(1.2990381056766579701j)
assert legenp(3,2,3).ae(-360)
assert legenp(3,3,3).ae(240j*2**0.5)
assert legenp(3,4,3).ae(0)
assert legenp(0,0.5,2).ae(0.52503756790433198939 - 0.52503756790433198939j)
assert legenp(-1,-0.5,2).ae(0.60626116232846498110 + 0.60626116232846498110j)
assert legenp(-2,0.5,2).ae(1.5751127037129959682 - 1.5751127037129959682j)
assert legenp(-2,0.5,-0.5).ae(-0.85738275810499171286)
def test_legenq():
mp.dps = 15
f = legenq
# Evaluation at poles
assert isnan(f(3,2,1))
assert isnan(f(3,2,-1))
assert isnan(f(3,2,1,type=3))
assert isnan(f(3,2,-1,type=3))
# Evaluation at 0
assert f(0,1,0,type=2).ae(-1)
assert f(-2,2,0,type=2,zeroprec=200).ae(0)
assert f(1.5,3,0,type=2).ae(-2.2239343475841951023)
assert f(0,1,0,type=3).ae(j)
assert f(-2,2,0,type=3,zeroprec=200).ae(0)
assert f(1.5,3,0,type=3).ae(2.2239343475841951022*(1-1j))
# Standard case, degree 0
assert f(0,0,-1.5).ae(-0.8047189562170501873 + 1.5707963267948966192j)
assert f(0,0,-0.5).ae(-0.54930614433405484570)
assert f(0,0,0,zeroprec=200).ae(0)
assert f(0,0,0.5).ae(0.54930614433405484570)
assert f(0,0,1.5).ae(0.8047189562170501873 - 1.5707963267948966192j)
assert f(0,0,-1.5,type=3).ae(-0.80471895621705018730)
assert f(0,0,-0.5,type=3).ae(-0.5493061443340548457 - 1.5707963267948966192j)
assert f(0,0,0,type=3).ae(-1.5707963267948966192j)
assert f(0,0,0.5,type=3).ae(0.5493061443340548457 - 1.5707963267948966192j)
assert f(0,0,1.5,type=3).ae(0.80471895621705018730)
# Standard case, degree 1
assert f(1,0,-1.5).ae(0.2070784343255752810 - 2.3561944901923449288j)
assert f(1,0,-0.5).ae(-0.72534692783297257715)
assert f(1,0,0).ae(-1)
assert f(1,0,0.5).ae(-0.72534692783297257715)
assert f(1,0,1.5).ae(0.2070784343255752810 - 2.3561944901923449288j)
# Standard case, degree 2
assert f(2,0,-1.5).ae(-0.0635669991240192885 + 4.5160394395353277803j)
assert f(2,0,-0.5).ae(0.81866326804175685571)
assert f(2,0,0,zeroprec=200).ae(0)
assert f(2,0,0.5).ae(-0.81866326804175685571)
assert f(2,0,1.5).ae(0.0635669991240192885 - 4.5160394395353277803j)
# Misc orders and degrees
assert f(2,3,1.5,type=2).ae(-5.7243340223994616228j)
assert f(2,3,1.5,type=3).ae(-5.7243340223994616228)
assert f(2,3,0.5,type=2).ae(-12.316805742712016310)
assert f(2,3,0.5,type=3).ae(-12.316805742712016310j)
assert f(2,3,-1.5,type=2).ae(-5.7243340223994616228j)
assert f(2,3,-1.5,type=3).ae(5.7243340223994616228)
assert f(2,3,-0.5,type=2).ae(-12.316805742712016310)
assert f(2,3,-0.5,type=3).ae(-12.316805742712016310j)
assert f(2+3j, 3+4j, 0.5, type=3).ae(0.0016119404873235186807 - 0.0005885900510718119836j)
assert f(2+3j, 3+4j, -1.5, type=3).ae(0.008451400254138808670 + 0.020645193304593235298j)
assert f(-2.5,1,-1.5).ae(3.9553395527435335749j)
assert f(-2.5,1,-0.5).ae(1.9290561746445456908)
assert f(-2.5,1,0).ae(1.2708196271909686299)
assert f(-2.5,1,0.5).ae(-0.31584812990742202869)
assert f(-2.5,1,1.5).ae(-3.9553395527435335742 + 0.2993235655044701706j)
assert f(-2.5,1,-1.5,type=3).ae(0.29932356550447017254j)
assert f(-2.5,1,-0.5,type=3).ae(-0.3158481299074220287 - 1.9290561746445456908j)
assert f(-2.5,1,0,type=3).ae(1.2708196271909686292 - 1.2708196271909686299j)
assert f(-2.5,1,0.5,type=3).ae(1.9290561746445456907 + 0.3158481299074220287j)
assert f(-2.5,1,1.5,type=3).ae(-0.29932356550447017254)
def test_agm():
mp.dps = 15
assert agm(0,0) == 0
assert agm(0,1) == 0
assert agm(1,1) == 1
assert agm(7,7) == 7
assert agm(j,j) == j
assert (1/agm(1,sqrt(2))).ae(0.834626841674073186)
assert agm(1,2).ae(1.4567910310469068692)
assert agm(1,3).ae(1.8636167832448965424)
assert agm(1,j).ae(0.599070117367796104+0.599070117367796104j)
assert agm(2) == agm(1,2)
assert agm(-3,4).ae(0.63468509766550907+1.3443087080896272j)
def test_gammainc():
mp.dps = 15
assert gammainc(2,5).ae(6*exp(-5))
assert gammainc(2,0,5).ae(1-6*exp(-5))
assert gammainc(2,3,5).ae(-6*exp(-5)+4*exp(-3))
assert gammainc(-2.5,-0.5).ae(-0.9453087204829418812-5.3164237738936178621j)
assert gammainc(0,2,4).ae(0.045121158298212213088)
assert gammainc(0,3).ae(0.013048381094197037413)
assert gammainc(0,2+j,1-j).ae(0.00910653685850304839-0.22378752918074432574j)
assert gammainc(0,1-j).ae(0.00028162445198141833+0.17932453503935894015j)
assert gammainc(3,4,5,True).ae(0.11345128607046320253)
assert gammainc(3.5,0,inf).ae(gamma(3.5))
assert gammainc(-150.5,500).ae('6.9825435345798951153e-627')
assert gammainc(-150.5,800).ae('4.6885137549474089431e-788')
assert gammainc(-3.5, -20.5).ae(0.27008820585226911 - 1310.31447140574997636j)
assert gammainc(-3.5, -200.5).ae(0.27008820585226911 - 5.3264597096208368435e76j) # XXX real part
assert gammainc(0,0,2) == inf
assert gammainc(1,b=1).ae(0.6321205588285576784)
assert gammainc(3,2,2) == 0
assert gammainc(2,3+j,3-j).ae(-0.28135485191849314194j)
assert gammainc(4+0j,1).ae(5.8860710587430771455)
# Regularized upper gamma
assert isnan(gammainc(0, 0, regularized=True))
assert gammainc(-1, 0, regularized=True) == inf
assert gammainc(1, 0, regularized=True) == 1
assert gammainc(0, 5, regularized=True) == 0
assert gammainc(0, 2+3j, regularized=True) == 0
assert gammainc(0, 5000, regularized=True) == 0
assert gammainc(0, 10**30, regularized=True) == 0
assert gammainc(-1, 5, regularized=True) == 0
assert gammainc(-1, 5000, regularized=True) == 0
assert gammainc(-1, 10**30, regularized=True) == 0
assert gammainc(-1, -5, regularized=True) == 0
assert gammainc(-1, -5000, regularized=True) == 0
assert gammainc(-1, -10**30, regularized=True) == 0
assert gammainc(-1, 3+4j, regularized=True) == 0
assert gammainc(1, 5, regularized=True).ae(exp(-5))
assert gammainc(1, 5000, regularized=True).ae(exp(-5000))
assert gammainc(1, 10**30, regularized=True).ae(exp(-10**30))
assert gammainc(1, 3+4j, regularized=True).ae(exp(-3-4j))
assert gammainc(-1000000,2).ae('1.3669297209397347754e-301037', abs_eps=0, rel_eps=8*eps)
assert gammainc(-1000000,2,regularized=True) == 0
assert gammainc(-1000000,3+4j).ae('-1.322575609404222361e-698979 - 4.9274570591854533273e-698978j', abs_eps=0, rel_eps=8*eps)
assert gammainc(-1000000,3+4j,regularized=True) == 0
assert gammainc(2+3j, 4+5j, regularized=True).ae(0.085422013530993285774-0.052595379150390078503j)
assert gammainc(1000j, 1000j, regularized=True).ae(0.49702647628921131761 + 0.00297355675013575341j)
# Generalized
assert gammainc(3,4,2) == -gammainc(3,2,4)
assert gammainc(4, 2, 3).ae(1.2593494302978947396)
assert gammainc(4, 2, 3, regularized=True).ae(0.20989157171631578993)
assert gammainc(0, 2, 3).ae(0.035852129613864082155)
assert gammainc(0, 2, 3, regularized=True) == 0
assert gammainc(-1, 2, 3).ae(0.015219822548487616132)
assert gammainc(-1, 2, 3, regularized=True) == 0
assert gammainc(0, 2, 3).ae(0.035852129613864082155)
assert gammainc(0, 2, 3, regularized=True) == 0
# Should use upper gammas
assert gammainc(5, 10000, 12000).ae('1.1359381951461801687e-4327', abs_eps=0, rel_eps=8*eps)
# Should use lower gammas
assert gammainc(10000, 2, 3).ae('8.1244514125995785934e4765')
def test_gammainc_expint_n():
# These tests are intended to check all cases of the low-level code
# for upper gamma and expint with small integer index.
# Need to cover positive/negative arguments; small/large/huge arguments
# for both positive and negative indices, as well as indices 0 and 1
# which may be special-cased
mp.dps = 15
assert expint(-3,3.5).ae(0.021456366563296693987)
assert expint(-2,3.5).ae(0.014966633183073309405)
assert expint(-1,3.5).ae(0.011092916359219041088)
assert expint(0,3.5).ae(0.0086278238349481430685)
assert expint(1,3.5).ae(0.0069701398575483929193)
assert expint(2,3.5).ae(0.0058018939208991255223)
assert expint(3,3.5).ae(0.0049453773495857807058)
assert expint(-3,-3.5).ae(-4.6618170604073311319)
assert expint(-2,-3.5).ae(-5.5996974157555515963)
assert expint(-1,-3.5).ae(-6.7582555017739415818)
assert expint(0,-3.5).ae(-9.4615577024835182145)
assert expint(1,-3.5).ae(-13.925353995152335292 - 3.1415926535897932385j)
assert expint(2,-3.5).ae(-15.62328702434085977 - 10.995574287564276335j)
assert expint(3,-3.5).ae(-10.783026313250347722 - 19.242255003237483586j)
assert expint(-3,350).ae(2.8614825451252838069e-155, abs_eps=0, rel_eps=8*eps)
assert expint(-2,350).ae(2.8532837224504675901e-155, abs_eps=0, rel_eps=8*eps)
assert expint(-1,350).ae(2.8451316155828634555e-155, abs_eps=0, rel_eps=8*eps)
assert expint(0,350).ae(2.8370258275042797989e-155, abs_eps=0, rel_eps=8*eps)
assert expint(1,350).ae(2.8289659656701459404e-155, abs_eps=0, rel_eps=8*eps)
assert expint(2,350).ae(2.8209516419468505006e-155, abs_eps=0, rel_eps=8*eps)
assert expint(3,350).ae(2.8129824725501272171e-155, abs_eps=0, rel_eps=8*eps)
assert expint(-3,-350).ae(-2.8528796154044839443e+149)
assert expint(-2,-350).ae(-2.8610072121701264351e+149)
assert expint(-1,-350).ae(-2.8691813842677537647e+149)
assert expint(0,-350).ae(-2.8774025343659421709e+149)
u = expint(1,-350)
assert u.ae(-2.8856710698020863568e+149)
assert u.imag.ae(-3.1415926535897932385)
u = expint(2,-350)
assert u.ae(-2.8939874026504650534e+149)
assert u.imag.ae(-1099.5574287564276335)
u = expint(3,-350)
assert u.ae(-2.9023519497915044349e+149)
assert u.imag.ae(-192422.55003237483586)
assert expint(-3,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(-2,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(-1,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(0,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(1,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(2,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(3,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert expint(-3,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
assert expint(-2,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
assert expint(-1,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
assert expint(0,-350000000000000000000000).ae('-3.7805306852415755699e+152003068666138139677871')
u = expint(1,-350000000000000000000000)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
assert u.imag.ae(-3.1415926535897932385)
u = expint(2,-350000000000000000000000)
assert u.imag.ae(-1.0995574287564276335e+24)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
u = expint(3,-350000000000000000000000)
assert u.imag.ae(-1.9242255003237483586e+47)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
# Small case; no branch cut
assert gammainc(-3,3.5).ae(0.00010020262545203707109)
assert gammainc(-2,3.5).ae(0.00040370427343557393517)
assert gammainc(-1,3.5).ae(0.0016576839773997501492)
assert gammainc(0,3.5).ae(0.0069701398575483929193)
assert gammainc(1,3.5).ae(0.03019738342231850074)
assert gammainc(2,3.5).ae(0.13588822540043325333)
assert gammainc(3,3.5).ae(0.64169439772426814072)
# Small case; with branch cut
assert gammainc(-3,-3.5).ae(0.03595832954467563286 - 0.52359877559829887308j)
assert gammainc(-2,-3.5).ae(-0.88024704597962022221 - 1.5707963267948966192j)
assert gammainc(-1,-3.5).ae(4.4637962926688170771 - 3.1415926535897932385j)
assert gammainc(0,-3.5).ae(-13.925353995152335292 - 3.1415926535897932385j)
assert gammainc(1,-3.5).ae(33.115451958692313751)
assert gammainc(2,-3.5).ae(-82.788629896730784377)
assert gammainc(3,-3.5).ae(240.08702670051927469)
# Asymptotic case; no branch cut
assert gammainc(-3,350).ae(6.5424095113340358813e-163, abs_eps=0, rel_eps=8*eps)
assert gammainc(-2,350).ae(2.296312222489899769e-160, abs_eps=0, rel_eps=8*eps)
assert gammainc(-1,350).ae(8.059861834133858573e-158, abs_eps=0, rel_eps=8*eps)
assert gammainc(0,350).ae(2.8289659656701459404e-155, abs_eps=0, rel_eps=8*eps)
assert gammainc(1,350).ae(9.9295903962649792963e-153, abs_eps=0, rel_eps=8*eps)
assert gammainc(2,350).ae(3.485286229089007733e-150, abs_eps=0, rel_eps=8*eps)
assert gammainc(3,350).ae(1.2233453960006379793e-147, abs_eps=0, rel_eps=8*eps)
# Asymptotic case; branch cut
u = gammainc(-3,-350)
assert u.ae(6.7889565783842895085e+141)
assert u.imag.ae(-0.52359877559829887308)
u = gammainc(-2,-350)
assert u.ae(-2.3692668977889832121e+144)
assert u.imag.ae(-1.5707963267948966192)
u = gammainc(-1,-350)
assert u.ae(8.2685354361441858669e+146)
assert u.imag.ae(-3.1415926535897932385)
u = gammainc(0,-350)
assert u.ae(-2.8856710698020863568e+149)
assert u.imag.ae(-3.1415926535897932385)
u = gammainc(1,-350)
assert u.ae(1.0070908870280797598e+152)
assert u.imag == 0
u = gammainc(2,-350)
assert u.ae(-3.5147471957279983618e+154)
assert u.imag == 0
u = gammainc(3,-350)
assert u.ae(1.2266568422179417091e+157)
assert u.imag == 0
# Extreme asymptotic case
assert gammainc(-3,350000000000000000000000).ae('5.0362468738874738859e-152003068666138139677990', abs_eps=0, rel_eps=8*eps)
assert gammainc(-2,350000000000000000000000).ae('1.7626864058606158601e-152003068666138139677966', abs_eps=0, rel_eps=8*eps)
assert gammainc(-1,350000000000000000000000).ae('6.1694024205121555102e-152003068666138139677943', abs_eps=0, rel_eps=8*eps)
assert gammainc(0,350000000000000000000000).ae('2.1592908471792544286e-152003068666138139677919', abs_eps=0, rel_eps=8*eps)
assert gammainc(1,350000000000000000000000).ae('7.5575179651273905e-152003068666138139677896', abs_eps=0, rel_eps=8*eps)
assert gammainc(2,350000000000000000000000).ae('2.645131287794586675e-152003068666138139677872', abs_eps=0, rel_eps=8*eps)
assert gammainc(3,350000000000000000000000).ae('9.2579595072810533625e-152003068666138139677849', abs_eps=0, rel_eps=8*eps)
u = gammainc(-3,-350000000000000000000000)
assert u.ae('8.8175642804468234866e+152003068666138139677800')
assert u.imag.ae(-0.52359877559829887308)
u = gammainc(-2,-350000000000000000000000)
assert u.ae('-3.0861474981563882203e+152003068666138139677824')
assert u.imag.ae(-1.5707963267948966192)
u = gammainc(-1,-350000000000000000000000)
assert u.ae('1.0801516243547358771e+152003068666138139677848')
assert u.imag.ae(-3.1415926535897932385)
u = gammainc(0,-350000000000000000000000)
assert u.ae('-3.7805306852415755699e+152003068666138139677871')
assert u.imag.ae(-3.1415926535897932385)
assert gammainc(1,-350000000000000000000000).ae('1.3231857398345514495e+152003068666138139677895')
assert gammainc(2,-350000000000000000000000).ae('-4.6311500894209300731e+152003068666138139677918')
assert gammainc(3,-350000000000000000000000).ae('1.6209025312973255256e+152003068666138139677942')
def test_incomplete_beta():
mp.dps = 15
assert betainc(-2,-3,0.5,0.75).ae(63.4305673311255413583969)
assert betainc(4.5,0.5+2j,2.5,6).ae(0.2628801146130621387903065 + 0.5162565234467020592855378j)
assert betainc(4,5,0,6).ae(90747.77142857142857142857)
def test_erf():
mp.dps = 15
assert erf(0) == 0
assert erf(1).ae(0.84270079294971486934)
assert erf(3+4j).ae(-120.186991395079444098 - 27.750337293623902498j)
assert erf(-4-3j).ae(-0.99991066178539168236 + 0.00004972026054496604j)
assert erf(pi).ae(0.99999112385363235839)
assert erf(1j).ae(1.6504257587975428760j)
assert erf(-1j).ae(-1.6504257587975428760j)
assert isinstance(erf(1), mpf)
assert isinstance(erf(-1), mpf)
assert isinstance(erf(0), mpf)
assert isinstance(erf(0j), mpc)
assert erf(inf) == 1
assert erf(-inf) == -1
assert erfi(0) == 0
assert erfi(1/pi).ae(0.371682698493894314)
assert erfi(inf) == inf
assert erfi(-inf) == -inf
assert erf(1+0j) == erf(1)
assert erfc(1+0j) == erfc(1)
assert erf(0.2+0.5j).ae(1 - erfc(0.2+0.5j))
assert erfc(0) == 1
assert erfc(1).ae(1-erf(1))
assert erfc(-1).ae(1-erf(-1))
assert erfc(1/pi).ae(1-erf(1/pi))
assert erfc(-10) == 2
assert erfc(-1000000) == 2
assert erfc(-inf) == 2
assert erfc(inf) == 0
assert isnan(erfc(nan))
assert (erfc(10**4)*mpf(10)**43429453).ae('3.63998738656420')
assert erf(8+9j).ae(-1072004.2525062051158 + 364149.91954310255423j)
assert erfc(8+9j).ae(1072005.2525062051158 - 364149.91954310255423j)
assert erfc(-8-9j).ae(-1072003.2525062051158 + 364149.91954310255423j)
mp.dps = 50
# This one does not use the asymptotic series
assert (erfc(10)*10**45).ae('2.0884875837625447570007862949577886115608181193212')
# This one does
assert (erfc(50)*10**1088).ae('2.0709207788416560484484478751657887929322509209954')
mp.dps = 15
assert str(erfc(10**50)) == '3.66744826532555e-4342944819032518276511289189166050822943970058036665661144537831658646492088707747292249493384317534'
assert erfinv(0) == 0
assert erfinv(0.5).ae(0.47693627620446987338)
assert erfinv(-0.5).ae(-0.47693627620446987338)
assert erfinv(1) == inf
assert erfinv(-1) == -inf
assert erf(erfinv(0.95)).ae(0.95)
assert erf(erfinv(0.999999999995)).ae(0.999999999995)
assert erf(erfinv(-0.999999999995)).ae(-0.999999999995)
mp.dps = 50
assert erf(erfinv('0.99999999999999999999999999999995')).ae('0.99999999999999999999999999999995')
assert erf(erfinv('0.999999999999999999999999999999995')).ae('0.999999999999999999999999999999995')
assert erf(erfinv('-0.999999999999999999999999999999995')).ae('-0.999999999999999999999999999999995')
mp.dps = 15
# Complex asymptotic expansions
v = erfc(50j)
assert v.real == 1
assert v.imag.ae('-6.1481820666053078736e+1083')
assert erfc(-100+5j).ae(2)
assert (erfc(100+5j)*10**4335).ae(2.3973567853824133572 - 3.9339259530609420597j)
assert erfc(100+100j).ae(0.00065234366376857698698 - 0.0039357263629214118437j)
def test_pdf():
mp.dps = 15
assert npdf(-inf) == 0
assert npdf(inf) == 0
assert npdf(5,0,2).ae(npdf(5+4,4,2))
assert quadts(lambda x: npdf(x,-0.5,0.8), [-inf, inf]) == 1
assert ncdf(0) == 0.5
assert ncdf(3,3) == 0.5
assert ncdf(-inf) == 0
assert ncdf(inf) == 1
assert ncdf(10) == 1
# Verify that this is computed accurately
assert (ncdf(-10)*10**24).ae(7.619853024160526)
def test_lambertw():
mp.dps = 15
assert lambertw(0) == 0
assert lambertw(0+0j) == 0
assert lambertw(inf) == inf
assert isnan(lambertw(nan))
assert lambertw(inf,1).real == inf
assert lambertw(inf,1).imag.ae(2*pi)
assert lambertw(-inf,1).real == inf
assert lambertw(-inf,1).imag.ae(3*pi)
assert lambertw(0,-1) == -inf
assert lambertw(0,1) == -inf
assert lambertw(0,3) == -inf
assert lambertw(e).ae(1)
assert lambertw(1).ae(0.567143290409783873)
assert lambertw(-pi/2).ae(j*pi/2)
assert lambertw(-log(2)/2).ae(-log(2))
assert lambertw(0.25).ae(0.203888354702240164)
assert lambertw(-0.25).ae(-0.357402956181388903)
assert lambertw(-1./10000,0).ae(-0.000100010001500266719)
assert lambertw(-0.25,-1).ae(-2.15329236411034965)
assert lambertw(0.25,-1).ae(-3.00899800997004620-4.07652978899159763j)
assert lambertw(-0.25,-1).ae(-2.15329236411034965)
assert lambertw(0.25,1).ae(-3.00899800997004620+4.07652978899159763j)
assert lambertw(-0.25,1).ae(-3.48973228422959210+7.41405453009603664j)
assert lambertw(-4).ae(0.67881197132094523+1.91195078174339937j)
assert lambertw(-4,1).ae(-0.66743107129800988+7.76827456802783084j)
assert lambertw(-4,-1).ae(0.67881197132094523-1.91195078174339937j)
assert lambertw(1000).ae(5.24960285240159623)
assert lambertw(1000,1).ae(4.91492239981054535+5.44652615979447070j)
assert lambertw(1000,-1).ae(4.91492239981054535-5.44652615979447070j)
assert lambertw(1000,5).ae(3.5010625305312892+29.9614548941181328j)
assert lambertw(3+4j).ae(1.281561806123775878+0.533095222020971071j)
assert lambertw(-0.4+0.4j).ae(-0.10396515323290657+0.61899273315171632j)
assert lambertw(3+4j,1).ae(-0.11691092896595324+5.61888039871282334j)
assert lambertw(3+4j,-1).ae(0.25856740686699742-3.85211668616143559j)
assert lambertw(-0.5,-1).ae(-0.794023632344689368-0.770111750510379110j)
assert lambertw(-1./10000,1).ae(-11.82350837248724344+6.80546081842002101j)
assert lambertw(-1./10000,-1).ae(-11.6671145325663544)
assert lambertw(-1./10000,-2).ae(-11.82350837248724344-6.80546081842002101j)
assert lambertw(-1./100000,4).ae(-14.9186890769540539+26.1856750178782046j)
assert lambertw(-1./100000,5).ae(-15.0931437726379218666+32.5525721210262290086j)
assert lambertw((2+j)/10).ae(0.173704503762911669+0.071781336752835511j)
assert lambertw((2+j)/10,1).ae(-3.21746028349820063+4.56175438896292539j)
assert lambertw((2+j)/10,-1).ae(-3.03781405002993088-3.53946629633505737j)
assert lambertw((2+j)/10,4).ae(-4.6878509692773249+23.8313630697683291j)
assert lambertw(-(2+j)/10).ae(-0.226933772515757933-0.164986470020154580j)
assert lambertw(-(2+j)/10,1).ae(-2.43569517046110001+0.76974067544756289j)
assert lambertw(-(2+j)/10,-1).ae(-3.54858738151989450-6.91627921869943589j)
assert lambertw(-(2+j)/10,4).ae(-4.5500846928118151+20.6672982215434637j)
mp.dps = 50
assert lambertw(pi).ae('1.073658194796149172092178407024821347547745350410314531')
mp.dps = 15
# Former bug in generated branch
assert lambertw(-0.5+0.002j).ae(-0.78917138132659918344 + 0.76743539379990327749j)
assert lambertw(-0.5-0.002j).ae(-0.78917138132659918344 - 0.76743539379990327749j)
assert lambertw(-0.448+0.4j).ae(-0.11855133765652382241 + 0.66570534313583423116j)
assert lambertw(-0.448-0.4j).ae(-0.11855133765652382241 - 0.66570534313583423116j)
def test_meijerg():
mp.dps = 15
assert meijerg([[2,3],[1]],[[0.5,2],[3,4]], 2.5).ae(4.2181028074787439386)
assert meijerg([[],[1+j]],[[1],[1]], 3+4j).ae(271.46290321152464592 - 703.03330399954820169j)
assert meijerg([[0.25],[1]],[[0.5],[2]],0) == 0
assert meijerg([[0],[]],[[0,0,'1/3','2/3'], []], '2/27').ae(2.2019391389653314120)
# Verify 1/z series being used
assert meijerg([[-3],[-0.5]], [[-1],[-2.5]], -0.5).ae(-1.338096165935754898687431)
assert meijerg([[1-(-1)],[1-(-2.5)]], [[1-(-3)],[1-(-0.5)]], -2.0).ae(-1.338096165935754898687431)
assert meijerg([[-3],[-0.5]], [[-1],[-2.5]], -1).ae(-(pi+4)/(4*pi))
a = 2.5
b = 1.25
for z in [mpf(0.25), mpf(2)]:
x1 = hyp1f1(a,b,z)
x2 = gamma(b)/gamma(a)*meijerg([[1-a],[]],[[0],[1-b]],-z)
x3 = gamma(b)/gamma(a)*meijerg([[1-0],[1-(1-b)]],[[1-(1-a)],[]],-1/z)
assert x1.ae(x2)
assert x1.ae(x3)
def test_appellf1():
mp.dps = 15
assert appellf1(2,-2,1,1,2,3).ae(-1.75)
assert appellf1(2,1,-2,1,2,3).ae(-8)
assert appellf1(2,1,-2,1,0.5,0.25).ae(1.5)
assert appellf1(-2,1,3,2,3,3).ae(19)
assert appellf1(1,2,3,4,0.5,0.125).ae( 1.53843285792549786518)
def test_coulomb():
# Note: most tests are doctests
# Test for a bug:
mp.dps = 15
assert coulombg(mpc(-5,0),2,3).ae(20.087729487721430394)
def test_hyper_param_accuracy():
mp.dps = 15
As = [n+1e-10 for n in range(-5,-1)]
Bs = [n+1e-10 for n in range(-12,-5)]
assert hyper(As,Bs,10).ae(-381757055858.652671927)
assert legenp(0.5, 100, 0.25).ae(-2.4124576567211311755e+144)
assert (hyp1f1(1000,1,-100)*10**24).ae(5.2589445437370169113)
assert (hyp2f1(10, -900, 10.5, 0.99)*10**24).ae(1.9185370579660768203)
assert (hyp2f1(1000,1.5,-3.5,-1.5)*10**385).ae(-2.7367529051334000764)
assert hyp2f1(-5, 10, 3, 0.5, zeroprec=500) == 0
assert (hyp1f1(-10000, 1000, 100)*10**424).ae(-3.1046080515824859974)
assert (hyp2f1(1000,1.5,-3.5,-0.75,maxterms=100000)*10**231).ae(-4.0534790813913998643)
assert legenp(2, 3, 0.25) == 0
try:
hypercomb(lambda a: [([],[],[],[],[a],[-a],0.5)], [3])
assert 0
except ValueError:
pass
assert hypercomb(lambda a: [([],[],[],[],[a],[-a],0.5)], [3], infprec=200) == inf
assert meijerg([[],[]],[[0,0,0,0],[]],0.1).ae(1.5680822343832351418)
assert (besselk(400,400)*10**94).ae(1.4387057277018550583)
mp.dps = 5
(hyp1f1(-5000.5, 1500, 100)*10**185).ae(8.5185229673381935522)
(hyp1f1(-5000, 1500, 100)*10**185).ae(9.1501213424563944311)
mp.dps = 15
(hyp1f1(-5000.5, 1500, 100)*10**185).ae(8.5185229673381935522)
(hyp1f1(-5000, 1500, 100)*10**185).ae(9.1501213424563944311)
assert hyp0f1(fadd(-20,'1e-100',exact=True), 0.25).ae(1.85014429040102783e+49)
assert hyp0f1((-20*10**100+1, 10**100), 0.25).ae(1.85014429040102783e+49)
def test_hypercomb_zero_pow():
# check that 0^0 = 1
assert hypercomb(lambda a: (([0],[a],[],[],[],[],0),), [0]) == 1
assert meijerg([[-1.5],[]],[[0],[-0.75]],0).ae(1.4464090846320771425)
def test_spherharm():
mp.dps = 15
t = 0.5; r = 0.25
assert spherharm(0,0,t,r).ae(0.28209479177387814347)
assert spherharm(1,-1,t,r).ae(0.16048941205971996369 - 0.04097967481096344271j)
assert spherharm(1,0,t,r).ae(0.42878904414183579379)
assert spherharm(1,1,t,r).ae(-0.16048941205971996369 - 0.04097967481096344271j)
assert spherharm(2,-2,t,r).ae(0.077915886919031181734 - 0.042565643022253962264j)
assert spherharm(2,-1,t,r).ae(0.31493387233497459884 - 0.08041582001959297689j)
assert spherharm(2,0,t,r).ae(0.41330596756220761898)
assert spherharm(2,1,t,r).ae(-0.31493387233497459884 - 0.08041582001959297689j)
assert spherharm(2,2,t,r).ae(0.077915886919031181734 + 0.042565643022253962264j)
assert spherharm(3,-3,t,r).ae(0.033640236589690881646 - 0.031339125318637082197j)
assert spherharm(3,-2,t,r).ae(0.18091018743101461963 - 0.09883168583167010241j)
assert spherharm(3,-1,t,r).ae(0.42796713930907320351 - 0.10927795157064962317j)
assert spherharm(3,0,t,r).ae(0.27861659336351639787)
assert spherharm(3,1,t,r).ae(-0.42796713930907320351 - 0.10927795157064962317j)
assert spherharm(3,2,t,r).ae(0.18091018743101461963 + 0.09883168583167010241j)
assert spherharm(3,3,t,r).ae(-0.033640236589690881646 - 0.031339125318637082197j)
assert spherharm(0,-1,t,r) == 0
assert spherharm(0,-2,t,r) == 0
assert spherharm(0,1,t,r) == 0
assert spherharm(0,2,t,r) == 0
assert spherharm(1,2,t,r) == 0
assert spherharm(1,3,t,r) == 0
assert spherharm(1,-2,t,r) == 0
assert spherharm(1,-3,t,r) == 0
assert spherharm(2,3,t,r) == 0
assert spherharm(2,4,t,r) == 0
assert spherharm(2,-3,t,r) == 0
assert spherharm(2,-4,t,r) == 0
assert spherharm(3,4.5,0.5,0.25).ae(-22.831053442240790148 + 10.910526059510013757j)
assert spherharm(2+3j, 1-j, 1+j, 3+4j).ae(-2.6582752037810116935 - 1.0909214905642160211j)
assert spherharm(-6,2.5,t,r).ae(0.39383644983851448178 + 0.28414687085358299021j)
assert spherharm(-3.5, 3, 0.5, 0.25).ae(0.014516852987544698924 - 0.015582769591477628495j)
assert spherharm(-3, 3, 0.5, 0.25) == 0
assert spherharm(-6, 3, 0.5, 0.25).ae(-0.16544349818782275459 - 0.15412657723253924562j)
assert spherharm(-6, 1.5, 0.5, 0.25).ae(0.032208193499767402477 + 0.012678000924063664921j)
assert spherharm(3,0,0,1).ae(0.74635266518023078283)
assert spherharm(3,-2,0,1) == 0
assert spherharm(3,-2,1,1).ae(-0.16270707338254028971 - 0.35552144137546777097j)
def test_qfunctions():
mp.dps = 15
assert qp(2,3,100).ae('2.7291482267247332183e2391')
|
from fnmatch import fnmatch
from itertools import groupby
import os
from trac.core import *
from trac.config import Option
from trac.perm import PermissionSystem, IPermissionPolicy
ConfigObj = None
try:
from configobj import ConfigObj
except ImportError:
pass
class AuthzPolicy(Component):
"""Permission policy using an authz-like configuration file.
Refer to SVN documentation for syntax of the authz file. Groups are
supported.
As the fine-grained permissions brought by this permission policy are
often used in complement of the other pemission policies (like the
`DefaultPermissionPolicy`), there's no need to redefine all the
permissions here. Only additional rights or restrictions should be added.
=== Installation ===
Note that this plugin requires the `configobj` package:
http://www.voidspace.org.uk/python/configobj.html
You should be able to install it by doing a simple `easy_install configobj`
Enabling this policy requires listing it in `trac.ini:
{{{
[trac]
permission_policies = AuthzPolicy, DefaultPermissionPolicy
[authz_policy]
authz_file = conf/authzpolicy.conf
}}}
This means that the `AuthzPolicy` permissions will be checked first, and
only if no rule is found will the `DefaultPermissionPolicy` be used.
=== Configuration ===
The `authzpolicy.conf` file is a `.ini` style configuration file.
- Each section of the config is a glob pattern used to match against a
Trac resource descriptor. These descriptors are in the form:
{{{
<realm>:<id>@<version>[/<realm>:<id>@<version> ...]
}}}
Resources are ordered left to right, from parent to child. If any
component is inapplicable, `*` is substituted. If the version pattern is
not specified explicitely, all versions (`@*`) is added implicitly
Example: Match the WikiStart page
{{{
[wiki:*]
[wiki:WikiStart*]
[wiki:WikiStart@*]
[wiki:WikiStart]
}}}
Example: Match the attachment `wiki:WikiStart@117/attachment/FOO.JPG@*`
on WikiStart
{{{
[wiki:*]
[wiki:WikiStart*]
[wiki:WikiStart@*]
[wiki:WikiStart@*/attachment/*]
[wiki:WikiStart@117/attachment/FOO.JPG]
}}}
- Sections are checked against the current Trac resource '''IN ORDER''' of
appearance in the configuration file. '''ORDER IS CRITICAL'''.
- Once a section matches, the current username is matched, '''IN ORDER''',
against the keys of the section. If a key is prefixed with a `@`, it is
treated as a group. If a key is prefixed with a `!`, the permission is
denied rather than granted. The username will match any of 'anonymous',
'authenticated', <username> or '*', using normal Trac permission rules.
Example configuration:
{{{
[groups]
administrators = athomas
[*/attachment:*]
* = WIKI_VIEW, TICKET_VIEW
[wiki:WikiStart@*]
@administrators = WIKI_ADMIN
anonymous = WIKI_VIEW
* = WIKI_VIEW
# Deny access to page templates
[wiki:PageTemplates/*]
* =
# Match everything else
[*]
@administrators = TRAC_ADMIN
anonymous = BROWSER_VIEW, CHANGESET_VIEW, FILE_VIEW, LOG_VIEW,
MILESTONE_VIEW, POLL_VIEW, REPORT_SQL_VIEW, REPORT_VIEW, ROADMAP_VIEW,
SEARCH_VIEW, TICKET_CREATE, TICKET_MODIFY, TICKET_VIEW, TIMELINE_VIEW,
WIKI_CREATE, WIKI_MODIFY, WIKI_VIEW
# Give authenticated users some extra permissions
authenticated = REPO_SEARCH, XML_RPC
}}}
"""
implements(IPermissionPolicy)
authz_file = Option('authz_policy', 'authz_file', None,
'Location of authz policy configuration file.')
authz = None
authz_mtime = None
# IPermissionPolicy methods
def check_permission(self, action, username, resource, perm):
if ConfigObj is None:
self.log.error('configobj package not found')
return None
if self.authz_file and not self.authz_mtime or \
os.path.getmtime(self.get_authz_file()) > self.authz_mtime:
self.parse_authz()
resource_key = self.normalise_resource(resource)
self.log.debug('Checking %s on %s', action, resource_key)
permissions = self.authz_permissions(resource_key, username)
if permissions is None:
return None # no match, can't decide
elif permissions == ['']:
return False # all actions are denied
# FIXME: expand all permissions once for all
ps = PermissionSystem(self.env)
for deny, perms in groupby(permissions,
key=lambda p: p.startswith('!')):
if deny and action in ps.expand_actions([p[1:] for p in perms]):
return False # action is explicitly denied
elif action in ps.expand_actions(perms):
return True # action is explicitly granted
return None # no match for action, can't decide
# Internal methods
def get_authz_file(self):
f = self.authz_file
return os.path.isabs(f) and f or os.path.join(self.env.path, f)
def parse_authz(self):
self.env.log.debug('Parsing authz security policy %s' %
self.get_authz_file())
self.authz = ConfigObj(self.get_authz_file())
self.groups_by_user = {}
for group, users in self.authz.get('groups', {}).iteritems():
if isinstance(users, basestring):
users = [users]
for user in users:
self.groups_by_user.setdefault(user, set()).add('@' + group)
self.authz_mtime = os.path.getmtime(self.get_authz_file())
def normalise_resource(self, resource):
def flatten(resource):
if not resource or not (resource.realm or resource.id):
return []
# XXX Due to the mixed functionality in resource we can end up with
# ticket, ticket:1, ticket:1@10. This code naively collapses all
# subsets of the parent resource into one. eg. ticket:1@10
parent = resource.parent
while parent and (resource.realm == parent.realm or \
(resource.realm == parent.realm and resource.id == parent.id)):
parent = parent.parent
if parent:
parent = flatten(parent)
else:
parent = []
return parent + ['%s:%s@%s' % (resource.realm or '*',
resource.id or '*',
resource.version or '*')]
return '/'.join(flatten(resource))
def authz_permissions(self, resource_key, username):
# TODO: Handle permission negation in sections. eg. "if in this
# ticket, remove TICKET_MODIFY"
valid_users = ['*', 'anonymous']
if username and username != 'anonymous':
valid_users = ['*', 'authenticated', username]
for resource_section in [a for a in self.authz.sections
if a != 'groups']:
resource_glob = resource_section
if '@' not in resource_glob:
resource_glob += '@*'
if fnmatch(resource_key, resource_glob):
section = self.authz[resource_section]
for who, permissions in section.iteritems():
if who in valid_users or \
who in self.groups_by_user.get(username, []):
self.env.log.debug('%s matched section %s for user %s'
% (resource_key, resource_glob, username))
if isinstance(permissions, basestring):
return [permissions]
else:
return permissions
return None
|
from __future__ import unicode_literals
from datetime import timedelta
import logging
import os
import re
import time
from django.conf import settings
from django.db import models
from django.db.models import Q
from django.utils.crypto import get_random_string
from django.utils.encoding import python_2_unicode_compatible
from django.utils.timezone import now
from django.utils.translation import ugettext_lazy as _
import requests
from mama_cas.compat import Session
from mama_cas.exceptions import InvalidProxyCallback
from mama_cas.exceptions import InvalidRequest
from mama_cas.exceptions import InvalidService
from mama_cas.exceptions import InvalidTicket
from mama_cas.exceptions import UnauthorizedServiceProxy
from mama_cas.exceptions import ValidationError
from mama_cas.request import SingleSignOutRequest
from mama_cas.services import get_logout_url
from mama_cas.services import logout_allowed
from mama_cas.services import service_allowed
from mama_cas.services import proxy_allowed
from mama_cas.services import proxy_callback_allowed
from mama_cas.utils import add_query_params
from mama_cas.utils import clean_service_url
from mama_cas.utils import is_scheme_https
from mama_cas.utils import match_service
logger = logging.getLogger(__name__)
class TicketManager(models.Manager):
def create_ticket(self, ticket=None, **kwargs):
"""
Create a new ``Ticket``. Additional arguments are passed to the
``create()`` function. Return the newly created ``Ticket``.
"""
if not ticket:
ticket = self.create_ticket_str()
if 'service' in kwargs:
kwargs['service'] = clean_service_url(kwargs['service'])
if 'expires' not in kwargs:
expires = now() + timedelta(seconds=self.model.TICKET_EXPIRE)
kwargs['expires'] = expires
t = self.create(ticket=ticket, **kwargs)
logger.debug("Created %s %s" % (t.name, t.ticket))
return t
def create_ticket_str(self, prefix=None):
"""
Generate a sufficiently opaque ticket string to ensure the ticket is
not guessable. If a prefix is provided, prepend it to the string.
"""
if not prefix:
prefix = self.model.TICKET_PREFIX
return "%s-%d-%s" % (prefix, int(time.time()),
get_random_string(length=self.model.TICKET_RAND_LEN))
def validate_ticket(self, ticket, service, renew=False, require_https=False):
"""
Given a ticket string and service identifier, validate the
corresponding ``Ticket``. If validation succeeds, return the
``Ticket``. If validation fails, raise an appropriate error.
If ``renew`` is ``True``, ``ServiceTicket`` validation will
only succeed if the ticket was issued from the presentation
of the user's primary credentials.
If ``require_https`` is ``True``, ``ServiceTicket`` validation
will only succeed if the service URL scheme is HTTPS.
"""
if not ticket:
raise InvalidRequest("No ticket string provided")
if not self.model.TICKET_RE.match(ticket):
raise InvalidTicket("Ticket string %s is invalid" % ticket)
try:
t = self.get(ticket=ticket)
except self.model.DoesNotExist:
raise InvalidTicket("Ticket %s does not exist" % ticket)
if t.is_consumed():
raise InvalidTicket("%s %s has already been used" %
(t.name, ticket))
if t.is_expired():
raise InvalidTicket("%s %s has expired" % (t.name, ticket))
if not service:
raise InvalidRequest("No service identifier provided")
if require_https and not is_scheme_https(service):
raise InvalidService("Service %s is not HTTPS" % service)
if not service_allowed(service):
raise InvalidService("Service %s is not a valid %s URL" %
(service, t.name))
try:
if not match_service(t.service, service):
raise InvalidService("%s %s for service %s is invalid for "
"service %s" % (t.name, ticket, t.service, service))
except AttributeError:
pass
try:
if renew and not t.is_primary():
raise InvalidTicket("%s %s was not issued via primary "
"credentials" % (t.name, ticket))
except AttributeError:
pass
logger.debug("Validated %s %s" % (t.name, ticket))
return t
def delete_invalid_tickets(self):
"""
Delete consumed or expired ``Ticket``s that are not referenced
by other ``Ticket``s. Invalid tickets are no longer valid for
authentication and can be safely deleted.
A custom management command is provided that executes this method
on all applicable models by running ``manage.py cleanupcas``.
"""
for ticket in self.filter(Q(consumed__isnull=False) |
Q(expires__lte=now())).order_by('-expires'):
try:
ticket.delete()
except models.ProtectedError:
pass
def consume_tickets(self, user):
"""
Consume all valid ``Ticket``s for a specified user. This is run
when the user logs out to ensure all issued tickets are no longer
valid for future authentication attempts.
"""
for ticket in self.filter(user=user, consumed__isnull=True,
expires__gt=now()):
ticket.consume()
@python_2_unicode_compatible
class Ticket(models.Model):
"""
``Ticket`` is an abstract base class implementing common methods
and fields for CAS tickets.
"""
TICKET_EXPIRE = getattr(settings, 'MAMA_CAS_TICKET_EXPIRE', 90)
TICKET_RAND_LEN = getattr(settings, 'MAMA_CAS_TICKET_RAND_LEN', 32)
TICKET_RE = re.compile("^[A-Z]{2,3}-[0-9]{10,}-[a-zA-Z0-9]{%d}$" % TICKET_RAND_LEN)
ticket = models.CharField(_('ticket'), max_length=255, unique=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=_('user'),
on_delete=models.CASCADE)
expires = models.DateTimeField(_('expires'))
consumed = models.DateTimeField(_('consumed'), null=True)
objects = TicketManager()
class Meta:
abstract = True
def __str__(self):
return self.ticket
@property
def name(self):
return self._meta.verbose_name
def consume(self):
"""
Consume a ``Ticket`` by populating the ``consumed`` field with
the current datetime. A consumed ``Ticket`` is invalid for future
authentication attempts.
"""
self.consumed = now()
self.save()
def is_consumed(self):
"""
Check a ``Ticket``s consumed state, consuming it in the process.
"""
if self.consumed is None:
self.consume()
return False
return True
def is_expired(self):
"""
Check a ``Ticket``s expired state. Return ``True`` if the ticket is
expired, and ``False`` otherwise.
"""
return self.expires <= now()
class ServiceTicketManager(TicketManager):
def request_sign_out(self, user):
"""
Send a single logout request to each service accessed by a
specified user. This is called at logout when single logout
is enabled.
If requests-futures is installed, asynchronous requests will
be sent. Otherwise, synchronous requests will be sent.
"""
session = Session()
for ticket in self.filter(user=user, consumed__gte=user.last_login):
ticket.request_sign_out(session=session)
class ServiceTicket(Ticket):
"""
(3.1) A ``ServiceTicket`` is used by the client as a credential to
obtain access to a service. It is obtained upon a client's presentation
of credentials and a service identifier to /login.
"""
TICKET_PREFIX = 'ST'
service = models.CharField(_('service'), max_length=255)
primary = models.BooleanField(_('primary'), default=False)
objects = ServiceTicketManager()
class Meta:
verbose_name = _('service ticket')
verbose_name_plural = _('service tickets')
def is_primary(self):
"""
Check the credential origin for a ``ServiceTicket``. If the ticket was
issued from the presentation of the user's primary credentials,
return ``True``, otherwise return ``False``.
"""
if self.primary:
return True
return False
def request_sign_out(self, session=requests):
"""
Send a POST request to the ``ServiceTicket``s logout URL to
request sign-out.
"""
if logout_allowed(self.service):
request = SingleSignOutRequest(context={'ticket': self})
url = get_logout_url(self.service) or self.service
session.post(url, data={'logoutRequest': request.render_content()})
logger.info("Single sign-out request sent to %s" % url)
class ProxyTicket(Ticket):
"""
(3.2) A ``ProxyTicket`` is used by a service as a credential to obtain
access to a back-end service on behalf of a client. It is obtained upon
a service's presentation of a ``ProxyGrantingTicket`` and a service
identifier.
"""
TICKET_PREFIX = 'PT'
service = models.CharField(_('service'), max_length=255)
granted_by_pgt = models.ForeignKey('ProxyGrantingTicket',
verbose_name=_('granted by proxy-granting ticket'),
on_delete=models.CASCADE)
class Meta:
verbose_name = _('proxy ticket')
verbose_name_plural = _('proxy tickets')
class ProxyGrantingTicketManager(TicketManager):
def create_ticket(self, service, pgturl, **kwargs):
"""
When a ``pgtUrl`` parameter is provided to ``/serviceValidate`` or
``/proxyValidate``, attempt to create a new ``ProxyGrantingTicket``.
If validation succeeds, create and return the ``ProxyGrantingTicket``.
If validation fails, return ``None``.
"""
pgtid = self.create_ticket_str()
pgtiou = self.create_ticket_str(prefix=self.model.IOU_PREFIX)
try:
self.validate_callback(service, pgturl, pgtid, pgtiou)
except ValidationError as e:
logger.warning("%s %s" % (e.code, e))
return None
else:
# pgtUrl validation succeeded, so create a new PGT with the
# previously generated ticket strings
return super(ProxyGrantingTicketManager, self).create_ticket(ticket=pgtid, iou=pgtiou, **kwargs)
def validate_callback(self, service, pgturl, pgtid, pgtiou):
"""Verify the provided proxy callback URL."""
if not proxy_allowed(service):
raise UnauthorizedServiceProxy("%s is not authorized to use proxy authentication" % service)
if not is_scheme_https(pgturl):
raise InvalidProxyCallback("Proxy callback %s is not HTTPS" % pgturl)
if not proxy_callback_allowed(service, pgturl):
raise InvalidProxyCallback("%s is not an authorized proxy callback URL" % pgturl)
# Verify that the SSL certificate is valid
verify = os.environ.get('REQUESTS_CA_BUNDLE', True)
try:
requests.get(pgturl, verify=verify, timeout=5)
except requests.exceptions.SSLError:
raise InvalidProxyCallback("SSL certificate validation failed for proxy callback %s" % pgturl)
except requests.exceptions.RequestException as e:
raise InvalidProxyCallback(e)
# Callback certificate appears valid, so send the ticket strings
pgturl = add_query_params(pgturl, {'pgtId': pgtid, 'pgtIou': pgtiou})
try:
response = requests.get(pgturl, verify=verify, timeout=5)
except requests.exceptions.RequestException as e:
raise InvalidProxyCallback(e)
try:
response.raise_for_status()
except requests.exceptions.HTTPError as e:
raise InvalidProxyCallback("Proxy callback %s returned %s" % (pgturl, e))
class ProxyGrantingTicket(Ticket):
"""
(3.3) A ``ProxyGrantingTicket`` is used by a service to obtain proxy
tickets for obtaining access to a back-end service on behalf of a
client. It is obtained upon validation of a ``ServiceTicket`` or a
``ProxyTicket``.
"""
TICKET_PREFIX = 'PGT'
IOU_PREFIX = 'PGTIOU'
TICKET_EXPIRE = getattr(settings, 'SESSION_COOKIE_AGE')
iou = models.CharField(_('iou'), max_length=255, unique=True)
granted_by_st = models.ForeignKey(ServiceTicket, null=True, blank=True,
on_delete=models.PROTECT,
verbose_name=_('granted by service ticket'))
granted_by_pt = models.ForeignKey(ProxyTicket, null=True, blank=True,
on_delete=models.PROTECT,
verbose_name=_('granted by proxy ticket'))
objects = ProxyGrantingTicketManager()
class Meta:
verbose_name = _('proxy-granting ticket')
verbose_name_plural = _('proxy-granting tickets')
def is_consumed(self):
"""Check a ``ProxyGrantingTicket``s consumed state."""
return self.consumed is not None
|
"""
Model and manager used by the two-step (sign up, then activate)
workflow. If you're not using that workflow, you don't need to have
'registration' in your INSTALLED_APPS.
This is provided primarily for backwards-compatibility with existing
installations; new installs of django-registration should look into
the HMAC activation workflow in registration.backends.hmac, which
provides a two-step process but requires no models or storage of the
activation key.
"""
import datetime
import hashlib
import re
from django.conf import settings
from django.contrib.auth import get_user_model
from django.db import models
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.crypto import get_random_string
from django.utils.encoding import python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
from django.utils import timezone
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, username, email, password,
site, send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
User = get_user_model()
user_kwargs = {
User.USERNAME_FIELD: username,
'email': email,
'password': password,
}
new_user = User.objects.create_user(**user_kwargs)
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if send_email:
registration_profile.send_activation_email(site)
return new_user
create_inactive_user = transaction.atomic(create_inactive_user)
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
User = get_user_model()
username = str(getattr(user, User.USERNAME_FIELD))
hash_input = (get_random_string(5) + username).encode('utf-8')
activation_key = hashlib.sha1(hash_input).hexdigest()
return self.create(user=user,
activation_key=activation_key)
@transaction.atomic
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
profile.delete()
user.delete()
@python_2_unicode_compatible
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.OneToOneField(settings.AUTH_USER_MODEL,
verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __str__(self):
return "Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(
days=settings.ACCOUNT_ACTIVATION_DAYS
)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= timezone.now())
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = {'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site}
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
|
from flask import Flask
from . import config
from . import ElaborateCharts
app = Flask(__name__)
app.config['SECRET_KEY'] = config.SECRET_KEY
charts = ElaborateCharts(app)
if __name__ == '__main__':
app.run(host='127.0.0.1', debug=True)
|
import glob
import os
from .. import *
@skip_if('java' not in test_features, 'skipping java tests')
@skip_if_backend('msbuild')
class TestJava(IntegrationTest):
def __init__(self, *args, **kwargs):
super().__init__(os.path.join('languages', 'java'), install=True,
*args, **kwargs)
def test_build(self):
self.build('program.jar')
for i in glob.glob('*.class*'):
os.remove(i)
self.assertOutput(['java', '-jar', 'program.jar'],
'hello from java!\n')
def test_install(self):
self.build('install')
self.assertDirectory(self.installdir, [
os.path.join(self.libdir, 'program.jar'),
])
os.chdir(self.srcdir)
cleandir(self.builddir)
self.assertOutput(
['java', '-jar', os.path.join(self.libdir, 'program.jar')],
'hello from java!\n'
)
@skip_if('gcj' not in test_features, 'skipping gcj tests')
class TestGcj(IntegrationTest):
def __init__(self, *args, **kwargs):
super().__init__(os.path.join('languages', 'java'),
extra_env={'JAVAC': os.getenv('GCJ', 'gcj')},
*args, **kwargs)
def test_build(self):
self.build('program')
self.assertOutput([executable('program')], 'hello from java!\n')
@skip_if('java' not in test_features, 'skipping java tests')
@skip_if_backend('msbuild')
class TestJavaLibrary(IntegrationTest):
def __init__(self, *args, **kwargs):
super().__init__(os.path.join('languages', 'java_library'),
install=True, *args, **kwargs)
def test_build(self):
self.build('program.jar')
for i in glob.glob('*.class*'):
os.remove(i)
self.assertOutput(['java', '-jar', 'program.jar'],
'hello from library!\n')
def test_install(self):
self.build('install')
self.assertDirectory(self.installdir, [
os.path.join(self.libdir, 'lib.jar'),
os.path.join(self.libdir, 'program.jar'),
])
os.chdir(self.srcdir)
cleandir(self.builddir)
self.assertOutput(
['java', '-jar', os.path.join(self.libdir, 'program.jar')],
'hello from library!\n'
)
def test_package(self):
self.build('install')
self.configure(
srcdir=os.path.join('languages', 'java_package'), installdir=None,
extra_env={'CLASSPATH': os.path.join(self.libdir, '*')}
)
self.build()
self.assertOutput(['java', '-jar', 'program.jar'],
'hello from library!\n')
|
from __future__ import print_function
import re
import ast
import subprocess
import sys
from optparse import OptionParser
DEBUG = False
CONFIRM_STEPS = False
DRY_RUN = False
def skip_step():
"""
Asks for user's response whether to run a step. Default is yes.
:return: boolean
"""
global CONFIRM_STEPS
if CONFIRM_STEPS:
choice = raw_input("--- Confirm step? (y/N) [y] ")
if choice.lower() == 'n':
return True
return False
def run_step(*args):
"""
Prints out the command and asks if it should be run.
If yes (default), runs it.
:param args: list of strings (command and args)
"""
global DRY_RUN
cmd = args
print(' '.join(cmd))
if skip_step():
print('--- Skipping...')
elif DRY_RUN:
print('--- Pretending to run...')
else:
subprocess.check_output(cmd)
def version(version_file):
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open(version_file, 'rb') as f:
ver = str(ast.literal_eval(_version_re.search(
f.read().decode('utf-8')).group(1)))
return ver
def commit_for_release(version_file, ver):
run_step('git', 'reset')
run_step('git', 'add', version_file)
run_step('git', 'commit', '--message', 'Releasing version %s' % ver)
def create_git_tag(tag_name):
run_step('git', 'tag', '-s', '-m', tag_name, tag_name)
def create_source_tarball():
run_step('python', 'setup.py', 'sdist')
def upload_source_tarball():
run_step('python', 'setup.py', 'sdist', 'upload')
def push_to_github():
run_step('git', 'push', 'origin', 'master')
def push_tags_to_github():
run_step('git', 'push', '--tags', 'origin')
if __name__ == '__main__':
if DEBUG:
subprocess.check_output = lambda x: x
ver = version('pgcli/__init__.py')
print('Releasing Version:', ver)
parser = OptionParser()
parser.add_option(
"-c", "--confirm-steps", action="store_true", dest="confirm_steps",
default=False, help=("Confirm every step. If the step is not "
"confirmed, it will be skipped.")
)
parser.add_option(
"-d", "--dry-run", action="store_true", dest="dry_run",
default=False, help="Print out, but not actually run any steps."
)
popts, pargs = parser.parse_args()
CONFIRM_STEPS = popts.confirm_steps
DRY_RUN = popts.dry_run
choice = raw_input('Are you sure? (y/N) [n] ')
if choice.lower() != 'y':
sys.exit(1)
commit_for_release('pgcli/__init__.py', ver)
create_git_tag('v%s' % ver)
create_source_tarball()
push_to_github()
push_tags_to_github()
upload_source_tarball()
|
from configurations import values
from . import common, databases, email
from .. import __version__
class Raven(object):
"""Report uncaught exceptions to the Sentry server."""
INSTALLED_APPS = common.Common.INSTALLED_APPS + ('raven.contrib.django.raven_compat',)
RAVEN_CONFIG = {
'dsn': values.URLValue(environ_name='RAVEN_CONFIG_DSN'),
'release': __version__,
}
class Sentry404(Raven):
"""Log 404 events to the Sentry server."""
MIDDLEWARE_CLASSES = (
'raven.contrib.django.raven_compat.middleware.Sentry404CatchMiddleware',
) + common.Common.MIDDLEWARE_CLASSES
class Public(email.Email, databases.Databases, common.Common):
"""General settings for public projects."""
SECRET_KEY = values.SecretValue()
CSRF_COOKIE_HTTPONLY = True
SECURE_BROWSER_XSS_FILTER = True
SECURE_CONTENT_TYPE_NOSNIFF = True
X_FRAME_OPTIONS = 'DENY'
SILENCED_SYSTEM_CHECKS = values.ListValue([])
class Stage(Public):
"""Settings for staging server."""
pass
class SSL(object):
"""Settings for SSL."""
SECURE_SSL_HOST = values.Value('www.example.com')
SECURE_SSL_REDIRECT = True
class Prod(Public, SSL):
"""Settings for production server."""
pass
|
import numpy as np
import OpenGL.GL as gl
import texture, shader, colormap, color
class Image(object):
''' '''
def __init__(self, Z, format=None, cmap=colormap.IceAndFire, vmin=None, vmax=None,
interpolation='nearest', origin='lower', lighted=False,
gridsize=(0.0,0.0,0.0), elevation = 0.0):
''' Creates a texture from numpy array.
Parameters:
-----------
Z : numpy array
Z may be a float32 or uint8 array with following shapes:
* M
* MxN
* MxNx[1,2,3,4]
format: [None | 'A' | 'LA' | 'RGB' | 'RGBA']
Specify the texture format to use. Most of times it is possible to
find it automatically but there are a few cases where it not
possible to decide. For example an array with shape (M,3) can be
considered as 2D alpha texture of size (M,3) or a 1D RGB texture of
size (M,).
interpolation: 'nearest', 'bilinear' or 'bicubic'
Interpolation method.
vmin: scalar
Minimal representable value.
vmax: scalar
Maximal representable value.
origin: 'lower' or 'upper'
Place the [0,0] index of the array in the upper left or lower left
corner.
'''
self._lut = None
self._interpolation = interpolation
self._lighted = lighted
self._gridsize = gridsize
self._elevation = elevation
self._texture = texture.Texture(Z)
self._origin = origin
self._vmin = vmin
self._vmax = vmax
self._data = Z
self.cmap = cmap # This takes care of actual build
self._shader = None
self.build()
def build(self):
''' Build shader '''
interpolation = self._interpolation
gridsize = self._gridsize
elevation = self._elevation
lighted = self._lighted
cmap = self._cmap
self._shader = None
# Source format is RGB or RGBA, no need of a colormap
if self._texture.src_format in [gl.GL_RGB,gl.GL_RGBA]:
if interpolation == 'bicubic':
self._shader = shader.Bicubic(False, lighted=lighted, gridsize=gridsize, elevation=elevation)
elif interpolation == 'bilinear':
self._shader = shader.Bilinear(False, lighted=lighted, gridsize=gridsize, elevation=elevation)
else:
self._shader = None
# Source format is not RGB or RGBA
else:
if cmap:
if interpolation == 'bicubic':
self._shader = shader.Bicubic(True, lighted=lighted, gridsize=gridsize, elevation=elevation)
elif interpolation == 'bilinear':
self._shader = shader.Bilinear(True, lighted=lighted, gridsize=gridsize, elevation=elevation)
else:
self._shader = shader.Nearest(True, lighted=lighted, gridsize=gridsize, elevation=elevation)
else:
if interpolation == 'bicubic':
self._shader = shader.Bicubic(False, lighted=lighted, gridsize=gridsize, elevation=elevation)
elif interpolation == 'bilinear':
self._shader = shader.Bilinear(False, lighted=lighted, gridsize=gridsize, elevation=elevation)
else:
self._shader = None
self.update()
@property
def shape(self):
''' Underlying array shape. '''
return self._data.shape
@property
def data(self):
''' Underlying array '''
return self._data
@property
def texture(self):
''' Underlying texture '''
return self._texture
@property
def shader(self):
''' Currently active shader '''
return self._shader
@property
def format(self):
''' Array representation format (string). '''
format = self._texture.src_format
if format == gl.GL_ALPHA:
return 'A'
elif format == gl.GL_LUMINANCE_ALPHA:
return 'LA'
elif format == gl.GL_RGB:
return 'RGB'
elif format == gl.GL_RGBA:
return 'RGBA'
def _get_cmap(self):
return self._cmap
def _set_cmap(self, cmap):
self._cmap = cmap
colors = self.cmap.LUT['rgb'][1:].flatten().view((np.float32,3))
self._lut = texture.Texture(colors)
cmap = property(_get_cmap, _set_cmap,
doc=''' Colormap to be used to represent the array. ''')
def _get_elevation(self):
return self._elevation
def _set_elevation(self, elevation):
# Do we need to re-build shader ?
if not (elevation*self._elevation):
self._elevation = elevation
self.build()
elif self._shader:
self._elevation = elevation
self._shader._elevation = elevation
elevation = property(_get_elevation, _set_elevation,
doc=''' Image elevation. ''')
def _get_origin(self):
return self._origin
def _set_origin(self, origin):
self._origin = origin
origin = property(_get_origin, _set_origin,
doc=''' Place the [0,0] index of the array in the upper
left or lower left corner. ''')
def _get_lighted(self):
return self._lighted
def _set_lighted(self, lighted):
self._lighted = lighted
self.build()
lighted = property(_get_lighted, _set_lighted,
doc=''' Indicate whether image is ligthed. ''')
def _get_interpolation(self):
return self._interpolation
def _set_interpolation(self, interpolation):
self._interpolation = interpolation
self.build()
interpolation = property(_get_interpolation, _set_interpolation,
doc=''' Interpolation method. ''')
def _get_vmin(self):
return self._vmin
def _set_vmin(self, vmin):
self._vmin = vmin
vmin = property(_get_vmin, _set_vmin,
doc=''' Minimal representable value. ''')
def _get_vmax(self):
return self._vmax
def _set_vmax(self, vmax):
self._vmax = vmax
vmax = property(_get_vmax, _set_vmax,
doc=''' Maximal representable value. ''')
def _get_gridsize(self):
return self._gridsize
def _get_gridsize_x(self):
return self._gridsize[0]
def _get_gridsize_y(self):
return self._gridsize[1]
def _get_gridsize_z(self):
return self._gridsize[2]
def _set_gridsize(self, gridsize):
# Do we need to re-build shader ?
x,y,z = gridsize
x,y,z = max(0,x),max(0,y),max(0,z)
_x,_y,_z = self._gridsize
self._gridsize = x,y,z
if not (x+y+z)*(_x+_y+_z) and (x+y+z)+(_x+_y+_z):
self.build()
elif self._shader:
self._shader._gridsize = x,y,z
def _set_gridsize_x(self, x):
self.gridsize = (max(0,x), self._gridsize[1], self._gridsize[2])
def _set_gridsize_y(self, y):
self.gridsize = (self._gridsize[0], max(0,y), self._gridsize[2])
def _set_gridsize_z(self, z):
self.gridsize = (self._gridsize[0], self._gridsize[1], max(0,z))
gridsize = property(_get_gridsize, _set_gridsize,
doc=''' Image grid (x,y,z). ''')
def update(self):
''' Data update. '''
if self.vmin is None:
vmin = self.data.min()
else:
vmin = self.vmin
if self.vmax is None:
vmax = self._data.max()
else:
vmax = self.vmax
if vmin == vmax:
vmin, vmax = 0, 1
if self._lut:
s = self._lut.width
self._texture.update(bias = 1.0/(s-1)-vmin*((s-3.1)/(s-1))/(vmax-vmin),
scale = ((s-3.1)/(s-1))/(vmax-vmin))
else:
self._texture.update(bias=-vmin/(vmax-vmin),scale=1.0/(vmax-vmin))
def blit(self, x, y, w, h):
''' Blit array onto active framebuffer. '''
if self._shader:
self._shader.bind(self.texture,self._lut)
if self.origin == 'lower':
t=0,1
else:
t=1,0
gl.glColor(1,1,1,1)
self._texture.blit(x,y,w,h,t=t)
if self._shader:
self._shader.unbind()
|
import pytest
from prism.grep import pattern, search
def log_lines():
return [
"[Sun Apr 08 12:51:52 2012] [notice] Digest: done",
"[Mon Jul 11 09:26:13 2011] Error: [client ::1] File does not exist: /Library/WebServer/Documents/favicon.ico",
]
def test_search():
for line in log_lines():
assert search(line), "Regexp pattern '{0}' didn't match line '{1}'".format(pattern, line)
|
"""
Контролер веб интерфейса бота
:copyright: (c) 2013 by Pavel Lyashkov.
:license: BSD, see LICENSE for more details.
"""
import re
import os
from flask import Flask, Blueprint, abort, request, make_response, url_for, render_template
from web import app
from web import cache
api = Blueprint('api', __name__)
@api.route('/index', methods=['GET'])
def index():
return 1
|
"""
This module contains a class, :class:`Query`, that was implemented to provide
users with means to programmatically query the
`ACS Zeropoints Calculator <https://acszeropoints.stsci.edu>`_.
The API works by submitting requests to the
ACS Zeropoints Calculator referenced above and hence, it is only valid for ACS
specific instruments (HRC, SBC, or WFC).
The API can be used in two ways by specifying either a
``(date, detector, filter)`` combination or just a ``(date, detector)``
combination. In the first case, the query
will return the zeropoint information for the specific filter and detector at
specified date. In the second case, the query will return the zeropoint
information for all the filters for the desired detector at the specified date.
In either case, the result will be an ``astropy.table.QTable`` where each column
is an ``astropy.units.quantity.Quantity`` object with the appropriate units attached.
Examples
--------
Retrieve the zeropoint information for all the filters on 2016-04-01 for WFC:
>>> from acstools import acszpt
>>> date = '2016-04-01'
>>> detector = 'WFC'
>>> q = acszpt.Query(date=date, detector=detector)
>>> zpt_table = q.fetch()
>>> print(zpt_table)
FILTER PHOTPLAM PHOTFLAM STmag VEGAmag ABmag
Angstrom erg / (Angstrom cm2 s) mag(ST) mag mag(AB)
str6 float64 float64 float64 float64 float64
------ -------- ---------------------- ------- ------- -------
F435W 4329.2 3.148e-19 25.155 25.763 25.665
F475W 4746.2 1.827e-19 25.746 26.149 26.056
F502N 5023.0 5.259e-18 22.098 22.365 22.285
F550M 5581.5 3.99e-19 24.898 24.825 24.856
F555W 5360.9 1.963e-19 25.667 25.713 25.713
F606W 5922.0 7.811e-20 26.668 26.405 26.498
F625W 6312.0 1.188e-19 26.213 25.735 25.904
F658N 6584.0 1.97e-18 23.164 22.381 22.763
F660N 6599.4 5.156e-18 22.119 21.428 21.714
F775W 7693.2 9.954e-20 26.405 25.272 25.667
F814W 8045.0 7.046e-20 26.78 25.517 25.944
F850LP 9033.2 1.52e-19 25.945 24.332 24.858
F892N 8914.8 1.502e-18 23.458 21.905 22.4
Retrieve the zeropoint information for the F435W filter on 2016-04-01 for WFC:
>>> from acstools import acszpt
>>> date = '2016-04-01'
>>> detector = 'WFC'
>>> filt = 'F435W'
>>> q = acszpt.Query(date=date, detector=detector, filter=filt)
>>> zpt_table = q.fetch()
>>> print(zpt_table)
FILTER PHOTPLAM PHOTFLAM STmag VEGAmag ABmag
Angstrom erg / (Angstrom cm2 s) mag(ST) mag mag(AB)
------ -------- ---------------------- ------- ------- -------
F435W 4329.2 3.148e-19 25.155 25.763 25.665
Retrieve the zeropoint information for the F435W filter for WFC at multiple dates:
>>> from acstools import acszpt
>>> dates = ['2004-10-13', '2011-04-01', '2014-01-17', '2018-05-23']
>>> queries = []
>>> for date in dates:
... q = acszpt.Query(date=date, detector='WFC', filt='F435W')
... zpt_table = q.fetch()
... # Each object has a zpt_table attribute, so we save the instance
... queries.append(q)
>>> for q in queries:
... print(q.date, q.zpt_table['PHOTFLAM'][0], q.zpt_table['STmag'][0])
2004-10-13 3.074e-19 erg / (Angstrom cm2 s) 25.181 mag(ST)
2011-04-01 3.138e-19 erg / (Angstrom cm2 s) 25.158 mag(ST)
2014-01-17 3.144e-19 erg / (Angstrom cm2 s) 25.156 mag(ST)
2018-05-23 3.152e-19 erg / (Angstrom cm2 s) 25.154 mag(ST)
>>> type(queries[0].zpt_table['PHOTFLAM'])
astropy.units.quantity.Quantity
"""
import datetime as dt
import logging
import os
from urllib.request import urlopen
from urllib.error import URLError
import astropy.units as u
from astropy.table import QTable
from bs4 import BeautifulSoup
import numpy as np
__taskname__ = "acszpt"
__author__ = "Nathan Miles"
__version__ = "1.0"
__vdate__ = "22-Jan-2019"
__all__ = ['Query']
logging.basicConfig()
LOG = logging.getLogger(f'{__taskname__}.Query')
LOG.setLevel(logging.INFO)
class Query:
"""Class used to interface with the ACS Zeropoints Calculator API.
Parameters
----------
date : str
Input date in the following ISO format, YYYY-MM-DD.
detector : {'HRC', 'SBC', 'WFC'}
One of the three channels on ACS: HRC, SBC, or WFC.
filt : str or `None`, optional
One of valid filters for the chosen detector. If no filter is supplied,
all of the filters for the chosen detector will be used:
* HRC:
F220W, F250W, F330W,
F344N, F435W, F475W,
F502N, F550M, F555W,
F606W, F625W, F658N, F660N,
F775W, F814W, F850LP, F892N
* WFC:
F435W, F475W,
F502N, F550M, F555W,
F606W, F625W, F658N, F660N,
F775W, F814W, F850LP, F892N
* SBC:
F115LP, F122M, F125LP,
F140LP, F150LP, F165LP
"""
def __init__(self, date, detector, filt=None):
# Set the attributes
self._date = date
self._detector = detector.upper()
self._filt = filt
self.valid_filters = {
'WFC': ['F435W', 'F475W', 'F502N', 'F550M',
'F555W', 'F606W', 'F625W', 'F658N',
'F660N', 'F775W', 'F814W', 'F850LP', 'F892N'],
'HRC': ['F220W', 'F250W', 'F330W', 'F344N',
'F435W', 'F475W', 'F502N', 'F550M',
'F555W', 'F606W', 'F625W', 'F658N',
'F660N', 'F775W', 'F814W', 'F850LP', 'F892N'],
'SBC': ['F115LP', 'F122M', 'F125LP',
'F140LP', 'F150LP', 'F165LP']
}
self._zpt_table = None
# Set the private attributes
if filt is None:
self._url = ('https://acszeropoints.stsci.edu/results_all/?'
f'date={self.date}&detector={self.detector}')
else:
self._filt = filt.upper()
self._url = ('https://acszeropoints.stsci.edu/results_single/?'
f'date1={self.date}&detector={self.detector}'
f'&{self.detector}_filter={self.filt}')
# ACS Launch Date
self._acs_installation_date = dt.datetime(2002, 3, 7)
# The farthest date in future that the component and throughput files
# are valid for. If input date is larger, extrapolation is not valid.
self._extrapolation_date = dt.datetime(2021, 12, 31)
self._msg_div = '-' * 79
self._valid_detectors = ['HRC', 'SBC', 'WFC']
self._response = None
self._failed = False
self._data_units = {
'FILTER': u.dimensionless_unscaled,
'PHOTPLAM': u.angstrom,
'PHOTFLAM': u.erg / u.cm ** 2 / u.second / u.angstrom,
'STmag': u.STmag,
'VEGAmag': u.mag,
'ABmag': u.ABmag
}
self._block_size = len(self._data_units)
@property
def date(self):
"""The user supplied date. (str)"""
return self._date
@property
def detector(self):
"""The user supplied detector. (str)"""
return self._detector
@property
def filt(self):
"""The user supplied filter, if one was given. (str or `None`)"""
return self._filt
@property
def zpt_table(self):
"""The results returned by the ACS Zeropoint Calculator. (`astropy.table.QTable`)"""
return self._zpt_table
def _check_inputs(self):
"""Check the inputs to ensure they are valid.
Returns
-------
status : bool
True if all inputs are valid, False if one is not.
"""
valid_detector = True
valid_filter = True
valid_date = True
# Determine the submitted detector is valid
if self.detector not in self._valid_detectors:
msg = (f'{self.detector} is not a valid detector option.\n'
'Please choose one of the following:\n'
f'{os.linesep.join(self._valid_detectors)}\n'
f'{self._msg_div}')
LOG.error(msg)
valid_detector = False
# Determine if the submitted filter is valid
if (self.filt is not None and valid_detector and
self.filt not in self.valid_filters[self.detector]):
msg = (f'{self.filt} is not a valid filter for {self.detector}\n'
'Please choose one of the following:\n'
f'{os.linesep.join(self.valid_filters[self.detector])}\n'
f'{self._msg_div}')
LOG.error(msg)
valid_filter = False
# Determine if the submitted date is valid
date_check = self._check_date()
if date_check is not None:
LOG.error(f'{date_check}\n{self._msg_div}')
valid_date = False
if not valid_detector or not valid_filter or not valid_date:
return False
return True
def _check_date(self, fmt='%Y-%m-%d'):
"""Convenience method for determining if the input date is valid.
Parameters
----------
fmt : str
The format of the date string. The default is ``%Y-%m-%d``, which
corresponds to ``YYYY-MM-DD``.
Returns
-------
status : str or `None`
If the date is valid, returns `None`. If the date is invalid,
returns a message explaining the issue.
"""
result = None
try:
dt_obj = dt.datetime.strptime(self.date, fmt)
except ValueError:
result = f'{self.date} does not match YYYY-MM-DD format'
else:
if dt_obj < self._acs_installation_date:
result = ('The observation date cannot occur '
'before ACS was installed '
f'({self._acs_installation_date.strftime(fmt)})')
elif dt_obj > self._extrapolation_date:
result = ('The observation date cannot occur after the '
'maximum allowable date, '
f'{self._extrapolation_date.strftime(fmt)}. '
'Extrapolations of the '
'instrument throughput after this date lead to '
'high uncertainties and are therefore invalid.')
finally:
return result
def _submit_request(self):
"""Submit a request to the ACS Zeropoint Calculator.
If an exception is raised during the request, an error message is
given. Otherwise, the response is saved in the corresponding
attribute.
"""
if not self._url.startswith('http'):
raise ValueError(f'Invalid URL {self._url}')
try:
self._response = urlopen(self._url) # nosec
except URLError as e:
msg = (f'{repr(e)}\n{self._msg_div}\nThe query failed! '
'Please check your inputs. '
'If the error persists, submit a ticket to the '
'ACS Help Desk at hsthelp.stsci.edu with the error message '
'displayed above.')
LOG.error(msg)
self._failed = True
else:
self._failed = False
def _parse_and_format(self):
""" Parse and format the results returned by the ACS Zeropoint Calculator.
Using ``beautifulsoup4``, find all the ``<tb> </tb>`` tags present in
the response. Format the results into an astropy.table.QTable with
corresponding units and assign it to the zpt_table attribute.
"""
soup = BeautifulSoup(self._response.read(), 'html.parser')
# Grab all elements in the table returned by the ZPT calc.
td = soup.find_all('td')
# Remove the units attached to PHOTFLAM and PHOTPLAM column names.
td = [val.text.split(' ')[0] for val in td]
# Turn the single list into a 2-D numpy array
data = np.reshape(td,
(int(len(td) / self._block_size), self._block_size))
# Create the QTable, note that sometimes self._response will be empty
# even though the return was successful; hence the try/except to catch
# any potential index errors. Provide the user with a message and
# set the zpt_table to None.
try:
tab = QTable(data[1:, :],
names=data[0],
dtype=[str, float, float, float, float, float])
except IndexError as e:
msg = (f'{repr(e)}\n{self._msg_div}\n'
'There was an issue parsing the request. '
'Try resubmitting the query. If this issue persists, please '
'submit a ticket to the Help Desk at'
'https://stsci.service-now.com/hst')
LOG.info(msg)
self._zpt_table = None
else:
# If and only if no exception was raised, attach the units to each
# column of the QTable. Note we skip the FILTER column because
# Quantity objects in astropy must be numerical (i.e. not str)
for col in tab.colnames:
if col.lower() == 'filter':
continue
tab[col].unit = self._data_units[col]
self._zpt_table = tab
def fetch(self):
"""Submit the request to the ACS Zeropoints Calculator.
This method will:
* submit the request
* parse the response
* format the results into a table with the correct units
Returns
-------
tab : `astropy.table.QTable` or `None`
If the request was successful, returns a table; otherwise, `None`.
"""
LOG.info('Checking inputs...')
valid_inputs = self._check_inputs()
if valid_inputs:
LOG.info(f'Submitting request to {self._url}')
self._submit_request()
if self._failed:
return
LOG.info('Parsing the response and formatting the results...')
self._parse_and_format()
return self.zpt_table
LOG.error('Please fix the incorrect input(s)')
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.utils.translation import gettext_lazy as _
from churchill.apps.core.models import BaseModel
from churchill.apps.currencies.services import get_default_currency_id
class StatsCalculationStrategy(models.TextChoices):
LAST_SHOT = "LAST_SHOT", _("From the last shot")
WEEKLY = "WEEKLY", _("Weekly")
MONTHLY = "MONTHLY", _("Monthly")
ALL_TIME = "ALL_TIME", _("For the all time")
class Profile(BaseModel):
user = models.OneToOneField(
User,
on_delete=models.CASCADE,
primary_key=True,
related_name="profile",
)
image = models.FileField(
upload_to=settings.PROFILE_IMAGE_DIRECTORY, null=True, blank=True
)
language = models.CharField(
max_length=5,
blank=True,
default=settings.LANGUAGE_CODE,
choices=settings.LANGUAGES,
)
currency = models.ForeignKey(
"currencies.Currency",
related_name="profiles",
on_delete=models.DO_NOTHING,
blank=True,
default=get_default_currency_id,
)
next_day_offset = models.IntegerField(
blank=True,
default=settings.NEXT_DAY_OFFSET,
help_text=_("Offset in hours for the next day"),
)
avg_consumption = models.IntegerField(
blank=True,
default=settings.AVG_ALCOHOL_CONSUMPTION,
help_text=_("Average alcohol consumption in ml per year"),
)
avg_price = models.DecimalField(
max_digits=5,
decimal_places=2,
blank=True,
default=settings.AVG_ALCOHOL_PRICE,
help_text=_("Average alcohol price for 1000 ml"),
)
stats_calculation_strategy = models.CharField(
max_length=20,
choices=StatsCalculationStrategy.choices,
default=StatsCalculationStrategy.MONTHLY,
)
verification_token = models.CharField(max_length=16, null=True, blank=True)
def __str__(self):
return self.user.email
|
__authors__ = ""
__copyright__ = "(c) 2014, pymal"
__license__ = "BSD License"
__contact__ = "Name Of Current Guardian of this file <email@address>"
USER_AGENT = 'api-indiv-0829BA2B33942A4A5E6338FE05EFB8A1'
HOST_NAME = "http://myanimelist.net"
DEBUG = False
RETRY_NUMBER = 4
RETRY_SLEEP = 1
SHORT_SITE_FORMAT_TIME = '%b %Y'
LONG_SITE_FORMAT_TIME = '%b %d, %Y'
MALAPPINFO_FORMAT_TIME = "%Y-%m-%d"
MALAPPINFO_NONE_TIME = "0000-00-00"
MALAPI_FORMAT_TIME = "%Y%m%d"
MALAPI_NONE_TIME = "00000000"
|
import sys
import struct
import dpkt
from sc_warts import *
if dpkt.__version__ == '1.8':
print "Upgrade dpkt"
sys.exit(-1)
TRACEBOXTYPE = 0x0c
def dict_diff(a, b):
diff = dict()
for k in a:
if k in b:
if b[k] != a[k]:
diff[k] = (a[k],b[k])
return diff
class WartsTraceBoxReader(WartsReader):
def __init__(self, wartsfile, verbose=False):
super(WartsTraceBoxReader, self).__init__(wartsfile, verbose)
def next(self):
while True:
obj = self.next_object()
if not obj:
return (False, False)
if (obj.typ == TRACEBOXTYPE):
return (obj.flags, obj.pkts)
def next_object(self):
# read warts object header
self.header = self.fd.read(8)
# sanity check
if len(self.header) != 8:
return None
(magic, typ, length) = struct.unpack('!HHI', self.header)
if self.verbose:
print "Magic: %02X Obj: %02X Len: %02x" % (magic, typ, length)
assert(magic == obj_type['MAGIC'])
# read remainder of object
data = self.fd.read(length)
if typ == obj_type['LIST']:
return WartsList(data, verbose=self.verbose)
elif typ == obj_type['CYCLESTART']:
return WartsCycle(data, verbose=self.verbose)
elif typ == obj_type['CYCLE']:
return WartsCycle(data, verbose=self.verbose)
elif typ == obj_type['CYCLE_STOP']:
return WartsCycleStop(data, verbose=self.verbose)
elif typ == TRACEBOXTYPE:
return WartsTraceBox(data, verbose=self.verbose)
else:
print "Unsupported object: %02x Len: %d" % (typ, length)
assert False
class WartsTraceBox(WartsBaseObject):
def __init__(self, data, verbose=False):
super(WartsTraceBox, self).__init__(TRACEBOXTYPE, verbose)
self.data = data
self.flagdata = data
self.pkts = []
self.flag_defines = [
('listid', unpack_uint32_t),
('cycleid', unpack_uint32_t),
('userid', unpack_uint32_t),
('srcaddr', self.unpack_address),
('dstaddr', self.unpack_address),
('sport', unpack_uint16_t),
('dport', unpack_uint16_t),
('start', read_timeval),
('result', unpack_uint16_t),
('rtt', unpack_uint8_t),
('qtype', unpack_uint8_t),
('udp', unpack_uint8_t),
('printmode', unpack_uint8_t),
('pktc16', unpack_uint16_t),
('pktc', unpack_uint32_t),
]
flag_bytes = self.read_flags()
if self.verbose:
print "TB Params:", self.flags
offset = flag_bytes
for i in range(self.flags['pktc']):
pkt = WartsTraceBoxPkt(data[offset:], self.referenced_address, self.verbose)
self.pkts.append(pkt.flags)
offset+=pkt.flag_bytes
if self.verbose: print "Pkt %d: %s" % (i+1, pkt.flags)
class WartsTraceBoxPkt(WartsBaseObject):
def __init__(self, data, refs, verbose=False):
super(WartsTraceBoxPkt, self).__init__(TRACEBOXTYPE, verbose)
self.update_ref(refs)
self.flagdata = data
self.flag_defines = [
('dir', unpack_uint8_t),
('time', read_timeval),
('len', unpack_uint16_t),
('data', self.read_pass),
]
self.flag_bytes = self.read_flags()
datalen = self.flags['len']
self.flags['data'] = self.read_tracebox_pkt(data[self.flag_bytes:self.flag_bytes+datalen])
self.flag_bytes += self.flags['len']
def read_pass(self, b):
return ("pass", 0)
def read_tracebox_pkt(self, data):
fields = dict()
ip = dpkt.ip.IP(data)
fields['hop'] = socket.inet_ntoa(ip.src)
if ip.p == dpkt.ip.IP_PROTO_ICMP:
# This is a reply from a hop
fields['hop'] = socket.inet_ntoa(ip.src)
icmp = ip.data
#print "ICMP quote:", icmp.type, icmp.code, "LEN:", len(icmp.data.data)
# icmp.data is type dpkt.icmp.TimeExceed
# so, icmp.data.data is a dpkt.ip.IP
ip = icmp.data.data
fields['IP::Version'] = ip.v
fields['IP::IHL'] = ip.hl
dscp = (ip.tos & 0xFC) >> 2
ecn = (ip.tos & 0x03)
fields['IP::DiffServicesCP'] = hex(dscp)
fields['IP::ECN'] = hex(ecn)
fields['IP:Length'] = hex(ip.len)
fields['IP:ID'] = ip.id
flags = (ip.df >> 1) + ip.mf
fields['IP:Flags'] = hex(flags)
fields['IP:FragmentOffset'] = ip.offset
fields['IP:TTL'] = ip.ttl
fields['IP::Protocol'] = ip.p
fields['IP::Checksum'] = hex(ip.sum)
fields['IP::SourceAddr'] = socket.inet_ntoa(ip.src)
fields['IP::DestAddr'] = socket.inet_ntoa(ip.dst)
if ip.p == dpkt.ip.IP_PROTO_TCP:
tcp = ip.data
if not isinstance(tcp, dpkt.tcp.TCP):
#print "Partial quote!"
z = struct.pack('12sB',ip.data,0x50) + struct.pack('7B',*([0]*7))
tcp = dpkt.tcp.TCP(z)
#print type(tcp)
if len(ip.data) >= 4:
fields['TCP::SPort'] = hex(tcp.sport)
fields['TCP::DPort'] = hex(tcp.dport)
if len(ip.data) >= 8:
fields['TCP::SeqNumber'] = hex(tcp.seq)
if len(ip.data) >= 12:
fields['TCP::AckNumber'] = hex(tcp.ack)
if len(ip.data) >= 16:
fields['TCP::Offset'] = hex(tcp.off)
fields['TCP::Flags'] = hex(tcp.flags)
fields['TCP::Window'] = hex(tcp.win)
if len(ip.data) == 20:
fields['TCP::Checksum'] = hex(tcp.sum)
fields['TCP::UrgentPtr'] = hex(tcp.urp)
if len(ip.data) >= 20:
if len(tcp.opts) > 0:
opts = dpkt.tcp.parse_opts(tcp.opts)
for o,d in opts:
if o == dpkt.tcp.TCP_OPT_EOL:
fields['TCP::OPT_EOL'] = d
elif o == dpkt.tcp.TCP_OPT_NOP:
fields['TCP::OPT_NOP'] = d
elif o == dpkt.tcp.TCP_OPT_MSS:
fields['TCP::OPT_MSS'] = d
elif o == dpkt.tcp.TCP_OPT_WSCALE:
fields['TCP::OPT_WSCALE'] = d
elif o == dpkt.tcp.TCP_OPT_SACKOK:
fields['TCP::OPT_SACKOK'] = d
elif o == dpkt.tcp.TCP_OPT_SACK:
fields['TCP::OPT_SACK'] = d
elif o == dpkt.tcp.TCP_OPT_TIMESTAMP:
fields['TCP::OPT_TIMESTAMP'] = d
return fields
if __name__ == "__main__":
assert len(sys.argv) == 2
w = WartsTraceBoxReader(sys.argv[1], verbose=False)
while True:
(flags, pkts) = w.next()
if flags == False: break
print "tracebox from %s to %s (result: %d)" % (flags['srcaddr'], flags['dstaddr'], flags['result'])
last_tx = None
last_tx_ts = 0
i = 0
for pkt in pkts:
ts = pkt['time'] - flags['start']
if pkt['dir'] == 1: #TX
#print " TX at %1.3f:" % (ts)
if last_tx != None:
i+=1
print " %d: *" % (i)
last_tx = pkt['data']
last_tx_ts = pkt['time']
else: #RX
#print " RX at %1.3f:" % (ts)
i+=1
rtt = (pkt['time'] - last_tx_ts)*1000.0
if last_tx:
diff = dict_diff(last_tx, pkt['data'])
print " %d: %s RTT:%1.3f: %s" % (i, pkt['data']['hop'], rtt, " ".join(diff.keys()))
last_tx = None
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "ConstantTrend", cycle_length = 12, transform = "None", sigma = 0.0, exog_count = 100, ar_order = 12);
|
from django.db import models
from django.db.models import Q
from django.core.exceptions import ObjectDoesNotExist
from django.contrib.auth import get_user_model
from django.contrib.auth.models import UserManager, Permission, AnonymousUser
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import smart_text
from django.utils.translation import ugettext as _
from django.conf import settings
from django.utils.six import text_type
from userena import settings as userena_settings
from userena.utils import generate_sha1, get_profile_model, get_datetime_now, \
get_user_profile
from userena import signals as userena_signals
from guardian.shortcuts import assign_perm, get_perms
import re
SHA1_RE = re.compile('^[a-f0-9]{40}$')
ASSIGNED_PERMISSIONS = {
'profile':
(('view_profile', 'Can view profile'),
('change_profile', 'Can change profile'),
('delete_profile', 'Can delete profile')),
'user':
(('change_user', 'Can change user'),
('delete_user', 'Can delete user'))
}
class UserenaManager(UserManager):
""" Extra functionality for the Userena model. """
def create_user(self, username, email, password, active=False,
send_email=True, pending_activation=False, first_name="", last_name="", organization = ""):
"""
A simple wrapper that creates a new :class:`User`.
:param username:
String containing the username of the new user.
:param email:
String containing the email address of the new user.
:param password:
String containing the password for the new user.
:param active:
Boolean that defines if the user requires activation by clicking
on a link in an e-mail. Defaults to ``False``.
:param send_email:
Boolean that defines if the user should be sent an email. You could
set this to ``False`` when you want to create a user in your own
code, but don't want the user to activate through email.
:return: :class:`User` instance representing the new user.
"""
new_user = get_user_model().objects.create_user(
username, email, password)
new_user.is_active = active
new_user.first_name = first_name
new_user.last_name = last_name
new_user.save()
userena_profile = self.create_userena_profile(new_user)
# All users have an empty profile
profile_model = get_profile_model()
try:
new_profile = new_user.emif_profile
except profile_model.DoesNotExist:
new_profile = profile_model(user=new_user)
new_profile.save(using=self._db)
# Give permissions to view and change profile
for perm in ASSIGNED_PERMISSIONS['profile']:
assign_perm(perm[0], new_user, get_user_profile(user=new_user))
# Give permissions to view and change itself
for perm in ASSIGNED_PERMISSIONS['user']:
assign_perm(perm[0], new_user, new_user)
userena_profile = self.create_userena_profile(new_user)
if send_email:
if pending_activation:
userena_profile.send_pending_activation_email(organization=organization)
else:
userena_profile.send_activation_email()
return new_user
def create_userena_profile(self, user):
"""
Creates an :class:`UserenaSignup` instance for this user.
:param user:
Django :class:`User` instance.
:return: The newly created :class:`UserenaSignup` instance.
"""
if isinstance(user.username, text_type):
user.username = smart_text(user.username)
salt, activation_key = generate_sha1(user.username)
try:
profile = self.get(user=user)
except self.model.DoesNotExist:
profile = self.create(user=user,
activation_key=activation_key)
return profile
def reissue_activation(self, activation_key):
"""
Creates a new ``activation_key`` resetting activation timeframe when
users let the previous key expire.
:param activation_key:
String containing the secret SHA1 activation key.
"""
try:
userena = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
try:
salt, new_activation_key = generate_sha1(userena.user.username)
userena.activation_key = new_activation_key
userena.save(using=self._db)
userena.user.date_joined = get_datetime_now()
userena.user.save(using=self._db)
userena.send_activation_email()
return True
except Exception:
return False
def activate_user(self, activation_key):
"""
Activate an :class:`User` by supplying a valid ``activation_key``.
If the key is valid and an user is found, activates the user and
return it. Also sends the ``activation_complete`` signal.
:param activation_key:
String containing the secret SHA1 for a valid activation.
:return:
The newly activated :class:`User` or ``False`` if not successful.
"""
if SHA1_RE.search(activation_key):
try:
userena = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not userena.user.is_active:
if not userena.activation_key_expired():
is_active = True
user = userena.user
user.is_active = is_active
userena.activation_key = userena_settings.USERENA_ACTIVATED
userena.save(using=self._db)
user.save(using=self._db)
# Send the activation_complete signal
userena_signals.activation_complete.send(sender=None,
user=user)
return user
return False
def check_expired_activation(self, activation_key):
"""
Check if ``activation_key`` is still valid.
Raises a ``self.model.DoesNotExist`` exception if key is not present or
``activation_key`` is not a valid string
:param activation_key:
String containing the secret SHA1 for a valid activation.
:return:
True if the ket has expired, False if still valid.
"""
if SHA1_RE.search(activation_key):
userena = self.get(activation_key=activation_key)
return userena.activation_key_expired()
raise self.model.DoesNotExist
def reject_user(self, activation_key):
if SHA1_RE.search(activation_key):
try:
userena = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not userena.user.is_active and not userena.activation_key_expired():
user = userena.user
user.userena_signup.activation_key = userena_settings.USERENA_ACTIVATION_REJECTED
user.userena_signup.send_rejection_email()
user.is_active = False
user.userena_signup.save()
user.save()
return True
return False
def confirm_email(self, confirmation_key):
"""
Confirm an email address by checking a ``confirmation_key``.
A valid ``confirmation_key`` will set the newly wanted e-mail
address as the current e-mail address. Returns the user after
success or ``False`` when the confirmation key is
invalid. Also sends the ``confirmation_complete`` signal.
:param confirmation_key:
String containing the secret SHA1 that is used for verification.
:return:
The verified :class:`User` or ``False`` if not successful.
"""
if SHA1_RE.search(confirmation_key):
try:
userena = self.get(email_confirmation_key=confirmation_key,
email_unconfirmed__isnull=False)
except self.model.DoesNotExist:
return False
else:
user = userena.user
old_email = user.email
user.email = userena.email_unconfirmed
userena.email_unconfirmed, userena.email_confirmation_key = '',''
userena.save(using=self._db)
user.save(using=self._db)
# Send the confirmation_complete signal
userena_signals.confirmation_complete.send(sender=None,
user=user,
old_email=old_email)
return user
return False
def delete_expired_users(self):
"""
Checks for expired users and delete's the ``User`` associated with
it. Skips if the user ``is_staff``.
:return: A list containing the deleted users.
"""
deleted_users = []
for user in get_user_model().objects.filter(is_staff=False,
is_active=False):
if user.userena_signup.activation_key_expired():
deleted_users.append(user)
user.delete()
return deleted_users
def check_permissions(self):
"""
Checks that all permissions are set correctly for the users.
:return: A set of users whose permissions was wrong.
"""
# Variable to supply some feedback
changed_permissions = []
changed_users = []
warnings = []
# Check that all the permissions are available.
for model, perms in ASSIGNED_PERMISSIONS.items():
if model == 'profile':
model_obj = get_profile_model()
else: model_obj = get_user_model()
model_content_type = ContentType.objects.get_for_model(model_obj)
for perm in perms:
try:
Permission.objects.get(codename=perm[0],
content_type=model_content_type)
except Permission.DoesNotExist:
changed_permissions.append(perm[1])
Permission.objects.create(name=perm[1],
codename=perm[0],
content_type=model_content_type)
# it is safe to rely on settings.ANONYMOUS_USER_NAME since it is a
# requirement of django-guardian
for user in get_user_model().objects.exclude(username=settings.ANONYMOUS_USER_NAME):
try:
user_profile = get_user_profile(user=user)
except ObjectDoesNotExist:
warnings.append(_("No profile found for %(username)s") \
% {'username': user.username})
else:
all_permissions = get_perms(user, user_profile) + get_perms(user, user)
for model, perms in ASSIGNED_PERMISSIONS.items():
if model == 'profile':
perm_object = get_user_profile(user=user)
else: perm_object = user
for perm in perms:
if perm[0] not in all_permissions:
assign_perm(perm[0], user, perm_object)
changed_users.append(user)
return (changed_permissions, changed_users, warnings)
class UserenaBaseProfileManager(models.Manager):
""" Manager for :class:`UserenaProfile` """
def get_visible_profiles(self, user=None):
"""
Returns all the visible profiles available to this user.
For now keeps it simple by just applying the cases when a user is not
active, a user has it's profile closed to everyone or a user only
allows registered users to view their profile.
:param user:
A Django :class:`User` instance.
:return:
All profiles that are visible to this user.
"""
profiles = self.all()
filter_kwargs = {'user__is_active': True}
profiles = profiles.filter(**filter_kwargs)
if user and isinstance(user, AnonymousUser):
profiles = profiles.exclude(Q(privacy='closed') | Q(privacy='registered'))
else: profiles = profiles.exclude(Q(privacy='closed'))
return profiles
|
from django import forms
from django.core import validators
from comperio.accounts.models import cUser, Settings, cGroup
from django.core.validators import email_re
import random, datetime, sha
MIN_PASSWORD_LENGTH = 6
class LoginForm(forms.Form):
"""account login form"""
username = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5', 'placeholder':'username', 'tabindex':'1'}), help_text="username or email")
password = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'password', 'tabindex':'2'}))
class RegistrationForm(forms.Form):
"""user registration form"""
def check_consent(val):
"""check if the user has agreed to the consent form"""
return val
username = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5', 'placeholder':'username'}), max_length=30)
email = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5','placeholder':'email'}), max_length=60, validators=[validators.validate_email])
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)])
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'verify password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)])
consent = forms.BooleanField(widget=forms.CheckboxInput() , label="I have read and understood the above consent form")
honeypot = forms.CharField(widget=forms.HiddenInput(), required=False)
def isValidHuman(self, new_data):
"""check if the user is human"""
return new_data['honeypot'] == ""
def isValidUsername(self, new_data):
"""check if the username is valid"""
if not cUser.objects.filter(username=new_data['username']):
return True
return False
def isValidEmail(self, new_data):
"""check if the email is unique"""
# TODO: email is ok if same
if not cUser.objects.filter(email=new_data['email']):
return True
return False
# TODO: display specific error messages on the form
# TODO: form is not passing field errors.
def isValidPassword(self, new_data):
"""
check if the passwords match
"""
if len(new_data['password1']) < MIN_PASSWORD_LENGTH or len(new_data['password2']) < MIN_PASSWORD_LENGTH:
return False
return True
def PasswordsMatch(self, new_data):
"""check if the passwords match"""
if new_data['password1'] == new_data['password2']:
return True
return False
def save(self, new_data):
"""create a new inactive user from the form data"""
# make sure email is unique
if new_data['consent'] == False:
raise forms.ValidationError(u'You must agree to the consent form')
try:
duplicate = cUser.objects.get(email=new_data['email'])
except cUser.DoesNotExist:
# make sure we have a valid email
if email_re.search(new_data['email']):
# Build the activation key for their account
salt = sha.new(str(random.random())).hexdigest()[:5]
activation_key = sha.new(salt+new_data['username']).hexdigest()
key_expires = datetime.datetime.today() + datetime.timedelta(2)
u = cUser.objects.create(username=new_data['username'],
email=new_data['email'],
activation_key=activation_key,
key_expires=key_expires,
)
u.set_password(new_data['password1'])
u.is_active=False
u.save()
return u
# invalid email
raise forms.ValidationError(u'invalid email')
# duplciate user or bad email
raise forms.ValidationError(u'email already in use')
return None
class EditAccountForm(forms.Form):
"""user registration form"""
username = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5', 'placeholder':'username'}), max_length=30)
email = forms.CharField(widget=forms.TextInput(attrs={'class':'span-5','placeholder':'email'}), max_length=60, validators=[validators.validate_email])
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)], required=False)
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'class':'span-5 char_count','placeholder':'verify password'}), max_length=60, validators=[validators.MinLengthValidator(MIN_PASSWORD_LENGTH)], required=False)
def isValidUsername(self, new_data):
"""check if the username is valid"""
# TODO: username ok if same
if not cUser.objects.filter(username=new_data['username']):
return True
return False
# TODO: display specific error messages on the form
# TODO: form is not passing field errors.
def isValidPassword(self, new_data):
"""
check if the passwords match
"""
if new_data['password1'] != '':
if len(new_data['password1']) < MIN_PASSWORD_LENGTH or len(new_data['password2']) < MIN_PASSWORD_LENGTH:
return False
return True
def isValidEmail(self, new_data):
"""check if the email is unique"""
# TODO: email is ok if same
if not cUser.objects.filter(email=new_data['email']):
return True
return False
def PasswordsMatch(self, new_data):
"""check if the passwords match"""
if new_data['password1'] == new_data['password2']:
return True
return False
def update(self, request, u):
"""update an existing user from the form data"""
# make sure email is unique
new_data = request.POST.copy()
if u.email != new_data['email']:
try:
duplicate = cUser.objects.get(email=new_data['email'])
raise forms.ValidationError(u'email is not available')
except cUser.DoesNotExist:
u.email = new_data['email']
if u.username != new_data['username']:
try:
duplicate = cUser.objects.get(username=new_data['username'])
raise forms.ValidationError(u'username is not available')
except cUser.DoesNotExist:
u.username = new_data['username']
if new_data['password1'] != '':
u.set_password(new_data['password1'])
u.save()
class CreateGroupForm(forms.Form):
"""create a new user group"""
title = forms.CharField(widget=forms.TextInput(attrs={'class':'span-10 title',}), max_length=100)
description = forms.CharField(widget=forms.Textarea(attrs={'class':'span-10 description-textarea',}), max_length=1000, required=False)
type = forms.CharField(widget=forms.Select(choices=cGroup.types), required=False)
visibility = forms.CharField(widget=forms.Select(choices=cGroup.visibility_types), required=False)
open_registration = forms.CharField(widget=forms.CheckboxInput(), help_text="Open registration allows anyone to request group membership")
class SettingsForm(forms.ModelForm):
"""profile settings form"""
class Meta:
model = Settings
exclude = ("user",)
|
from classytags.arguments import Argument, MultiValueArgument
from classytags.core import Options, Tag
from classytags.helpers import InclusionTag
from classytags.parser import Parser
from cms.models import Page, Placeholder as PlaceholderModel
from cms.plugin_rendering import render_plugins, render_placeholder
from cms.plugins.utils import get_plugins
from cms.utils import get_language_from_request
from cms.utils.moderator import get_cmsplugin_queryset, get_page_queryset
from cms.utils.placeholder import validate_placeholder_name
from django import template
from django.conf import settings
from django.contrib.sites.models import Site
from django.core.cache import cache
from django.core.mail import mail_managers
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from itertools import chain
import re
register = template.Library()
def get_site_id(site):
if site:
if isinstance(site, Site):
site_id = site.id
elif isinstance(site, int) or (isinstance(site, basestring) and site.isdigit()):
site_id = int(site)
else:
site_id = settings.SITE_ID
else:
site_id = settings.SITE_ID
return site_id
def has_permission(page, request):
return page.has_change_permission(request)
register.filter(has_permission)
CLEAN_KEY_PATTERN = re.compile(r'[^a-zA-Z0-9_-]')
def _clean_key(key):
return CLEAN_KEY_PATTERN.sub('-', key)
def _get_cache_key(name, page_lookup, lang, site_id):
if isinstance(page_lookup, Page):
page_key = str(page_lookup.pk)
else:
page_key = str(page_lookup)
page_key = _clean_key(page_key)
return name+'__page_lookup:'+page_key+'_site:'+str(site_id)+'_lang:'+str(lang)
def _get_page_by_untyped_arg(page_lookup, request, site_id):
"""
The `page_lookup` argument can be of any of the following types:
- Integer: interpreted as `pk` of the desired page
- String: interpreted as `reverse_id` of the desired page
- `dict`: a dictionary containing keyword arguments to find the desired page
(for instance: `{'pk': 1}`)
- `Page`: you can also pass a Page object directly, in which case there will be no database lookup.
- `None`: the current page will be used
"""
if page_lookup is None:
return request.current_page
if isinstance(page_lookup, Page):
return page_lookup
if isinstance(page_lookup, basestring):
page_lookup = {'reverse_id': page_lookup}
elif isinstance(page_lookup, (int, long)):
page_lookup = {'pk': page_lookup}
elif not isinstance(page_lookup, dict):
raise TypeError('The page_lookup argument can be either a Dictionary, Integer, Page, or String.')
page_lookup.update({'site': site_id})
try:
return get_page_queryset(request).get(**page_lookup)
except Page.DoesNotExist:
site = Site.objects.get_current()
subject = _('Page not found on %(domain)s') % {'domain':site.domain}
body = _("A template tag couldn't find the page with lookup arguments `%(page_lookup)s\n`. "
"The URL of the request was: http://%(host)s%(path)s") \
% {'page_lookup': repr(page_lookup), 'host': site.domain, 'path': request.path}
if settings.DEBUG:
raise Page.DoesNotExist(body)
else:
if settings.SEND_BROKEN_LINK_EMAILS:
mail_managers(subject, body, fail_silently=True)
return None
class PageUrl(InclusionTag):
template = 'cms/content.html'
name = 'page_url'
options = Options(
Argument('page_lookup'),
Argument('lang', required=False, default=None),
Argument('site', required=False, default=None),
)
def get_context(self, context, page_lookup, lang, site):
site_id = get_site_id(site)
request = context.get('request', False)
if not request:
return {'content': ''}
if request.current_page == "dummy":
return {'content': ''}
if lang is None:
lang = get_language_from_request(request)
cache_key = _get_cache_key('page_url', page_lookup, lang, site_id)+'_type:absolute_url'
url = cache.get(cache_key)
if not url:
page = _get_page_by_untyped_arg(page_lookup, request, site_id)
if page:
url = page.get_absolute_url(language=lang)
cache.set(cache_key, url, settings.CMS_CACHE_DURATIONS['content'])
if url:
return {'content': url}
return {'content': ''}
register.tag(PageUrl)
register.tag('page_id_url', PageUrl)
def _get_placeholder(current_page, page, context, name):
placeholder_cache = getattr(current_page, '_tmp_placeholders_cache', {})
if page.pk in placeholder_cache:
return placeholder_cache[page.pk].get(name, None)
placeholder_cache[page.pk] = {}
placeholders = page.placeholders.all()
for placeholder in placeholders:
placeholder_cache[page.pk][placeholder.slot] = placeholder
current_page._tmp_placeholders_cache = placeholder_cache
return placeholder_cache[page.pk].get(name, None)
def get_placeholder_content(context, request, current_page, name, inherit):
pages = [current_page]
if inherit:
pages = chain([current_page], current_page.get_cached_ancestors(ascending=True))
for page in pages:
placeholder = _get_placeholder(current_page, page, context, name)
if placeholder is None:
continue
if not get_plugins(request, placeholder):
continue
content = render_placeholder(placeholder, context, name)
if content:
return content
placeholder = _get_placeholder(current_page, current_page, context, name)
return render_placeholder(placeholder, context, name)
class PlaceholderParser(Parser):
def parse_blocks(self):
for bit in getattr(self.kwargs['extra_bits'], 'value', self.kwargs['extra_bits']):
if getattr(bit, 'value', bit.var.value) == 'or':
return super(PlaceholderParser, self).parse_blocks()
return
class PlaceholderOptions(Options):
def get_parser_class(self):
return PlaceholderParser
class Placeholder(Tag):
"""
This template node is used to output page content and
is also used in the admin to dynamically generate input fields.
eg: {% placeholder "placeholder_name" %}
{% placeholder "sidebar" inherit %}
{% placeholder "footer" inherit or %}
<a href="/about/">About us</a>
{% endplaceholder %}
Keyword arguments:
name -- the name of the placeholder
width -- additional width attribute (integer) which gets added to the plugin context
(deprecated, use `{% with 320 as width %}{% placeholder "foo"}{% endwith %}`)
inherit -- optional argument which if given will result in inheriting
the content of the placeholder with the same name on parent pages
or -- optional argument which if given will make the template tag a block
tag whose content is shown if the placeholder is empty
"""
name = 'placeholder'
options = PlaceholderOptions(
Argument('name', resolve=False),
MultiValueArgument('extra_bits', required=False, resolve=False),
blocks=[
('endplaceholder', 'nodelist'),
]
)
def render_tag(self, context, name, extra_bits, nodelist=None):
validate_placeholder_name(name)
width = None
inherit = False
for bit in extra_bits:
if bit == 'inherit':
inherit = True
elif bit.isdigit():
width = int(bit)
import warnings
warnings.warn(
"The width parameter for the placeholder tag is deprecated.",
DeprecationWarning
)
if not 'request' in context:
return ''
request = context['request']
if width:
context.update({'width': width})
page = request.current_page
if not page or page == 'dummy':
return ''
content = get_placeholder_content(context, request, page, name, inherit)
if not content and nodelist:
return nodelist.render(context)
return content
def get_name(self):
return self.kwargs['name'].var.value.strip('"').strip("'")
register.tag(Placeholder)
class PageAttribute(Tag):
"""
This template node is used to output attribute from a page such
as its title or slug.
Synopsis
{% page_attribute "field-name" %}
{% page_attribute "field-name" page_lookup %}
Example
{# Output current page's page_title attribute: #}
{% page_attribute "page_title" %}
{# Output page_title attribute of the page with reverse_id "the_page": #}
{% page_attribute "page_title" "the_page" %}
{# Output slug attribute of the page with pk 10: #}
{% page_attribute "slug" 10 %}
Keyword arguments:
field-name -- the name of the field to output. Use one of:
- title
- menu_title
- page_title
- slug
- meta_description
- meta_keywords
page_lookup -- lookup argument for Page, if omitted field-name of current page is returned.
See _get_page_by_untyped_arg() for detailed information on the allowed types and their interpretation
for the page_lookup argument.
"""
name = 'page_attribute'
options = Options(
Argument('name', resolve=False),
Argument('page_lookup', required=False, default=None)
)
valid_attributes = [
"title",
"slug",
"meta_description",
"meta_keywords",
"page_title",
"menu_title"
]
def render_tag(self, context, name, page_lookup):
if not 'request' in context:
return ''
name = name.lower()
request = context['request']
lang = get_language_from_request(request)
page = _get_page_by_untyped_arg(page_lookup, request, get_site_id(None))
if page == "dummy":
return ''
if page and name in self.valid_attributes:
f = getattr(page, "get_%s" % name)
return f(language=lang, fallback=True)
return ''
register.tag(PageAttribute)
class CleanAdminListFilter(InclusionTag):
template = 'admin/filter.html'
name = 'clean_admin_list_filter'
options = Options(
Argument('cl'),
Argument('spec'),
)
def get_context(self, context, cl, spec):
choices = sorted(list(spec.choices(cl)), key=lambda k: k['query_string'])
query_string = None
unique_choices = []
for choice in choices:
if choice['query_string'] != query_string:
unique_choices.append(choice)
query_string = choice['query_string']
return {'title': spec.title(), 'choices' : unique_choices}
def _show_placeholder_for_page(context, placeholder_name, page_lookup, lang=None,
site=None, cache_result=True):
"""
Shows the content of a page with a placeholder name and given lookup
arguments in the given language.
This is useful if you want to have some more or less static content that is
shared among many pages, such as a footer.
See _get_page_by_untyped_arg() for detailed information on the allowed types
and their interpretation for the page_lookup argument.
"""
validate_placeholder_name(placeholder_name)
request = context.get('request', False)
site_id = get_site_id(site)
if not request:
return {'content': ''}
if lang is None:
lang = get_language_from_request(request)
content = None
if cache_result:
base_key = _get_cache_key('_show_placeholder_for_page', page_lookup, lang, site_id)
cache_key = _clean_key('%s_placeholder:%s' % (base_key, placeholder_name))
content = cache.get(cache_key)
if not content:
page = _get_page_by_untyped_arg(page_lookup, request, site_id)
if not page:
return {'content': ''}
try:
placeholder = page.placeholders.get(slot=placeholder_name)
except PlaceholderModel.DoesNotExist:
if settings.DEBUG:
raise
return {'content': ''}
baseqs = get_cmsplugin_queryset(request)
plugins = baseqs.filter(
placeholder=placeholder,
language=lang,
placeholder__slot__iexact=placeholder_name,
parent__isnull=True
).order_by('position').select_related()
c = render_plugins(plugins, context, placeholder)
content = "".join(c)
if cache_result:
cache.set(cache_key, content, settings.CMS_CACHE_DURATIONS['content'])
if content:
return {'content': mark_safe(content)}
return {'content': ''}
class ShowPlaceholderById(InclusionTag):
template = 'cms/content.html'
name = 'show_placeholder_by_id'
options = Options(
Argument('placeholder_name'),
Argument('reverse_id'),
Argument('lang', required=False, default=None),
Argument('site', required=False, default=None),
)
def get_context(self, *args, **kwargs):
return _show_placeholder_for_page(**self.get_kwargs(*args, **kwargs))
def get_kwargs(self, context, placeholder_name, reverse_id, lang, site):
return {
'context': context,
'placeholder_name': placeholder_name,
'page_lookup': reverse_id,
'lang': lang,
'site': site
}
register.tag(ShowPlaceholderById)
register.tag('show_placeholder', ShowPlaceholderById)
class ShowUncachedPlaceholderById(ShowPlaceholderById):
name = 'show_uncached_placeholder_by_id'
def get_kwargs(self, *args, **kwargs):
kwargs = super(ShowUncachedPlaceholderById, self).get_kwargs(*args, **kwargs)
kwargs['cache_result'] = False
return kwargs
register.tag(ShowUncachedPlaceholderById)
register.tag('show_uncached_placeholder', ShowUncachedPlaceholderById)
class CMSToolbar(InclusionTag):
template = 'cms/toolbar/toolbar.html'
name = 'cms_toolbar'
def render(self, context):
request = context.get('request', None)
if not request:
return ''
toolbar = getattr(request, 'toolbar', None)
if not toolbar:
return ''
if not toolbar.show_toolbar:
return ''
return super(CMSToolbar, self).render(context)
def get_context(self, context):
context['CMS_TOOLBAR_CONFIG'] = context['request'].toolbar.as_json(context)
return context
register.tag(CMSToolbar)
|
import hashlib
import json
import os
import uuid
from django import forms
from django.conf import settings
from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.core.serializers.json import DjangoJSONEncoder
from django.db import models
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from modelcluster.contrib.taggit import ClusterTaggableManager
from modelcluster.fields import ParentalKey, ParentalManyToManyField
from modelcluster.models import ClusterableModel
from taggit.managers import TaggableManager
from taggit.models import ItemBase, TagBase, TaggedItemBase
from wagtail.admin.edit_handlers import (
FieldPanel, InlinePanel, MultiFieldPanel, ObjectList, PageChooserPanel, StreamFieldPanel,
TabbedInterface)
from wagtail.admin.forms import WagtailAdminPageForm
from wagtail.admin.mail import send_mail
from wagtail.contrib.forms.forms import FormBuilder
from wagtail.contrib.forms.models import (
FORM_FIELD_CHOICES, AbstractEmailForm, AbstractFormField, AbstractFormSubmission)
from wagtail.contrib.forms.views import SubmissionsListView
from wagtail.contrib.settings.models import BaseSetting, register_setting
from wagtail.contrib.sitemaps import Sitemap
from wagtail.contrib.table_block.blocks import TableBlock
from wagtail.core.blocks import CharBlock, RawHTMLBlock, RichTextBlock, StructBlock
from wagtail.core.fields import RichTextField, StreamField
from wagtail.core.models import Orderable, Page, PageManager, PageQuerySet, Task
from wagtail.documents.edit_handlers import DocumentChooserPanel
from wagtail.documents.models import AbstractDocument, Document
from wagtail.images.blocks import ImageChooserBlock
from wagtail.images.edit_handlers import ImageChooserPanel
from wagtail.images.models import AbstractImage, AbstractRendition, Image
from wagtail.search import index
from wagtail.snippets.edit_handlers import SnippetChooserPanel
from wagtail.snippets.models import register_snippet
from wagtail.utils.decorators import cached_classmethod
from .forms import FormClassAdditionalFieldPageForm, ValidatedPageForm
EVENT_AUDIENCE_CHOICES = (
('public', "Public"),
('private', "Private"),
)
COMMON_PANELS = (
FieldPanel('slug'),
FieldPanel('seo_title'),
FieldPanel('show_in_menus'),
FieldPanel('search_description'),
)
class LinkFields(models.Model):
link_external = models.URLField("External link", blank=True)
link_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
link_document = models.ForeignKey(
'wagtaildocs.Document',
null=True,
blank=True,
related_name='+',
on_delete=models.CASCADE
)
@property
def link(self):
if self.link_page:
return self.link_page.url
elif self.link_document:
return self.link_document.url
else:
return self.link_external
panels = [
FieldPanel('link_external'),
PageChooserPanel('link_page'),
DocumentChooserPanel('link_document'),
]
class Meta:
abstract = True
class CarouselItem(LinkFields):
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
embed_url = models.URLField("Embed URL", blank=True)
caption = models.CharField(max_length=255, blank=True)
panels = [
ImageChooserPanel('image'),
FieldPanel('embed_url'),
FieldPanel('caption'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
class RelatedLink(LinkFields):
title = models.CharField(max_length=255, help_text="Link title")
panels = [
FieldPanel('title'),
MultiFieldPanel(LinkFields.panels, "Link"),
]
class Meta:
abstract = True
class SimplePage(Page):
content = models.TextField()
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('content'),
]
def get_admin_display_title(self):
return "%s (simple page)" % super().get_admin_display_title()
class PageWithExcludedCopyField(Page):
content = models.TextField()
# Exclude this field from being copied
special_field = models.CharField(
blank=True, max_length=255, default='Very Special')
exclude_fields_in_copy = ['special_field']
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('special_field'),
FieldPanel('content'),
]
class PageWithOldStyleRouteMethod(Page):
"""
Prior to Wagtail 0.4, the route() method on Page returned an HttpResponse
rather than a Page instance. As subclasses of Page may override route,
we need to continue accepting this convention (albeit as a deprecated API).
"""
content = models.TextField()
template = 'tests/simple_page.html'
def route(self, request, path_components):
return self.serve(request)
class FilePage(Page):
file_field = models.FileField()
FilePage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('file_field'),
]
class EventPageCarouselItem(Orderable, CarouselItem):
page = ParentalKey('tests.EventPage', related_name='carousel_items', on_delete=models.CASCADE)
class EventPageRelatedLink(Orderable, RelatedLink):
page = ParentalKey('tests.EventPage', related_name='related_links', on_delete=models.CASCADE)
class EventPageSpeakerAward(Orderable, models.Model):
speaker = ParentalKey('tests.EventPageSpeaker', related_name='awards', on_delete=models.CASCADE)
name = models.CharField("Award name", max_length=255)
date_awarded = models.DateField(null=True, blank=True)
panels = [
FieldPanel('name'),
FieldPanel('date_awarded'),
]
class EventPageSpeaker(Orderable, LinkFields, ClusterableModel):
page = ParentalKey('tests.EventPage', related_name='speakers', related_query_name='speaker', on_delete=models.CASCADE)
first_name = models.CharField("Name", max_length=255, blank=True)
last_name = models.CharField("Surname", max_length=255, blank=True)
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
@property
def name_display(self):
return self.first_name + " " + self.last_name
panels = [
FieldPanel('first_name'),
FieldPanel('last_name'),
ImageChooserPanel('image'),
MultiFieldPanel(LinkFields.panels, "Link"),
InlinePanel('awards', label="Awards"),
]
class EventCategory(models.Model):
name = models.CharField("Name", max_length=255)
def __str__(self):
return self.name
class EventPageForm(WagtailAdminPageForm):
def clean(self):
cleaned_data = super().clean()
# Make sure that the event starts before it ends
start_date = cleaned_data['date_from']
end_date = cleaned_data['date_to']
if start_date and end_date and start_date > end_date:
raise ValidationError('The end date must be after the start date')
return cleaned_data
class EventPage(Page):
date_from = models.DateField("Start date", null=True)
date_to = models.DateField(
"End date",
null=True,
blank=True,
help_text="Not required if event is on a single day"
)
time_from = models.TimeField("Start time", null=True, blank=True)
time_to = models.TimeField("End time", null=True, blank=True)
audience = models.CharField(max_length=255, choices=EVENT_AUDIENCE_CHOICES)
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
cost = models.CharField(max_length=255)
signup_link = models.URLField(blank=True)
feed_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+'
)
categories = ParentalManyToManyField(EventCategory, blank=True)
search_fields = [
index.SearchField('get_audience_display'),
index.SearchField('location'),
index.SearchField('body'),
index.FilterField('url_path'),
]
password_required_template = 'tests/event_page_password_required.html'
base_form_class = EventPageForm
EventPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('date_from'),
FieldPanel('date_to'),
FieldPanel('time_from'),
FieldPanel('time_to'),
FieldPanel('location'),
FieldPanel('audience'),
FieldPanel('cost'),
FieldPanel('signup_link'),
InlinePanel('carousel_items', label="Carousel items"),
FieldPanel('body', classname="full"),
InlinePanel('speakers', label="Speakers", heading="Speaker lineup"),
InlinePanel('related_links', label="Related links"),
FieldPanel('categories'),
# InlinePanel related model uses `pk` not `id`
InlinePanel('head_counts', label='Head Counts'),
]
EventPage.promote_panels = [
MultiFieldPanel(COMMON_PANELS, "Common page configuration"),
ImageChooserPanel('feed_image'),
]
class HeadCountRelatedModelUsingPK(models.Model):
"""Related model that uses a custom primary key (pk) not id"""
custom_id = models.AutoField(primary_key=True)
event_page = ParentalKey(
EventPage,
on_delete=models.CASCADE,
related_name='head_counts'
)
head_count = models.IntegerField()
panels = [FieldPanel('head_count')]
class FormClassAdditionalFieldPage(Page):
location = models.CharField(max_length=255)
body = RichTextField(blank=True)
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('location'),
FieldPanel('body'),
FieldPanel('code'), # not in model, see set base_form_class
]
base_form_class = FormClassAdditionalFieldPageForm
class SingleEventPage(EventPage):
excerpt = models.TextField(
max_length=255,
blank=True,
null=True,
help_text="Short text to describe what is this action about"
)
# Give this page model a custom URL routing scheme
def get_url_parts(self, request=None):
url_parts = super().get_url_parts(request=request)
if url_parts is None:
return None
else:
site_id, root_url, page_path = url_parts
return (site_id, root_url, page_path + 'pointless-suffix/')
def route(self, request, path_components):
if path_components == ['pointless-suffix']:
# treat this as equivalent to a request for this page
return super().route(request, [])
else:
# fall back to default routing rules
return super().route(request, path_components)
def get_admin_display_title(self):
return "%s (single event)" % super().get_admin_display_title()
SingleEventPage.content_panels = [FieldPanel('excerpt')] + EventPage.content_panels
class EventSitemap(Sitemap):
pass
class EventIndex(Page):
intro = RichTextField(blank=True)
ajax_template = 'tests/includes/event_listing.html'
def get_events(self):
return self.get_children().live().type(EventPage)
def get_paginator(self):
return Paginator(self.get_events(), 4)
def get_context(self, request, page=1):
# Pagination
paginator = self.get_paginator()
try:
events = paginator.page(page)
except PageNotAnInteger:
events = paginator.page(1)
except EmptyPage:
events = paginator.page(paginator.num_pages)
# Update context
context = super().get_context(request)
context['events'] = events
return context
def route(self, request, path_components):
if self.live and len(path_components) == 1:
try:
return self.serve(request, page=int(path_components[0]))
except (TypeError, ValueError):
pass
return super().route(request, path_components)
def get_static_site_paths(self):
# Get page count
page_count = self.get_paginator().num_pages
# Yield a path for each page
for page in range(page_count):
yield '/%d/' % (page + 1)
# Yield from superclass
for path in super().get_static_site_paths():
yield path
def get_sitemap_urls(self, request=None):
# Add past events url to sitemap
return super().get_sitemap_urls(request=request) + [
{
'location': self.full_url + 'past/',
'lastmod': self.latest_revision_created_at
}
]
def get_cached_paths(self):
return super().get_cached_paths() + [
'/past/'
]
EventIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
]
class FormField(AbstractFormField):
page = ParentalKey('FormPage', related_name='form_fields', on_delete=models.CASCADE)
class FormPage(AbstractEmailForm):
def get_context(self, request):
context = super().get_context(request)
context['greeting'] = "hello world"
return context
# This is redundant (SubmissionsListView is the default view class), but importing
# SubmissionsListView in this models.py helps us to confirm that this recipe
# https://docs.wagtail.io/en/stable/reference/contrib/forms/customisation.html#customise-form-submissions-listing-in-wagtail-admin
# works without triggering circular dependency issues -
# see https://github.com/wagtail/wagtail/issues/6265
submissions_list_view_class = SubmissionsListView
FormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class JadeFormField(AbstractFormField):
page = ParentalKey('JadeFormPage', related_name='form_fields', on_delete=models.CASCADE)
class JadeFormPage(AbstractEmailForm):
template = "tests/form_page.jade"
JadeFormPage.content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class RedirectFormField(AbstractFormField):
page = ParentalKey('FormPageWithRedirect', related_name='form_fields', on_delete=models.CASCADE)
class FormPageWithRedirect(AbstractEmailForm):
thank_you_redirect_page = models.ForeignKey(
'wagtailcore.Page',
null=True,
blank=True,
on_delete=models.SET_NULL,
related_name='+',
)
def get_context(self, request):
context = super(FormPageWithRedirect, self).get_context(request)
context['greeting'] = "hello world"
return context
def render_landing_page(self, request, form_submission=None, *args, **kwargs):
"""
Renders the landing page OR if a receipt_page_redirect is chosen redirects to this page.
"""
if self.thank_you_redirect_page:
return redirect(self.thank_you_redirect_page.url, permanent=False)
return super(FormPageWithRedirect, self).render_landing_page(request, form_submission, *args, **kwargs)
FormPageWithRedirect.content_panels = [
FieldPanel('title', classname="full title"),
PageChooserPanel('thank_you_redirect_page'),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class FormPageWithCustomSubmission(AbstractEmailForm):
"""
This Form page:
* Have custom submission model
* Have custom related_name (see `FormFieldWithCustomSubmission.page`)
* Saves reference to a user
* Doesn't render html form, if submission for current user is present
"""
intro = RichTextField(blank=True)
thank_you_text = RichTextField(blank=True)
def get_context(self, request, *args, **kwargs):
context = super().get_context(request)
context['greeting'] = "hello world"
return context
def get_form_fields(self):
return self.custom_form_fields.all()
def get_data_fields(self):
data_fields = [
('useremail', 'User email'),
]
data_fields += super().get_data_fields()
return data_fields
def get_submission_class(self):
return CustomFormPageSubmission
def process_form_submission(self, form):
form_submission = self.get_submission_class().objects.create(
form_data=json.dumps(form.cleaned_data, cls=DjangoJSONEncoder),
page=self, user=form.user
)
if self.to_address:
addresses = [x.strip() for x in self.to_address.split(',')]
content = '\n'.join([x[1].label + ': ' + str(form.data.get(x[0])) for x in form.fields.items()])
send_mail(self.subject, content, addresses, self.from_address,)
# process_form_submission should now return the created form_submission
return form_submission
def serve(self, request, *args, **kwargs):
if self.get_submission_class().objects.filter(page=self, user__pk=request.user.pk).exists():
return TemplateResponse(
request,
self.template,
self.get_context(request)
)
return super().serve(request, *args, **kwargs)
FormPageWithCustomSubmission.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
InlinePanel('custom_form_fields', label="Form fields"),
FieldPanel('thank_you_text', classname="full"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class FormFieldWithCustomSubmission(AbstractFormField):
page = ParentalKey(FormPageWithCustomSubmission, on_delete=models.CASCADE, related_name='custom_form_fields')
class CustomFormPageSubmission(AbstractFormSubmission):
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
def get_data(self):
form_data = super().get_data()
form_data.update({
'useremail': self.user.email,
})
return form_data
class FormFieldForCustomListViewPage(AbstractFormField):
page = ParentalKey(
'FormPageWithCustomSubmissionListView',
related_name='form_fields',
on_delete=models.CASCADE
)
class FormPageWithCustomSubmissionListView(AbstractEmailForm):
"""Form Page with customised submissions listing view"""
intro = RichTextField(blank=True)
thank_you_text = RichTextField(blank=True)
def get_submissions_list_view_class(self):
from .views import CustomSubmissionsListView
return CustomSubmissionsListView
def get_submission_class(self):
return CustomFormPageSubmission
def get_data_fields(self):
data_fields = [
('useremail', 'User email'),
]
data_fields += super().get_data_fields()
return data_fields
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('intro', classname="full"),
InlinePanel('form_fields', label="Form fields"),
FieldPanel('thank_you_text', classname="full"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
EXTENDED_CHOICES = FORM_FIELD_CHOICES + (('ipaddress', 'IP Address'),)
class ExtendedFormField(AbstractFormField):
"""Override the field_type field with extended choices."""
page = ParentalKey(
'FormPageWithCustomFormBuilder',
related_name='form_fields',
on_delete=models.CASCADE)
field_type = models.CharField(
verbose_name='field type', max_length=16, choices=EXTENDED_CHOICES)
class CustomFormBuilder(FormBuilder):
"""
A custom FormBuilder that has an 'ipaddress' field with
customised create_singleline_field with shorter max_length
"""
def create_singleline_field(self, field, options):
options['max_length'] = 120 # usual default is 255
return forms.CharField(**options)
def create_ipaddress_field(self, field, options):
return forms.GenericIPAddressField(**options)
class FormPageWithCustomFormBuilder(AbstractEmailForm):
"""
A Form page that has a custom form builder and uses a custom
form field model with additional field_type choices.
"""
form_builder = CustomFormBuilder
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('form_fields', label="Form fields"),
MultiFieldPanel([
FieldPanel('to_address', classname="full"),
FieldPanel('from_address', classname="full"),
FieldPanel('subject', classname="full"),
], "Email")
]
class AdvertPlacement(models.Model):
page = ParentalKey('wagtailcore.Page', related_name='advert_placements', on_delete=models.CASCADE)
advert = models.ForeignKey('tests.Advert', related_name='+', on_delete=models.CASCADE)
colour = models.CharField(max_length=255)
class AdvertTag(TaggedItemBase):
content_object = ParentalKey('Advert', related_name='tagged_items', on_delete=models.CASCADE)
class Advert(ClusterableModel):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
tags = TaggableManager(through=AdvertTag, blank=True)
panels = [
FieldPanel('url'),
FieldPanel('text'),
FieldPanel('tags'),
]
def __str__(self):
return self.text
register_snippet(Advert)
class AdvertWithCustomPrimaryKey(ClusterableModel):
advert_id = models.CharField(max_length=255, primary_key=True)
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
panels = [
FieldPanel('url'),
FieldPanel('text'),
]
def __str__(self):
return self.text
register_snippet(AdvertWithCustomPrimaryKey)
class AdvertWithCustomUUIDPrimaryKey(ClusterableModel):
advert_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
panels = [
FieldPanel('url'),
FieldPanel('text'),
]
def __str__(self):
return self.text
register_snippet(AdvertWithCustomUUIDPrimaryKey)
class AdvertWithTabbedInterface(models.Model):
url = models.URLField(null=True, blank=True)
text = models.CharField(max_length=255)
something_else = models.CharField(max_length=255)
advert_panels = [
FieldPanel('url'),
FieldPanel('text'),
]
other_panels = [
FieldPanel('something_else'),
]
edit_handler = TabbedInterface([
ObjectList(advert_panels, heading='Advert'),
ObjectList(other_panels, heading='Other'),
])
def __str__(self):
return self.text
class Meta:
ordering = ('text',)
register_snippet(AdvertWithTabbedInterface)
class StandardIndex(Page):
""" Index for the site """
parent_page_types = [Page]
StandardIndex.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('seo_title'),
FieldPanel('slug'),
InlinePanel('advert_placements', label="Adverts"),
]
StandardIndex.promote_panels = []
class StandardChild(Page):
pass
StandardChild.edit_handler = TabbedInterface([
ObjectList(StandardChild.content_panels, heading='Content'),
ObjectList(StandardChild.promote_panels, heading='Promote'),
ObjectList(StandardChild.settings_panels, heading='Settings', classname='settings'),
ObjectList([], heading='Dinosaurs'),
], base_form_class=WagtailAdminPageForm)
class BusinessIndex(Page):
""" Can be placed anywhere, can only have Business children """
subpage_types = ['tests.BusinessChild', 'tests.BusinessSubIndex']
class BusinessSubIndex(Page):
""" Can be placed under BusinessIndex, and have BusinessChild children """
# BusinessNowherePage is 'incorrectly' added here as a possible child.
# The rules on BusinessNowherePage prevent it from being a child here though.
subpage_types = ['tests.BusinessChild', 'tests.BusinessNowherePage']
parent_page_types = ['tests.BusinessIndex', 'tests.BusinessChild']
class BusinessChild(Page):
""" Can only be placed under Business indexes, no children allowed """
subpage_types = []
parent_page_types = ['tests.BusinessIndex', BusinessSubIndex]
class BusinessNowherePage(Page):
""" Not allowed to be placed anywhere """
parent_page_types = []
class TaggedPageTag(TaggedItemBase):
content_object = ParentalKey('tests.TaggedPage', related_name='tagged_items', on_delete=models.CASCADE)
class TaggedPage(Page):
tags = ClusterTaggableManager(through=TaggedPageTag, blank=True)
TaggedPage.content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('tags'),
]
class SingletonPage(Page):
@classmethod
def can_create_at(cls, parent):
# You can only create one of these!
return super(SingletonPage, cls).can_create_at(parent) \
and not cls.objects.exists()
class SingletonPageViaMaxCount(Page):
max_count = 1
class PageChooserModel(models.Model):
page = models.ForeignKey('wagtailcore.Page', help_text='help text', on_delete=models.CASCADE)
class EventPageChooserModel(models.Model):
page = models.ForeignKey('tests.EventPage', help_text='more help text', on_delete=models.CASCADE)
class SnippetChooserModel(models.Model):
advert = models.ForeignKey(Advert, help_text='help text', on_delete=models.CASCADE)
panels = [
SnippetChooserPanel('advert'),
]
class SnippetChooserModelWithCustomPrimaryKey(models.Model):
advertwithcustomprimarykey = models.ForeignKey(AdvertWithCustomPrimaryKey, help_text='help text', on_delete=models.CASCADE)
panels = [
SnippetChooserPanel('advertwithcustomprimarykey'),
]
class CustomImage(AbstractImage):
caption = models.CharField(max_length=255, blank=True)
fancy_caption = RichTextField(blank=True)
not_editable_field = models.CharField(max_length=255, blank=True)
admin_form_fields = Image.admin_form_fields + (
'caption',
'fancy_caption',
)
class Meta:
unique_together = [
('title', 'collection')
]
class CustomRendition(AbstractRendition):
image = models.ForeignKey(CustomImage, related_name='renditions', on_delete=models.CASCADE)
class Meta:
unique_together = (
('image', 'filter_spec', 'focal_point_key'),
)
class CustomImageWithAuthor(AbstractImage):
author = models.CharField(max_length=255)
admin_form_fields = Image.admin_form_fields + (
'author',
)
class CustomRenditionWithAuthor(AbstractRendition):
image = models.ForeignKey(CustomImageWithAuthor, related_name='renditions', on_delete=models.CASCADE)
class Meta:
unique_together = (
('image', 'filter_spec', 'focal_point_key'),
)
class CustomDocument(AbstractDocument):
description = models.TextField(blank=True)
fancy_description = RichTextField(blank=True)
admin_form_fields = Document.admin_form_fields + (
'description',
'fancy_description'
)
class Meta:
unique_together = [
('title', 'collection')
]
class StreamModel(models.Model):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
class ExtendedImageChooserBlock(ImageChooserBlock):
"""
Example of Block with custom get_api_representation method.
If the request has an 'extended' query param, it returns a dict of id and title,
otherwise, it returns the default value.
"""
def get_api_representation(self, value, context=None):
image_id = super().get_api_representation(value, context=context)
if 'request' in context and context['request'].query_params.get('extended', False):
return {
'id': image_id,
'title': value.title
}
return image_id
class StreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ExtendedImageChooserBlock()),
('product', StructBlock([
('name', CharBlock()),
('price', CharBlock()),
])),
('raw_html', RawHTMLBlock()),
])
api_fields = ('body',)
content_panels = [
FieldPanel('title'),
StreamFieldPanel('body'),
]
preview_modes = []
class DefaultStreamPage(Page):
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
], default='')
content_panels = [
FieldPanel('title'),
StreamFieldPanel('body'),
]
class MTIBasePage(Page):
is_creatable = False
class Meta:
verbose_name = "MTI Base page"
class MTIChildPage(MTIBasePage):
# Should be creatable by default, no need to set anything
pass
class AbstractPage(Page):
class Meta:
abstract = True
@register_setting
class TestSetting(BaseSetting):
title = models.CharField(max_length=100)
email = models.EmailField(max_length=50)
@register_setting
class ImportantPages(BaseSetting):
sign_up_page = models.ForeignKey(
'wagtailcore.Page', related_name="+", null=True, on_delete=models.SET_NULL)
general_terms_page = models.ForeignKey(
'wagtailcore.Page', related_name="+", null=True, on_delete=models.SET_NULL)
privacy_policy_page = models.ForeignKey(
'wagtailcore.Page', related_name="+", null=True, on_delete=models.SET_NULL)
@register_setting(icon="tag")
class IconSetting(BaseSetting):
pass
class NotYetRegisteredSetting(BaseSetting):
pass
@register_setting
class FileUploadSetting(BaseSetting):
file = models.FileField()
class BlogCategory(models.Model):
name = models.CharField(unique=True, max_length=80)
class BlogCategoryBlogPage(models.Model):
category = models.ForeignKey(BlogCategory, related_name="+", on_delete=models.CASCADE)
page = ParentalKey('ManyToManyBlogPage', related_name='categories', on_delete=models.CASCADE)
panels = [
FieldPanel('category'),
]
class ManyToManyBlogPage(Page):
"""
A page type with two different kinds of M2M relation.
We don't formally support these, but we don't want them to cause
hard breakages either.
"""
body = RichTextField(blank=True)
adverts = models.ManyToManyField(Advert, blank=True)
blog_categories = models.ManyToManyField(
BlogCategory, through=BlogCategoryBlogPage, blank=True)
# make first_published_at editable on this page model
settings_panels = Page.settings_panels + [
FieldPanel('first_published_at'),
]
class OneToOnePage(Page):
"""
A Page containing a O2O relation.
"""
body = RichTextBlock(blank=True)
page_ptr = models.OneToOneField(Page, parent_link=True,
related_name='+', on_delete=models.CASCADE)
class GenericSnippetPage(Page):
"""
A page containing a reference to an arbitrary snippet (or any model for that matter)
linked by a GenericForeignKey
"""
snippet_content_type = models.ForeignKey(ContentType, on_delete=models.SET_NULL, null=True)
snippet_object_id = models.PositiveIntegerField(null=True)
snippet_content_object = GenericForeignKey('snippet_content_type', 'snippet_object_id')
class CustomImageFilePath(AbstractImage):
def get_upload_to(self, filename):
"""Create a path that's file-system friendly.
By hashing the file's contents we guarantee an equal distribution
of files within our root directories. This also gives us a
better chance of uploading images with the same filename, but
different contents - this isn't guaranteed as we're only using
the first three characters of the checksum.
"""
original_filepath = super().get_upload_to(filename)
folder_name, filename = original_filepath.split(os.path.sep)
# Ensure that we consume the entire file, we can't guarantee that
# the stream has not be partially (or entirely) consumed by
# another process
original_position = self.file.tell()
self.file.seek(0)
hash256 = hashlib.sha256()
while True:
data = self.file.read(256)
if not data:
break
hash256.update(data)
checksum = hash256.hexdigest()
self.file.seek(original_position)
return os.path.join(folder_name, checksum[:3], filename)
class CustomPageQuerySet(PageQuerySet):
def about_spam(self):
return self.filter(title__contains='spam')
CustomManager = PageManager.from_queryset(CustomPageQuerySet)
class CustomManagerPage(Page):
objects = CustomManager()
class MyBasePage(Page):
"""
A base Page model, used to set site-wide defaults and overrides.
"""
objects = CustomManager()
class Meta:
abstract = True
class MyCustomPage(MyBasePage):
pass
class ValidatedPage(Page):
foo = models.CharField(max_length=255)
base_form_class = ValidatedPageForm
content_panels = Page.content_panels + [
FieldPanel('foo'),
]
class DefaultRichTextFieldPage(Page):
body = RichTextField()
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class DefaultRichBlockFieldPage(Page):
body = StreamField([
('rich_text', RichTextBlock()),
])
content_panels = Page.content_panels + [
StreamFieldPanel('body')
]
class CustomRichTextFieldPage(Page):
body = RichTextField(editor='custom')
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class CustomRichBlockFieldPage(Page):
body = StreamField([
('rich_text', RichTextBlock(editor='custom')),
])
content_panels = [
FieldPanel('title', classname="full title"),
StreamFieldPanel('body'),
]
class RichTextFieldWithFeaturesPage(Page):
body = RichTextField(features=['quotation', 'embed', 'made-up-feature'])
content_panels = [
FieldPanel('title', classname="full title"),
FieldPanel('body'),
]
class SectionedRichTextPageSection(Orderable):
page = ParentalKey('tests.SectionedRichTextPage', related_name='sections', on_delete=models.CASCADE)
body = RichTextField()
panels = [
FieldPanel('body')
]
class SectionedRichTextPage(Page):
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('sections')
]
class InlineStreamPageSection(Orderable):
page = ParentalKey('tests.InlineStreamPage', related_name='sections', on_delete=models.CASCADE)
body = StreamField([
('text', CharBlock()),
('rich_text', RichTextBlock()),
('image', ImageChooserBlock()),
])
panels = [
StreamFieldPanel('body')
]
class InlineStreamPage(Page):
content_panels = [
FieldPanel('title', classname="full title"),
InlinePanel('sections')
]
class TableBlockStreamPage(Page):
table = StreamField([('table', TableBlock())])
content_panels = [StreamFieldPanel('table')]
class UserProfile(models.Model):
# Wagtail's schema must be able to coexist alongside a custom UserProfile model
user = models.OneToOneField(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
favourite_colour = models.CharField(max_length=255)
class PanelSettings(TestSetting):
panels = [
FieldPanel('title')
]
class TabbedSettings(TestSetting):
edit_handler = TabbedInterface([
ObjectList([
FieldPanel('title')
], heading='First tab'),
ObjectList([
FieldPanel('email')
], heading='Second tab'),
])
class AlwaysShowInMenusPage(Page):
show_in_menus_default = True
class AddedStreamFieldWithoutDefaultPage(Page):
body = StreamField([
('title', CharBlock())
])
class AddedStreamFieldWithEmptyStringDefaultPage(Page):
body = StreamField([
('title', CharBlock())
], default='')
class AddedStreamFieldWithEmptyListDefaultPage(Page):
body = StreamField([
('title', CharBlock())
], default=[])
class PerUserContentPanels(ObjectList):
def _replace_children_with_per_user_config(self):
self.children = self.instance.basic_content_panels
if self.request.user.is_superuser:
self.children = self.instance.superuser_content_panels
self.children = [
child.bind_to(model=self.model, instance=self.instance,
request=self.request, form=self.form)
for child in self.children]
def on_instance_bound(self):
# replace list of children when both instance and request are available
if self.request:
self._replace_children_with_per_user_config()
else:
super().on_instance_bound()
def on_request_bound(self):
# replace list of children when both instance and request are available
if self.instance:
self._replace_children_with_per_user_config()
else:
super().on_request_bound()
class PerUserPageMixin:
basic_content_panels = []
superuser_content_panels = []
@cached_classmethod
def get_edit_handler(cls):
tabs = []
if cls.basic_content_panels and cls.superuser_content_panels:
tabs.append(PerUserContentPanels(heading='Content'))
if cls.promote_panels:
tabs.append(ObjectList(cls.promote_panels,
heading='Promote'))
if cls.settings_panels:
tabs.append(ObjectList(cls.settings_panels,
heading='Settings',
classname='settings'))
edit_handler = TabbedInterface(tabs,
base_form_class=cls.base_form_class)
return edit_handler.bind_to(model=cls)
class SecretPage(PerUserPageMixin, Page):
boring_data = models.TextField()
secret_data = models.TextField()
basic_content_panels = Page.content_panels + [
FieldPanel('boring_data'),
]
superuser_content_panels = basic_content_panels + [
FieldPanel('secret_data'),
]
class SimpleParentPage(Page):
# `BusinessIndex` has been added to bring it in line with other tests
subpage_types = ['tests.SimpleChildPage', BusinessIndex]
class SimpleChildPage(Page):
# `Page` has been added to bring it in line with other tests
parent_page_types = ['tests.SimpleParentPage', Page]
max_count_per_parent = 1
class PersonPage(Page):
first_name = models.CharField(
max_length=255,
verbose_name='First Name',
)
last_name = models.CharField(
max_length=255,
verbose_name='Last Name',
)
content_panels = Page.content_panels + [
MultiFieldPanel([
FieldPanel('first_name'),
FieldPanel('last_name'),
], 'Person'),
InlinePanel('addresses', label='Address'),
]
class Meta:
verbose_name = 'Person'
verbose_name_plural = 'Persons'
class Address(index.Indexed, ClusterableModel, Orderable):
address = models.CharField(
max_length=255,
verbose_name='Address',
)
tags = ClusterTaggableManager(
through='tests.AddressTag',
blank=True,
)
person = ParentalKey(
to='tests.PersonPage',
related_name='addresses',
verbose_name='Person'
)
panels = [
FieldPanel('address'),
FieldPanel('tags'),
]
class Meta:
verbose_name = 'Address'
verbose_name_plural = 'Addresses'
class AddressTag(TaggedItemBase):
content_object = ParentalKey(
to='tests.Address',
on_delete=models.CASCADE,
related_name='tagged_items'
)
class RestaurantPage(Page):
tags = ClusterTaggableManager(through='tests.TaggedRestaurant', blank=True)
content_panels = Page.content_panels + [
FieldPanel('tags'),
]
class RestaurantTag(TagBase):
free_tagging = False
class Meta:
verbose_name = "Tag"
verbose_name_plural = "Tags"
class TaggedRestaurant(ItemBase):
tag = models.ForeignKey(
RestaurantTag, related_name="tagged_restaurants", on_delete=models.CASCADE
)
content_object = ParentalKey(
to='tests.RestaurantPage',
on_delete=models.CASCADE,
related_name='tagged_items'
)
class SimpleTask(Task):
pass
|
from __future__ import unicode_literals
import base
from misc import GetPageInfo
from models import PageIdentifier
from category import GetSubcategoryInfos
from revisions import GetCurrentContent, GetPageRevisionInfos
from meta import GetSourceInfo
def test_unicode_title():
get_beyonce = GetCurrentContent("Beyoncé Knowles")
assert get_beyonce()
def test_coercion_basic():
pid = PageIdentifier(title='Africa', page_id=123, ns=4, source='enwp')
get_subcats = GetSubcategoryInfos(pid)
assert get_subcats.input_param == 'Category:Africa'
def test_web_request():
url = 'http://upload.wikimedia.org/wikipedia/commons/d/d2/Mcgregor.jpg'
get_photo = base.WebRequestOperation(url)
res = get_photo()
text = res[0]
assert len(text) == 16408
def test_get_html():
get_africa_html = base.GetPageHTML('Africa')
res = get_africa_html()
text = res[0]
assert len(text) > 350000
def test_missing_revisions():
get_revs = GetPageRevisionInfos('Coffee_lololololol')
rev_list = get_revs()
'''
Should return 'missing' and negative pageid
'''
assert len(rev_list) == 0
def test_get_meta():
get_source_info = GetSourceInfo()
meta = get_source_info()
assert meta
def test_client_passed_to_subops():
# This tests whether the client object given to the initial operation
# is passed to its sub-operations.
# Use just enough titles to force multiplexing so that we can get
# sub ops to test.
titles = ['a'] * (base.DEFAULT_QUERY_LIMIT.get_limit() + 1)
client = base.MockClient()
op = GetPageInfo(titles, client=client)
assert id(op.subop_queues[0].peek().client) == id(client)
|
import math
from numba import njit
from tardis.montecarlo.montecarlo_numba import (
njit_dict_no_parallel,
)
import tardis.montecarlo.montecarlo_numba.numba_config as nc
from tardis.montecarlo.montecarlo_numba.numba_config import (
C_SPEED_OF_LIGHT,
MISS_DISTANCE,
SIGMA_THOMSON,
CLOSE_LINE_THRESHOLD,
)
from tardis.montecarlo.montecarlo_numba.utils import MonteCarloException
from tardis.montecarlo.montecarlo_numba.r_packet import print_r_packet_properties
@njit(**njit_dict_no_parallel)
def calculate_distance_boundary(r, mu, r_inner, r_outer):
"""
Calculate distance to shell boundary in cm.
Parameters
----------
r : float
radial coordinate of the RPacket
mu : float
cosine of the direction of movement
r_inner : float
inner radius of current shell
r_outer : float
outer radius of current shell
"""
delta_shell = 0
if mu > 0.0:
# direction outward
distance = math.sqrt(r_outer * r_outer + ((mu * mu - 1.0) * r * r)) - (
r * mu
)
delta_shell = 1
else:
# going inward
check = r_inner * r_inner + (r * r * (mu * mu - 1.0))
if check >= 0.0:
# hit inner boundary
distance = -r * mu - math.sqrt(check)
delta_shell = -1
else:
# miss inner boundary
distance = math.sqrt(
r_outer * r_outer + ((mu * mu - 1.0) * r * r)
) - (r * mu)
delta_shell = 1
return distance, delta_shell
@njit(**njit_dict_no_parallel)
def calculate_distance_line(
r_packet, comov_nu, is_last_line, nu_line, time_explosion
):
"""
Calculate distance until RPacket is in resonance with the next line
Parameters
----------
r_packet : tardis.montecarlo.montecarlo_numba.r_packet.RPacket
comov_nu : float
comoving frequency at the CURRENT position of the RPacket
is_last_line : bool
return MISS_DISTANCE if at the end of the line list
nu_line : float
line to check the distance to
time_explosion : float
time since explosion in seconds
Returns
-------
"""
nu = r_packet.nu
if is_last_line:
return MISS_DISTANCE
nu_diff = comov_nu - nu_line
# for numerical reasons, if line is too close, we set the distance to 0.
if abs(nu_diff / nu) < CLOSE_LINE_THRESHOLD:
nu_diff = 0.0
if nu_diff >= 0:
distance = (nu_diff / nu) * C_SPEED_OF_LIGHT * time_explosion
else:
raise MonteCarloException(
"nu difference is less than 0.0"
)
if nc.ENABLE_FULL_RELATIVITY:
return calculate_distance_line_full_relativity(
nu_line, nu, time_explosion, r_packet
)
return distance
@njit(**njit_dict_no_parallel)
def calculate_distance_line_full_relativity(
nu_line, nu, time_explosion, r_packet
):
# distance = - mu * r + (ct - nu_r * nu_r * sqrt(ct * ct - (1 + r * r * (1 - mu * mu) * (1 + pow(nu_r, -2))))) / (1 + nu_r * nu_r);
nu_r = nu_line / nu
ct = C_SPEED_OF_LIGHT * time_explosion
distance = -r_packet.mu * r_packet.r + (
ct
- nu_r
* nu_r
* math.sqrt(
ct * ct
- (
1
+ r_packet.r
* r_packet.r
* (1 - r_packet.mu * r_packet.mu)
* (1 + 1.0 / (nu_r * nu_r))
)
)
) / (1 + nu_r * nu_r)
return distance
@njit(**njit_dict_no_parallel)
def calculate_distance_electron(electron_density, tau_event):
"""
Calculate distance to Thomson Scattering
Parameters
----------
electron_density : float
tau_event : float
"""
# add full_relativity here
return tau_event / (electron_density * SIGMA_THOMSON)
|
"""URI API
This file contains the part of the blaze API dealing with URIs. The
"URI API". In Blaze persistence is provided by the means of this URI
API, that allows specifying a "location" for an array as an URI.
The URI API allows:
- saving existing arrays to an URI.
- loading an array into memory from an URI.
- opening an URI as an array.
- dropping the contents of a given URI.
"""
from __future__ import absolute_import, division, print_function
import os
import warnings
from datashape import to_numpy, to_numpy_dtype
import blz
from ..py2help import urlparse
from ..datadescriptor import (BLZDataDescriptor, CSVDataDescriptor,
JSONDataDescriptor, HDF5DataDescriptor)
from ..objects.array import Array
def _to_numpy(ds):
res = to_numpy(ds)
res = res if type(res) is tuple else ((), to_numpy_dtype(ds))
return res
class Storage(object):
"""
Storage(uri, mode='a', permanent=True)
Class to host parameters for persistence properties.
Parameters
----------
uri : string
The URI where the data set will be stored.
mode : string ('r'ead, 'a'ppend)
The mode for creating/opening the storage.
permanent : bool
Whether this file should be permanent or not.
Examples
--------
>>> store = Storage('blz-store.blz')
"""
SUPPORTED_FORMATS = ('json', 'csv', 'blz', 'hdf5')
@property
def uri(self):
"""The URI for the data set."""
return self._uri
@property
def mode(self):
"""The mode for opening the storage."""
return self._mode
@property
def format(self):
"""The format used for storage."""
return self._format
@property
def permanent(self):
"""Whether this file should be permanent or not."""
return self._permanent
@property
def path(self):
"""Returns a blz path for a given uri."""
return self._path
def __init__(self, uri, mode='a', permanent=True, format=None):
if not isinstance(uri, str):
raise ValueError("`uri` must be a string.")
self._uri = uri
self._format = self._path = ""
self._set_format_and_path_from_uri(uri, format)
self._mode = mode
if not permanent:
raise ValueError(
"`permanent` set to False is not supported yet.")
self._permanent = permanent
def __repr__(self):
args = ["uri=%s" % self._uri, "mode=%s" % self._mode]
return '%s(%s)' % (self.__class__.__name__, ', '.join(args))
def _set_format_and_path_from_uri(self, uri, format=None):
"""Parse the uri into the format and path"""
up = urlparse.urlparse(self._uri)
if up.scheme in self.SUPPORTED_FORMATS:
warnings.warn("Blaze no longer uses file type in network protocol field of the uri. "
"Please use format kwarg.", DeprecationWarning)
self._path = up.netloc + up.path
if os.name == 'nt' and len(up.scheme) == 1:
# This is a workaround for raw windows paths like
# 'C:/x/y/z.csv', for which urlparse parses 'C' as
# the scheme and '/x/y/z.csv' as the path.
self._path = uri
if not self._path:
raise ValueError("Unable to extract path from uri: %s", uri)
_, extension = os.path.splitext(self._path)
extension = extension.strip('.')
# Support for deprecated format in url network scheme
format_from_up = None
if up.scheme in self.SUPPORTED_FORMATS:
format_from_up = up.scheme
if format and format_from_up != format_from_up:
raise ValueError("URI scheme and file format do not match. Given uri: %s, format: %s" %
(up.geturl(), format))
# find actual format
if format:
self._format = format
elif format_from_up:
self._format = format_from_up
elif extension:
self._format = extension
else:
raise ValueError("Cannot determine format from: %s" % uri)
if self._format not in self.SUPPORTED_FORMATS:
raise ValueError("`format` '%s' is not supported." % self._format)
def _persist_convert(persist):
if not isinstance(persist, Storage):
if isinstance(persist, str):
persist = Storage(persist)
else:
raise ValueError('persist argument must be either a'
'URI string or Storage object')
return persist
def from_blz(persist, **kwargs):
"""Open an existing persistent BLZ array.
Parameters
----------
persist : a Storage instance
The Storage instance specifies, among other things, path of
where the array is stored.
kwargs : a dictionary
Put here different parameters depending on the format.
Returns
-------
out: a concrete blaze array.
"""
persist = _persist_convert(persist)
d = blz.barray(rootdir=persist.path, **kwargs)
dd = BLZDataDescriptor(d)
return Array(dd)
def from_csv(persist, **kwargs):
"""Open an existing persistent CSV array.
Parameters
----------
persist : a Storage instance
The Storage instance specifies, among other things, path of
where the array is stored.
kwargs : a dictionary
Put here different parameters depending on the format.
Returns
-------
out: a concrete blaze array.
"""
persist = _persist_convert(persist)
dd = CSVDataDescriptor(persist.path, **kwargs)
return Array(dd)
def from_json(persist, **kwargs):
"""Open an existing persistent JSON array.
Parameters
----------
persist : a Storage instance
The Storage instance specifies, among other things, path of
where the array is stored.
kwargs : a dictionary
Put here different parameters depending on the format.
Returns
-------
out: a concrete blaze array.
"""
persist = _persist_convert(persist)
dd = JSONDataDescriptor(persist.path, **kwargs)
return Array(dd)
def from_hdf5(persist, **kwargs):
"""Open an existing persistent HDF5 array.
Parameters
----------
persist : a Storage instance
The Storage instance specifies, among other things, path of
where the array is stored.
kwargs : a dictionary
Put here different parameters depending on the format.
Returns
-------
out: a concrete blaze array.
"""
persist = _persist_convert(persist)
dd = HDF5DataDescriptor(persist.path, **kwargs)
return Array(dd)
def drop(persist):
"""Remove a persistent storage."""
persist = _persist_convert(persist)
if persist.format == 'blz':
from shutil import rmtree
rmtree(persist.path)
elif persist.format in ('csv', 'json', 'hdf5'):
import os
os.unlink(persist.path)
|
'''
'''
from __future__ import with_statement
__docformat__ = 'restructuredtext'
__version__ = '$Id: $'
from pyglet.app.base import PlatformEventLoop
from pyglet.libs.darwin import *
class CocoaEventLoop(PlatformEventLoop):
def __init__(self):
super(CocoaEventLoop, self).__init__()
# Prepare the default application.
NSApplication.sharedApplication()
# Create an autorelease pool for menu creation and finishLaunching
pool = NSAutoreleasePool.alloc().init()
self._create_application_menu()
# The value for the ApplicationPolicy is 0 as opposed to the
# constant name NSApplicationActivationPolicyRegular, as it
# doesn't appear to be in the bridge support in Apple's pyObjC
# as of OS X 10.6.7
NSApp().setActivationPolicy_(0)
NSApp().finishLaunching()
NSApp().activateIgnoringOtherApps_(True)
# Then get rid of the pool when we're done.
del pool
def _create_application_menu(self):
# Sets up a menu and installs a "quit" item so that we can use
# Command-Q to exit the application.
# See http://cocoawithlove.com/2010/09/minimalist-cocoa-programming.html
# This could also be done much more easily with a NIB.
menubar = NSMenu.alloc().init()
appMenuItem = NSMenuItem.alloc().init()
menubar.addItem_(appMenuItem)
NSApp().setMainMenu_(menubar)
appMenu = NSMenu.alloc().init()
processName = NSProcessInfo.processInfo().processName()
hideItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
"Hide " + processName, "hide:", "h")
appMenu.addItem_(hideItem)
appMenu.addItem_(NSMenuItem.separatorItem())
quitItem = NSMenuItem.alloc().initWithTitle_action_keyEquivalent_(
"Quit " + processName, "terminate:", "q")
appMenu.addItem_(quitItem)
appMenuItem.setSubmenu_(appMenu)
def start(self):
pass
def step(self, timeout=None):
# Create an autorelease pool for this iteration.
pool = NSAutoreleasePool.alloc().init()
# Determine the timeout date.
if timeout is None:
# Using distantFuture as untilDate means that nextEventMatchingMask
# will wait until the next event comes along.
timeout_date = NSDate.distantFuture()
else:
timeout_date = NSDate.dateWithTimeIntervalSinceNow_(timeout)
# Retrieve the next event (if any). We wait for an event to show up
# and then process it, or if timeout_date expires we simply return.
# We only process one event per call of step().
self._is_running.set()
event = NSApp().nextEventMatchingMask_untilDate_inMode_dequeue_(
NSAnyEventMask, timeout_date, NSDefaultRunLoopMode, True)
# Dispatch the event (if any).
if event is not None:
event_type = event.type()
if event_type != NSApplicationDefined:
# Send out event as normal. Responders will still receive
# keyUp:, keyDown:, and flagsChanged: events.
NSApp().sendEvent_(event)
# Resend key events as special pyglet-specific messages
# which supplant the keyDown:, keyUp:, and flagsChanged: messages
# because NSApplication translates multiple key presses into key
# equivalents before sending them on, which means that some keyUp:
# messages are never sent for individual keys. Our pyglet-specific
# replacements ensure that we see all the raw key presses & releases.
# We also filter out key-down repeats since pyglet only sends one
# on_key_press event per key press.
if event_type == NSKeyDown and not event.isARepeat():
NSApp().sendAction_to_from_("pygletKeyDown:", None, event)
elif event_type == NSKeyUp:
NSApp().sendAction_to_from_("pygletKeyUp:", None, event)
elif event_type == NSFlagsChanged:
NSApp().sendAction_to_from_("pygletFlagsChanged:", None, event)
NSApp().updateWindows()
did_time_out = False
else:
did_time_out = True
self._is_running.clear()
# Destroy the autorelease pool used for this step.
del pool
return did_time_out
def stop(self):
pass
def notify(self):
pool = NSAutoreleasePool.alloc().init()
notifyEvent = NSEvent.otherEventWithType_location_modifierFlags_timestamp_windowNumber_context_subtype_data1_data2_(
NSApplicationDefined, # type
NSPoint(0.0, 0.0), # location
0, # modifierFlags
0, # timestamp
0, # windowNumber
None, # graphicsContext
0, # subtype
0, # data1
0, # data2
)
NSApp().postEvent_atStart_(notifyEvent, False)
del pool
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.