repo_name stringlengths 4 116 | path stringlengths 4 379 | size stringlengths 1 7 | content stringlengths 3 1.05M | license stringclasses 15
values |
|---|---|---|---|---|
TheDurtch/TheDurtch.github.io | paste/application/libraries/geshi/geshi/nsis.php | 20643 | <?php
/*************************************************************************************
* nsis.php
* --------
* Author: deguix (cevo_deguix@yahoo.com.br), Tux (http://tux.a4.cz/), Jan T. Sott (http://github.com/idleberg)
* Copyright: (c) 2005 deguix, 2004 Tux (http://tux.a4.cz/), Nigel McNie (http://qbnz.com/highlighter)
* Release Version: 1.0.8.12
* Date Started: 2005/12/03
*
* Nullsoft Scriptable Install System language file for GeSHi.
*
* CHANGES
* -------
* 2014/04/23 (2.0.3)
* - Updated to NSIS 3.0a2
* - Removed deprecated commands
* 2005/12/03 (2.0.2)
* - Updated to NSIS 2.11.
* 2005/06/17 (2.0.1)
* - Updated to NSIS 2.07b0.
* 2005/04/05 (2.0.0)
* - Updated to NSIS 2.06.
* 2004/11/27 (1.0.2)
* - Added support for multiple object splitters
* 2004/10/27 (1.0.1)
* - Added support for URLs
* 2004/08/05 (1.0.0)
* - First Release
*
* TODO (updated 2004/11/27)
* -------------------------
*
*************************************************************************************
*
* This file is part of GeSHi.
*
* GeSHi is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* GeSHi is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with GeSHi; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
************************************************************************************/
$language_data = array(
'LANG_NAME' => 'NSIS',
'COMMENT_SINGLE' => array(1 => ';', 2 => '#'),
'COMMENT_MULTI' => array('/*' => '*/'),
'CASE_KEYWORDS' => GESHI_CAPS_NO_CHANGE,
'QUOTEMARKS' => array("'",'"','`'),
'ESCAPE_CHAR' => '',
'KEYWORDS' => array(
1 => array(
'!addincludedir', '!addplugindir', '!appendfile', '!cd', '!define', '!delfile', '!echo', '!error',
'!execute', '!finalize', '!getdllversion', '!include', '!insertmacro', '!macro', '!macroend', '!makensis', '!packhdr',
'!searchparse', '!searchreplace', '!system', '!tempfile', '!undef', '!verbose', '!warning'
),
2 => array(
'AddBrandingImage', 'AllowRootDirInstall', 'AutoCloseWindow', 'BGFont',
'BGGradient', 'BrandingText', 'Caption', 'ChangeUI', 'CheckBitmap', 'CompletedText', 'ComponentText',
'CRCCheck', 'DetailsButtonText', 'DirText', 'DirVar', 'DirVerify', 'FileErrorText',
'Function', 'FunctionEnd', 'Icon', 'InstallButtonText', 'InstallColors', 'InstallDir',
'InstallDirRegKey', 'InstProgressFlags', 'InstType', 'LangString', 'LicenseBkColor',
'LicenseData', 'LicenseForceSelection', 'LicenseLangString', 'LicenseText', 'LoadLanguageFile', 'ManifestDPIAware', 'ManifestSupportedOS',
'MiscButtonText', 'Name', 'OutFile', 'Page', 'PageEx', 'PageExEnd', 'RequestExecutionLevel', 'Section',
'SectionEnd', 'SectionGroup', 'SectionGroupEnd', 'SetCompressor', 'SetFont', 'ShowInstDetails',
'ShowUninstDetails', 'SilentInstall', 'SilentUnInstall', 'SpaceTexts', 'SubCaption', 'SubSection',
'SubSectionEnd', 'Unicode', 'UninstallButtonText', 'UninstallCaption', 'UninstallIcon', 'UninstallSubCaption',
'UninstallText', 'UninstPage', 'Var', 'VIAddVersionKey', 'VIFileVersion', 'VIProductVersion', 'WindowIcon', 'XPStyle'
),
3 => array(
'AddSize', 'AllowSkipFiles', 'FileBufSize', 'GetInstDirError', 'PageCallbacks',
'SectionIn', 'SetCompress', 'SetCompressorDictSize',
'SetDatablockOptimize', 'SetDateSave', 'SetOverwrite', 'SetPluginUnload'
),
4 => array(
'Abort', 'BringToFront', 'Call', 'CallInstDLL', 'ClearErrors', 'CopyFiles','CreateDirectory',
'CreateFont', 'CreateShortCut', 'Delete', 'DeleteINISec', 'DeleteINIStr', 'DeleteRegKey',
'DeleteRegValue', 'DetailPrint', 'EnableWindow', 'EnumRegKey', 'EnumRegValue', 'Exch', 'Exec',
'ExecShell', 'ExecWait', 'ExpandEnvStrings', 'File', 'FileClose', 'FileOpen', 'FileRead',
'FileReadByte', 'FileReadUTF16LE', 'FileReadWord', 'FileSeek', 'FileWrite', 'FileWriteByte', 'FileWriteUTF16LE', 'FileWriteWord', 'FindClose', 'FindFirst', 'FindNext',
'FindWindow', 'FlushINI', 'GetCurInstType', 'GetCurrentAddress', 'GetDlgItem', 'GetDLLVersion',
'GetDLLVersionLocal', 'GetErrorLevel', 'GetFileTime', 'GetFileTimeLocal', 'GetFullPathName',
'GetFunctionAddress', 'GetLabelAddress', 'GetTempFileName', 'Goto', 'HideWindow',
'IfAbort', 'IfErrors', 'IfFileExists', 'IfRebootFlag', 'IfSilent', 'InitPluginsDir', 'InstTypeGetText',
'InstTypeSetText', 'IntCmp', 'IntCmpU', 'IntFmt', 'IntOp', 'IsWindow', 'LockWindow', 'LogSet', 'LogText',
'MessageBox', 'Nop', 'Pop', 'Push', 'Quit', 'ReadEnvStr', 'ReadINIStr', 'ReadRegDWORD', 'ReadRegStr',
'Reboot', 'RegDLL', 'Rename', 'ReserveFile', 'Return', 'RMDir', 'SearchPath', 'SectionGetFlags',
'SectionGetInstTypes', 'SectionGetSize', 'SectionGetText', 'SectionSetFlags', 'SectionSetInstTypes',
'SectionSetSize', 'SectionSetText', 'SendMessage', 'SetAutoClose', 'SetBrandingImage', 'SetCtlColors',
'SetCurInstType', 'SetDetailsPrint', 'SetDetailsView', 'SetErrorLevel', 'SetErrors', 'SetFileAttributes',
'SetOutPath', 'SetRebootFlag', 'SetRegView', 'SetShellVarContext', 'SetSilent', 'ShowWindow', 'Sleep', 'StrCmp', 'StrCmpS',
'StrCpy', 'StrLen', 'UnRegDLL', 'WriteINIStr', 'WriteRegBin', 'WriteRegDWORD', 'WriteRegExpandStr',
'WriteRegStr', 'WriteUninstaller'
),
5 => array(
'all', 'alwaysoff', 'ARCHIVE', 'auto', 'both', 'bzip2', 'checkbox', 'components', 'current',
'custom', 'directory', 'false', 'FILE_ATTRIBUTE_ARCHIVE', 'FILE_ATTRIBUTE_HIDDEN', 'FILE_ATTRIBUTE_NORMAL',
'FILE_ATTRIBUTE_OFFLINE', 'FILE_ATTRIBUTE_READONLY', 'FILE_ATTRIBUTE_SYSTEM,TEMPORARY',
'FILE_ATTRIBUTE_TEMPORARY', 'force', 'HIDDEN', 'hide', 'HKCC', 'HKCR', 'HKCU', 'HKDD', 'HKEY_CLASSES_ROOT',
'HKEY_CURRENT_CONFIG', 'HKEY_CURRENT_USER', 'HKEY_DYN_DATA', 'HKEY_LOCAL_MACHINE', 'HKEY_PERFORMANCE_DATA',
'HKEY_USERS', 'HKLM', 'HKPD', 'HKU', 'IDABORT', 'IDCANCEL', 'IDIGNORE', 'IDNO', 'IDOK', 'IDRETRY', 'IDYES',
'ifdiff', 'ifnewer', 'instfiles', 'lastused', 'leave', 'license', 'listonly', 'lzma', 'manual',
'MB_ABORTRETRYIGNORE', 'MB_DEFBUTTON1', 'MB_DEFBUTTON2', 'MB_DEFBUTTON3', 'MB_DEFBUTTON4',
'MB_ICONEXCLAMATION', 'MB_ICONINFORMATION', 'MB_ICONQUESTION', 'MB_ICONSTOP', 'MB_OK', 'MB_OKCANCEL',
'MB_RETRYCANCEL', 'MB_RIGHT', 'MB_SETFOREGROUND', 'MB_TOPMOST', 'MB_YESNO', 'MB_YESNOCANCEL', 'nevershow',
'none', 'normal', 'off', 'OFFLINE', 'on', 'radiobuttons', 'READONLY', 'RO', 'SHCTX', 'SHELL_CONTEXT', 'show',
'silent', 'silentlog', 'SW_HIDE', 'SW_SHOWMAXIMIZED', 'SW_SHOWMINIMIZED', 'SW_SHOWNORMAL', 'SYSTEM',
'textonly', 'true', 'try', 'uninstConfirm', 'zlib'
),
6 => array(
'/a', '/components', '/COMPONENTSONLYONCUSTOM', '/CUSTOMSTRING', '/e', '/FILESONLY', '/FINAL', '/gray', '/GLOBAL',
'/ifempty', '/IMGID', '/ITALIC', '/lang', '/NOCUSTOM', '/nonfatal', '/NOUNLOAD', '/oname', '/r', '/REBOOTOK',
'/RESIZETOFIT', '/SOLID', '/SD', '/SHORT', '/silent', '/STRIKE', '/TIMEOUT', '/TRIMCENTER', '/TRIMLEFT',
'/TRIMRIGHT', '/UNDERLINE', '/windows', '/x'
),
7 => array(
'.onGUIEnd', '.onGUIInit', '.onInit', '.onInstFailed', '.onInstSuccess', '.onMouseOverSection',
'.onRebootFailed', '.onSelChange', '.onUserAbort', '.onVerifyInstDir', 'un.onGUIEnd', 'un.onGUIInit',
'un.onInit', 'un.onRebootFailed', 'un.onUninstFailed', 'un.onUninstSuccess', 'un.onUserAbort'
),
8 => array(
'MUI.nsh', '"${NSISDIR}\Contrib\Modern UI\System.nsh"', 'MUI_SYSVERSION', 'MUI_ICON', 'MUI_UNICON',
'MUI_HEADERIMAGE', 'MUI_HEADERIMAGE_BITMAP', 'MUI_HEADERIMAGE_BITMAP_NOSTRETCH', 'MUI_HEADERIMAGE_BITMAP_RTL',
'MUI_HEADERIMAGE_BITMAP_RTL_NOSTRETCH', 'MUI_HEADERIMAGE_UNBITMAP', 'MUI_HEADERIMAGE_UNBITMAP_NOSTRETCH',
'MUI_HEADERIMAGE_UNBITMAP_RTL', 'MUI_HEADERIMAGE_UNBITMAP_RTL_NOSTRETCH', 'MUI_HEADERIMAGE_RIGHT', 'MUI_BGCOLOR',
'MUI_UI', 'MUI_UI_HEADERIMAGE', 'MUI_UI_HEADERIMAGE_RIGHT', 'MUI_UI_COMPONENTSPAGE_SMALLDESC',
'MUI_UI_COMPONENTSPAGE_NODESC', 'MUI_WELCOMEFINISHPAGE_BITMAP', 'MUI_WELCOMEFINISHPAGE_BITMAP_NOSTRETCH',
'MUI_WELCOMEFINISHPAGE_INI', 'MUI_UNWELCOMEFINISHPAGE_BITMAP', 'MUI_UNWELCOMEFINISHPAGE_BITMAP_NOSTRETCH',
'MUI_UNWELCOMEFINISHPAGE_INI', 'MUI_LICENSEPAGE_BGCOLOR', 'MUI_COMPONENTSPAGE_CHECKBITMAP',
'MUI_COMPONENTSPAGE_SMALLDESC', 'MUI_COMPONENTSPAGE_NODESC', 'MUI_INSTFILESPAGE_COLORS',
'MUI_INSTFILESPAGE_PROGRESSBAR', 'MUI_FINISHPAGE_NOAUTOCLOSE', 'MUI_UNFINISHPAGE_NOAUTOCLOSE',
'MUI_ABORTWARNING', 'MUI_ABORTWARNING_TEXT', 'MUI_UNABORTWARNING', 'MUI_UNABORTWARNING_TEXT',
'MUI_PAGE_WELCOME', 'MUI_PAGE_LICENSE', 'MUI_PAGE_COMPONENTS', 'MUI_PAGE_DIRECTORY',
'MUI_PAGE_STARTMENU', 'MUI_PAGE_INSTFILES', 'MUI_PAGE_FINISH', 'MUI_UNPAGE_WELCOME',
'MUI_UNPAGE_CONFIRM', 'MUI_UNPAGE_LICENSE', 'MUI_UNPAGE_COMPONENTS', 'MUI_UNPAGE_DIRECTORY',
'MUI_UNPAGE_INSTFILES', 'MUI_UNPAGE_FINISH', 'MUI_PAGE_HEADER_TEXT', 'MUI_PAGE_HEADER_SUBTEXT',
'MUI_WELCOMEPAGE_TITLE', 'MUI_WELCOMEPAGE_TITLE_3LINES', 'MUI_WELCOMEPAGE_TEXT',
'MUI_LICENSEPAGE_TEXT_TOP', 'MUI_LICENSEPAGE_TEXT_BOTTOM', 'MUI_LICENSEPAGE_BUTTON',
'MUI_LICENSEPAGE_CHECKBOX', 'MUI_LICENSEPAGE_CHECKBOX_TEXT', 'MUI_LICENSEPAGE_RADIOBUTTONS',
'MUI_LICENSEPAGE_RADIOBUTTONS_TEXT_ACCEPT', 'MUI_LICENSEPAGE_RADIOBUTTONS_TEXT_DECLINE',
'MUI_COMPONENTSPAGE_TEXT_TOP', 'MUI_COMPONENTSPAGE_TEXT_COMPLIST', 'MUI_COMPONENTSPAGE_TEXT_INSTTYPE',
'MUI_COMPONENTSPAGE_TEXT_DESCRIPTION_TITLE', 'MUI_COMPONENTSPAGE_TEXT_DESCRIPTION_INFO',
'MUI_DIRECTORYPAGE_TEXT_TOP', 'MUI_DIRECTORYPAGE_TEXT_DESTINATION', 'MUI_DIRECTORYPAGE_VARIABLE',
'MUI_DIRECTORYPAGE_VERIFYONLEAVE', 'MUI_STARTMENU_WRITE_BEGIN', 'MUI_STARTMENU_WRITE_END',
'MUI_STARTMENUPAGE_TEXT_TOP', 'MUI_STARTMENUPAGE_TEXT_CHECKBOX', 'MUI_STARTMENUPAGE_DEFAULTFOLDER',
'MUI_STARTMENUPAGE_NODISABLE', 'MUI_STARTMENUPAGE_REGISTRY_ROOT', 'MUI_STARTMENUPAGE_REGISTRY_KEY',
'MUI_STARTMENUPAGE_REGISTRY_VALUENAME', 'MUI_INSTFILESPAGE_FINISHHEADER_TEXT',
'MUI_INSTFILESPAGE_FINISHHEADER_SUBTEXT', 'MUI_INSTFILESPAGE_ABORTHEADER_TEXT',
'MUI_INSTFILESPAGE_ABORTHEADER_SUBTEXT', 'MUI_FINISHPAGE_TITLE', 'MUI_FINISHPAGE_TITLE_3LINES',
'MUI_FINISHPAGE_TEXT', 'MUI_FINISHPAGE_TEXT_LARGE', 'MUI_FINISHPAGE_BUTTON',
'MUI_FINISHPAGE_TEXT_REBOOT', 'MUI_FINISHPAGE_TEXT_REBOOTNOW', 'MUI_FINISHPAGE_TEXT_REBOOTLATER',
'MUI_FINISHPAGE_RUN', 'MUI_FINISHPAGE_RUN_TEXT', 'MUI_FINISHPAGE_RUN_PARAMETERS',
'MUI_FINISHPAGE_RUN_NOTCHECKED', 'MUI_FINISHPAGE_RUN_FUNCTION', 'MUI_FINISHPAGE_SHOWREADME',
'MUI_FINISHPAGE_SHOWREADME_TEXT', 'MUI_FINISHPAGE_SHOWREADME_NOTCHECKED',
'MUI_FINISHPAGE_SHOWREADME_FUNCTION', 'MUI_FINISHPAGE_LINK', 'MUI_FINISHPAGE_LINK_LOCATION',
'MUI_FINISHPAGE_LINK_COLOR', 'MUI_FINISHPAGE_NOREBOOTSUPPORT', 'MUI_UNCONFIRMPAGE_TEXT_TOP',
'MUI_UNCONFIRMPAGE_TEXT_LOCATION', 'MUI_LANGUAGE', 'MUI_LANGDLL_DISPLAY',
'MUI_LANGDLL_REGISTRY_ROOT', 'MUI_LANGDLL_REGISTRY_KEY', 'MUI_LANGDLL_REGISTRY_VALUENAME',
'MUI_LANGDLL_WINDOWTITLE', 'MUI_LANGDLL_INFO', 'MUI_LANGDLL_ALWAYSSHOW',
'MUI_RESERVEFILE_INSTALLOPTIONS', 'MUI_RESERVEFILE_LANGDLL', 'MUI_FUNCTION_DESCRIPTION_BEGIN',
'MUI_DESCRIPTION_TEXT', 'MUI_FUNCTION_DESCRIPTION_END', 'MUI_INSTALLOPTIONS_EXTRACT',
'MUI_INSTALLOPTIONS_EXTRACT_AS', 'MUI_HEADER_TEXT', 'MUI_INSTALLOPTIONS_DISPLAY',
'MUI_INSTALLOPTIONS_INITDIALOG', 'MUI_INSTALLOPTIONS_SHOW',
'MUI_INSTALLOPTIONS_DISPLAY_RETURN', 'MUI_INSTALLOPTIONS_SHOW_RETURN',
'MUI_INSTALLOPTIONS_READ', 'MUI_INSTALLOPTIONS_WRITE',
'MUI_CUSTOMFUNCTION_GUIINIT', 'MUI_CUSTOMFUNCTION_UNGUIINIT',
'MUI_CUSTOMFUNCTION_ABORT', 'MUI_CUSTOMFUNCTION_UNABORT',
'MUI_PAGE_CUSTOMFUNCTION_PRE', 'MUI_PAGE_CUSTOMFUNCTION_SHOW', 'MUI_PAGE_CUSTOMFUNCTION_LEAVE',
'MUI_WELCOMEFINISHPAGE_CUSTOMFUNCTION_INIT'
),
9 => array(
'LogicLib.nsh', '${LOGICLIB}', 'LOGICLIB_STRCMP', 'LOGICLIB_INT64CMP', 'LOGICLIB_SECTIONCMP', '${If}', '${Unless}',
'${ElseIf}', '${ElseUnless}', '${Else}', '${EndIf}', '${EndUnless}', '${AndIf}', '${AndUnless}',
'${OrIf}', '${OrUnless}', '${IfThen}', '${IfCmd}', '${Select}', '${Case2}', '${Case3}',
'${Case4}', '${Case5}', '${CaseElse}', '${Default}', '${EndSelect}', '${Switch}',
'${Case}', '${EndSwitch}', '${Do}', '${DoWhile}', '${UntilWhile}', '${Continue}', '${Break}',
'${Loop}', '${LoopWhile}', '${LoopUntil}', '${While}', '${ExitWhile}', '${EndWhile}', '${For}',
'${ForEach}', '${ExitFor}', '${Next}', '${Abort}', '${Errors}', '${RebootFlag}', '${Silent}',
'${FileExists}', '${Cmd}', '${SectionIsSelected}', '${SectionIsSectionGroup}',
'${SectionIsSectionGroupEnd}', '${SectionIsBold}', '${SectionIsReadOnly}',
'${SectionIsExpanded}', '${SectionIsPartiallySelected}'
),
10 => array(
'StrFunc.nsh', '${STRFUNC}', '${StrCase}', '${StrClb}', '${StrIOToNSIS}', '${StrLoc}', '${StrNSISToIO}', '${StrRep}',
'${StrSort}', '${StrStr}', '${StrStrAdv}', '${StrTok}', '${StrTrimNewLines}'
),
11 => array(
'UpgradeDLL.nsh', 'UPGRADEDLL_INCLUDED', 'UpgradeDLL'
),
12 => array(
'Sections.nsh', 'SECTIONS_INCLUDED', '${SF_SELECTED}', '${SF_SECGRP}', '${SF_SUBSEC}', '${SF_SECGRPEND}',
'${SF_SUBSECEND}', '${SF_BOLD}', '${SF_RO}', '${SF_EXPAND}', '${SF_PSELECTED}', '${SF_TOGGLED}',
'${SF_NAMECHG}', '${SECTION_OFF}', 'SelectSection', 'UnselectSection', 'ReverseSection',
'StartRadioButtons', 'RadioButton', 'EndRadioButtons', '${INSTTYPE_0}', '${INSTTYPE_1}', '${INSTTYPE_2}',
'${INSTTYPE_3}', '${INSTTYPE_4}', '${INSTTYPE_5}', '${INSTTYPE_6}', '${INSTTYPE_7}', '${INSTTYPE_8}',
'${INSTTYPE_9}', '${INSTTYPE_10}', '${INSTTYPE_11}', '${INSTTYPE_12}', '${INSTTYPE_13}', '${INSTTYPE_14}',
'${INSTTYPE_15}', '${INSTTYPE_16}', '${INSTTYPE_17}', '${INSTTYPE_18}', '${INSTTYPE_19}', '${INSTTYPE_20}',
'${INSTTYPE_21}', '${INSTTYPE_22}', '${INSTTYPE_23}', '${INSTTYPE_24}', '${INSTTYPE_25}', '${INSTTYPE_26}',
'${INSTTYPE_27}', '${INSTTYPE_28}', '${INSTTYPE_29}', '${INSTTYPE_30}', '${INSTTYPE_31}', '${INSTTYPE_32}',
'SetSectionInInstType', 'ClearSectionInInstType', 'SetSectionFlag', 'ClearSectionFlag', 'SectionFlagIsSet'
),
13 => array(
'Colors.nsh', 'WHITE', 'BLACK', 'YELLOW', 'RED', 'GREEN', 'BLUE', 'MAGENTA', 'CYAN', 'rgb2hex'
),
14 => array(
'FileFunc.nsh', '${Locate}', '${GetSize}', '${DriveSpace}', '${GetDrives}', '${GetTime}', '${GetFileAttributes}', '${GetFileVersion}', '${GetExeName}', '${GetExePath}', '${GetParameters}', '${GetOptions}', '${GetRoot}', '${GetParent}', '${GetFileName}', '${GetBaseName}', '${GetFileExt}', '${BannerTrimPath}', '${DirState}', '${RefreshShellIcons}'
),
15 => array(
'TextFunc.nsh', '${LineFind}', '${LineRead}', '${FileReadFromEnd}', '${LineSum}', '${FileJoin}', '${TextCompare}', '${ConfigRead}', '${ConfigWrite}', '${FileRecode}', '${TrimNewLines}'
),
16 => array(
'WordFunc.nsh', '${WordFind}', '${WordFind2X}', '${WordFind3X}', '${WordReplace}', '${WordAdd}', '${WordInsert}', '${StrFilter}', '${VersionCompare}', '${VersionConvert}'
)
),
'SYMBOLS' => array(
),
'CASE_SENSITIVE' => array(
GESHI_COMMENTS => false,
1 => false,
2 => false,
3 => false,
4 => false,
5 => false,
6 => false,
7 => false,
8 => false,
9 => false,
10 => false,
11 => false,
12 => false,
13 => false,
14 => false,
15 => false,
16 => false
),
'STYLES' => array(
'KEYWORDS' => array(
1 => 'color: #000066; font-weight:bold;',
2 => 'color: #000066;',
3 => 'color: #003366;',
4 => 'color: #000099;',
5 => 'color: #ff6600;',
6 => 'color: #ff6600;',
7 => 'color: #006600;',
8 => 'color: #006600;',
9 => 'color: #006600;',
10 => 'color: #006600;',
11 => 'color: #006600;',
12 => 'color: #006600;',
13 => 'color: #006600;',
14 => 'color: #006600;',
15 => 'color: #006600;',
16 => 'color: #006600;'
),
'COMMENTS' => array(
1 => 'color: #666666; font-style: italic;',
2 => 'color: #666666; font-style: italic;',
'MULTI' => 'color: #666666; font-style: italic;'
),
'ESCAPE_CHAR' => array(
0 => 'color: #660066; font-weight: bold;'
),
'BRACKETS' => array(
0 => ''
),
'STRINGS' => array(
0 => 'color: #660066;'
),
'NUMBERS' => array(
0 => ''
),
'METHODS' => array(
0 => ''
),
'SYMBOLS' => array(
0 => ''
),
'REGEXPS' => array(
0 => 'color: #660000;',
1 => 'color: #660000;',
2 => 'color: #660000;',
3 => 'color: #660000;',
4 => 'color: #660000;',
5 => 'color: #660000;',
6 => 'color: #660000;',
7 => 'color: #000099;',
8 => 'color: #003399;'
),
'SCRIPT' => array(
0 => ''
)
),
'URLS' => array(
1 => '',
2 => '',
3 => '',
4 => '',
5 => '',
6 => '',
7 => '',
8 => '',
9 => '',
10 => '',
11 => '',
12 => '',
13 => '',
14 => '',
15 => '',
16 => ''
),
'OOLANG' => false,
'OBJECT_SPLITTERS' => array(
),
'REGEXPS' => array(
0 => '\$\$',
1 => '\$\\r',
2 => '\$\\n',
3 => '\$\\t',
4 => '\$[a-zA-Z0-9_]+',
5 => '\$\{.{1,256}\}',
6 => '\$\\\(.{1,256}\\\)',
7 => array(
GESHI_SEARCH => '([^:\/\\\*\?\"\<\>(?:<PIPE>)\s]*?)(::)([^:\/\\\*\?\"\<\>(?:<PIPE>)\s]*?)',
GESHI_REPLACE => '\\1',
GESHI_MODIFIERS => '',
GESHI_BEFORE => '',
GESHI_AFTER => '\\2\\3'
),
8 => array(
GESHI_SEARCH => '([^:\/\\\*\?\"\<\>(?:<PIPE>)\s]*?)(::)([^:\/\\\*\?\"\<\>(?:<PIPE>)]*?\s)',
GESHI_REPLACE => '\\3',
GESHI_MODIFIERS => '',
GESHI_BEFORE => '\\1\\2',
GESHI_AFTER => ''
)
),
'STRICT_MODE_APPLIES' => GESHI_NEVER,
'SCRIPT_DELIMITERS' => array(
),
'HIGHLIGHT_STRICT_BLOCK' => array(
)
);
| mit |
rlishtaba/py-algorithms | py_algorithms/trees/__init__.py | 1491 | __all__ = [
'level_order_traversal',
'horizontal_level_order_traversal',
'new_bst_from_list',
]
from typing import List, Any
from ..data_structures import TreeNode
from .level_order_traversal import LevelOrderTraversal
from .horizontal_level_order_traversal import HorizontalLevelOrderTraversal
from .bst_from_list import BstFromList
from .in_order_traversal import InOrderTraversal
from .find_range_in_bst import FindRangeInBst
def in_order_traversal(root: TreeNode) -> List[List[Any]]:
"""
Factory method
:param root: TreeNode
:return: Result of traversal
"""
return InOrderTraversal.apply(root)
def level_order_traversal(root: TreeNode) -> List[List[Any]]:
"""
Factory method
:param root: TreeNode
:return: List of levels
"""
return LevelOrderTraversal.apply(root)
def horizontal_level_order_traversal(root: TreeNode) -> List[List[Any]]:
"""
Factory method
:param root: TreeNode
:return: List of levels
"""
return HorizontalLevelOrderTraversal.apply(root)
def new_bst_from_list(xs: List[Any]) -> TreeNode:
"""
Factory method
:param xs: List of comparable objects
:return: root Tree node
"""
return BstFromList.apply(xs)
def find_range_in_bst(bst, k1, k2) -> List[Any]:
"""
Factory method
:param bst: TreeNode
:param k1: start of the range
:param k2: end of the range
:return: List
"""
return FindRangeInBst.apply(root=bst, k1=k1, k2=k2)
| mit |
StereoFlo/simple-mvc | src/Core/Session.php | 7432 | <?php
namespace Core;
use App\Utils;
class Session
{
/**
* @var string The name used for the session
*/
private static $SESSION_NAME = 'f7eac143c2e6c95e84a3e128e9ddcee6';
/**
* Session Age.
* The number of seconds of inactivity before a session expires.
*
* @var integer
*/
private static $SESSION_AGE = 1800;
/**
* Writes a value to the current session data.
*
* @param string $key String identifier.
* @param mixed $value Single value or array of values to be written.
*
* @return mixed Value or array of values written.
* @throws \Exception
*/
public static function write(string $key, $value)
{
if (!\is_string($key)) {
throw new \Exception('Session key must be string value');
}
self::init();
$_SESSION[$key] = $value;
self::age();
return $value;
}
/**
* Reads a specific value from the current session data.
*
* @param string $key String identifier.
* @param boolean $child Optional child identifier for accessing array elements.
*
* @return mixed Returns a string value upon success. Returns false upon failure.
* @throws \Exception
*/
public static function read(string $key, bool $child = false)
{
if (!is_string($key)) {
throw new \Exception('Session key must be string value');
}
self::init();
$keyValue = Utils::getProperty($_SESSION, $key);
if (!$keyValue) {
return false;
}
self::age();
if (false === $child) {
return $keyValue;
}
if (Utils::getProperty($keyValue, $child)) {
return $_SESSION[$key][$child];
}
return false;
}
/**
* Deletes a value from the current session data.
*
* @param string $key String identifying the array key to delete.
*
* @return bool
* @throws \Exception
*/
public static function delete(string $key): bool
{
if (!is_string($key)) {
throw new \Exception('Session key must be string value');
}
self::init();
unset($_SESSION[$key]);
self::age();
return true;
}
/**
* Echos current session data.
*
* @return void
*/
public static function dump()
{
self::init();
echo nl2br(print_r($_SESSION));
}
/**
* Starts or resumes a session by calling {@link Session::_init()}.
*
* @see Session::init()
*
* @param bool $regenerate_session_id
* @param int $limit
* @param string $path
* @param null|string $domain
* @param null $secure_cookies_only
*
* @return bool Returns true upon success and false upon failure.
*/
public static function start(bool $regenerate_session_id = true, int $limit = 0, $path = '/', string $domain = null, $secure_cookies_only = null)
{
return self::init($regenerate_session_id, $limit, $path, $domain, $secure_cookies_only);
}
/**
* @return bool
*/
public static function regenerate_session_id(): bool
{
$session = [];
foreach ($_SESSION as $k => $v) {
$session[$k] = $v;
}
session_destroy();
session_id(bin2hex(openssl_random_pseudo_bytes(16)));
session_start();
foreach ($session as $k => $v) {
$_SESSION[$k] = $v;
}
return true;
}
/**
* Returns current session cookie parameters or an empty array.
*
* @return array Associative array of session cookie parameters.
*/
public static function params(): array
{
$currentSessionData = [];
if ('' !== session_id()) {
$currentSessionData = session_get_cookie_params();
}
if (empty($currentSessionData)) {
return [];
}
return $currentSessionData;
}
/**
* Closes the current session and releases session file lock.
*
* @return boolean Returns true upon success and false upon failure.
*/
public static function close(): bool
{
if ('' !== session_id()) {
session_write_close();
return true;
}
return true;
}
/**
* Alias for {@link Session::close()}.
*
* @see Session::close()
* @return boolean Returns true upon success and false upon failure.
*/
public static function commit(): bool
{
return self::close();
}
/**
* Removes session data and destroys the current session.
*
* @return bool
*/
public static function destroy(): bool
{
if (empty(session_id())) {
return false;
}
$_SESSION = [];
if (ini_get("session.use_cookies")) {
$params = session_get_cookie_params();
setcookie(session_name(), '', time() - 42000, $params["path"], $params["domain"], $params["secure"], $params["httponly"]);
}
return session_destroy();
}
/**
* Expires a session if it has been inactive for a specified amount of time.
* @return void
* @throws \Exception
*/
private static function age()
{
$last = $_SESSION['LAST_ACTIVE'] ?? false;
if (false !== $last && (time() - $last > self::$SESSION_AGE)) {
self::destroy();
throw new \Exception('Something is wrong with a session');
}
$_SESSION['LAST_ACTIVE'] = time();
}
/**
* Initializes a new session or resumes an existing session.
*
* @param bool $regenerate_session_id
* @param int $limit
* @param string $path
* @param null|string $domain
* @param bool|null $secure_cookies_only
* @param string $baseUrl
*
* @return bool Returns true upon success and false upon failure.
* @throws \Exception
*/
private static function init(bool $regenerate_session_id = false, int $limit = 0, string $path = '/', string $domain = null, bool $secure_cookies_only = null, string $baseUrl = ''): bool
{
if (function_exists('session_status')) {
if (session_status() == PHP_SESSION_DISABLED) {
throw new \Exception('Session is disabled');
}
}
if (empty(session_id())) {
try {
$site_root = $baseUrl;
$session_save_path = $site_root . Config::getConfig('session', 'session_dir');
session_save_path($session_save_path);
session_name(self::$SESSION_NAME);
$domain = $domain ?? $_SERVER['SERVER_NAME'];
session_set_cookie_params($limit, $path, $domain, $secure_cookies_only, true);
session_start();
if ($regenerate_session_id) {
self::regenerate_session_id();
}
return true;
} catch (\Exception $exception) {
throw new \Exception($exception->getMessage());
}
}
self::age();
if ($regenerate_session_id && rand(1, 100) <= 5) {
self::regenerate_session_id();
$_SESSION['regenerated_id'] = session_id();
}
return true;
}
} | mit |
facekapow/hypertunes | views/index/js/loaders/chromecast/loader.js | 6156 | 'use strict';
const {PlayerStatus} = require('../../player-classes');
const backend = require('./backend');
const cast = document.getElementById('player-cast');
const castv2 = require('castv2-client');
const Client = castv2.Client;
const DefaultMediaReceiver = castv2.DefaultMediaReceiver;
const mdns = require('mdns-js');
const {Popup} = require('../../managers/popups');
const create = (opt, obj) => {
if (typeof opt === 'string' && typeof obj === 'undefined') {
return document.createElement(opt);
}
if (typeof opt === 'string' && typeof obj === 'object') {
var elm = document.createElement(opt);
if (typeof obj.css !== 'undefined') {
for (var i in obj.css) {
elm.style[i] = obj.css[i];
}
}
var manuals = {
'style': true,
'dataset': true
}
for (var prop in manuals) {
(function(prop) {
if (typeof obj[prop] !== 'undefined' && obj[prop] !== null) {
for (var i in obj[prop]) {
elm[prop][i] = obj[prop][i];
}
}
})(prop);
}
for (var i in obj) {
if (manuals[i]) continue;
elm[i] = obj[i];
}
return elm;
}
}
module.exports = () => {
const errPop = new Popup({
message: 'Oh no!',
ok: true,
hideOnAction: true
});
const devPop = new Popup({
message: 'Choose a device'
});
let first = true;
const browser = mdns.createBrowser(mdns.tcp('googlecast'));
let animationI = 0;
let animationInterval = null;
let upOrDown = 0;
const animateCast = () => {
animationI = 0;
animationInterval = setInterval(() => {
cast.src = `resources/images/cast_icons/${animationI}.png`;
if (upOrDown === 0) {
animationI++;
} else {
animationI--;
}
if (animationI > 2) {
upOrDown = 1;
animationI = 2;
} else if (animationI < 0) {
upOrDown = 0;
animationI = 0;
}
}, 500);
}
const stopAnimatingCast = () => (animationInterval) && (clearInterval(animationInterval), animationInterval = null);
const wantsCast = (host) => {
browser.stop();
let lastTime = 0;
const onclose = () => {
cast.onclick = () => window.managers.popups.add(devPop);
cast.src = 'resources/images/cast_icons/cast.png';
if (!(playerClient.status === PlayerStatus.finished || playerClient.status === PlayerStatus.idle)) {
playerClient.deinit();
app.events.once('returnCastAudioPath', (pathname) => {
app.events.emit('closeCastServer');
const cb = () => {
playerClient.load(pathname);
playerClient.seek(lastTime);
if (playerClient.status !== PlayerStatus.playing) {
playerClient.pause();
} else {
playerClient.play();
}
}
if (pathname) {
if (playerClient.backend) {
cb();
} else {
playerClient.once('init', () => cb()); // we don't care which backend it's switched to, just load the song to it
}
}
});
app.events.emit('getCastAudioPath');
}
}
castClient.on('error', (err) => {
console.log(`error emitted: ${err.stack}`);
errPop.html = `There was an error casting.<br>Try that again (disconnect and reconnect).<br>Details: ${err.message}`;
window.managers.popups.add(errPop);
if (err.message === 'This socket is closed' || err.message === 'Device timeout') {
onclose();
} else {
castClient.close();
}
});
castClient.connect(host, () => {
castClient.launch(DefaultMediaReceiver, (err, player) => {
const cb = () => player.close();
player.once('close', onclose);
cast.onclick = cb;
app.events.on('appWillQuit', cb);
stopAnimatingCast();
cast.src = 'resources/images/cast_icons/connected.png';
// request success
app.events.once('castServerStarted', (addr) => {
const castBackend = backend(addr, player);
const onupdate = (e, time) => {
if (e === 'timeupdate') lastTime = time;
}
let src = (() => playerClient.source)();
let meta = (() => playerClient.metadata)();
let time = (() => playerClient.time)();
if (src) playerClient.pause();
playerClient.once('init', (backend) => {
if (backend === castBackend) {
backend.on('update', onupdate);
playerClient.once('deinit', (backend) => {
if (backend === castBackend) backend.removeListener('update', onupdate);
});
if (src) {
playerClient.once('status-playing', () => {
playerClient.pause();
playerClient.seek(time);
playerClient.play();
});
playerClient.load(src, meta);
}
}
});
playerClient.switchBackend(castBackend);
});
app.events.emit('startCastServer');
});
});
}
const castClient = new Client();
let hostsFound = {};
browser.on('update', (service) => {
if (first) {
cast.onclick = () => {
if (playerClient.backend && !playerClient.backend.supportsCast) return toastify.error('The current backend doesn\'t support casting.');
window.managers.popups.add(devPop);
}
cast.classList.add('shown');
first = false;
}
const addr = service.addresses[0];
let hostname = service.host;
if (!addr || !hostname || hostsFound[addr]) return;
if (hostname.substr(hostname.length-6) === '.local') hostname = hostname.substr(0, hostname.length-6);
devPop.add(create('a', {
href: '#',
className: 'devCont--device',
innerHTML: hostname,
dataset: {
hostaddr: addr
},
onclick: (e) => {
e.preventDefault();
animateCast();
wantsCast(addr);
devPop.shown = false;
return false;
}
}));
hostsFound[addr] = true;
});
browser.on('ready', () => browser.discover());
}
| mit |
durub/stony | src/test/java/com/github/stony/interpreter/InterpreterTest.java | 3351 | package com.github.stony.interpreter;
import org.junit.Assert;
import java.io.*;
import java.net.URISyntaxException;
import java.net.URL;
/**
* Barebones framework to test story files. Testing classes should extend this.
*
* The idea is to test the output
*/
class InterpreterTest {
/**
* InterpreterTest should not be instantiated directly, only extended.
*/
protected InterpreterTest() {
}
/**
* Asserts if the output of the finished story is equal to the output parameter.
*
* @param filePath resource file path.
* @param output expected output.
*
* @throws FileNotFoundException when the file doesn't exist.
* @throws IOException when a read error occured.
* @throws URISyntaxException when the file path doesn't convert to a valid URI.
*/
protected void assertOutputEquals(String filePath, String output) throws IOException, URISyntaxException {
final InputStream inputStream = new ByteArrayInputStream(new byte[0]);
final OutputStream outputStream = new ByteArrayOutputStream();
final Interpreter interpreter = new Interpreter(readFileData(filePath), inputStream, outputStream);
assertOutputEquals(interpreter, output);
}
/**
* Asserts if the output of the finished story is equal to the output parameter.
*
* @param interpreter interpreter with a ByteArrayOutputStream as the output stream.
* @param output expected output.
* @throws IOException if converting the output stream to a UTF-8 string failed.
* @throws IllegalArgumentException when the interpreter output stream is not an instance of ByteArrayOutputStream.
*/
protected void assertOutputEquals(Interpreter interpreter, String output) throws IOException {
final OutputStream outputStream = interpreter.getOutputStream();
if (!(outputStream instanceof ByteArrayOutputStream)) {
throw new IllegalArgumentException("Interpreter output stream must be a ByteArrayOutputStream.");
}
while (!interpreter.isFinished()) {
interpreter.executeInstruction();
}
Assert.assertEquals(output, ((ByteArrayOutputStream) outputStream).toString("UTF-8"));
}
/**
* Returns the content of a resource file as a byte array.
*
* @param filePath path to the resource file.
* @return content of the resource file.
*
* @throws FileNotFoundException when the file doesn't exist.
* @throws IOException when a read error occured.
* @throws URISyntaxException when the file path doesn't convert to a valid URI -- should not happen.
*/
@SuppressWarnings("TryFinallyCanBeTryWithResources")
protected byte[] readFileData(String filePath) throws IOException, URISyntaxException {
final URL url = getClass().getResource(filePath);
if (url == null) {
throw new FileNotFoundException("Invalid file path: " + filePath);
}
final File file = new File(url.toURI());
final RandomAccessFile randomAccessFile = new RandomAccessFile(file, "r");
try {
byte[] bytes = new byte[(int) randomAccessFile.length()];
randomAccessFile.readFully(bytes);
return bytes;
} finally {
randomAccessFile.close();
}
}
}
| mit |
ehough/epilog | src/test/php/ehough/epilog/handler/BrowserConsoleHandlerTest.php | 4584 | <?php
/*
* This file is part of the Monolog package.
*
* (c) Jordi Boggiano <j.boggiano@seld.be>
*
* For the full copyright and license information, please view the LICENSE
* file that was distributed with this source code.
*/
/**
* @covers ehough_epilog_handler_BrowserConsoleHandler
*/
class BrowserConsoleHandlerTest extends ehough_epilog_TestCase
{
protected function setUp()
{
if (version_compare(PHP_VERSION, '5.3') < 0) {
$this->markTestSkipped('PHP < 5.3');
return;
}
ehough_epilog_handler_BrowserConsoleHandler::reset();
}
protected function generateScript()
{
$reflMethod = new ReflectionMethod('ehough_epilog_handler_BrowserConsoleHandler', 'generateScript');
$reflMethod->setAccessible(true);
return $reflMethod->invoke(null);
}
public function testStyling()
{
$handler = new ehough_epilog_handler_BrowserConsoleHandler();
$handler->setFormatter($this->getIdentityFormatter());
$handler->handle($this->getRecord(ehough_epilog_Logger::DEBUG, 'foo[[bar]]{color: red}'));
$expected = <<<EOF
(function (c) {if (c && c.groupCollapsed) {
c.log("%cfoo%cbar%c", "font-weight: normal", "color: red", "font-weight: normal");
}})(console);
EOF;
$this->assertEquals($expected, $this->generateScript());
}
public function testEscaping()
{
$handler = new ehough_epilog_handler_BrowserConsoleHandler();
$handler->setFormatter($this->getIdentityFormatter());
$handler->handle($this->getRecord(ehough_epilog_Logger::DEBUG, "[foo] [[\"bar\n[baz]\"]]{color: red}"));
$expected = <<<EOF
(function (c) {if (c && c.groupCollapsed) {
c.log("%c[foo] %c\"bar\\n[baz]\"%c", "font-weight: normal", "color: red", "font-weight: normal");
}})(console);
EOF;
$this->assertEquals($expected, $this->generateScript());
}
public function testAutolabel()
{
$handler = new ehough_epilog_handler_BrowserConsoleHandler();
$handler->setFormatter($this->getIdentityFormatter());
$handler->handle($this->getRecord(ehough_epilog_Logger::DEBUG, '[[foo]]{macro: autolabel}'));
$handler->handle($this->getRecord(ehough_epilog_Logger::DEBUG, '[[bar]]{macro: autolabel}'));
$handler->handle($this->getRecord(ehough_epilog_Logger::DEBUG, '[[foo]]{macro: autolabel}'));
$expected = <<<EOF
(function (c) {if (c && c.groupCollapsed) {
c.log("%c%cfoo%c", "font-weight: normal", "background-color: blue; color: white; border-radius: 3px; padding: 0 2px 0 2px", "font-weight: normal");
c.log("%c%cbar%c", "font-weight: normal", "background-color: green; color: white; border-radius: 3px; padding: 0 2px 0 2px", "font-weight: normal");
c.log("%c%cfoo%c", "font-weight: normal", "background-color: blue; color: white; border-radius: 3px; padding: 0 2px 0 2px", "font-weight: normal");
}})(console);
EOF;
$this->assertEquals($expected, $this->generateScript());
}
public function testContext()
{
$handler = new ehough_epilog_handler_BrowserConsoleHandler();
$handler->setFormatter($this->getIdentityFormatter());
$handler->handle($this->getRecord(ehough_epilog_Logger::DEBUG, 'test', array('foo' => 'bar')));
$expected = <<<EOF
(function (c) {if (c && c.groupCollapsed) {
c.groupCollapsed("%ctest", "font-weight: normal");
c.log("%c%s", "font-weight: bold", "Context");
c.log("%s: %o", "foo", "bar");
c.groupEnd();
}})(console);
EOF;
$this->assertEquals($expected, $this->generateScript());
}
public function testConcurrentHandlers()
{
$handler1 = new ehough_epilog_handler_BrowserConsoleHandler();
$handler1->setFormatter($this->getIdentityFormatter());
$handler2 = new ehough_epilog_handler_BrowserConsoleHandler();
$handler2->setFormatter($this->getIdentityFormatter());
$handler1->handle($this->getRecord(ehough_epilog_Logger::DEBUG, 'test1'));
$handler2->handle($this->getRecord(ehough_epilog_Logger::DEBUG, 'test2'));
$handler1->handle($this->getRecord(ehough_epilog_Logger::DEBUG, 'test3'));
$handler2->handle($this->getRecord(ehough_epilog_Logger::DEBUG, 'test4'));
$expected = <<<EOF
(function (c) {if (c && c.groupCollapsed) {
c.log("%ctest1", "font-weight: normal");
c.log("%ctest2", "font-weight: normal");
c.log("%ctest3", "font-weight: normal");
c.log("%ctest4", "font-weight: normal");
}})(console);
EOF;
$this->assertEquals($expected, $this->generateScript());
}
}
| mit |
devsunny/common-tools | src/main/java/com/asksunny/validator/NotEqualsValueValidator.java | 654 | package com.asksunny.validator;
import com.asksunny.validator.annotation.ValueValidation;
public class NotEqualsValueValidator extends EqualsValueValidator {
public NotEqualsValueValidator(Class<?> targetType, Class<?> valueType, String fieldName, ValueValidation fv) {
super(targetType, valueType, fieldName, fv);
setNegate(true);
}
public NotEqualsValueValidator(Class<?> fieldType, String fieldName, ValueValidation fv) {
super(fieldType, fieldName, fv);
setNegate(true);
}
public NotEqualsValueValidator(String fieldName, ValueValidationRule rule) {
super(fieldName, rule);
setNegate(true);
}
}
| mit |
mining/frontend | gulpfile.js | 806 | var gulp = require('gulp');
var jshint = require('gulp-jshint');
var gettext = require('gulp-angular-gettext');
gulp.task('pot', function () {
return gulp.src(['./assets/app/views/*.html', './views/*.html', './assets/app/scripts/**/*.js'])
.pipe(gettext.extract('template.pot', {
// options to pass to angular-gettext-tools...
}))
.pipe(gulp.dest('./po/'));
});
gulp.task('translations', function () {
return gulp.src('./po/**/*.po')
.pipe(gettext.compile({
// options to pass to angular-gettext-tools...
format: 'javascript'
}))
.pipe(gulp.dest('./assets/app/scripts/i18n'));
});
gulp.task('default', function() {
return gulp.src('./assets/**/*.js')
.pipe(jshint())
.pipe(jshint.reporter('default'))
});
| mit |
winseros/gulp-armarapify-plugin | src/parser/nodes/readers/test/expressionReader.spec.ts | 21285 | import { ReaderUtility } from './../readerUtility';
import { tokenTypes } from './../../../tokens/tokenTypes';
import { Token } from '../../../tokens/token';
import { TokenIterator } from '../../../tokenIterator';
import { ExpressionReader } from '../expressionReader';
import { nodeTypes } from '../../nodeTypes';
import { mathOperators } from '../../../../mathOperators';
import { StringNode } from '../../stringNode';
import { MathGrpNode } from '../../mathGrpNode';
import { MathOpNode } from '../../mathOpNode';
import { MathNegNode } from '../../mathNegNode';
import { WordNode } from '../../wordNode';
import { IntegerNode } from '../../integerNode';
import { FloatNode } from '../../floatNode';
const implementFakeIterator = (iteratorMock: any, calls: Token<any>[]): any => {
let callIndex = 0;
return (): boolean => {
iteratorMock.current = calls[callIndex];
callIndex++;
return !!iteratorMock.current;
};
};
describe('parser/nodes/readers/expressionReader', () => {
describe('readExpression', () => {
it('should throw if a ; is missing at the end of the line', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0},
{ tokenType: tokenTypes.newline, tokenValue: '\r\n', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
expect(() => reader.readExpression(tokenTypes.semicolon)).toThrowError('; expected at the end of the line');
});
it('should throw if a math operator is missing', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
expect(() => reader.readExpression(tokenTypes.semicolon)).toThrowError(`Math operator expected but was "2" of type "${tokenTypes.integer}"`);
});
it('should throw if next token is unexpected', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.equals, tokenValue: '=', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
expect(() => reader.readExpression(tokenTypes.semicolon)).toThrowError(`Unexpected token "=" of type "${tokenTypes.equals}"`);
});
it('should throw if math operator is unexpected', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.mul, lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
expect(() => reader.readExpression(tokenTypes.semicolon)).toThrowError(`Unexpected math operator "*" of type "${tokenTypes.mathOp}"`);
});
it('should throw if math operator is followed by a space', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.minus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.whitespace, tokenValue: ' ', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
expect(() => reader.readExpression(tokenTypes.semicolon)).toThrowError(`Unexpected token " " of type "${tokenTypes.whitespace}"`);
});
it('should read an expression until a file ends', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.string, tokenValue: 'abc', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression();
expect(expression).toBeDefined();
const str = expression as StringNode;
expect(str.type).toEqual(nodeTypes.string);
expect(str.value).toEqual('abc');
});
it('should read "string" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.string, tokenValue: 'abc', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const str = expression as StringNode;
expect(str.type).toEqual(nodeTypes.string);
expect(str.value).toEqual('abc');
});
it('should read "a + b" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.float, tokenValue: 1.5, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.plus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const plus = expression as MathOpNode;
expect(plus.type).toEqual(nodeTypes.mathOp);
expect(plus.operator).toEqual('+');
const left = plus.left as FloatNode;
expect(left.type).toEqual(nodeTypes.float);
expect(left.value).toEqual(1.5);
const right = plus.right as IntegerNode;
expect(right.type).toEqual(nodeTypes.integer);
expect(right.value).toEqual(2);
});
it('should read "a*b + c" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.mul, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.minus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.string, tokenValue: 'abc', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const minus = expression as MathOpNode;
expect(minus.type).toEqual(nodeTypes.mathOp);
expect(minus.operator).toEqual('-');
const minusRight = minus.right as StringNode;
expect(minusRight.type).toEqual(nodeTypes.string);
expect(minusRight.value).toEqual('abc');
const mul = minus.left as MathOpNode;
expect(mul.type).toEqual(nodeTypes.mathOp);
expect(mul.operator).toEqual('*');
const mulLeft = mul.left as IntegerNode;
expect(mulLeft.type).toEqual(nodeTypes.integer);
expect(mulLeft.value).toEqual(1);
const mulRight = mul.right as IntegerNode;
expect(mulRight.type).toEqual(nodeTypes.integer);
expect(mulRight.value).toEqual(2);
});
it('should read "a*b ^ c" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.mul, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.pow, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.word, tokenValue: 'abc', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const mul = expression as MathOpNode;
expect(mul.type).toEqual(nodeTypes.mathOp);
expect(mul.operator).toEqual('*');
const mulLeft = mul.left as IntegerNode;
expect(mulLeft.type).toEqual(nodeTypes.integer);
expect(mulLeft.value).toEqual(1);
const exp = mul.right as MathOpNode;
expect(exp.type).toEqual(nodeTypes.mathOp);
expect(exp.operator).toEqual('^');
const expLeft = exp.left as IntegerNode;
expect(expLeft.type).toEqual(nodeTypes.integer);
expect(expLeft.value).toEqual(2);
const expRight = exp.right as WordNode;
expect(expRight.type).toEqual(nodeTypes.word);
expect(expRight.value).toEqual('abc');
});
it('should read "a + b*c" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.plus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.mul, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 3, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const plus = expression as MathOpNode;
expect(plus.type).toEqual(nodeTypes.mathOp);
expect(plus.operator).toEqual('+');
const plusLeft = plus.left as IntegerNode;
expect(plusLeft.type).toEqual(nodeTypes.integer);
expect(plusLeft.value).toEqual(1);
const mul = plus.right as MathOpNode;
expect(mul.type).toEqual(nodeTypes.mathOp);
expect(mul.operator).toEqual('*');
const mulLeft = mul.left as IntegerNode;
expect(mulLeft.type).toEqual(nodeTypes.integer);
expect(mulLeft.value).toEqual(2);
const mulRight = mul.right as IntegerNode;
expect(mulRight.type).toEqual(nodeTypes.integer);
expect(mulRight.value).toEqual(3);
});
it('should read "(a + b) * c" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.bracketOpen, tokenValue: '(', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.plus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.bracketClose, tokenValue: ')', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.mul, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 3, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const mul = expression as MathOpNode;
expect(mul.type).toEqual(nodeTypes.mathOp);
expect(mul.operator).toEqual('*');
const mulRight = mul.right as IntegerNode;
expect(mulRight.type).toEqual(nodeTypes.integer);
expect(mulRight.value).toEqual(3);
const grp = mul.left as MathGrpNode;
expect(grp.type).toEqual(nodeTypes.mathGrp);
const plus = grp.value as MathOpNode;
expect(plus.type).toEqual(nodeTypes.mathOp);
expect(plus.operator).toEqual('+');
const plusLeft = plus.left as IntegerNode;
expect(plusLeft.type).toEqual(nodeTypes.integer);
expect(plusLeft.value).toEqual(1);
const plusRight = plus.right as IntegerNode;
expect(plusRight.type).toEqual(nodeTypes.integer);
expect(plusRight.value).toEqual(2);
});
it('should read "((a / b))" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.bracketOpen, tokenValue: '(', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.bracketOpen, tokenValue: '(', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 1, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.div, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.integer, tokenValue: 2, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.bracketClose, tokenValue: ')', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.bracketClose, tokenValue: ')', lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const grp1 = expression as MathGrpNode;
expect(grp1.type).toEqual(nodeTypes.mathGrp);
const grp2 = grp1.value as MathGrpNode;
expect(grp2.type).toEqual(nodeTypes.mathGrp);
const div = grp2.value as MathOpNode;
expect(div.type).toEqual(nodeTypes.mathOp);
expect(div.operator).toEqual('/');
const plusLeft = div.left as IntegerNode;
expect(plusLeft.type).toEqual(nodeTypes.integer);
expect(plusLeft.value).toEqual(1);
const plusRight = div.right as IntegerNode;
expect(plusRight.type).toEqual(nodeTypes.integer);
expect(plusRight.value).toEqual(2);
});
it('should read "-a" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.minus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.float, tokenValue: 1.5, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const neg = expression as MathNegNode;
expect(neg.type).toEqual(nodeTypes.mathNeg);
const number = neg.value as FloatNode;
expect(number.type).toEqual(nodeTypes.float);
expect(number.value).toEqual(1.5);
});
it('should read "+a" expression', () => {
const tokenIterator = jasmine.createSpyObj('tokenIteratorSpy', ['moveNext']) as TokenIterator;
const spyMoveNext = tokenIterator.moveNext as jasmine.Spy;
spyMoveNext.and.callFake(implementFakeIterator(tokenIterator, [
{ tokenType: tokenTypes.mathOp, tokenValue: mathOperators.plus, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.float, tokenValue: 1.5, lineNumber: 0, colNumber: 0, index: 0 },
{ tokenType: tokenTypes.semicolon, tokenValue: ';', lineNumber: 0, colNumber: 0, index: 0 }
]));
tokenIterator.moveNext();
const reader = new ExpressionReader(new ReaderUtility(tokenIterator));
const expression = reader.readExpression(tokenTypes.semicolon);
expect(expression).toBeDefined();
const number = expression as FloatNode;
expect(number.type).toEqual(nodeTypes.float);
expect(number.value).toEqual(1.5);
});
});
});
| mit |
alvinoktavianus/Maiga-CI | application/controllers/Employee.php | 7761 | <?php
defined('BASEPATH') OR exit('No direct script access allowed');
class Employee extends CI_Controller {
public function index()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp' ) {
$this->load->model('employee_model');
$data['page_title'] = "Employee ".$this->session->userdata('user_session')['email']." | Maiga";
$data['page'] = 'homeview';
$data['profile'] = $this->employee_model->find_by_email($this->session->userdata('user_session')['email']);
$this->load->view('include/masterlogin', $data);
} else {
redirect('/','refresh');
}
}
public function profile()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp' ) {
$this->load->model('employee_model');
$data['page_title'] = "Profile | Maiga";
$data['page'] = "profileview";
$data['employee'] = $this->employee_model->find_by_email($this->session->userdata('user_session')['email']);
$this->load->view('include/masterlogin', $data);
} else {
redirect('/','refresh');
}
}
public function uploadassignment()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp' ) {
$this->load->model('employee_model');
$data['page_title'] = "Upload Assignment | Maiga";
$data['page'] = "uploadassignmentview";
$data['options'] = $this->employee_model->get_topic();
$data['assignments'] = $this->employee_model->get_all_assignment($this->session->userdata('user_session')['email']);
$this->load->view('include/masterlogin', $data);
} else {
redirect('/','refresh');
}
}
public function downloadpayroll()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp' ) {
$this->load->model('employee_model');
$data['page_title'] = "Download Payroll | Maiga";
$data['page'] = "downloadpayrollview";
$data['payrolls'] = $this->employee_model->get_all_payrolls_by_email($this->session->userdata('user_session')['email']);
$this->load->view('include/masterlogin', $data);
} else {
redirect('/','refresh');
}
}
public function getpayroll()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp' && $this->input->get('filename') != null ) {
$path = "./uploads/payrolls/".$this->input->get('filename');
force_download($path, NULL);
} else {
redirect('/','refresh');
}
}
public function do_upload_revision()
{
if ( $this->session->has_userdata('user_session') &&
$this->session->userdata('user_session')['role'] == 'emp' &&
$this->input->get('topic') != null &&
$this->input->get('filename') != null ) {
$config['upload_path'] = './uploads/assignments/';
$config['allowed_types'] = 'doc|docx';
$this->upload->initialize($config);
if ( $this->upload->do_upload('revision')){
$data = $this->upload->data();
$email = $this->session->userdata('user_session')['email'];
$topic = $this->input->get('topic');
$filename = $this->input->get('filename');
$query = array(
'assignment' => $data['file_name'],
'status' => 'P',
'updatedttm' => date('Y-m-d H:i:s', now('Asia/Jakarta'))
);
$query2 = array(
'email' => $this->session->userdata('user_session')['email'],
'topic' => $this->input->get('topic'),
'assignment' => $data['file_name'],
'description' => $this->input->post('description')
);
$this->db->trans_begin();
$this->load->model('employee_model');
$this->employee_model->update_assignment( $email, $topic, $filename, $query );
$this->employee_model->insert_to_history( $query2 );
$this->db->trans_commit();
}
redirect('/employee/uploadassignment','refresh');
} else {
redirect('/','refresh');
}
}
public function do_uploadassignment()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp' ) {
$this->form_validation->set_rules('topic', 'Topik', 'trim|required');
$this->form_validation->set_rules('description', 'Description', 'trim|required');
if ($this->form_validation->run() == false) {
$this->session->set_flashdata('errors', validation_errors());
redirect('/employee/uploadassignment','refresh');
} else {
$config['upload_path'] = './uploads/assignments/';
$config['allowed_types'] = 'doc|docx';
$this->upload->initialize($config);
if ( $this->upload->do_upload('assignment')){
$data = $this->upload->data();
$query = array(
'email' => $this->session->userdata('user_session')['email'],
'topic' => $this->input->post('topic'),
'assignment' => $data['file_name'],
'description' => $this->input->post('description'),
'uploadedby' => 'emp'
);
$query2 = array(
'email' => $this->session->userdata('user_session')['email'],
'topic' => $this->input->post('topic'),
'assignment' => $data['file_name'],
'description' => $this->input->post('description')
);
$this->db->trans_begin();
$this->load->model('employee_model');
$this->employee_model->insert_assignment_by_email( $this->session->userdata('user_session')['email'], $query );
$this->employee_model->insert_to_history( $query2 );
$this->db->trans_commit();
$this->session->set_flashdata('success', 'Successfully upload assignment.');
redirect('/employee/uploadassignment','refresh');
}
else{
$this->session->set_flashdata('errors', $this->upload->display_errors());
redirect('/employee/uploadassignment','refresh');
}
}
} else {
redirect('/','refresh');
}
}
public function do_updateprofile()
{
if ( $this->session->has_userdata('user_session') && $this->session->userdata('user_session')['role'] == 'emp') {
$this->form_validation->set_rules('nama', 'Nama', 'trim|required');
$this->form_validation->set_rules('password', 'Password', 'trim|required|min_length[8]');
$this->form_validation->set_rules('conf-pass', 'Ulangi Password', 'trim|required|matches[password]');
$this->form_validation->set_rules('tempatlahir', 'Tempat Lahir', 'trim|required');
$this->form_validation->set_rules('tanggallahir', 'Tanggal Lahir', 'trim|required');
$this->form_validation->set_rules('mobile', 'No. HP.', 'trim|required|numeric');
$this->form_validation->set_rules('namabank', 'Nama Bank', 'trim|required');
$this->form_validation->set_rules('norekening', 'No. Rekening', 'trim|required|numeric');
if ($this->form_validation->run() == false) {
$this->session->set_flashdata('errors', validation_errors());
} else {
$data = array(
'nama' => $this->input->post('nama'),
'tempatlahir' => $this->input->post('tempatlahir'),
'tanggallahir' => $this->input->post('tanggallahir'),
'mobile' => $this->input->post('mobile'),
'password' => $this->bcrypt->hash_password($this->input->post('password')),
'namabank' => $this->input->post('namabank'),
'norekening' => $this->input->post('norekening')
);
$this->db->trans_begin();
$this->load->model('employee_model');
$this->employee_model->update_profile( $this->session->userdata('user_session')['email'], $data );
$this->db->trans_commit();
$this->session->set_flashdata('success', 'Successfully update profile!');
}
redirect('/employee/profile','refresh');
} else {
redirect('/','refresh');
}
}
}
/* End of file Employee.php */
/* Location: ./application/controllers/Employee.php */ | mit |
projecthydra-labs/solrizer-fedora | spec/integration/fedora_indexer_spec.rb | 534 | require File.expand_path(File.dirname(__FILE__) + '/../spec_helper')
require 'solrizer'
describe Solrizer::Fedora::Indexer do
before(:each) do
@indexer = Solrizer::Fedora::Indexer.new
end
describe "index" do
it "should update solr with the metadata from the given object" do
pending "Got to decide if/how to handle fixtures in this gem. Probably should just mock out Fedora & Solr entirely."
obj = Solrizer::Repository.get_object( "druid:sb733gr4073" )
@indexer.index( obj )
end
end
end | mit |
GPUdb/gpudb-api-java | api/src/main/java/com/gpudb/protocol/FilterByBoxRequest.java | 20988 | /*
* This file was autogenerated by the GPUdb schema processor.
*
* DO NOT EDIT DIRECTLY.
*/
package com.gpudb.protocol;
import java.util.LinkedHashMap;
import java.util.Map;
import org.apache.avro.Schema;
import org.apache.avro.SchemaBuilder;
import org.apache.avro.generic.GenericData;
import org.apache.avro.generic.IndexedRecord;
/**
* A set of parameters for {@link
* com.gpudb.GPUdb#filterByBox(FilterByBoxRequest)}.
* <p>
* Calculates how many objects within the given table lie in a
* rectangular box. The operation is synchronous, meaning that a response will
* not
* be returned until all the objects are fully available. The response payload
* provides the count of the resulting set. A new resultant set which satisfies
* the
* input NAI restriction specification is also created when a {@code viewName}
* is
* passed in as part of the input payload.
*/
public class FilterByBoxRequest implements IndexedRecord {
private static final Schema schema$ = SchemaBuilder
.record("FilterByBoxRequest")
.namespace("com.gpudb")
.fields()
.name("tableName").type().stringType().noDefault()
.name("viewName").type().stringType().noDefault()
.name("xColumnName").type().stringType().noDefault()
.name("minX").type().doubleType().noDefault()
.name("maxX").type().doubleType().noDefault()
.name("yColumnName").type().stringType().noDefault()
.name("minY").type().doubleType().noDefault()
.name("maxY").type().doubleType().noDefault()
.name("options").type().map().values().stringType().noDefault()
.endRecord();
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @return the schema for the class.
*
*/
public static Schema getClassSchema() {
return schema$;
}
/**
* Optional parameters.
* <ul>
* <li> {@link
* com.gpudb.protocol.FilterByBoxRequest.Options#COLLECTION_NAME
* COLLECTION_NAME}: [DEPRECATED--please specify the containing schema for
* the view as part of {@code viewName} and use {@link
* com.gpudb.GPUdb#createSchema(CreateSchemaRequest)} to create the schema
* if non-existent] Name of a schema for the newly created view. If the
* schema is non-existent, it will be automatically created.
* </ul>
* The default value is an empty {@link Map}.
* A set of string constants for the parameter {@code options}.
*/
public static final class Options {
/**
* [DEPRECATED--please specify the containing schema for the view as
* part of {@code viewName} and use {@link
* com.gpudb.GPUdb#createSchema(CreateSchemaRequest)} to create the
* schema if non-existent] Name of a schema for the newly created
* view. If the schema is non-existent, it will be automatically
* created.
*/
public static final String COLLECTION_NAME = "collection_name";
private Options() { }
}
private String tableName;
private String viewName;
private String xColumnName;
private double minX;
private double maxX;
private String yColumnName;
private double minY;
private double maxY;
private Map<String, String> options;
/**
* Constructs a FilterByBoxRequest object with default parameters.
*/
public FilterByBoxRequest() {
tableName = "";
viewName = "";
xColumnName = "";
yColumnName = "";
options = new LinkedHashMap<>();
}
/**
* Constructs a FilterByBoxRequest object with the specified parameters.
*
* @param tableName Name of the table on which the bounding box operation
* will be performed, in [schema_name.]table_name format,
* using standard <a
* href="../../../../../../concepts/tables/#table-name-resolution"
* target="_top">name resolution rules</a>. Must be an
* existing table.
* @param viewName If provided, then this will be the name of the view
* containing the results, in [schema_name.]view_name
* format, using standard <a
* href="../../../../../../concepts/tables/#table-name-resolution"
* target="_top">name resolution rules</a> and meeting <a
* href="../../../../../../concepts/tables/#table-naming-criteria"
* target="_top">table naming criteria</a>. Must not be
* an already existing table or view. The default value
* is ''.
* @param xColumnName Name of the column on which to perform the bounding
* box query. Must be a valid numeric column.
* @param minX Lower bound for the column chosen by {@code xColumnName}.
* Must be less than or equal to {@code maxX}.
* @param maxX Upper bound for {@code xColumnName}. Must be greater than
* or equal to {@code minX}.
* @param yColumnName Name of a column on which to perform the bounding
* box query. Must be a valid numeric column.
* @param minY Lower bound for {@code yColumnName}. Must be less than or
* equal to {@code maxY}.
* @param maxY Upper bound for {@code yColumnName}. Must be greater than
* or equal to {@code minY}.
* @param options Optional parameters.
* <ul>
* <li> {@link
* com.gpudb.protocol.FilterByBoxRequest.Options#COLLECTION_NAME
* COLLECTION_NAME}: [DEPRECATED--please specify the
* containing schema for the view as part of {@code
* viewName} and use {@link
* com.gpudb.GPUdb#createSchema(CreateSchemaRequest)} to
* create the schema if non-existent] Name of a schema for
* the newly created view. If the schema is non-existent,
* it will be automatically created.
* </ul>
* The default value is an empty {@link Map}.
*
*/
public FilterByBoxRequest(String tableName, String viewName, String xColumnName, double minX, double maxX, String yColumnName, double minY, double maxY, Map<String, String> options) {
this.tableName = (tableName == null) ? "" : tableName;
this.viewName = (viewName == null) ? "" : viewName;
this.xColumnName = (xColumnName == null) ? "" : xColumnName;
this.minX = minX;
this.maxX = maxX;
this.yColumnName = (yColumnName == null) ? "" : yColumnName;
this.minY = minY;
this.maxY = maxY;
this.options = (options == null) ? new LinkedHashMap<String, String>() : options;
}
/**
*
* @return Name of the table on which the bounding box operation will be
* performed, in [schema_name.]table_name format, using standard <a
* href="../../../../../../concepts/tables/#table-name-resolution"
* target="_top">name resolution rules</a>. Must be an existing
* table.
*
*/
public String getTableName() {
return tableName;
}
/**
*
* @param tableName Name of the table on which the bounding box operation
* will be performed, in [schema_name.]table_name format,
* using standard <a
* href="../../../../../../concepts/tables/#table-name-resolution"
* target="_top">name resolution rules</a>. Must be an
* existing table.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setTableName(String tableName) {
this.tableName = (tableName == null) ? "" : tableName;
return this;
}
/**
*
* @return If provided, then this will be the name of the view containing
* the results, in [schema_name.]view_name format, using standard
* <a
* href="../../../../../../concepts/tables/#table-name-resolution"
* target="_top">name resolution rules</a> and meeting <a
* href="../../../../../../concepts/tables/#table-naming-criteria"
* target="_top">table naming criteria</a>. Must not be an already
* existing table or view. The default value is ''.
*
*/
public String getViewName() {
return viewName;
}
/**
*
* @param viewName If provided, then this will be the name of the view
* containing the results, in [schema_name.]view_name
* format, using standard <a
* href="../../../../../../concepts/tables/#table-name-resolution"
* target="_top">name resolution rules</a> and meeting <a
* href="../../../../../../concepts/tables/#table-naming-criteria"
* target="_top">table naming criteria</a>. Must not be
* an already existing table or view. The default value
* is ''.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setViewName(String viewName) {
this.viewName = (viewName == null) ? "" : viewName;
return this;
}
/**
*
* @return Name of the column on which to perform the bounding box query.
* Must be a valid numeric column.
*
*/
public String getXColumnName() {
return xColumnName;
}
/**
*
* @param xColumnName Name of the column on which to perform the bounding
* box query. Must be a valid numeric column.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setXColumnName(String xColumnName) {
this.xColumnName = (xColumnName == null) ? "" : xColumnName;
return this;
}
/**
*
* @return Lower bound for the column chosen by {@code xColumnName}. Must
* be less than or equal to {@code maxX}.
*
*/
public double getMinX() {
return minX;
}
/**
*
* @param minX Lower bound for the column chosen by {@code xColumnName}.
* Must be less than or equal to {@code maxX}.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setMinX(double minX) {
this.minX = minX;
return this;
}
/**
*
* @return Upper bound for {@code xColumnName}. Must be greater than or
* equal to {@code minX}.
*
*/
public double getMaxX() {
return maxX;
}
/**
*
* @param maxX Upper bound for {@code xColumnName}. Must be greater than
* or equal to {@code minX}.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setMaxX(double maxX) {
this.maxX = maxX;
return this;
}
/**
*
* @return Name of a column on which to perform the bounding box query.
* Must be a valid numeric column.
*
*/
public String getYColumnName() {
return yColumnName;
}
/**
*
* @param yColumnName Name of a column on which to perform the bounding
* box query. Must be a valid numeric column.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setYColumnName(String yColumnName) {
this.yColumnName = (yColumnName == null) ? "" : yColumnName;
return this;
}
/**
*
* @return Lower bound for {@code yColumnName}. Must be less than or equal
* to {@code maxY}.
*
*/
public double getMinY() {
return minY;
}
/**
*
* @param minY Lower bound for {@code yColumnName}. Must be less than or
* equal to {@code maxY}.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setMinY(double minY) {
this.minY = minY;
return this;
}
/**
*
* @return Upper bound for {@code yColumnName}. Must be greater than or
* equal to {@code minY}.
*
*/
public double getMaxY() {
return maxY;
}
/**
*
* @param maxY Upper bound for {@code yColumnName}. Must be greater than
* or equal to {@code minY}.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setMaxY(double maxY) {
this.maxY = maxY;
return this;
}
/**
*
* @return Optional parameters.
* <ul>
* <li> {@link
* com.gpudb.protocol.FilterByBoxRequest.Options#COLLECTION_NAME
* COLLECTION_NAME}: [DEPRECATED--please specify the containing
* schema for the view as part of {@code viewName} and use {@link
* com.gpudb.GPUdb#createSchema(CreateSchemaRequest)} to create the
* schema if non-existent] Name of a schema for the newly created
* view. If the schema is non-existent, it will be automatically
* created.
* </ul>
* The default value is an empty {@link Map}.
*
*/
public Map<String, String> getOptions() {
return options;
}
/**
*
* @param options Optional parameters.
* <ul>
* <li> {@link
* com.gpudb.protocol.FilterByBoxRequest.Options#COLLECTION_NAME
* COLLECTION_NAME}: [DEPRECATED--please specify the
* containing schema for the view as part of {@code
* viewName} and use {@link
* com.gpudb.GPUdb#createSchema(CreateSchemaRequest)} to
* create the schema if non-existent] Name of a schema for
* the newly created view. If the schema is non-existent,
* it will be automatically created.
* </ul>
* The default value is an empty {@link Map}.
*
* @return {@code this} to mimic the builder pattern.
*
*/
public FilterByBoxRequest setOptions(Map<String, String> options) {
this.options = (options == null) ? new LinkedHashMap<String, String>() : options;
return this;
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @return the schema object describing this class.
*
*/
@Override
public Schema getSchema() {
return schema$;
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @param index the position of the field to get
*
* @return value of the field with the given index.
*
* @throws IndexOutOfBoundsException
*
*/
@Override
public Object get(int index) {
switch (index) {
case 0:
return this.tableName;
case 1:
return this.viewName;
case 2:
return this.xColumnName;
case 3:
return this.minX;
case 4:
return this.maxX;
case 5:
return this.yColumnName;
case 6:
return this.minY;
case 7:
return this.maxY;
case 8:
return this.options;
default:
throw new IndexOutOfBoundsException("Invalid index specified.");
}
}
/**
* This method supports the Avro framework and is not intended to be called
* directly by the user.
*
* @param index the position of the field to set
* @param value the value to set
*
* @throws IndexOutOfBoundsException
*
*/
@Override
@SuppressWarnings("unchecked")
public void put(int index, Object value) {
switch (index) {
case 0:
this.tableName = (String)value;
break;
case 1:
this.viewName = (String)value;
break;
case 2:
this.xColumnName = (String)value;
break;
case 3:
this.minX = (Double)value;
break;
case 4:
this.maxX = (Double)value;
break;
case 5:
this.yColumnName = (String)value;
break;
case 6:
this.minY = (Double)value;
break;
case 7:
this.maxY = (Double)value;
break;
case 8:
this.options = (Map<String, String>)value;
break;
default:
throw new IndexOutOfBoundsException("Invalid index specified.");
}
}
@Override
public boolean equals(Object obj) {
if( obj == this ) {
return true;
}
if( (obj == null) || (obj.getClass() != this.getClass()) ) {
return false;
}
FilterByBoxRequest that = (FilterByBoxRequest)obj;
return ( this.tableName.equals( that.tableName )
&& this.viewName.equals( that.viewName )
&& this.xColumnName.equals( that.xColumnName )
&& ( (Double)this.minX ).equals( (Double)that.minX )
&& ( (Double)this.maxX ).equals( (Double)that.maxX )
&& this.yColumnName.equals( that.yColumnName )
&& ( (Double)this.minY ).equals( (Double)that.minY )
&& ( (Double)this.maxY ).equals( (Double)that.maxY )
&& this.options.equals( that.options ) );
}
@Override
public String toString() {
GenericData gd = GenericData.get();
StringBuilder builder = new StringBuilder();
builder.append( "{" );
builder.append( gd.toString( "tableName" ) );
builder.append( ": " );
builder.append( gd.toString( this.tableName ) );
builder.append( ", " );
builder.append( gd.toString( "viewName" ) );
builder.append( ": " );
builder.append( gd.toString( this.viewName ) );
builder.append( ", " );
builder.append( gd.toString( "xColumnName" ) );
builder.append( ": " );
builder.append( gd.toString( this.xColumnName ) );
builder.append( ", " );
builder.append( gd.toString( "minX" ) );
builder.append( ": " );
builder.append( gd.toString( this.minX ) );
builder.append( ", " );
builder.append( gd.toString( "maxX" ) );
builder.append( ": " );
builder.append( gd.toString( this.maxX ) );
builder.append( ", " );
builder.append( gd.toString( "yColumnName" ) );
builder.append( ": " );
builder.append( gd.toString( this.yColumnName ) );
builder.append( ", " );
builder.append( gd.toString( "minY" ) );
builder.append( ": " );
builder.append( gd.toString( this.minY ) );
builder.append( ", " );
builder.append( gd.toString( "maxY" ) );
builder.append( ": " );
builder.append( gd.toString( this.maxY ) );
builder.append( ", " );
builder.append( gd.toString( "options" ) );
builder.append( ": " );
builder.append( gd.toString( this.options ) );
builder.append( "}" );
return builder.toString();
}
@Override
public int hashCode() {
int hashCode = 1;
hashCode = (31 * hashCode) + this.tableName.hashCode();
hashCode = (31 * hashCode) + this.viewName.hashCode();
hashCode = (31 * hashCode) + this.xColumnName.hashCode();
hashCode = (31 * hashCode) + ((Double)this.minX).hashCode();
hashCode = (31 * hashCode) + ((Double)this.maxX).hashCode();
hashCode = (31 * hashCode) + this.yColumnName.hashCode();
hashCode = (31 * hashCode) + ((Double)this.minY).hashCode();
hashCode = (31 * hashCode) + ((Double)this.maxY).hashCode();
hashCode = (31 * hashCode) + this.options.hashCode();
return hashCode;
}
}
| mit |
pcase/Shopper | app/src/main/java/com/azurehorsecreations/shopper/presentation/ui/navigation/SplashNavigator.java | 665 | package com.azurehorsecreations.shopper.presentation.ui.navigation;
import android.content.Context;
import android.content.Intent;
/**
* Created by pattycase on 9/14/17.
*/
public class SplashNavigator implements INavigator {
private final Context mActivityContext;
private final Class<?> mClassToNavigateTo;
public SplashNavigator(Context activityContext, Class<?> cls) {
this.mActivityContext = activityContext;
this.mClassToNavigateTo = cls;
}
@Override
public void launchActivity() {
Intent intent = new Intent(mActivityContext, mClassToNavigateTo);
mActivityContext.startActivity(intent);
}
}
| mit |
teadur/registry | app/models/legacy/object_history.rb | 276 | module Legacy
class ObjectHistory < Db
self.table_name = :object_history
belongs_to :object_registry, foreign_key: :historyid
def self.instance_method_already_implemented?(method_name)
return true if method_name == 'update'
super
end
end
end
| mit |
behance/BeFF | util/image.d.ts | 271 | export interface BeImage {
isAnimatedGif(bits: any): boolean;
getDimensions(bits: any): { width: number; height: number; };
getBinaryFromDataUri(dataUri: string): string;
isCMYK(bits: any): boolean;
}
declare const _default: BeImage;
export default _default;
| mit |
Sleekwater/Switchboard | src/org/sleekwater/switchboard/servlet/PlayComplete.java | 1606 | package org.sleekwater.switchboard.servlet;
import java.io.IOException;
import javax.servlet.ServletException;
import javax.servlet.annotation.WebServlet;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.sleekwater.switchboard.Device;
import org.sleekwater.switchboard.Devices;
@WebServlet(description = "Servlet handler for Plivo outbound calls", urlPatterns = { "/PlayAudio/PlayComplete" }, loadOnStartup=1)
public class PlayComplete extends HttpServlet {
private static final long serialVersionUID = 1L;
@Override
protected void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
String to = request.getParameter("To");
System.out.println("PlayComplete " + to );
Device d = Devices.d.get(to);
if (null != d)
d.completeAudioPlay();
// Plivo is expecting some XML, so return it here
String emptyXML = "<Response></Response>";
response.addHeader("Content-Type", "text/xml");
response.getWriter().print(emptyXML);
return;
}
@Override
protected void doPost(HttpServletRequest request, HttpServletResponse resp) throws ServletException, IOException {
String to = request.getParameter("To");
System.out.println("PlayComplete doPost" + to );
Device d = Devices.d.get(to);
if (null != d)
d.completeAudioPlay();
// Plivo is expecting some XML, so return it here
String emptyXML = "<Response></Response>";
resp.addHeader("Content-Type", "text/xml");
resp.getWriter().print(emptyXML);
}
} | mit |
WAPOSAT/LandingWeb1 | assets/contactanos.gobierno.data.php | 260 | <?php
/*
Information of the form to Industry
*** Parameters Required ***
NOTHING
***
*/
$Category = array(
'Sector','Sector gubernamental', 'Gobierno distrital', 'Ministerio', 'Municipalidad', 'ONGs', 'Otros'
);
echo json_encode($Category);
?> | mit |
frostburn/panel-league | lib/ui/panel-league/garbage.js | 1518 | import { el, mount, svg } from 'redom';
import { getBlockSize } from './utils';
/**
* Draws random SVG circles on given element.
*/
function decorate(el, { x, width, height }) {
const svgElement = svg('svg', { width: '100%', height: '100%' });
el.style.width = getBlockSize(width);
el.style.left = getBlockSize(x);
mount(el, svgElement);
for (let i = 0; i < width * height * 4; ++i) {
const r = 60 + Math.floor(20 * Math.random());
const g = 50 + Math.floor(15 * Math.random());
const b = 40 + Math.floor(10 * Math.random());
const circle = svg('circle', {
fill: `rgb(${r}, ${g}, ${b})`,
r: `${Math.random() / 2}em`,
cx: getBlockSize(Math.random() * width),
cy: getBlockSize(Math.random() * height),
});
mount(svgElement, circle);
}
}
export default class GarbageSlab {
constructor(slab) {
this.el = el('.garbage');
decorate(this.el, slab);
}
get isFlashing() {
return this.el.classList.contains('flashing');
}
set isFlashing(value) {
if (!this.isFlashing && value) {
this.el.classList.add('flashing');
} else if (this.isFlashing && !value) {
this.el.classList.remove('flashing');
}
}
update(state, slab) {
let { y, height } = slab;
let top;
if (slab.flashTimer >= 0) {
++y;
--height;
}
top = state.height - y - height;
this.el.style.height = getBlockSize(height);
this.el.style.top = getBlockSize(top);
this.isFlashing = slab.flashTimer >= 0;
}
}
| mit |
LuccaSA/lucca-front | packages/ng/dropdown/src/lib/index.ts | 129 | export * from './dropdown.module';
export * from './item/index';
export * from './panel/index';
export * from './trigger/index';
| mit |
aethelwyrd/GreasyFriedChicken | syntax_rs/src/main.rs | 1966 | //rusty.rs
//Copyright 2015 David Huddle
mod my_module;
mod main_mod;
mod test;
use main_mod::*;
fn main() {
console_out();
match_is_like_switch(2,5);
for_in_is_for_each();
while_ex();
loop_ex();
//vector
// declare as ref so we can allow borrowing
let ref nums = vec![1i32, 2i32, 3i32];
let othernums = vec!{2i32, 1i32, 3i32};
if *nums == othernums { println!("vector: equal"); }
else { println!("vector: not equal"); }
//nums is borrowed here
vector_interation_by_ref(nums);
//nums is still valid because ref
//vector random access
let element = nums[1];
println!("{} is the 2nd element in vec", element);
//Box (pointer) othernums and make it ref mut
//also othernumbs is now out of scope
let ref mut mut_vec = Box::new(othernums);
//mut_vec is borrowed and mut
vector_mod_by_ref(mut_vec);
//mut_vec is borrowed not mut
vector_interation_by_ref(mut_vec);
//nullable type
let notnull = Some(28i32);
let amnull = None;
print_option(notnull);
print_option(amnull);
my_module::print_mod();
let firstpair = my_module::Pair{ first:23i32, second:67i32 };
println!("first: {}, second: {}", firstpair.first, firstpair.second);
let secondpair = my_module::generic_swap(firstpair);
println!("first: {}, second: {}", secondpair.first, secondpair.second);
my_module::range_matching(0.25);
my_module::range_matching(0.5);
my_module::range_matching(1.5);
my_module::tuple_struct();
my_module::iterator_example();
my_module::fizz_buzz((1..20).collect::<Vec<_>>());
my_module::fizz_buzz2((1..20).collect::<Vec<_>>());
//play_ground
let ref mut myvar = 45i32;
*myvar = 22i32;
let myothervar = &myvar;
play_ground(**myothervar);
// play_ground(myothervar);
}
fn play_ground(bob:i32){
println!("this is bob: {}", bob);
}
#[test]
#[should_panic]
fn test_in_main(){
assert!(false);
}
| mit |
eberlitz/gym-dev-unisinos | src/app/register/register.component.ts | 1399 | import { Component, OnInit } from '@angular/core';
import { Router } from '@angular/router';
import { TdLoadingService } from '@covalent/core';
import { AuthService } from '../../services/auth.service';
import { IUser } from '../users/services/user.service';
import { HttpInterceptorService } from '@covalent/http';
@Component({
selector: 'ag-register',
templateUrl: './register.component.html',
styleUrls: ['./register.component.scss']
})
export class RegisterComponent {
name: string = '';
email: string = '';
username: string = '';
password: string = '';
constructor(
private _router: Router,
private _loadingService: TdLoadingService,
private _authService: AuthService,
private _http: HttpInterceptorService,
) {
}
async register() {
try {
this._loadingService.register();
const user = <IUser>{
name: this.name,
email: this.email,
local: {
username: this.username,
password: this.password
}
};
await this._http.post('~/register', JSON.stringify(user))
.map((a) => a.json())
.toPromise();
await this._authService.auth('local', {
username: this.username,
password: this.password
});
this._router.navigate(['/']);
} catch (error) {
alert(error);
} finally {
this._loadingService.resolve();
}
}
}
| mit |
Evairfairy/AESharp | AESharp.Logon/Repositories/RealmRepository.cs | 1643 | using System.Collections.Generic;
namespace AESharp.Logon.Repositories
{
public class RealmRepository
{
private readonly List<Realm> _realms = new List<Realm>
{
new Realm
{
Name = "Evairfairy's Test Realm",
Address = "127.0.0.1:8095",
Type = RealmType.Normal,
Flags = RealmFlags.Recommended,
IsLocked = false,
Region = RealmRegion.QA
},
new Realm
{
Name = "Zyres' Test Realm",
Address = "127.0.0.1:8096",
Type = RealmType.PVP,
Flags = RealmFlags.NewPlayers,
IsLocked = false,
Region = RealmRegion.QA
},
new Realm
{
Name = "Tony's Test Realm",
Address = "127.0.0.1:8097",
Type = RealmType.RP,
Flags = RealmFlags.Full,
IsLocked = false,
Region = RealmRegion.QA
},
new Realm
{
Name = "Rakinishu's Test Realm",
Address = "127.0.0.1:8098",
Type = RealmType.RPPVP,
Flags = RealmFlags.RedName,
IsLocked = true,
Region = RealmRegion.QA
}
};
public List<Realm> GetRealms()
{
var realms = new List<Realm>();
foreach (var realm in _realms)
{
realms.Add(realm);
}
return realms;
}
}
} | mit |
rodrigoflores/punt-receiver | lib/punt/receiver/retriever.rb | 561 | require 'octokit'
module Punt
module Receiver
class Retriever
def initialize(config,octokit_client=Octokit::Client)
@config = config
@client = octokit_client.new(:access_token => @config.github_token)
end
def retrieve_pull_requests(output=$stdout)
@config.relevant_repos.each do |repo|
prs = @client.pull_requests(repo)
prs.each do |pr|
output.puts "#{pr[:title]}\t#{pr[:user][:login]}\t#{pr[:created_at]}\t#{pr[:html_url]}"
end
end
end
end
end
end
| mit |
FacticiusVir/SharpVk | src/SharpVk/PhysicalDeviceShaderFloat16Int8Features.gen.cs | 3213 | // The MIT License (MIT)
//
// Copyright (c) Andrew Armstrong/FacticiusVir 2020
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
// This file was automatically generated and should not be edited directly.
using System;
using System.Runtime.InteropServices;
namespace SharpVk
{
/// <summary>
/// Describes features supported by VK_KHR_shader_float16_int8
/// </summary>
[StructLayout(LayoutKind.Sequential)]
public partial struct PhysicalDeviceShaderFloat16Int8Features
{
/// <summary>
/// Indicates whether 16-bit floats (halfs) are supported in shader
/// code. This also indicates whether shader modules can declare the
/// Float16 capability.
/// </summary>
public bool ShaderFloat16
{
get;
set;
}
/// <summary>
/// Indicates whether 8-bit integers (signed and unsigned) are
/// supported in shader code. This also indicates whether shader
/// modules can declare the Int8 capability.
/// </summary>
public bool ShaderInt8
{
get;
set;
}
/// <summary>
///
/// </summary>
/// <param name="pointer">
/// </param>
internal unsafe void MarshalTo(SharpVk.Interop.PhysicalDeviceShaderFloat16Int8Features* pointer)
{
pointer->SType = StructureType.PhysicalDeviceShaderFloat16Int8FeaturesVersion;
pointer->Next = null;
pointer->ShaderFloat16 = this.ShaderFloat16;
pointer->ShaderInt8 = this.ShaderInt8;
}
/// <summary>
///
/// </summary>
/// <param name="pointer">
/// </param>
internal static unsafe PhysicalDeviceShaderFloat16Int8Features MarshalFrom(SharpVk.Interop.PhysicalDeviceShaderFloat16Int8Features* pointer)
{
PhysicalDeviceShaderFloat16Int8Features result = default(PhysicalDeviceShaderFloat16Int8Features);
result.ShaderFloat16 = pointer->ShaderFloat16;
result.ShaderInt8 = pointer->ShaderInt8;
return result;
}
}
}
| mit |
tadas-s/omniauth-edmodo | lib/omniauth/strategies/edmodo.rb | 1188 | require 'omniauth-oauth2'
module OmniAuth
module Strategies
class Edmodo < OmniAuth::Strategies::OAuth2
option :client_options, {
:site => 'https://api.edmodo.com',
:authorize_url => 'https://api.edmodo.com/oauth/authorize',
:token_url => 'https://api.edmodo.com/oauth/token'
}
def request_phase
super
end
def authorize_params
super.tap do |params|
%w[scope client_options].each do |v|
if request.params[v]
params[v.to_sym] = request.params[v]
end
end
end
end
uid { raw_info['id'].to_s }
info do
{
'nickname' => raw_info['username'],
'email' => raw_info['email'],
'first_name' => raw_info['first_name'],
'last_name' => raw_info['last_name'],
'image' => raw_info['avatars']['large']
}
end
extra do
{:raw_info => raw_info}
end
def raw_info
access_token.options[:mode] = :header
@raw_info ||= access_token.get('users/me').parsed
end
end
end
end
OmniAuth.config.add_camelization 'edmodo', 'Edmodo'
| mit |
AceCoin/AceCoin | src/irc.cpp | 10993 | // Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "irc.h"
#include "net.h"
#include "strlcpy.h"
#include "base58.h"
using namespace std;
using namespace boost;
int nGotIRCAddresses = 0;
void ThreadIRCSeed2(void* parg);
#pragma pack(push, 1)
struct ircaddr
{
struct in_addr ip;
short port;
};
#pragma pack(pop)
string EncodeAddress(const CService& addr)
{
struct ircaddr tmp;
if (addr.GetInAddr(&tmp.ip))
{
tmp.port = htons(addr.GetPort());
vector<unsigned char> vch(UBEGIN(tmp), UEND(tmp));
return string("u") + EncodeBase58Check(vch);
}
return "";
}
bool DecodeAddress(string str, CService& addr)
{
vector<unsigned char> vch;
if (!DecodeBase58Check(str.substr(1), vch))
return false;
struct ircaddr tmp;
if (vch.size() != sizeof(tmp))
return false;
memcpy(&tmp, &vch[0], sizeof(tmp));
addr = CService(tmp.ip, ntohs(tmp.port));
return true;
}
static bool Send(SOCKET hSocket, const char* pszSend)
{
if (strstr(pszSend, "PONG") != pszSend)
printf("IRC SENDING: %s\n", pszSend);
const char* psz = pszSend;
const char* pszEnd = psz + strlen(psz);
while (psz < pszEnd)
{
int ret = send(hSocket, psz, pszEnd - psz, MSG_NOSIGNAL);
if (ret < 0)
return false;
psz += ret;
}
return true;
}
bool RecvLineIRC(SOCKET hSocket, string& strLine)
{
while (true)
{
bool fRet = RecvLine(hSocket, strLine);
if (fRet)
{
if (fShutdown)
return false;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() >= 1 && vWords[0] == "PING")
{
strLine[1] = 'O';
strLine += '\r';
Send(hSocket, strLine.c_str());
continue;
}
}
return fRet;
}
}
int RecvUntil(SOCKET hSocket, const char* psz1, const char* psz2=NULL, const char* psz3=NULL, const char* psz4=NULL)
{
while (true)
{
string strLine;
strLine.reserve(10000);
if (!RecvLineIRC(hSocket, strLine))
return 0;
printf("IRC %s\n", strLine.c_str());
if (psz1 && strLine.find(psz1) != string::npos)
return 1;
if (psz2 && strLine.find(psz2) != string::npos)
return 2;
if (psz3 && strLine.find(psz3) != string::npos)
return 3;
if (psz4 && strLine.find(psz4) != string::npos)
return 4;
}
}
bool Wait(int nSeconds)
{
if (fShutdown)
return false;
printf("IRC waiting %d seconds to reconnect\n", nSeconds);
for (int i = 0; i < nSeconds; i++)
{
if (fShutdown)
return false;
MilliSleep(1000);
}
return true;
}
bool RecvCodeLine(SOCKET hSocket, const char* psz1, string& strRet)
{
strRet.clear();
while (true)
{
string strLine;
if (!RecvLineIRC(hSocket, strLine))
return false;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() < 2)
continue;
if (vWords[1] == psz1)
{
printf("IRC %s\n", strLine.c_str());
strRet = strLine;
return true;
}
}
}
bool GetIPFromIRC(SOCKET hSocket, string strMyName, CNetAddr& ipRet)
{
Send(hSocket, strprintf("USERHOST %s\r", strMyName.c_str()).c_str());
string strLine;
if (!RecvCodeLine(hSocket, "302", strLine))
return false;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() < 4)
return false;
string str = vWords[3];
if (str.rfind("@") == string::npos)
return false;
string strHost = str.substr(str.rfind("@")+1);
// Hybrid IRC used by lfnet always returns IP when you userhost yourself,
// but in case another IRC is ever used this should work.
printf("GetIPFromIRC() got userhost %s\n", strHost.c_str());
CNetAddr addr(strHost, true);
if (!addr.IsValid())
return false;
ipRet = addr;
return true;
}
void ThreadIRCSeed(void* parg)
{
// Make this thread recognisable as the IRC seeding thread
RenameThread("AceCoin-ircseed");
try
{
ThreadIRCSeed2(parg);
}
catch (std::exception& e) {
PrintExceptionContinue(&e, "ThreadIRCSeed()");
} catch (...) {
PrintExceptionContinue(NULL, "ThreadIRCSeed()");
}
printf("ThreadIRCSeed exited\n");
}
void ThreadIRCSeed2(void* parg)
{
// Don't connect to IRC if we won't use IPv4 connections.
if (IsLimited(NET_IPV4))
return;
// ... or if we won't make outbound connections and won't accept inbound ones.
if (mapArgs.count("-connect") && fNoListen)
return;
// ... or if IRC is not enabled.
if (!GetBoolArg("-irc", false))
return;
printf("ThreadIRCSeed started\n");
int nErrorWait = 10;
int nRetryWait = 10;
int nNameRetry = 0;
while (!fShutdown)
{
CService addrConnect("92.243.23.21", 6667); // irc.lfnet.org
CService addrIRC("irc.lfnet.org", 6667, true);
if (addrIRC.IsValid())
addrConnect = addrIRC;
SOCKET hSocket;
if (!ConnectSocket(addrConnect, hSocket))
{
printf("IRC connect failed\n");
nErrorWait = nErrorWait * 11 / 10;
if (Wait(nErrorWait += 60))
continue;
else
return;
}
if (!RecvUntil(hSocket, "Found your hostname", "using your IP address instead", "Couldn't look up your hostname", "ignoring hostname"))
{
closesocket(hSocket);
hSocket = INVALID_SOCKET;
nErrorWait = nErrorWait * 11 / 10;
if (Wait(nErrorWait += 60))
continue;
else
return;
}
CNetAddr addrIPv4("1.2.3.4"); // arbitrary IPv4 address to make GetLocal prefer IPv4 addresses
CService addrLocal;
string strMyName;
// Don't use our IP as our nick if we're not listening
// or if it keeps failing because the nick is already in use.
if (!fNoListen && GetLocal(addrLocal, &addrIPv4) && nNameRetry<3)
strMyName = EncodeAddress(GetLocalAddress(&addrConnect));
if (strMyName == "")
strMyName = strprintf("x%"PRIu64"", GetRand(1000000000));
Send(hSocket, strprintf("NICK %s\r", strMyName.c_str()).c_str());
Send(hSocket, strprintf("USER %s 8 * : %s\r", strMyName.c_str(), strMyName.c_str()).c_str());
int nRet = RecvUntil(hSocket, " 004 ", " 433 ");
if (nRet != 1)
{
closesocket(hSocket);
hSocket = INVALID_SOCKET;
if (nRet == 2)
{
printf("IRC name already in use\n");
nNameRetry++;
Wait(10);
continue;
}
nErrorWait = nErrorWait * 11 / 10;
if (Wait(nErrorWait += 60))
continue;
else
return;
}
nNameRetry = 0;
MilliSleep(500);
// Get our external IP from the IRC server and re-nick before joining the channel
CNetAddr addrFromIRC;
if (GetIPFromIRC(hSocket, strMyName, addrFromIRC))
{
printf("GetIPFromIRC() returned %s\n", addrFromIRC.ToString().c_str());
// Don't use our IP as our nick if we're not listening
if (!fNoListen && addrFromIRC.IsRoutable())
{
// IRC lets you to re-nick
AddLocal(addrFromIRC, LOCAL_IRC);
strMyName = EncodeAddress(GetLocalAddress(&addrConnect));
Send(hSocket, strprintf("NICK %s\r", strMyName.c_str()).c_str());
}
}
if (fTestNet) {
Send(hSocket, "JOIN #AceCoinTEST\r");
Send(hSocket, "WHO #AceCoinTEST\r");
} else {
// randomly join #AceCoin00-#AceCoin05
//int channel_number = GetRandInt(5);
int channel_number = 0;
// Channel number is always 0 for initial release
//int channel_number = 0;
Send(hSocket, strprintf("JOIN #AceCoin%02d\r", channel_number).c_str());
Send(hSocket, strprintf("WHO #AceCoin%02d\r", channel_number).c_str());
}
int64_t nStart = GetTime();
string strLine;
strLine.reserve(10000);
while (!fShutdown && RecvLineIRC(hSocket, strLine))
{
if (strLine.empty() || strLine.size() > 900 || strLine[0] != ':')
continue;
vector<string> vWords;
ParseString(strLine, ' ', vWords);
if (vWords.size() < 2)
continue;
char pszName[10000];
pszName[0] = '\0';
if (vWords[1] == "352" && vWords.size() >= 8)
{
// index 7 is limited to 16 characters
// could get full length name at index 10, but would be different from join messages
strlcpy(pszName, vWords[7].c_str(), sizeof(pszName));
printf("IRC got who\n");
}
if (vWords[1] == "JOIN" && vWords[0].size() > 1)
{
// :username!username@50000007.F000000B.90000002.IP JOIN :#channelname
strlcpy(pszName, vWords[0].c_str() + 1, sizeof(pszName));
if (strchr(pszName, '!'))
*strchr(pszName, '!') = '\0';
printf("IRC got join\n");
}
if (pszName[0] == 'u')
{
CAddress addr;
if (DecodeAddress(pszName, addr))
{
addr.nTime = GetAdjustedTime();
if (addrman.Add(addr, addrConnect, 51 * 60))
printf("IRC got new address: %s\n", addr.ToString().c_str());
nGotIRCAddresses++;
}
else
{
printf("IRC decode failed\n");
}
}
}
closesocket(hSocket);
hSocket = INVALID_SOCKET;
if (GetTime() - nStart > 20 * 60)
{
nErrorWait /= 3;
nRetryWait /= 3;
}
nRetryWait = nRetryWait * 11 / 10;
if (!Wait(nRetryWait += 60))
return;
}
}
#ifdef TEST
int main(int argc, char *argv[])
{
WSADATA wsadata;
if (WSAStartup(MAKEWORD(2,2), &wsadata) != NO_ERROR)
{
printf("Error at WSAStartup()\n");
return false;
}
ThreadIRCSeed(NULL);
WSACleanup();
return 0;
}
#endif
| mit |
C2FO/fast-csv | examples/formatting-js/examples/quote_headers_object.example.js | 560 | const csv = require('@fast-csv/format');
const csvStream = csv.format({ headers: true, quoteHeaders: { header2: true } });
csvStream.pipe(process.stdout).on('end', () => process.exit());
csvStream.write({ header1: 'value1a', header2: 'value2a' });
csvStream.write({ header1: 'value1a', header2: 'value2a' });
csvStream.write({ header1: 'value1a', header2: 'value2a' });
csvStream.write({ header1: 'value1a', header2: 'value2a' });
csvStream.end();
// Output:
// header1,"header2"
// value1a,value2a
// value1a,value2a
// value1a,value2a
// value1a,value2a
| mit |
Kuniwak/vint | vint/linting/cli.py | 7575 | from typing import Dict, Any, List # noqa: F401
import sys
from argparse import ArgumentParser
from pathlib import Path
import logging
from vint.linting.linter import Linter
from vint.linting.env import build_environment
from vint.linting.config.config_container import ConfigContainer
from vint.linting.config.config_cmdargs_source import ConfigCmdargsSource
from vint.linting.config.config_default_source import ConfigDefaultSource
from vint.linting.config.config_global_source import ConfigGlobalSource
from vint.linting.config.config_project_source import ConfigProjectSource
from vint.linting.config.config_util import get_config_value
from vint.linting.lint_target import (
AbstractLintTarget,
LintTargetFile,
LintTargetBufferedStream,
CachedLintTarget,
)
from vint.linting.policy_set import PolicySet
from vint.linting.formatter.abstract_formatter import AbstractFormatter
from vint.linting.policy_registry import get_policy_classes
from vint.linting.formatter.formatter import Formatter
from vint.linting.formatter.json_formatter import JSONFormatter
from vint.linting.formatter.statistic_formatter import StatisticFormatter
_stdin_symbol = Path('-')
def start_cli():
env = _build_env(sys.argv)
_validate(env)
_adjust_log_level(env)
config_dict = _build_config_dict(env)
violations = _lint_all(env, config_dict)
parser = _build_arg_parser()
if len(violations) == 0:
parser.exit(status=0)
_print_violations(violations, config_dict)
parser.exit(status=1)
def _validate(env): # type: (Dict[str, Any]) -> None
parser = _build_arg_parser()
paths_to_lint = env['file_paths']
if len(paths_to_lint) == 0:
logging.error('nothing to check')
parser.print_help()
parser.exit(status=1)
if paths_to_lint.count(_stdin_symbol) > 1:
logging.error('number of "-" must be less than 2')
parser.exit(status=1)
for path_to_lint in filter(lambda path: path != _stdin_symbol, paths_to_lint):
if not path_to_lint.exists() or not path_to_lint.is_file():
logging.error('no such file or directory: `{path}`'.format(
path=str(path_to_lint)))
parser.exit(status=1)
def _build_env(argv):
""" Build an environment object.
This method take an argv parameter to make function pure.
"""
cmdargs = _build_cmdargs(argv)
env = build_environment(cmdargs)
return env
def _build_cmdargs(argv):
""" Build command line arguments dict to use;
- displaying usages
- vint.linting.env.build_environment
This method take an argv parameter to make function pure.
"""
parser = _build_arg_parser()
namespace = parser.parse_args(argv[1:])
cmdargs = vars(namespace)
return cmdargs
def _build_arg_parser():
parser = ArgumentParser(prog='vint', description='Lint Vim script')
parser.add_argument('-v', '--version', action='version', version=_get_version())
parser.add_argument('-V', '--verbose', action='store_const', const=True, help='output verbose message')
parser.add_argument('-e', '--error', action='store_const', const=True, help='report only errors')
parser.add_argument('-w', '--warning', action='store_const', const=True, help='report errors and warnings')
parser.add_argument('-s', '--style-problem', action='store_const', const=True, help='report errors, warnings and style problems')
parser.add_argument('-m', '--max-violations', type=int, help='limit max violations count')
parser.add_argument('-c', '--color', action='store_const', const=True, help='colorize output when possible')
parser.add_argument('--no-color', action='store_const', const=True, help='do not colorize output')
parser.add_argument('-j', '--json', action='store_const', const=True, help='output json style')
parser.add_argument('-t', '--stat', action='store_const', const=True, help='output statistic info')
parser.add_argument('--enable-neovim', action='store_const', const=True, help='enable Neovim syntax')
parser.add_argument('-f', '--format', help='set output format')
parser.add_argument('--stdin-display-name', type=str, help='specify a file path that is used for reporting when linting standard inputs')
parser.add_argument('files', nargs='*', help='file or directory path to lint')
return parser
def _build_config_dict(env): # type: (Dict[str, Any]) -> Dict[str, Any]
config = ConfigContainer(
ConfigDefaultSource(env),
ConfigGlobalSource(env),
ConfigProjectSource(env),
ConfigCmdargsSource(env),
)
return config.get_config_dict()
def _lint_all(env, config_dict): # type: (Dict[str, Any], Dict[str, Any]) -> List[Dict[str, Any]]
paths_to_lint = env['file_paths']
violations = []
linter = _build_linter(config_dict)
for path in paths_to_lint:
lint_target = _build_lint_target(path, config_dict)
violations += linter.lint(lint_target)
return violations
def _build_linter(config_dict): # type: (Dict[str, Any]) -> Linter
policy_set = PolicySet(get_policy_classes())
linter = Linter(policy_set, config_dict)
return linter
def _print_violations(violations, config_dict): # type: (List[Dict[str, Any]], Dict[str, Any]) -> None
formatter = _build_formatter(config_dict)
output = formatter.format_violations(violations)
print(output)
def _build_formatter(config_dict): # type: (Dict[str, Any]) -> AbstractFormatter
if 'cmdargs' not in config_dict:
return Formatter(config_dict)
cmdargs = config_dict['cmdargs']
if 'json' in cmdargs and cmdargs['json']:
return JSONFormatter()
elif 'stat' in cmdargs and cmdargs['stat']:
return StatisticFormatter(config_dict)
else:
return Formatter(config_dict)
def _get_version():
from ..__version__ import version
return version
def _adjust_log_level(env):
cmdargs = env['cmdargs']
is_verbose = cmdargs.get('verbose', False)
log_level = logging.DEBUG if is_verbose else logging.WARNING
logger = logging.getLogger()
logger.setLevel(log_level)
def _build_lint_target(path, config_dict): # type: (Path, Dict[str, Any]) -> AbstractLintTarget
if path == _stdin_symbol:
stdin_alt_path = get_config_value(config_dict, ['cmdargs', 'stdin_display_name'])
# NOTE: In Python 3, sys.stdin is a string not bytes. Then we can get bytes by sys.stdin.buffer.
# But in Python 2, sys.stdin.buffer is not defined. But we can get bytes by sys.stdin directly.
is_python_3 = hasattr(sys.stdin, 'buffer')
if is_python_3:
lint_target = LintTargetBufferedStream(
alternate_path=Path(stdin_alt_path),
buffered_io=sys.stdin.buffer
)
else:
# NOTE: Python 2 on Windows opens sys.stdin in text mode, and
# binary data that read from it becomes corrupted on \r\n
# SEE: https://stackoverflow.com/questions/2850893/reading-binary-data-from-stdin/38939320#38939320
if sys.platform == 'win32':
# set sys.stdin to binary mode
import os, msvcrt
msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
lint_target = LintTargetBufferedStream(
alternate_path=Path(stdin_alt_path),
buffered_io=sys.stdin
)
return CachedLintTarget(lint_target)
else:
lint_target = LintTargetFile(path)
return CachedLintTarget(lint_target)
| mit |
mohammedh123/ProjectMinos | ProjectMinos.Core/TileSet.cs | 2107 | using System.Collections.Generic;
namespace ProjectMinos.Core
{
public class TileSet
{
public string Name { get; }
public string Filename { get; }
public int TileWidth { get; }
public int TileHeight { get; }
public int FirstSpriteIndex { get; }
public int LastSpriteIndex { get; }
public IReadOnlyList<Rect> TileRects => _tileRects;
private readonly List<Rect> _tileRects = new List<Rect>();
public TileSet(string name, string filename, int tileWidth, int tileHeight, int imageWidth, int imageHeight, int margin, int spacing, int spriteIndexOffset)
{
Name = name;
Filename = filename;
TileWidth = tileWidth;
TileHeight = tileHeight;
FirstSpriteIndex = spriteIndexOffset;
var adjustedWidth = imageWidth - 2 * margin;
var adjustedHeight = imageHeight - 2 * margin;
var numTilesWidth = adjustedWidth / (tileWidth + spacing) +
(adjustedWidth % (tileWidth + spacing)) / tileWidth;
var numTilesHeight = adjustedHeight / (tileHeight + spacing) +
(adjustedHeight % (tileHeight + spacing)) / tileHeight;
for (var y = 0; y < numTilesHeight; y++)
{
for (var x = 0; x < numTilesWidth; x++)
{
_tileRects.Add(new Rect(margin + (tileWidth + spacing) * x,
margin + (tileHeight + spacing) * y,
tileWidth, tileHeight));
}
}
LastSpriteIndex = spriteIndexOffset + numTilesHeight * numTilesWidth;
}
public Rect GetRectBySpriteIndex(int spriteIndex)
{
var adjSprIdx = GetAdjustedSpriteIndex(spriteIndex);
return (adjSprIdx < 0 || adjSprIdx > _tileRects.Count) ? null : _tileRects[adjSprIdx];
}
private int GetAdjustedSpriteIndex(int spriteIndex) => spriteIndex - FirstSpriteIndex;
}
} | mit |
AamuLumi/sanic.js | bench/misc/loopEmpty.js | 3741 | 'use strict';
const Benchmark = require('benchmark');
Benchmark.options.minSamples = 200;
let fileWriter = null;
if (process.argv.some((e) => e === 'writeFile')) {
const BenchFileCreator = require('../benchFileCreator');
fileWriter = new BenchFileCreator();
fileWriter.addInformations(Benchmark.options.minSamples);
fileWriter.writeTableHeader([
'Elements',
'1',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'10',
'11',
'12',
'13',
]);
}
function computeNumber(nb) {
if (nb > 1000000) {
return `${Math.round(nb / 10000) / 100}m`;
} else if (nb > 1000) {
return `${Math.round(nb / 10) / 100}k`;
} else {
return `${Math.round(nb * 100) / 100}`;
}
}
function simpleRecursiveIteration(array, i = 0) {
if (i < array.length) {
array[i] * array[i];
return simpleRecursiveIteration(array, i + 1);
}
}
function recursiveClosureIteration(array) {
let i = 0;
function rec(array) {
if (i < array.length) {
array[i] * array[i];
i++;
return rec(array);
}
}
return rec(array);
}
function doSuite(arrayLength) {
console.log(`# Array.length : ${arrayLength}`);
const Suite = new Benchmark.Suite();
const array = new Array(arrayLength).fill(0);
Suite.add('for (let i = 0; i < array.length; i++)', function() {
for (let i = 0; i < array.length; i++) {}
})
.add('for (let i = 0, iMax = array.length; i < iMax; i++)', function() {
for (let i = 0, iMax = array.length; i < iMax; i++) {}
})
.add('let i = 0; const iMax = array.length; for (; i++ < iMax;)', function() {
let i = 0;
const iMax = array.length;
for (; i++ < iMax; ) {}
})
.add('let i = 0; const iMax = array.length; for (; i < iMax; i++)', function() {
let i = 0;
const iMax = array.length;
for (; i < iMax; i++) {}
})
.add('let i = 0; for (; i < array.length; i++)', function() {
let i = 0;
for (; i < array.length; i++) {}
})
.add('let i = 0; const iMax = array.length; for (; iMax - i; i++)', function() {
let i = 0;
const iMax = array.length;
for (; iMax - i; i++) {}
})
.add('let i = 0; for (; array[i] !== undefined; i++)', function() {
let i = 0;
for (; array[i] !== undefined; i++) {}
})
.add('let i = 0; for (; array[i++] !== undefined;)', function() {
let i = 0;
for (; array[i++] !== undefined; ) {}
})
.add('let i = 0; while (array[i++] !== undefined)', function() {
let i = 0;
while (array[i++] !== undefined) {}
})
.add('let i = array.length; while (i--)', function() {
let i = array.length;
while (i--) {}
})
.add('let i = 0, iMax = array.length; while (i++ < iMax)', function() {
let i = 0,
iMax = array.length;
while (i++ < iMax) {}
})
.add('let i = 0, iMax = array.length; while (iMax - (i++))', function() {
let i = 0,
iMax = array.length;
while (iMax - i++) {}
})
.add('let i = -array.length; while (i++)', function() {
let i = -array.length;
while (i++) {}
})
.add('Simple recursion', function() {
simpleRecursiveIteration(array);
})
.add('Simple recursion with i in closure', function() {
recursiveClosureIteration(array);
})
.add('let i = array.length; for (; i-- ;)', function() {
let i = array.length;
for (; i--; ) {}
})
.on('cycle', function(event) {
console.log(String(event.target));
if (fileWriter) {
fileWriter.writeTableElement(computeNumber(event.target.hz));
}
})
.on('complete', function() {
console.log('Fastest is ' + this.filter('fastest').map('name'));
})
.run({ async: false });
}
for (let i = 10; i < 10000000; i *= 10) {
if (fileWriter) {
fileWriter.writeTableElement(i);
}
doSuite(i);
if (fileWriter) {
fileWriter.newLine();
}
}
if (fileWriter) {
fileWriter.end();
}
| mit |
Rapptz/Shinobi | util/maker.cpp | 4204 | #include "maker.hpp"
namespace fs = boost::filesystem;
namespace util {
void make_default_shinobi() noexcept {
std::ofstream out("Shinobi");
out << "# The default Shinobi file. See reference.md for syntax help.\n\n"
"PROJECT_NAME := untitled\n"
"BUILDDIR := bin\n"
"OBJDIR := obj\n"
"CXX := g++\n"
"CXXFLAGS += -std=c++11 -pedantic -pedantic-errors -Wextra -Wall -O2\n"
"INCLUDE_FLAGS += -I.\n"
"LINK_FLAGS += -static\n"
"LIB_PATHS +=\n"
"LIBS +=\n"
"DEFINES += -DNDEBUG";
}
std::string remove_symlink(const fs::path& p) noexcept {
// Remove ./
auto result = p.string();
auto pos = result.find("./");
if(pos == std::string::npos) {
pos = result.find(".\\");
if(pos == std::string::npos) {
return result;
}
}
result.replace(pos, 2, "");
return result;
}
maker::maker(std::ofstream& out): dir(fs::current_path()), file(out) {
if(!shinobi.is_open()) {
make_default_shinobi();
shinobi.reopen();
}
}
void maker::debug(bool b) noexcept {
shinobi.debug = b;
}
void maker::create_variables() noexcept {
file.variable("ninja_required_version", "1.3");
file.variable("builddir", shinobi.get("BUILDDIR", "bin"));
file.variable("objdir", shinobi.get("OBJDIR", "obj"));
file.variable("cxx", shinobi.get("CXX", "g++"));
file.variable("cxxflags", shinobi.get("CXXFLAGS", "-std=c++11 -pedantic -pedantic-errors -Wextra -Wall -O2"));
file.variable("incflags", shinobi.get("INCLUDE_FLAGS", "-I."));
file.variable("ldflags", shinobi.get("LINK_FLAGS", "-static"));
file.variable("libpath", shinobi.get("LIB_PATHS", ""));
file.variable("libs", shinobi.get("LIBS", ""));
file.variable("def", shinobi.get("DEFINES", "-DNDEBUG"));
file.newline();
file.rule("compile", "deps = gcc",
"depfile = $out.d",
"command = $cxx -MMD -MF $out.d $cxxflags $def -c $in -o $out $incflags",
"description = Building $in to $out");
file.rule("link", "command = $cxx $in -o $out $ldflags $libpath $libs", "description = Linking $out");
}
void maker::fill_source_files() noexcept {
auto input_dirs = shinobi.get_list("SRCDIR");
if(input_dirs.empty()) {
input_dirs.push_back(".");
}
for(auto&& d : input_dirs) {
for(fs::recursive_directory_iterator it(d), end; it != end; ++it) {
auto p = it->path();
if(extension_is(p.string(), ".cpp", ".cxx", ".cc", ".c", ".c++")) {
input.emplace(remove_symlink(p));
}
}
}
auto added = shinobi.get_list("FILES");
for(auto&& i : added) {
if(extension_is(i, ".cpp", ".cxx", ".cc", ".c", ".c++")) {
input.emplace(i);
}
}
auto ignored = shinobi.get_list("IGNORED_FILES");
for(auto&& i : ignored) {
auto it = input.find(i);
if(it != input.end()) {
input.erase(it);
}
}
}
void maker::regular_parse() {
shinobi.parse();
create_variables();
fill_source_files();
create_directories();
// Generate build sequences
for(auto&& p : input) {
auto appended_dir = (dir / obj / p).parent_path();
if(!fs::exists(appended_dir)) {
fs::create_directories(appended_dir);
}
std::string output_file = "$objdir/" + remove_symlink(fs::path(p).replace_extension(".o"));
output.push_back(output_file);
file.build(p, output_file, "compile");
}
// Generate link sequence
file.build(stringify_list(output), "$builddir/" + shinobi.get("PROJECT_NAME", "untitled"), "link");
}
void maker::create_directories() {
fs::path bin(shinobi.get("BUILDDIR", "bin"));
fs::path obj(shinobi.get("OBJDIR", "obj"));
if(!fs::is_directory(bin)) {
fs::create_directory(bin);
}
if(!fs::is_directory(obj)) {
fs::create_directory(obj);
}
}
} // util | mit |
garysharp/DhcpServerApi | src/Dhcp/DhcpServerException.cs | 2118 | using System;
using System.Text;
using Dhcp.Native;
namespace Dhcp
{
public class DhcpServerException : Exception
{
private readonly DhcpErrors error;
private readonly string message;
internal DhcpServerException(string apiFunction, DhcpErrors error, string additionalMessage)
{
ApiFunction = apiFunction;
this.error = error;
ApiErrorMessage = BuildApiErrorMessage(error);
message = BuildMessage(apiFunction, additionalMessage, error, ApiErrorMessage);
}
internal DhcpServerException(string apiFunction, DhcpErrors error)
: this(apiFunction, error, null)
{ }
public string ApiFunction { get; }
public string ApiError => error.ToString();
internal DhcpErrors ApiErrorNative => error;
public uint ApiErrorId => (uint)error;
public string ApiErrorMessage { get; }
public override string Message => message;
private string BuildApiErrorMessage(DhcpErrors error)
{
var errorType = typeof(DhcpErrors).GetMember(error.ToString());
if (errorType.Length != 0)
{
var errorAttribute = errorType[0].GetCustomAttributes(typeof(DhcpErrorDescriptionAttribute), false);
if (errorAttribute.Length != 0)
return ((DhcpErrorDescriptionAttribute)errorAttribute[0]).Description;
}
return "Unknown Error";
}
private string BuildMessage(string apiFunction, string additionalMessage, DhcpErrors error, string apiErrorMessage)
{
var builder = new StringBuilder();
if (apiFunction != null)
builder.Append("An error occurred calling '").Append(apiFunction).Append("'. ");
if (additionalMessage != null)
builder.Append(additionalMessage).Append(". ");
builder.Append(apiErrorMessage).Append(" [").Append(error.ToString()).Append(' ').Append((uint)error).Append(']');
return builder.ToString();
}
}
}
| mit |
ohmybrew/laravel-shopify | src/ShopifyApp/resources/config/shopify-app.php | 13750 | <?php
return [
/*
|--------------------------------------------------------------------------
| Debug Mode
|--------------------------------------------------------------------------
|
| (Not yet complete) A verbose logged output of processes
|
*/
'debug' => (bool) env('SHOPIFY_DEBUG', false),
/*
|--------------------------------------------------------------------------
| Manual migrations
|--------------------------------------------------------------------------
|
| This option allows you to use:
| `php artisan vendor:publish --tag=shopify-migrations` to push migrations
| to your app's folder so you're free to modify before migrating.
|
*/
'manual_migrations' => (bool) env('SHOPIFY_MANUAL_MIGRATIONS', false),
/*
|--------------------------------------------------------------------------
| Manual routes
|--------------------------------------------------------------------------
|
| This option allows you to ignore the package's built-in routes.
| Use `false` (default) for allowing the built-in routes. Otherwise, you
| can list out which route "names" you would like excluded.
| See `resources/routes/shopify.php` and `resources/routes/api.php`
| for a list of available route names.
| Example: `home,billing` would ignore both "home" and "billing" routes.
|
| Please note that if you override the route names (see "route_names" below),
| the route names that are used in this option DO NOT change!
|
*/
'manual_routes' => env('SHOPIFY_MANUAL_ROUTES', false),
/*
|--------------------------------------------------------------------------
| Route names
|--------------------------------------------------------------------------
|
| This option allows you to override the package's built-in route names.
| This can help you avoid collisions with your existing route names.
|
*/
'route_names' => [
'home' => env('SHOPIFY_ROUTE_NAME_HOME', 'home'),
'authenticate' => env('SHOPIFY_ROUTE_NAME_AUTHENTICATE', 'authenticate'),
'authenticate.oauth' => env('SHOPIFY_ROUTE_NAME_AUTHENTICATE_OAUTH', 'authenticate.oauth'),
'billing' => env('SHOPIFY_ROUTE_NAME_BILLING', 'billing'),
'billing.process' => env('SHOPIFY_ROUTE_NAME_BILLING_PROCESS', 'billing.process'),
'billing.usage_charge' => env('SHOPIFY_ROUTE_NAME_BILLING_USAGE_CHARGE', 'billing.usage_charge'),
'webhook' => env('SHOPIFY_ROUTE_NAME_WEBHOOK', 'webhook'),
],
/*
|--------------------------------------------------------------------------
| Namespace
|--------------------------------------------------------------------------
|
| This option allows you to set a namespace.
| Useful for multiple apps using the same database instance.
| Meaning, one shop can be part of many apps on the same database.
|
*/
'namespace' => env('SHOPIFY_APP_NAMESPACE', null),
/*
|--------------------------------------------------------------------------
| Shopify Jobs Namespace
|--------------------------------------------------------------------------
|
| This option allows you to change out the default job namespace
| which is \App\Jobs. This option is mainly used if any custom configuration
| is done in autoload and does not need to be changed unless required.
|
*/
'job_namespace' => env('SHOPIFY_JOB_NAMESPACE', '\\App\\Jobs\\'),
/*
|--------------------------------------------------------------------------
| Prefix
|--------------------------------------------------------------------------
|
| This option allows you to set a prefix for URLs.
| Useful for multiple apps using the same database instance.
|
*/
'prefix' => env('SHOPIFY_APP_PREFIX', ''),
/*
|--------------------------------------------------------------------------
| AppBridge Mode
|--------------------------------------------------------------------------
|
| AppBridge (embedded apps) are enabled by default. Set to false to use legacy
| mode and host the app inside your own container.
|
*/
'appbridge_enabled' => (bool) env('SHOPIFY_APPBRIDGE_ENABLED', true),
// Use semver range to link to a major or minor version number.
// Leaving empty will use the latest verison - not recommended in production.
'appbridge_version' => env('SHOPIFY_APPBRIDGE_VERSION', '1'),
/*
|--------------------------------------------------------------------------
| Shopify App Name
|--------------------------------------------------------------------------
|
| This option simply lets you display your app's name.
|
*/
'app_name' => env('SHOPIFY_APP_NAME', 'Shopify App'),
/*
|--------------------------------------------------------------------------
| Shopify API Version
|--------------------------------------------------------------------------
|
| This option is for the app's API version string.
| Use "YYYY-MM" or "unstable". Refer to Shopify's documentation
| on API versioning for the current stable version.
|
*/
'api_version' => env('SHOPIFY_API_VERSION', '2020-01'),
/*
|--------------------------------------------------------------------------
| Shopify API Key
|--------------------------------------------------------------------------
|
| This option is for the app's API key.
|
*/
'api_key' => env('SHOPIFY_API_KEY', ''),
/*
|--------------------------------------------------------------------------
| Shopify API Secret
|--------------------------------------------------------------------------
|
| This option is for the app's API secret.
|
*/
'api_secret' => env('SHOPIFY_API_SECRET', ''),
/*
|--------------------------------------------------------------------------
| Shopify API Scopes
|--------------------------------------------------------------------------
|
| This option is for the scopes your application needs in the API.
|
*/
'api_scopes' => env('SHOPIFY_API_SCOPES', 'read_products,write_products'),
/*
|--------------------------------------------------------------------------
| Shopify API Grant Mode
|--------------------------------------------------------------------------
|
| This option is for the grant mode when authenticating.
| Default is "OFFLINE", "PERUSER" is available as well.
| Note: Install will always be in offline mode.
|
*/
'api_grant_mode' => env('SHOPIFY_API_GRANT_MODE', 'OFFLINE'),
/*
|--------------------------------------------------------------------------
| Shopify API Redirect
|--------------------------------------------------------------------------
|
| This option is for the redirect after authentication.
|
*/
'api_redirect' => env('SHOPIFY_API_REDIRECT', '/authenticate'),
/*
|--------------------------------------------------------------------------
| Shopify API Time Store
|--------------------------------------------------------------------------
|
| This option is for the class which will hold the timestamps for API calls.
|
*/
'api_time_store' => env('SHOPIFY_API_TIME_STORE', \Osiset\BasicShopifyAPI\Store\Memory::class),
/*
|--------------------------------------------------------------------------
| Shopify API Limit Store
|--------------------------------------------------------------------------
|
| This option is for the class which will hold the call limits for REST and GraphQL.
|
*/
'api_limit_store' => env('SHOPIFY_API_LIMIT_STORE', \Osiset\BasicShopifyAPI\Store\Memory::class),
/*
|--------------------------------------------------------------------------
| Shopify API Deferrer
|--------------------------------------------------------------------------
|
| This option is for the class which will handle sleep deferrals for API calls.
|
*/
'api_deferrer' => env('SHOPIFY_API_DEFERRER', \Osiset\BasicShopifyAPI\Deferrers\Sleep::class),
/*
|--------------------------------------------------------------------------
| Shopify API Init Function
|--------------------------------------------------------------------------
|
| This option is for initing the BasicShopifyAPI package optionally yourself.
| The first param injected in is the current options (\Osiset\BasicShopifyAPI\Options).
| The second param injected in is the session (if available) (\Osiset\BasicShopifyAPI\Session).
| The third param injected in is the current request input/query array (\Illuminate\Http\Request::all()).
| With all this, you can customize the options, change params, and more.
|
| Value for this option must be a callable (callable, Closure, etc).
|
*/
'api_init' => null,
/*
|--------------------------------------------------------------------------
| Shopify "MyShopify" domain
|--------------------------------------------------------------------------
|
| The internal URL used by shops. This will not change but in the future
| it may.
|
*/
'myshopify_domain' => env('SHOPIFY_MYSHOPIFY_DOMAIN', 'myshopify.com'),
/*
|--------------------------------------------------------------------------
| Enable Billing
|--------------------------------------------------------------------------
|
| Enable billing component to the package.
|
*/
'billing_enabled' => (bool) env('SHOPIFY_BILLING_ENABLED', false),
/*
|--------------------------------------------------------------------------
| Enable Freemium Mode
|--------------------------------------------------------------------------
|
| Allow a shop use the app in "freemium" mode.
| Shop will get a `freemium` flag on their record in the table.
|
*/
'billing_freemium_enabled' => (bool) env('SHOPIFY_BILLING_FREEMIUM_ENABLED', false),
/*
|--------------------------------------------------------------------------
| Billing Redirect
|--------------------------------------------------------------------------
|
| Required redirection URL for billing when
| a customer accepts or declines the charge presented.
|
*/
'billing_redirect' => env('SHOPIFY_BILLING_REDIRECT', '/billing/process'),
/*
|--------------------------------------------------------------------------
| Shopify Webhooks
|--------------------------------------------------------------------------
|
| This option is for defining webhooks.
| Key is for the Shopify webhook event
| Value is for the endpoint to call
|
*/
'webhooks' => [
/*
[
'topic' => env('SHOPIFY_WEBHOOK_1_TOPIC', 'orders/create'),
'address' => env('SHOPIFY_WEBHOOK_1_ADDRESS', 'https://some-app.com/webhook/orders-create')
],
...
*/
],
/*
|--------------------------------------------------------------------------
| Shopify ScriptTags
|--------------------------------------------------------------------------
|
| This option is for defining scripttags.
|
*/
'scripttags' => [
/*
[
'src' => env('SHOPIFY_SCRIPTTAG_1_SRC', 'https://some-app.com/some-controller/js-method-response'),
'event' => env('SHOPIFY_SCRIPTTAG_1_EVENT', 'onload'),
'display_scope' => env('SHOPIFY_SCRIPTTAG_1_DISPLAY_SCOPE', 'online_store')
],
...
*/
],
/*
|--------------------------------------------------------------------------
| After Authenticate Job
|--------------------------------------------------------------------------
|
| This option is for firing a job after a shop has been authenticated.
| This, like webhooks and scripttag jobs, will fire every time a shop
| authenticates, not just once.
|
*/
'after_authenticate_job' => [
/*
[
'job' => env('AFTER_AUTHENTICATE_JOB'), // example: \App\Jobs\AfterAuthorizeJob::class
'inline' => env('AFTER_AUTHENTICATE_JOB_INLINE', false) // False = dispatch job for later, true = dispatch immediately
],
*/
],
/*
|--------------------------------------------------------------------------
| Job Queues
|--------------------------------------------------------------------------
|
| This option is for setting a specific job queue for webhooks, scripttags
| and after_authenticate_job.
|
*/
'job_queues' => [
'webhooks' => env('WEBHOOKS_JOB_QUEUE', null),
'scripttags' => env('SCRIPTTAGS_JOB_QUEUE', null),
'after_authenticate' => env('AFTER_AUTHENTICATE_JOB_QUEUE', null),
],
/*
|--------------------------------------------------------------------------
| Config API Callback
|--------------------------------------------------------------------------
|
| This option can be used to modify what returns when `getConfig('api_*')` is used.
| A use-case for this is modifying the return of `api_secret` or something similar.
|
| A closure/callable is required.
| The first argument will be the key string.
| The second argument will be something to help identify the shop.
|
*/
'config_api_callback' => null,
];
| mit |
zchhaenngg/IWI | src/ImproveX.EntityFramework/Migrations/201707261347311_Initial_Migration.Designer.cs | 831 | // <auto-generated />
namespace ImproveX.Migrations
{
using System.CodeDom.Compiler;
using System.Data.Entity.Migrations;
using System.Data.Entity.Migrations.Infrastructure;
using System.Resources;
[GeneratedCode("EntityFramework.Migrations", "6.1.3-40302")]
public sealed partial class Initial_Migration : IMigrationMetadata
{
private readonly ResourceManager Resources = new ResourceManager(typeof(Initial_Migration));
string IMigrationMetadata.Id
{
get { return "201707261347311_Initial_Migration"; }
}
string IMigrationMetadata.Source
{
get { return null; }
}
string IMigrationMetadata.Target
{
get { return Resources.GetString("Target"); }
}
}
}
| mit |
danleeb/fullcalendar | src/tasks/TasksView.js | 5451 | fcViews.tasks = TasksListView;
function TasksListView(element, calendar) {
var t = this;
// exports
t.render = render;
t.fetchData = getFetchData;
// imports
TasksList.call(t, element, calendar, 'tasks');
var opt = t.opt;
var renderList = t.renderList;
var formatDate = calendar.formatDate;
function render(date, delta) {
t.title = opt('tasksTitle');
t.start = 1;
t.end = null;
t.visStart = null;
t.visEnd = null;
renderList();
}
function getFetchData() {
return {
type: 'task',
done: opt('tasksShowDone'),
canceled: opt('tasksShowCanceled')
};
}
}
function TasksList(element, calendar, viewName) {
var t = this;
// exports
t.renderList = renderList;
t.setHeight = setHeight;
t.setWidth = setWidth;
t.getSegmentContainer = function() { return segmentContainer };
t.getActionContainer = function() { return selectActionContainer };
View.call(t, element, calendar, viewName);
SelectionManager.call(t);
TasksListEventRenderer.call(t);
var opt = t.opt;
var clearEvents = t.clearEvents;
var daySelectionMousedown = t.daySelectionMousedown;
var formatDate = calendar.formatDate;
var rerenderEvents = calendar.rerenderEvents;
var trigger = calendar.trigger;
var applyToSelectedTasks = t.applyToSelectedTasks;
var getSelectedTasks = t.getSelectedTasks;
var unselectAllTasks = t.unselectAllTasks;
// locals
var segmentContainer;
var selectActionContainer;
var actionContainer;
var changeView = calendar.changeView;
calendar.changeView = function(newViewname) {
if (newViewname !== 'tasks') {
actionContainer.hide();
}
changeView(newViewname);
};
/* Rendering
------------------------------------------------------------*/
function renderList() {
if (!segmentContainer) {
actionContainer = trigger('tasksRenderActions') || $('<div class="fc-tasks-actions"></div>');
actionContainer.appendTo(element.parent());
renderSelectActions();
segmentContainer = $('<div class="fc-tasks-container"/>').appendTo(element);
} else {
actionContainer.show();
clearEvents();
}
}
function setHeight(height) {
var h = height - selectActionContainer.height();
setMinHeight(segmentContainer, h);
segmentContainer.height(h);
setOuterWidth(segmentContainer, 0);
}
function setWidth(width) {
setOuterWidth(segmentContainer, width);
}
function renderSelectActions() {
var self = this;
selectActionContainer = $('<div class="fc-tasks-select-actions" />').appendTo(element);
$(opt('taskSelectActionsText').unselect).appendTo(selectActionContainer)
.on('change', function() {
if (!$(this).is(':checked')) {
unselectAllTasks();
$(this).prop('checked', true);
}
});
$(opt('taskSelectActionsText').indentSub).appendTo(selectActionContainer)
.on('click', function() {
applyToSelectedTasks(function(event) {
if (event.indent > 0) {
event.indent--;
}
});
trigger('tasksIndentSub', calendar, getSelectedTasks());
rerenderEvents();
});
$(opt('taskSelectActionsText').indentAdd).appendTo(selectActionContainer)
.on('click', function() {
applyToSelectedTasks(function(event) {
if (!event.indent) {
event.indent = 1;
} else if (event.indent < opt('tasksMaxIndent')) {
event.indent++;
}
});
trigger('tasksIndentAdd', calendar, getSelectedTasks());
rerenderEvents();
});
$(opt('taskSelectActionsText').open).appendTo(selectActionContainer)
.on('click', function() {
applyToSelectedTasks(function(event) {
event.done = false;
event.canceled = false;
});
trigger('tasksUndone', calendar, getSelectedTasks());
rerenderEvents();
});
$(opt('taskSelectActionsText').done).appendTo(selectActionContainer)
.on('click', function() {
applyToSelectedTasks(function(event) {
event.done = true;
event.canceled = false;
});
trigger('tasksDone', calendar, getSelectedTasks());
rerenderEvents();
});
$(opt('taskSelectActionsText').cancel).appendTo(selectActionContainer)
.on('click', function() {
applyToSelectedTasks(function(event) {
event.done = false;
event.canceled = true;
});
trigger('tasksCancel', calendar, getSelectedTasks());
rerenderEvents();
});
}
}
| mit |
dalejung/naginpy | naginpy/special_eval/tests/test_manifest.py | 15685 | import ast
from unittest import TestCase
from textwrap import dedent
import pandas as pd
import numpy as np
from numpy.testing import assert_almost_equal
import nose.tools as nt
from asttools import (
ast_equal
)
from ..manifest import (
Expression,
Manifest,
_manifest
)
from ..exec_context import (
ContextObject,
SourceObject,
ExecutionContext,
get_source_key
)
from .common import ArangeSource
def grab_expression_from_assign(code):
node = code.body[0].value
expr = ast.Expression(lineno=0, col_offset=0, body=node)
return expr
class TestExpression(TestCase):
def test_expression(self):
source = """
arr = np.arange(20)
res = np.sum(arr)
"""
source = dedent(source)
lines = source.strip().split('\n')
load_names = [['np'], ['np', 'arr']]
for i, line in enumerate(lines):
code = ast.parse(line, '<>', 'exec')
# expression must be evaluable, assignments are not
with nt.assert_raises(Exception):
Expression(code.body[0])
extracted_expr = grab_expression_from_assign(code)
# skip the assign
base_expr = ast.parse(line.split('=')[1].strip(), mode='eval')
exp1 = Expression(extracted_expr)
exp2 = Expression(base_expr)
nt.assert_equal(exp1, exp2)
nt.assert_is_not(exp1, exp2)
nt.assert_count_equal(exp1.load_names(), load_names[i])
def test_single_line(self):
""" Expressoins can only be single line """
source = """
np.arange(20)
np.sum(arr)
"""
source = dedent(source)
code = ast.parse(source)
# expression must be single line
with nt.assert_raises(Exception):
Expression(code)
# single line still works
Expression(code.body[0])
Expression(code.body[1])
def test_expression_conversion(self):
"""
So I'm not 100% sure on converting all code into ast.Expressions.
Right now it is what I'm doing, so might as well explicitly test?
"""
source = """
np.arange(20)
np.sum(arr)
"""
source = dedent(source)
code = ast.parse(source)
expr1 = Expression(code.body[0])
nt.assert_is_instance(expr1.code, ast.Expression)
expr2 = Expression(code.body[1])
nt.assert_is_instance(expr2.code, ast.Expression)
expr3 = Expression("np.arange(15)")
nt.assert_is_instance(expr3.code, ast.Expression)
def test_key(self):
""" stable hash key """
source = """
np.arange(20)
np.sum(arr)
"""
source = dedent(source)
code = ast.parse(source)
expr1 = Expression(code.body[0])
expr2 = Expression(code.body[1])
import binascii
# changed key to return str, same hash just different rep
correct1 = b'}\xff\x1c\x0er\xe8k3\x84\x96R\x98\x9a\xa4\xe0i'
correct1 = binascii.b2a_hex(correct1).decode('utf-8')
correct2 = b'\xd6\x88\x08\xa2\xd0\x01\xa4\xc6\xabb\x1aTj\xce\x98\x18'
correct2 = binascii.b2a_hex(correct2).decode('utf-8')
# keys are stable and should not change between lifecycles
nt.assert_equal(expr1.key, correct1)
nt.assert_equal(expr2.key, correct2)
# key also works for equals
nt.assert_equal(expr1, correct1)
nt.assert_equal(expr2, correct2)
def test_copy(self):
"""
test copy
"""
source = """
np.arange(20)
"""
source = dedent(source)
code = ast.parse(source)
expr1 = Expression(code.body[0])
expr2 = expr1.copy()
# equivalent value
nt.assert_true(ast_equal(expr1.code, expr2.code))
# but not the same
nt.assert_is_not(expr1.code, expr2.code)
nt.assert_is_not(expr1.code.body, expr2.code.body)
# mutability
nt.assert_false(expr2.mutable)
expr3 = expr1.copy(mutable=True)
nt.assert_true(expr3.mutable)
def test_mutability(self):
""" test immutability """
source = """
np.arange(20)
"""
source = dedent(source)
code = ast.parse(source)
new_num = ast.Num(n=3)
expr1 = Expression(code.body[0])
with nt.assert_raises_regexp(Exception, "This expression is not mutable"):
expr1.replace(new_num, expr1.code.body, 'args', 0)
expr2 = expr1.copy(mutable=True)
old_key = expr2.key
expr2.replace(new_num, expr2.code.body, 'args', 0)
nt.assert_not_equal(expr2.key, old_key)
# expr2 was changed
nt.assert_false(ast_equal(expr1.code, expr2.code))
nt.assert_equal(expr2.get_source(), 'np.arange(3)')
class TestManifest(TestCase):
def test_eval(self):
source = "d * string_test"
context = {
'd': 13,
'string_test': 'string_test'
}
expr = Expression(source)
exec_context = ExecutionContext(context)
manifest = Manifest(expr, exec_context)
nt.assert_equal(manifest.eval(), 'string_test' * 13)
def test_equals(self):
source = "d * string_test"
context = {
'd': 13,
'string_test': 'string_test'
}
expr = Expression(source)
exec_context = ExecutionContext(context)
manifest = Manifest(expr, exec_context)
manifest2 = Manifest(expr, exec_context)
nt.assert_equal(manifest, manifest2)
# change expression
expr3 = Expression("d * string_test * 2")
manifest3 = Manifest(expr3, exec_context)
nt.assert_not_equal(manifest, manifest3)
# change context
context4 = {
'd': 11,
'string_test': 'string_test'
}
exec_context4 = ExecutionContext(context4)
manifest4 = Manifest(expr, exec_context4)
nt.assert_not_equal(manifest, manifest4)
def test_nested_eval(self):
"""
d * (1 + arr + arr2[10:])
which is really two manifests
arr_manifest = (1 + arr + arr2[10:])
manfiest = (d * (arr_manifest))
"""
arr_source = "1 + arr + arr2[10:]"
aranger = ArangeSource()
arr_context = {
'arr': SourceObject(aranger, 10),
'arr2': SourceObject(aranger, 20),
}
arr_expr = Expression(arr_source)
arr_exec_context = ExecutionContext.from_ns(arr_context)
arr_manifest = Manifest(arr_expr, arr_exec_context)
source = "d * arr"
context = {
'd': 13,
'arr': arr_manifest
}
expr = Expression(source)
exec_context = ExecutionContext.from_ns(context)
manifest = Manifest(expr, exec_context)
correct = 13 * (1 + np.arange(10) + np.arange(20)[10:])
# up till this point, everything is lazy
nt.assert_equal(len(aranger.cache), 0)
assert_almost_equal(correct, manifest.eval())
nt.assert_equal(len(aranger.cache), 2)
def test_hashable(self):
source = "d * string_test"
context = {
'd': 13,
'string_test': 'string_test'
}
expr = Expression(source)
exec_context = ExecutionContext(context)
manifest = Manifest(expr, exec_context)
d = {}
d[manifest] = manifest #hashable
key = tuple([manifest.expression, manifest.context])
# test key
nt.assert_in(key, d)
# a feature is being able to check expression.key for cases
# where we don't have the source and just the stable key
stable_key = tuple([expr.key, manifest.context])
nt.assert_in(stable_key, d)
def test_stateless(self):
"""
stateless-ness of Manifest depends on context
"""
source = "d * string_test"
context = {
'd': 13,
'string_test': 'string_test'
}
expr = Expression(source)
exec_context = ExecutionContext(context)
manifest = Manifest(expr, exec_context)
nt.assert_equal(manifest.stateless, True)
context = {
'd': 13,
'string_test': object(),
}
expr = Expression(source)
exec_context = ExecutionContext.from_ns(context)
manifest = Manifest(expr, exec_context)
nt.assert_equal(manifest.stateless, False)
def test_fragment():
"""
This is a failing test atm. What I want is the ability to take two manifest
and see whether one is within the other.
A couple of notes. They sub-expression itself would obviously need to
match. With each sub expression, you can have a subset of execution
contexts. it is that subset that needs to match.
Manifest 1:
Expression:
arr1 + np.log(arr2)
ExecutionContext:
arr1 = np.random(10)
arr2 = np.arange(10)
Manifest 2:
Expression:
np.log(arr1)
ExecutionContext:
arr1 = np.arange(10)
Here manifest 2 should be considered subset of Manfiest 1, provided
that np.arange is wrapped to be stateless.
Now, currently our hash is done via the string repr. Since `arr2` in
Manifest 1 is `arr1` in Manfiest 2, we currently wouldn't match.
So we'd need to match the load name by value and not by name. I suppose
one could have a modified ast_source that replaced load names with pos
IDs.
"""
c = 1
df = pd.DataFrame(np.random.randn(30, 3), columns=['a', 'bob', 'c'])
source = """pd.core.window.Rolling(np.log(df + 10), 5, min_periods=c).sum()"""
ns = locals()
ns.update({k:v for k, v in globals().items() if k not in ns})
manifest = _manifest(source, ns)
sub_mf = _manifest("np.log(df+10)", ns.copy())
nt.assert_in(sub_mf, manifest)
# new dataframe, does effect contains
ns['df'] = pd.DataFrame(np.random.randn(30, 3), columns=['a', 'bob', 'c'])
sub_mf = _manifest("np.log(df+10)", ns.copy())
nt.assert_not_in(sub_mf, manifest)
# c is changed but not part of fragment, so doesn't effect contains
ns['c'] = 3
manifest = _manifest(source, ns)
sub_mf = _manifest("np.log(df+10)", ns.copy())
nt.assert_in(sub_mf, manifest)
def test_fragment_var_name():
"""
This should match even though the variable names are different.
"""
c = 1
df = pd.DataFrame(np.random.randn(30, 3), columns=['a', 'bob', 'c'])
source = """pd.core.window.Rolling(np.log(df + 10), 5, min_periods=c).sum()"""
ns = locals()
ns.update({k:v for k, v in globals().items() if k not in ns})
manifest = _manifest(source, ns)
# use blah instead of df. same code.
ns['blah'] = ns['df']
sub_mf = _manifest("np.log(blah+10)", ns)
nt.assert_in(sub_mf, manifest)
# now change blah to be a differnt value
ns['blah'] = 1
sub_mf = _manifest("np.log(blah+10)", ns)
nt.assert_not_in(sub_mf, manifest)
def test_fragment_order_of_ops():
"""
So, in a pure math sense, you should be able to
do this replacement:
E1 = a + b + a + (a + b)
S = b + a + (a + b)
E3 = a + S
E1 == E3
But since in python, the order of operations matters, you can't just
treat that as a subset.(a + b) is not always the same as (b + a)
Dumb example:
class Bob:
def __add__(self, other):
return other
a = Bob()
b = Bob()
nt.assert_not_equal(a + b, b + a)
"""
# TODO, is there a way to subset when dealing with types where operations
# are commutative?
ns = {'a': 1, 'b': 2}
manifest = _manifest("a + b + a + (a + b)", ns)
manifest2 = _manifest("b + a + (a + b)", ns)
nt.assert_not_in(manifest2, manifest)
def test_manifest_partial():
"""
Mechanism where the take a Manifest and supply a partial value via
another Manifest.
"""
ns = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
parent = _manifest("a + (c + d)", ns)
sub = _manifest("(x + y)", {'x': 3, 'y': 4})
# note we are purposely giving wrong answer
items = {sub: 3}
test = parent.eval_with(items, ignore_var_names=True)
nt.assert_equal(test, 4)
# parent unaffected
nt.assert_equal(parent.eval(), 8)
# sub also un affected
nt.assert_equal(sub.eval(), 7)
def test_manifest_partial_multi():
ns = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
parent = _manifest("a + (c + d) + (a + b)", ns)
# we are expecting these to match by execution context
sub = _manifest("(x + y)", {'x': 3, 'y': 4})
sub2 = _manifest("(x + y)", {'x': 1, 'y': 2})
items = {sub: sub.eval(), sub2: sub2.eval()}
# this errors since we don't multi match on the ast_contains
test = parent.eval_with(items, ignore_var_names=True)
nt.assert_equal(test, 11)
items = {sub: sub, sub2: sub2}
test = parent.eval_with(items, ignore_var_names=True)
nt.assert_equal(test, 11)
# pass in only manifest
test = parent.eval_with([sub, sub2], ignore_var_names=True)
nt.assert_equal(test, 11)
def test_eval_with_execution_count():
class Value:
""" value that keeps track of when it is used in ops """
def __init__(self, value):
self.value = value
self.op_count = 0
def get_obj(self):
return self.value
def __add__(self, other):
self.op_count += 1
return self.value + other
def __radd__(self, other):
self.op_count += 1
return self.value + other
ns = {
'a': Value(1),
'b': Value(2),
'c': Value(3),
'd': Value(4),
'e': Value(5)
}
parent = _manifest("e + (c + d) + (a + b)", ns)
# we are expecting these to match by execution context
sub = _manifest("(a + b)", ns)
sub2 = _manifest("(c + d)", ns)
items = {sub: sub.eval(), sub2: sub2.eval()}
# a through d should have been used
nt.assert_equal(ns['a'].op_count, 1)
nt.assert_equal(ns['b'].op_count, 1)
nt.assert_equal(ns['c'].op_count, 1)
nt.assert_equal(ns['d'].op_count, 1)
nt.assert_equal(ns['e'].op_count, 0)
res = parent.eval_with(items)
nt.assert_equal(res, 1+2+3+4+5)
# make sure we did not use the Value again
nt.assert_equal(ns['a'].op_count, 1)
nt.assert_equal(ns['b'].op_count, 1)
nt.assert_equal(ns['c'].op_count, 1)
nt.assert_equal(ns['d'].op_count, 1)
nt.assert_equal(ns['e'].op_count, 1) # gets used
# normal non partial eval
res = parent.eval()
nt.assert_equal(res, 1+2+3+4+5)
# since we did a full eval, everything got run again
nt.assert_equal(ns['a'].op_count, 2)
nt.assert_equal(ns['b'].op_count, 2)
nt.assert_equal(ns['c'].op_count, 2)
nt.assert_equal(ns['d'].op_count, 2)
nt.assert_equal(ns['e'].op_count, 2) # gets used
def test_expanded_multi_nested_partial():
# we are expecting these to match by execution context
ns = {'test1':0, 'test2': 1}
leaf = _manifest("(test1 + test2)", ns)
ns = {'x':1, 'y': leaf}
xy = _manifest("(x + y)", ns)
ns = {'a': 1, 'b': xy}
sub = _manifest("(a + b)", ns)
parent_ns = {'e': 3, 'a': sub}
parent = _manifest("e + a", parent_ns)
expanded = parent.expand()
nt.assert_count_equal(expanded.context.keys(),
['a', 'e', 'x', 'test1', 'test2'])
nt.assert_equal(expanded.expression.get_source(),
"(e + (a + (x + (test1 + test2))))")
nt.assert_equal(expanded.eval(), 6)
| mit |
GrUSP/opencfp | tests/Unit/PathTest.php | 1236 | <?php
declare(strict_types=1);
/**
* Copyright (c) 2013-2019 OpenCFP
*
* For the full copyright and license information, please view
* the LICENSE file that was distributed with this source code.
*
* @see https://github.com/opencfp/opencfp
*/
namespace OpenCFP\Test\Unit;
use Localheinz\Test\Util\Helper;
use OpenCFP\Path;
use OpenCFP\PathInterface;
final class PathTest extends \PHPUnit\Framework\TestCase
{
use Helper;
/**
* @test
*/
public function isFinal()
{
$this->assertClassIsFinal(Path::class);
}
/**
* @test
*/
public function implementsPathInterface()
{
$this->assertClassImplementsInterface(PathInterface::class, Path::class);
}
/**
* @test
*/
public function uploadPathReturnsUploadPath()
{
$path = new Path('/home/folder/base');
$this->assertSame(
'/home/folder/base/web/uploads',
$path->uploadPath()
);
}
/**
* @test
*/
public function assetsPathReturnsAssetsPath()
{
$path = new Path('/home/folder/base');
$this->assertSame(
'/home/folder/base/web/assets',
$path->assetsPath()
);
}
}
| mit |
renatosalvatori/zf2DataTable | tests/Zf2datatableTest/Column/Formatter/EmailTest.php | 928 | <?php
namespace Zf2datatableTest\Column\Formatter;
use Zf2datatable\Column\Formatter;
use PHPUnit_Framework_TestCase;
/**
* @group Column
* @covers Zf2datatable\Column\Formatter\Email
*/
class EmailTest extends PHPUnit_Framework_TestCase
{
public function testGetValidRendererNames()
{
$formatter = new Formatter\Email();
$this->assertEquals(array(
'jqGrid',
'bootstrapTable'
), $formatter->getValidRendererNames());
}
public function testGetFormattedValue()
{
$col = $this->getMockForAbstractClass('Zf2datatable\Column\AbstractColumn');
$col->setUniqueId('myCol');
$formatter = new Formatter\Email();
$formatter->setRowData(array(
'myCol' => 'name@example.com'
));
$this->assertEquals('<a href="mailto:name@example.com">name@example.com</a>', $formatter->getFormattedValue($col));
}
}
| mit |
horazont/xmpp-crowd | lcdencode/__init__.py | 203 | import codecs
from . import HD44780A00
mapping = {
"hd44780a00": HD44780A00.getregentry()
}
def search_function(encoding):
return mapping.get(encoding, None)
codecs.register(search_function)
| mit |
adriancmiranda/describe-type | internal/each.js | 620 | /* eslint-disable no-restricted-syntax */
const arraylike = require('../is/arraylike/arraylike.js');
const eachProperty = require('./eachProperty.js');
const eachValue = require('./eachValue.js');
/**
*
* @function
* @param {any} value
* @param {Function} cmd
* @param {Object} context
* @param {Boolean} keepReverseOrGetInheritedProps
* @returns {?}
*/
module.exports = function each(value, cmd, context, keepReverseOrGetInheritedProps) {
if (arraylike(value)) return eachValue(value, cmd, context, keepReverseOrGetInheritedProps);
return eachProperty(value, cmd, context, keepReverseOrGetInheritedProps);
}
| mit |
diewland/ssbwear | js/slot.js | 6229 | function slots_to_arr(txt){
var d = txt.split('|');
var dd = d[0].split('-');
if(d[1]){
dd.push(d[1])
}
return dd;
}
function get_slot_info(txt){
// flat items
var stxt = txt.split('|');
var items = stxt[0].split('-');
if(stxt[1]){
items = items.concat(stxt[1].split('-'));
}
// return craft data
return items.map(function(raw_code, i){
// initialize as blank slot
var output = {
raw_code : raw_code,
raw_code0 : raw_code, // shape code
removable : true,
};
var rdata = get_data_from_raw_code(raw_code);
if(rdata.lv){
var s_info = SLOTS[raw_code];
if(s_info){
output.removable = false;
output.img_code = rdata.img_code;
output.title = s_info.title;
output.desc = s_info.desc;
output.type = rdata.type;
output.lv = rdata.lv;
}
}
return output;
});
}
function get_data_from_raw_code(raw_code){
var limited_stones = [
// single
'AVG', 'KOH', 'EFR', 'SOG', 'HWL', 'HRF', 'VSP', 'TMV', 'MGR', 'WLW', 'SSL', 'SPB', 'GTT',
// party
'STB', 'LSG', 'WSM', 'SMR', 'IVT', 'DVS',
];
var data = raw_code.split('_');
var type = data[0];
var lv = data[1]*1;
var img_lv = lv;
if( lv > 10 ){ // item class 2 lv.6
img_lv = lv - 5;
}
else if( limited_stones.indexOf( type ) == -1 ){
// reuse image for non-limited stones
img_lv = lv % 5 == 0 ? 5 : lv % 5;
}
return {
type: type,
lv: lv,
img_code: type + '_' + img_lv,
}
}
function get_slots_by_type(type){
var result = [];
$.each(SLOTS, function(k, r){
if(r.type == type){
var d = get_data_from_raw_code(k);
result.push({
title: r.title,
desc: r.desc,
type: d.type,
lv: d.lv,
raw_code: k,
raw_code0: type,
img_code: d.img_code,
removable: true,
});
}
});
return result;
}
function gen_txt_slots_by_arr(rr){
var str = '';
$.each(rr, function(i, v){
if(i == 0){
str = v;
}
else if(i == 5){ // +15
str += '|' + v;
}
else {
str += '-' + v;
}
})
return str;
}
function gen_slots(txt, brave_lv){
var brave_lv = brave_lv || 0;
var html = '';
var msgs = [];
var infos = get_slot_info(txt);
// slots related with bravanization
if(brave_lv < 15){ // 5 standard slots
infos = infos.slice(0, 5);
}
else if(brave_lv == 15){ // add 6th slot
infos = infos.slice(0, 6);
}
else if(brave_lv == 20){ // add 7th slot
infos = infos.slice(0, 7);
}
$(infos).each(function(i, r){
var html_slot = '';
if(r.img_code){ // fix el
// TODO linked stones issue
msgs.push({ title: r.title, desc: r.desc, type: r.type, lv: r.lv, });
/*
if(i == 0){ // first el
msgs.push({ title: r.title, desc: r.desc, type: r.type, lv: r.lv, });
}
else if((r.type == infos[i-1].type)&&( r.type != 'EFR' )) { // linked stones
var pindex = msgs.length-1;
var p_title = msgs[pindex].title;
var p_lv = msgs[pindex].lv;
var cur_lv = p_lv + r.lv;
msgs[pindex].title = p_title.replace(p_lv, cur_lv);
msgs[pindex].desc = SLOTS[ r.type + '_' + (cur_lv+2)].desc;
msgs[pindex].lv = cur_lv;
html += "=";
}
else {
msgs.push({ title: r.title, desc: r.desc, type: r.type, lv: r.lv, });
}
*/
html_slot = "<div class='icon2' style='background-image: url(./img/el/" + r.img_code + ".png);'></div>";
}
else { // blank slot
html_slot = "<img src='./img/sl/" + r.raw_code + ".png' class='icon3' />";
}
// add blacket for hidden slot
if(i == 5){ // 6th slot
html_slot = '(' + html_slot;
}
else if(i == 6){ // 7th slot
html_slot = html_slot + ')';
}
html += html_slot;
});
// unknown extra slots + close bracket
if(brave_lv == 15){
if(infos.length == 5){
html += "(?)";
}
if(infos.length == 6){
html += ")";
}
}
else if(brave_lv == 20){
if(infos.length == 5){
html += "(??)";
}
if(infos.length == 6){
html += "?)";
}
}
// gen message
var gen_msg = '';
$(msgs).each(function(i, r){
gen_msg += r.title + "\\n( " + r.desc + " )\\n\\n";
});
gen_msg = gen_msg.replace(/'/g, "\\x27"); // fix single quote problem
return "<span onclick='alert(\"" + gen_msg + "\");'>" + html + "</span>";
}
function find_stones_by(slots_txt, field, value){
return get_slot_info(slots_txt).filter(function(o){
return o[field] == null ? false : o[field].match( new RegExp(value, 'i') ) != null;
});
}
function find_stones_by_type(slots_txt, value){
return find_stones_by(slots_txt, 'type', value);
}
function find_stones_by_desc(slots_txt, value){
return find_stones_by(slots_txt, 'desc', value);
}
function get_quick_lv_from_desc(desc){
if( desc.indexOf('Turn comes ludicrously quicker') > -1 ){
return 6;
}
else if( desc.indexOf('Turn comes massively quicker') > -1 ){
return 5;
}
else if( desc.indexOf('Turn comes much sooner') > -1 ){
return 4;
}
else if( desc.indexOf('Turn comes sooner') > -1 ){
return 3;
}
else if( desc.indexOf('Turn comes a bit sooner') > -1 ){
return 2;
}
else if( desc.indexOf('Turn comes a little bit sooner') > -1 ){
return 1;
}
else {
return 0;
}
}
| mit |
Offroadcode/umbraco-backoffice-visualization | BackOfficeVisualiser/Umbraco/BackOfficeVisualiser/Models/CompositionModel.cs | 209 | namespace BackOfficeVisualiser.Models
{
public class CompositionModel
{
public string Name { get; set; }
public string Alias { get; set; }
public int Id { get; set; }
}
} | mit |
CacheMoneyDB/CacheMoneyApp | lib/auth/ensure-auth.js | 743 | const tokenSvc = require('./token');
module.exports = function ensureAuth() {
return function ensureAuth(req, res, next) {
const authHeader = req.headers.authorization;
if(!authHeader) {
return next({code: 400, error: 'unauthorized, no token provided'});
};
const [bearer, jwt] = authHeader.split(' ');
if(bearer !== 'Bearer' || !jwt){
return next({code: 400, error:'unauthorized, invalid token'});
};
tokenSvc.verify(jwt)
.then(payload => {
req.user = payload;
next();
})
.catch(err => {
next({code: 400, error: 'unauthorized, invalid token'});
});
};
};
| mit |
oliviertassinari/material-ui | packages/mui-material/src/OutlinedInput/OutlinedInput.test.js | 1482 | import * as React from 'react';
import { expect } from 'chai';
import { createClientRender, describeConformance } from 'test/utils';
import OutlinedInput, { outlinedInputClasses as classes } from '@mui/material/OutlinedInput';
import InputBase from '@mui/material/InputBase';
describe('<OutlinedInput />', () => {
const render = createClientRender();
describeConformance(<OutlinedInput />, () => ({
classes,
inheritComponent: InputBase,
render,
refInstanceof: window.HTMLDivElement,
muiName: 'MuiOutlinedInput',
testDeepOverrides: { slotName: 'input', slotClassName: classes.input },
testVariantProps: { variant: 'contained', fullWidth: true },
testStateOverrides: { prop: 'size', value: 'small', styleKey: 'sizeSmall' },
skip: ['componentProp', 'componentsProp'],
}));
it('should render a NotchedOutline', () => {
const { container } = render(
<OutlinedInput classes={{ notchedOutline: 'notched-outlined' }} />,
);
expect(container.querySelector('.notched-outlined')).not.to.equal(null);
});
it('should forward classes to InputBase', () => {
render(<OutlinedInput error classes={{ error: 'error' }} />);
expect(document.querySelector('.error')).not.to.equal(null);
});
it('should respects the componentsProps if passed', () => {
render(<OutlinedInput componentsProps={{ root: { 'data-test': 'test' } }} />);
expect(document.querySelector('[data-test=test]')).not.to.equal(null);
});
});
| mit |
in-depth/indepth-demo | webpack.server.js | 2313 | require('dotenv').config()
const path = require('path')
const Webpack = require('webpack')
const nodeExternals = require('webpack-node-externals')
const postcssConfig = require('./postcss.config')
const nodeModulesPath = path.resolve(__dirname, 'node_modules')
const sourcePath = path.resolve(__dirname, 'src')
const config = {
target: 'node',
externals: [nodeExternals({
importType: 'commonjs',
modulesDir: nodeModulesPath,
modulesFromFile: false,
whitelist: [/\.scss$/, /\.css$/, /react-toolbox/],
})],
entry: path.resolve(__dirname, 'src/server/server.js'),
output: {
path: path.resolve(__dirname, 'server'),
filename: 'index.js',
publicPath: '/',
},
resolve: {
extensions: ['', '.js', '.jsx', '.css', '.scss'],
},
module: {
loaders: [
{
test: /\.js$/,
include: [sourcePath, /react-toolbox/],
loader: 'babel-loader',
query: {
cacheDirectory: false,
presets: ['es2015', 'es2016', 'stage-2', 'react'],
plugins: [
'babel-plugin-transform-object-rest-spread',
'babel-plugin-transform-class-properties',
'transform-class-properties',
['css-modules-transform', {
extensions: ['.css', '.scss'],
generateScopedName: '[name]__[local]___[hash:base64:5]',
}],
],
},
},
{
test: /\.css$/,
exclude: /node_modules/,
loader: 'css?modules&importLoaders=1&localIdentName=[name]__[local]___[hash:base64:5]!postcss',
},
{
test: /\.scss$/,
include: /react-toolbox/,
loader: 'css?modules&importLoaders=1&localIdentName=[name]__[local]___[hash:base64:5]!postcss!sass',
},
],
},
plugins: [
new Webpack.DefinePlugin({ 'process.env': {
NODE_ENV: JSON.stringify('production'),
PORT: JSON.stringify(process.env.PORT),
} }),
new Webpack.optimize.UglifyJsPlugin({
sourceMap: true,
compressor: { screw_ie8: true, keep_fnames: true, warnings: false },
mangle: { screw_ie8: true, keep_fnames: true },
}),
],
postcss: () => postcssConfig,
sassLoader: {
data: '@import "shared/styles/main.qscss";',
includePaths: [sourcePath],
},
}
module.exports = config
| mit |
stxnext/intranet-open | src/intranet3/intranet3/helpers.py | 8082 | import os
import json
import time
import math
import tempfile
import datetime
from dateutil.relativedelta import relativedelta
from urllib import quote_plus
from decimal import Decimal, ROUND_UP
try:
from PIL import Image
except ImportError:
import Image
from gdata.spreadsheet import text_db
from gdata.service import RequestError
from intranet3.priorities import PRIORITIES
from intranet3.log import FATAL_LOG, EXCEPTION_LOG, INFO_LOG
LOG = INFO_LOG(__name__)
EXCEPTION = EXCEPTION_LOG(__name__)
FATAL = FATAL_LOG(__name__)
positive_values = (True, 1, 1.0, '1', 'True', 'true', 't')
negative_values = (False, None, 0, 0.0, '0', 'False', 'false', 'f', 'None')
def dates_between(start, end):
delta = datetime.timedelta(days=1)
while start <= end:
yield start
start += delta
def sorting_by_severity(a, b):
a_idx = PRIORITIES.get(a.severity.lower(), 5)
b_idx = PRIORITIES.get(b.severity.lower(), 5)
compare = cmp(a_idx, b_idx)
if compare == 0:
# hack to always keep same order
a_idx = str(a_idx) + str(a.id)
b_idx = str(b_idx) + str(b.id)
compare = cmp(a_idx, b_idx)
return compare
def sorting_by_priority(a, b):
a_idx = PRIORITIES.get(a.priority.lower(), 5)
b_idx = PRIORITIES.get(b.priority.lower(), 5)
compare = cmp(a_idx, b_idx)
if compare == 0:
# hack to always keep same order
a_idx = str(a_idx) + str(a.id)
b_idx = str(b_idx) + str(b.id)
compare = cmp(a_idx, b_idx)
return compare
class SpreadsheetConnector(object):
def __init__(self, email, password):
self.client = text_db.DatabaseClient(email, password)
def get_worksheet(self, spreadsheet_key, number):
database = self.client.GetDatabases(spreadsheet_key)[0]
return database.GetTables()[number]
def previous_day(date):
return day_offset(date, -1)
def next_day(date):
return day_offset(date, +1)
def day_offset(date, n):
delta = datetime.timedelta(days=n)
return date + delta
MONTH_DELTA = relativedelta(months=1)
def previous_month(date):
return date - MONTH_DELTA
def next_month(date):
return date + MONTH_DELTA
def next_quarter(date):
return date + relativedelta(months=3)
def start_end_month(date=None):
if not date:
date = datetime.date.today()
month_start = datetime.date(date.year, date.month, 1)
month_ends = previous_day(month_start + MONTH_DELTA)
return month_start, month_ends
MAX_TRIES = 9
# calculate how many seconds to wait after n-th try
wait_time = lambda n: 10 ** math.floor((n - 1) / 3)
def trier(func, doc=u''):
"""
Repeats a callback MAX_TRIES times with increasing time intervals (1s, 10s, 100s, 1000s).
RequestErrors are causing another try to be performed.
The MAX_TRIES-th unsuccessfull try causes the RequestError to be raised up from the function.
"""
i = 1
while True:
try:
result = func()
except (AssertionError, RequestError), e:
EXCEPTION(u'Error while trying function %s (%s/%s try)' % (doc, i, MAX_TRIES))
if isinstance(e, RequestError) and e.message.get('status') == 404: # no sense in retrying 404
raise
time.sleep(wait_time(i))
i += 1
if i > MAX_TRIES:
FATAL(u'Unable to execute function %s in %s tries' % (doc, MAX_TRIES))
raise
except:
EXCEPTION(u"Unknown exception while trying function %s (%s/%s try)" % (doc, i, MAX_TRIES))
FATAL(u"Unable to execute function due to unexpected error")
raise
else:
LOG(u'Managed to execute function %s (%s/%s try)' % (doc, i, MAX_TRIES))
return result
def decoded_dict(d, encoding='utf-8'):
result = {}
for k, v in d.iteritems():
result[k] = v.decode(encoding)
return result
def Converter(**kwargs):
"""
Returns a function that converts a dictionary by re-assigning keys according
to the mapping given in the params.
The mapping can map key -> key or key -> function
For example:
>>> converter = Converter(a='b', b=lambda d: d['e'] + d['f'])
>>> converter({'a': 1, 'b': 2, 'e': 3, 'f': 4})
{'a': 2, 'b': 7}
"""
return lambda d: dict((k, v(d) if callable(v) else d.get(v, '')) for (k, v) in kwargs.iteritems())
# serializes keyword arguments into URL query string
serialize_url = lambda prefix, **kwargs: prefix.encode('utf-8') + '&'.join(
('%s=%s' % (k, quote_plus(v.encode('utf-8') if isinstance(v, unicode) else v)))
if isinstance(v, basestring)
else ('&'.join('%s=%s' % (k, quote_plus(p.encode('utf-8') if isinstance(p, unicode) else p)) for p in v))
for (k, v) in kwargs.iteritems()
)
def format_time(value):
value = Decimal(str(value)).quantize(Decimal('.01'), rounding=ROUND_UP)
h, m = divmod(value * 60, 60)
return "%d:%02d" % (h, round(m))
def get_mem_usage():
""" Get memory usage for current process """
import os, subprocess
pid = os.getpid()
process = subprocess.Popen("ps -orss= %s" % pid, shell=True, stdout=subprocess.PIPE)
out, _err = process.communicate()
return int(out)
def image_resize(source,type, width = 100,height = 100):
s = ImageScaler(source)
if type == 't':
return s.thumb(width,height)
else:
return s.smart_scale(width,height)
class ImageScaler():
def __init__(self,source):
self.source = source
self.out = '';
def _tmp(self):
_,file = tempfile.mkstemp(prefix='image-')
with open(file,'w') as f:
f.write(self.source)
return file
def _img(self,file):
return Image.open(file);
def _out(self,file,img):
out = ''
if os.path.exists(file):
img.save(file,'PNG')
with open(file,'r') as f:
out = f.read()
os.remove(file)
return out
def smart_scale(self,width,height):
file = self._tmp()
img = self._img(file)
#img.crop()
x,y = img.size
dx = float(x)/float(width)
dy = float(y)/float(height)
r = min(dx,dy)
xr = int(x/r)
yr = int(y/r)
img = img.resize((xr,yr), Image.ANTIALIAS)
x1 = max(0,(xr-width)/2)
y1 = max(0,(yr-height)/2)
x2 = width+x1
y2 = height+y1
img = img.crop((x1,y1,x2,y2))
return self._out(file,img)
def crop(self,size):
file = self._tmp()
img = self._img(file)
img2 = img.crop(size)
img2.show()
return self._out(file,img2)
def thumb(self,width,height):
file = self._tmp()
img = self._img(file)
img.thumbnail((width,height))
return self._out(file, img)
def make_path(*args):
return os.path.join(*map(lambda x: str(x).strip('/'), args))
def groupby(a_list, keyfunc=lambda x: x, part=lambda x: x):
result = {}
for e in a_list:
to_append = part(e)
values = result.setdefault(keyfunc(e), [])
if to_append is not None:
values.append(to_append)
return result
def partition(items, max_count):
""" Partition a list of items into portions no larger than max_count """
portions = int(math.ceil(float(len(items)) / max_count))
for p in xrange(portions):
yield items[p::portions]
def get_working_days(date_start, date_end):
from intranet3.models import Holiday
if date_start > date_end:
return 0
holidays = Holiday.all()
date = date_start
diff = datetime.timedelta(days=1)
days = 0
while date <= date_end:
if not Holiday.is_holiday(date, holidays=holidays):
days += 1
date += diff
return days
def json_dumps_default(obj):
if isinstance(obj, datetime.datetime):
return obj.isoformat()
else:
return json.JSONEncoder().default(obj)
| mit |
freeslugs/eventum | app/models/Event.py | 13295 | """
.. module:: Event
:synopsis: An event database model.
.. moduleauthor:: Dan Schlosser <dan@danrs.ch>
"""
from flask import url_for
from mongoengine import ValidationError
from app import client_config, db
from app.models.fields import DateField, TimeField
import markdown
from datetime import datetime, timedelta
now = datetime.now
class Event(db.Document):
"""The object that represents an individual event in Mongoengine.
Recurring events also have a :class:`~app.models.EventSeries` instance that
connects them to the other events in the series.
:ivar date_created: :class:`mongoengine.fields.DateTimeField` - The date
that the event object was created.
:ivar date_modified: :class:`mongoengine.fields.DateTimeField` - The last
date the event was modified.
:ivar title: :class:`mongoengine.fields.StringField` - The title of the
event.
:ivar creator: :class:`mongoengine.fields.ReferenceField` - The User that
created the event.
:ivar location: :class:`mongoengine.fields.StringField` - The event's
location.
:ivar slug: :class:`mongoengine.fields.StringField` - The URL slug
associated with the event. **Note:** appending the slug to the base
path for events will not always yield the functioning URL for the
event, because recurring events have indexes appended to the url. User
:func:`get_absolute_url` always.
:ivar start_date: :class:`DateField` - The date the event starts.
:ivar end_date: :class:`DateField` - The date the event ends
:ivar start_time: :class:`TimeField` - The time the event starts
:ivar end_time: :class:`TimeField` - The time the event ends
:ivar short_description: :class:`mongoengine.fields.StringField` - The HTML
short description of the event.
:ivar long_description: :class:`mongoengine.fields.StringField` - The HTML
long description of the event.
:ivar short_description_markdown: :class:`mongoengine.fields.StringField` -
The markdown short description of the event.
:ivar long_description_markdown: :class:`mongoengine.fields.StringField` -
The markdown long description of the event.
:ivar published: :class:`mongoengine.fields.BooleanField` - True if the
event is published.
:ivar date_published: :class:`mongoengine.fields.DateTimeField` - The date
that the event was published.
:ivar is_recurring: :class:`mongoengine.fields.BooleanField` - True if the
event is recurring.
:ivar parent_series: :class:`mongoengine.fields.ReferenceField` - The
:class:`~app.models.EventSeries` object that holds the recurrence info
for an event, if it is recurring.
:ivar image: :class:`mongoengine.fields.ReferenceField` - The headline
image for the event.
:ivar facebook_url: :class:`mongoengine.fields.StringField` - The URL to
the Facebook event associated with this event.
:ivar gcal_id: :class:`mongoengine.fields.StringField` - The ID for this
event on Google Calendar. In Google Calendar API responses, this is
stored asthe ``id`` field for events. If this field is None, then we
never got a proper response from Google Calendar when (if) we made a
request to create it there. It most likely does not exist on Google
Calendar.
:ivar gcal_sequence: :class:`mongoengine.fields.IntField` - The sequence
number for the event, used by Google Calendar for versioning.
"""
# MongoEngine ORM metadata
meta = {
'allow_inheritance': True,
'indexes': ['start_date', 'creator'],
'ordering': ['-start_date']
}
date_created = db.DateTimeField(required=True, default=now)
date_modified = db.DateTimeField(required=True, default=now)
title = db.StringField(required=True, max_length=255)
creator = db.ReferenceField("User", required=True)
location = db.StringField()
slug = db.StringField(required=True, max_length=255)
start_date = DateField()
end_date = DateField()
start_time = TimeField()
end_time = TimeField()
short_description = db.StringField()
long_description = db.StringField()
short_description_markdown = db.StringField()
long_description_markdown = db.StringField()
published = db.BooleanField(required=True, default=False)
date_published = db.DateTimeField()
is_recurring = db.BooleanField(required=True, default=False)
parent_series = db.ReferenceField("EventSeries")
image = db.ReferenceField("Image")
facebook_url = db.StringField()
gcal_id = db.StringField()
gcal_sequence = db.IntField()
def get_absolute_url(self):
"""Returns the URL path that points to the client-facing version of
this event.
:returns: A URL path like ``"/events/cookies-and-code"``.
:rtype: str
"""
if self.is_recurring:
return url_for('client.recurring_event', slug=self.slug, index=self.index)
return url_for('client.event', slug=self.slug)
def image_url(self):
"""Returns the URL path that points to the image for the event.
:returns: The URL path like ``"/static/img/cat.jpg"``.
:rtype: str
"""
if self.image:
return self.image.url()
return url_for('static', filename=client_config['DEFAULT_EVENT_IMAGE'])
@property
def index(self):
"""Represents the index of this event in it's parent
:class:`~app.models.EventSeries`. Returns ``None`` if the event is not
recurring.
:returns: The index of the event in it's series.
:rtype: int
"""
if not self.is_recurring:
return
return self.parent_series.events.index(self)
def clean(self):
"""Called by Mongoengine on every ``.save()`` to the object.
Updates date_modified, renders the markdown into the HTML fields, and
validates datetimes to ensure the event ends after it starts.
:raises: :class:`wtforms.validators.ValidationError`
"""
self.date_modified = now()
if self.short_description_markdown:
self.short_description = markdown.markdown(self.short_description_markdown,
['extra', 'smarty'])
if self.long_description_markdown:
self.long_description = markdown.markdown(self.long_description_markdown,
['extra', 'smarty'])
if (self.start_date and
self.end_date and
self.start_date > self.end_date):
raise ValidationError("Start date should always come before end "
"date. Got (%r,%r)" % (self.start_date,
self.end_date))
# Check times against None, because midnight is represented by 0.
if (self.start_date == self.start_time and
self.start_time is not None and
self.end_time is not None and
self.start_time > self.end_time):
raise ValidationError("Start time should always come before end "
"time. Got (%r,%r)" % (self.start_time,
self.end_time))
def start_datetime(self):
"""A convenience method to combine ``start_date`` and ``start_time``
into one :class:`datetime`.
:returns: The combined datetime, or ``None` if ``start_date`` or
``start_time`` are ``None``.
:rtype: :class:`datetime`.
"""
# Check times against None, because midnight is represented by 0.
if self.start_date is None or self.start_time is None:
return None
return datetime.combine(self.start_date, self.start_time)
def end_datetime(self):
"""A convenience method to combine ``end_date`` and ``end_time``
into one :class:`datetime`.
:returns: The combined datetime, or ``None` if ``end_date`` or
``end_time`` are ``None``.
:rtype: :class:`datetime`.
"""
# Check times against None, because midnight is represented by 0.
if self.end_date is None or self.end_time is None:
return None
return datetime.combine(self.end_date, self.end_time)
def id_str(self):
"""The id of this object, as a string.
:returns: The id
:rtype: str
"""
return str(self.id)
def ready_for_publishing(self):
"""Returns True if the event has all necessary fields filled out.
Necessary fields are:
- ``title``
- ``creator``
- ``location``
- ``start_datetime``
- ``end_datetime``
- ``short_description``
- ``long_description``
- ``image``
:Returns: True if we are ready for publishing.
:rtype: bool
"""
return all([
self.title,
self.creator,
self.location,
self.start_datetime(),
self.end_datetime(),
self.short_description,
self.long_description,
self.image
])
def is_multiday(self):
"""Returns True if the event spans muliple days.
:returns: True if the event spans multiple days.
:rtype: bool
"""
if self.start_date == self.end_date:
return False
if self.start_date == self.end_date - timedelta(days=1) and self.end_time.hour < 5:
return False
return True
def human_readable_date(self):
"""Return the date of the event (presumed not multiday) formatted like:
``"Sunday, Mar 31"``.
:returns: The formatted date.
:rtype: str
"""
return self.start_date.strftime("%A, %b %d")
def human_readable_time(self):
"""Return the time range of the event (presumed not multiday) formatted
like ``"11am - 2:15pm"`` or ``"3 - 7:30pm"``.
:returns: The formatted date.
:rtype: str
"""
output = ''
if self.start_time.strftime("%p") == self.end_time.strftime("%p"):
format = "%I:%M-"
else:
format = "%I:%M%p-"
output += self.start_time.strftime(format).lstrip("0").lower()
output += self.end_time.strftime("%I:%M%p").lower().lstrip("0")
return output
def human_readable_datetime(self):
"""Format the start and end date date in one of the following three
formats:
1. ``"Sunday, March 31 11pm - Monday, April 1 3am"``
2. ``"Sunday, March 31 11am - 2:15pm"``
3. ``"Sunday, March 31 3 - 7:30pm"``
Depending on whether or not the start / end times / dates are the same.
All unkown values will be replaced by question marks.
:returns: The formatted date.
:rtype: str
"""
output = ""
if self.start_date:
output += self.start_date.strftime("%A, %B %d ") \
.replace(" 0", " ").replace("/0", "/")
else:
output += "???, ??/?? "
# Check times against None, because midnight is represented by 0.
if self.start_time is not None:
if self._start_and_end_time_share_am_or_pm():
start_format = "%I:%M-"
else:
start_format = "%I:%M%p-"
output += self.start_time.strftime(start_format).lstrip("0").lower()
else:
output += "??:?? - "
if self.end_date:
if self.start_date and self.start_date != self.end_date:
output += self.end_date.strftime("%A, %B %d ") \
.replace(" 0", " ").replace("/0", "/")
else:
output += "???, ??/?? "
# Check times against None, because midnight is represented by 0.
if self.end_time is not None:
output += self.end_time.strftime("%I:%M%p").lower().lstrip("0")
else:
output += "??:??"
return output
def _start_and_end_time_share_am_or_pm(self):
"""Returns True if the start and end times for an event are both pm or
am.
:returns: True if the start and end times for an event are both pm or
am.
:rtype: bool
"""
# Check times against None, because midnight is represented by 0.
return (self.start_time is not None and
self.end_time is not None and
self.start_time.strftime("%p") == self.end_time.strftime("%p"))
def __unicode__(self):
"""This event, as a unicode string.
:returns: The title of the event
:rtype: str
"""
return self.title
def __repr__(self):
"""The representation of this event.
:returns: The event's details.
:rtype: str
"""
return 'Event(title=%r, location=%r, creator=%r, start=%r, end=%r, ' \
'published=%r)' % (self.title, self.location, self.creator,
self.start_datetime(), self.end_datetime(),
self.published)
| mit |
korbin/co-bcrypt-native | test/index.test.js | 1669 | 'use strict';
var expect = require('chai').expect;
var bcrypt = require('../index');
describe('co-bcrypt-native', function () {
it('stores unwrapped functions', function () {
expect(bcrypt.unwrapped.genSalt).to.be.a('function');
expect(bcrypt.unwrapped.genSalt.length).to.eq(3);
expect(bcrypt.unwrapped.hash).to.be.a('function');
expect(bcrypt.unwrapped.hash.length).to.eq(3);
expect(bcrypt.unwrapped.compare).to.be.a('function');
expect(bcrypt.unwrapped.compare.length).to.eq(3);
});
context('genSalt', function () {
it('returns a thunk', function () {
var thunk = bcrypt.genSalt();
expect(thunk).to.be.a('function');
expect(thunk.length).to.eq(1);
});
it('generates a salt', function* () {
var salt = yield bcrypt.genSalt(13);
var split = salt.split('$');
//$2a$13$xxxxxxxxxxxxxxxxxxxxxx
expect(split[2]).to.eq('13');
});
});
context('hash', function () {
it('returns a thunk', function () {
var thunk = bcrypt.hash();
expect(thunk).to.be.a('function');
expect(thunk.length).to.eq(1);
});
it('generates a hash', function* () {
var hash = yield bcrypt.hash('test', 4);
expect(hash).to.not.be.empty;
});
});
context('compare', function () {
it('returns a thunk', function () {
var thunk = bcrypt.compare();
expect(thunk).to.be.a('function');
expect(thunk.length).to.eq(1);
});
it('generates a comparison', function* () {
var hash = yield bcrypt.hash('test', 4);
var comparison = yield bcrypt.compare('test', hash);
expect(comparison).to.be.true;
});
});
});
| mit |
gabzon/experiensa | components/grid.php | 1045 | <?php
//Semantic UI grid
Class Grid {
public static function display_grid($args,$return = false){
$grid = "";
if(!empty($args)){
$grid .= "<div class=\"ui grid\">";
foreach($args as $value){
$grid .= "<div class=\"four wide column\">";
$grid .= "<a href=\"".$value['post_link']."\" target=\"_blank\">";
$grid .= "<div class=\"ui raised segments\">";
$grid .= "<div class=\"ui segment\">".$value['title']."</div>";
$grid .= "<div class=\"ui secondary segment\">";
if(isset($value['show_thumbnail']) && $value['show_thumbnail'])
$grid .= "<img height=\"150\" width=\"150\" src=\"".$value['thumbnail_url']."\">";
else
$grid .= "<img src=\"".$value['thumbnail_url']."\">";
$grid .= "</div>";
$grid .= "</div>";
$grid .= "</a>";
$grid .= "</div>";
}
$grid .= "</div>";
}
if($return === true)
return $grid;
else
echo $grid;
}
}
| mit |
zh-h/IoTApp | app/src/main/java/iotapp/applehater/cn/iotapp/SettingActivity.java | 375 | package iotapp.applehater.cn.iotapp;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
public class SettingActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_setting);
setTitle("Setting");
}
}
| mit |
vinhch/BizwebSharp | src/BizwebSharp/Entities/ClientDetail.cs | 1201 | using Newtonsoft.Json;
namespace BizwebSharp
{
public class ClientDetail
{
/// <summary>
/// Shopify does not offer documentation for this field.
/// </summary>
[JsonProperty("accept_language")]
public string AcceptLanguage { get; set; }
/// <summary>
/// The browser screen height in pixels, if available.
/// </summary>
[JsonProperty("browser_height")]
public string BrowserHeight { get; set; }
/// <summary>
/// The browser IP address.
/// </summary>
[JsonProperty("browser_ip")]
public string BrowserIp { get; set; }
/// <summary>
/// The browser screen width in pixels, if available.
/// </summary>
[JsonProperty("browser_width")]
public string BrowserWidth { get; set; }
/// <summary>
/// A hash of the session.
/// </summary>
[JsonProperty("session_hash")]
public string SessionHash { get; set; }
/// <summary>
/// The browser's user agent string.
/// </summary>
[JsonProperty("user_agent")]
public string UserAgent { get; set; }
}
} | mit |
basimilch/basimilch-app | test/initializers/regexp_utils_test.rb | 5786 | require 'test_helper'
# TODO: Should not be an Action Controller test, but a Unit Test instead.
class RegexpUtilsTest < ActionController::TestCase
setup do
@no_address = []
@example_com = ["a@example.com"]
@example_org = ["a@example.org"]
@gmail_com_a_base = ["a@gmail.com"]
@gmail_com_a_alias = ["a+b@gmail.com"]
@gmail_com_a = @gmail_com_a_base + @gmail_com_a_alias
@gmail_com_b = ["b@gmail.com"]
@gmail_adresses = @gmail_com_a + @gmail_com_b
@all_adresses = @example_com + @example_org + @gmail_adresses
end
# TODO: Separate this tests for a purely Regexp utils test, and properly
# test the email delivery itself with:
# "assert_no_difference 'ActionMailer::Base.deliveries.size' do..."
# Test the 4 'nil' vs empty string cases
# NOTE: Read NOTE on RecipientWhitelistInterceptor.regex_for_email_list
test "whitelist and blacklist for emails should work 1" do
assert_email_filter whitelist: nil,
blacklist: nil,
allowed_list: @all_adresses
end
test "whitelist and blacklist for emails should work 2" do
assert_email_filter whitelist: "",
blacklist: nil,
allowed_list: @no_address
end
test "whitelist and blacklist for emails should work 3" do
assert_email_filter whitelist: nil,
blacklist: "",
allowed_list: @all_adresses
end
test "whitelist and blacklist for emails should work 4" do
assert_email_filter whitelist: "",
blacklist: "",
allowed_list: @no_address
end
# Test other combinations
test "whitelist and blacklist for emails should work 5" do
assert_email_filter whitelist: "@gmail.com",
blacklist: nil,
allowed_list: @gmail_adresses
end
test "whitelist and blacklist for emails should work 6" do
assert_email_filter whitelist: "a@gmail.com",
blacklist: nil,
allowed_list: @gmail_com_a_base + @gmail_com_a_alias
end
test "whitelist and blacklist for emails should work 7" do
assert_email_filter whitelist: "@gmail.com",
blacklist: "b@gmail.com",
allowed_list: @gmail_com_a_base + @gmail_com_a_alias
end
test "whitelist and blacklist for emails should work 8" do
assert_email_filter whitelist: " @gmail.com ",
blacklist: "a@gmail.com",
allowed_list: @gmail_com_b
end
test "whitelist and blacklist for emails should work 9" do
assert_email_filter whitelist: "@example.com ",
blacklist: nil,
allowed_list: @example_com
end
test "whitelist and blacklist for emails should work 10" do
assert_email_filter whitelist: "@example.com, @example.org, a@gmail.com",
blacklist: nil,
allowed_list: @all_adresses - @gmail_com_b
end
test "whitelist and blacklist for emails should work 11" do
assert_email_filter whitelist: "@example.com,@example.org,a+b@gmail.com",
blacklist: nil,
allowed_list: @all_adresses - @gmail_com_b
end
test "whitelist and blacklist for emails should work 12" do
assert_email_filter whitelist: "@example.com, @example.org, a@gmail.com",
blacklist: "a@gmail.com",
allowed_list: @example_com + @example_org
end
test "whitelist and blacklist for emails should work 13" do
assert_email_filter whitelist: "@example.com, @example.org, a@gmail.com",
blacklist: "a+b@gmail.com",
allowed_list: @example_com + @example_org
end
test "whitelist and blacklist for emails should work 14" do
assert_email_filter whitelist: nil,
blacklist: "a+b@gmail.com",
allowed_list: @all_adresses - @gmail_com_a
end
test "whitelist and blacklist for emails should work 15" do
assert_email_filter whitelist: nil,
blacklist: "@gmail.com",
allowed_list: @all_adresses - @gmail_adresses
end
test "whitelist and blacklist for emails should work 16" do
assert_email_filter whitelist: nil,
blacklist: "@example.com, @example.org",
allowed_list: @gmail_adresses
end
# The ENV variables in Heroku might translate the spaces in non-braking
# spaces, i.e.  . Therefore we test it here. This test is like the 16 but
# using a non-breaking space (i.e. ALT+SPACE in a Mac computer).
test "whitelist and blacklist for emails should work 17" do
assert_email_filter whitelist: nil,
blacklist: "@example.com , @example.org",
allowed_list: @gmail_adresses
end
private
def assert_email_filter(whitelist: nil, blacklist: nil, allowed_list: [])
interceptor_class = RecipientWhitelistBlacklistInterceptor
wl_regexp = RegexpUtils.regex_for_email_list(whitelist)
bl_regexp = RegexpUtils.regex_for_email_list(blacklist)
assert_equal allowed_list, interceptor_class.select_allowed_addresses(
@all_adresses,
whitelist_regexp: wl_regexp,
blacklist_regexp: bl_regexp
)
end
end
| mit |
rebuy-de/kubernetes-deployment | pkg/kubeutil/deployments.go | 1973 | package kubeutil
import (
"github.com/pkg/errors"
log "github.com/sirupsen/logrus"
apps "k8s.io/api/apps/v1"
v1meta "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/client-go/kubernetes"
)
func DeploymentRolloutComplete(deployment *apps.Deployment) bool {
logger := log.WithFields(log.Fields{
"Namespace": deployment.ObjectMeta.Namespace,
"Name": deployment.ObjectMeta.Name,
"ResourceVersion": deployment.ObjectMeta.ResourceVersion,
"UpdatedReplicas": deployment.Status.UpdatedReplicas,
"DesiredReplicas": *(deployment.Spec.Replicas),
"ActualGeneration": deployment.ObjectMeta.Generation,
"ObservedGeneration": deployment.Status.ObservedGeneration,
})
if deployment.Status.UpdatedReplicas == *(deployment.Spec.Replicas) &&
deployment.Status.Replicas == *(deployment.Spec.Replicas) &&
deployment.Status.AvailableReplicas == *(deployment.Spec.Replicas) &&
deployment.Status.ObservedGeneration >= deployment.Generation {
logger.Debug("deployment is up to date")
return true
}
logger.Debug("rollout still in progress")
return false
}
func GetReplicaSetForDeployment(client kubernetes.Interface, deployment *apps.Deployment) (*apps.ReplicaSet, error) {
replicaSets, err := client.
AppsV1().
ReplicaSets(deployment.ObjectMeta.Namespace).
List(v1meta.ListOptions{})
if err != nil {
return nil, errors.Wrapf(err, "unable to list replica sets")
}
deploymentRevision, ok := deployment.ObjectMeta.Annotations["deployment.kubernetes.io/revision"]
if !ok {
return nil, errors.Errorf("deployment doesn't have a revision annotation")
}
for _, rs := range replicaSets.Items {
rsRevision, ok := rs.ObjectMeta.Annotations["deployment.kubernetes.io/revision"]
if !ok {
continue
}
if deploymentRevision != rsRevision {
continue
}
if IsOwner(deployment.ObjectMeta, rs.ObjectMeta) {
return &rs, nil
}
}
return nil, errors.Errorf("could not found replicaset for deployment")
}
| mit |
oanhnn/slim-skeleton | src/Provider/HttpCacheServiceProvider.php | 718 | <?php
/**
* This file is part of `oanhnn/slim-skeleton` project.
*
* (c) Oanh Nguyen <oanhnn.bk@gmail.com>
*
* For the full copyright and license information, please view the LICENSE.md
* file that was distributed with this source code.
*/
namespace App\Provider;
use Pimple\Container;
use Slim\HttpCache\CacheProvider;
/**
* Http cache service provider
* Require slim/http-cache ^0.3.0
*/
class HttpCacheServiceProvider extends AbstractServiceProvider
{
/**
* Register Http Cache Service Provider.
*
* @param Container $container
*/
public function register(Container $container)
{
$provider = new CacheProvider();
$provider->register($container);
}
}
| mit |
kambojajs/kamboja | packages/moringa/test/integration/models/index.ts | 1043 | import { val, type } from "kamboja-foundation"
import { Document, Schema } from "mongoose"
import { mongoose } from "../../../src"
export class UserModel {
@type("string")
email: string
@type("string")
displayName: string
@type("date")
dateOfBirth: Date
@type("number")
rate: number
@type("date")
createdAt: Date
}
export class CategoryModel {
@type("string")
name: string
}
export class ItemModel {
@type("string")
name: string
@type("CategoryModel, models/index")
category: CategoryModel | Schema.Types.ObjectId
@type("UserModel, models/index")
createdBy: UserModel | Schema.Types.ObjectId
}
@mongoose.shortid()
export class ProductModel {
@type("string")
name: string
}
export class ParentProductModel{
@type("string")
name: string
@type("ProductModel, models/index")
child:ProductModel
}
export class ParentMultiChildModel{
@type("string")
name: string
@type("ProductModel[], models/index")
child:ProductModel[]
} | mit |
luozhaoyu/leetcode | n_queens.py | 2324 | import copy
class Board(object):
def __init__(self, horizontal, vertical, left_oblique, right_oblique, queens, n):
self.horizontal = horizontal
self.vertical = vertical
self.left_oblique = left_oblique
self.right_oblique = right_oblique
self.queens = queens
self.n = n
def is_valid_position(self, linex, rowj, n):
if self.horizontal[rowj] != ".":
return False
if self.vertical[linex] != ".":
return False
if self.left_oblique[(linex + rowj) % (2 * n)] != ".":
return False
if self.right_oblique[(linex - rowj) + n] != ".":
return False
return True
def put_here(self, linex, rowj, n):
self.horizontal[rowj] = "Q"
self.vertical[linex] = "Q"
self.left_oblique[(linex + rowj) % (2 * n)] = "Q"
self.right_oblique[(linex - rowj) + n] = "Q"
self.queens.append(rowj)
def __repr__(self):
return "\n".join(self.output())
def output(self):
res = []
for i in range(len(self.queens)):
p = ""
for j in range(self.n):
if j == self.queens[i]:
p += "Q"
else:
p += "."
res.append(p)
return res
class Solution:
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
horizontal = ["."] * n
vertical = ["."] * n
left_oblique = ["."] * 2 * n
right_oblique = ["."] * 2 * n
queens = []
board = Board(horizontal, vertical, left_oblique, right_oblique, [], n)
return self.put_queen(board, n)
def put_queen(self, board, n):
"""put xth queen on the board"""
res = []
if len(board.queens) == n:
# print(board)
res.append(board.output())
return res
# for the len(queen) queen
for i in range(n): # jth queue will sit at jth line
if board.is_valid_position(len(board.queens), i, n):
new_board = copy.deepcopy(board)
new_board.put_here(len(board.queens), i, n)
res.extend(self.put_queen(new_board, n))
return res
s = Solution()
print(s.solveNQueens(6))
| mit |
angular/angular-cli-stress-test | src/app/components/comp-1699/comp-1699.component.spec.ts | 847 | /**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
import { Comp1699Component } from './comp-1699.component';
describe('Comp1699Component', () => {
let component: Comp1699Component;
let fixture: ComponentFixture<Comp1699Component>;
beforeEach(async(() => {
TestBed.configureTestingModule({
declarations: [ Comp1699Component ]
})
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(Comp1699Component);
component = fixture.componentInstance;
fixture.detectChanges();
});
it('should create', () => {
expect(component).toBeTruthy();
});
});
| mit |
keynetic/kpress | content/themes/stay-wpcom/content.php | 2294 | <?php
/**
* @package Stay
* @since Stay 1.0
*/
?>
<article id="post-<?php the_ID(); ?>" <?php post_class(); ?>>
<header class="entry-header">
<?php the_title( '<h1 class="entry-title"><a href="' . esc_url( get_permalink() ) . '" rel="bookmark">', '</a></h1>' ); ?>
<?php if ( 'post' == get_post_type() ) : ?>
<div class="entry-meta">
<?php stay_posted_on(); ?>
</div><!-- .entry-meta -->
<?php endif; ?>
</header><!-- .entry-header -->
<?php if ( is_search() ) : // Only display Excerpts for Search ?>
<div class="entry-summary">
<?php the_excerpt(); ?>
</div><!-- .entry-summary -->
<?php else : ?>
<div class="entry-content">
<?php the_content( __( 'Continue reading <span class="meta-nav">→</span>', 'stay' ) ); ?>
<?php wp_link_pages( array( 'before' => '<div class="page-links">' . __( 'Pages:', 'stay' ), 'after' => '</div>' ) ); ?>
</div><!-- .entry-content -->
<?php endif; ?>
<footer class="entry-meta">
<?php if ( 'post' == get_post_type() ) : // Hide category and tag text for pages on Search ?>
<?php
/* translators: used between list items, there is a space after the comma */
$categories_list = get_the_category_list( __( ', ', 'stay' ) );
if ( $categories_list && stay_categorized_blog() ) :
?>
<span class="cat-links">
<?php printf( __( 'Posted in %1$s', 'stay' ), $categories_list ); ?>
</span>
<?php endif; // End if categories ?>
<?php
/* translators: used between list items, there is a space after the comma */
$tags_list = get_the_tag_list( '', __( ', ', 'stay' ) );
if ( $tags_list ) :
?>
<span class="sep"> | </span>
<span class="tags-links">
<?php printf( __( 'Tagged %1$s', 'stay' ), $tags_list ); ?>
</span>
<?php endif; // End if $tags_list ?>
<?php endif; // End if 'post' == get_post_type() ?>
<?php if ( ! post_password_required() && ( comments_open() || '0' != get_comments_number() ) ) : ?>
<span class="sep"> | </span>
<span class="comments-link"><?php comments_popup_link( __( 'Leave a comment', 'stay' ), __( '1 Comment', 'stay' ), __( '% Comments', 'stay' ) ); ?></span>
<?php endif; ?>
<?php edit_post_link( __( 'Edit', 'stay' ), '<span class="edit-link">', '</span>' ); ?>
</footer><!-- .entry-meta -->
</article><!-- #post-## -->
| mit |
www2014/discourse_advanced_search | assets/javascripts/discourse_advanced_search/helpers/discourse_advanced_search_helper.js | 374 | /**
Produces a search link to a topic
@method topicSearchLink
@for Handlebars
**/
Handlebars.registerHelper('topicSearchLink', function(property, options) {
var topic = Ember.Handlebars.get(this, property, options),
title = topic.get('fancy_title') || topic.get('title');
return "<a href='" + topic.get('relative_url') + "' class='title'>" + title + "</a>";
}); | mit |
Vikerus/ArchwayME | includes/config-db.php | 526 | <?php
##
## database access settings in php format
## automatically generated from /etc/dbconfig-common/phpmyadmin.conf
## by /usr/sbin/dbconfig-generate-include
## Wed, 04 Jun 2014 16:52:17 -0400
##
## by default this file is managed via ucf, so you shouldn't have to
## worry about manual changes being silently discarded. *however*,
## you'll probably also want to edit the configuration file mentioned
## above too.
##
$dbuser='';
$dbpass='';
$basepath='';
$dbname='';
$dbserver='localhost';
$dbport='';
$dbtype='mysql';
| mit |
yuanagain/seniorthesis | src/experiment_c1lohner.py | 1467 | """
experiment_c1lohner.py
Poincare C^1 Lohner Algorithm
Author: Yuan Wang
On Lohner Algorithm
https://books.google.com/books?id=7a-8yyjQVLcC&pg=PA178&lpg=PA178&dq
"""
from thesis_utils import *
from thesis_defaults import *
from thesis_poincare_utils import *
from thesis_plot_utils import *
import scipy.integrate as integrate
import scipy.special as special
from scipy.integrate import quad
from scipy.optimize import newton
import numdifftools as nd
from evolution import *
from experiment import *
class ExperimentC1Lohner(Experiment):
def setParams(self, T = 1000, start_pt = default_start):
self.params['T'] = T
self.params['start_pt'] = start_pt
self.saveParams()
def run(self, T = None, dt = 0.01, stepCnt = 10000):
def main():
"""
Testing
"""
print("============")
#evo = Evolution_1a(lmbda = lmbda_set_1)
evo = Evolution_ColluciNunez()
print(evo)
expmt = ExperimentC1Lohner( evo = evo,
title = "Poincare C^1 Lohner Algorithm",
descr = "C^1 Lohner algorithm for period detection")
# expmt.setParams(T = 4, start_pt = default_start)
expmt.setParams(hyperplane = HyperPlane(-4, 12, 2, -10, -1.2),
T = 30,
start_pt = [9.1, 4.1, 3.2, 4.5] )
print("============")
print(expmt)
expmt.run(T = None, stepCnt = 1000)
if __name__=="__main__":
main()
| mit |
aokomoriuta/ViennaCLFiles | viennacl/linalg/compressed_matrix_operations.hpp | 12122 | #ifndef VIENNACL_COMPRESSED_MATRIX_OPERATIONS_HPP_
#define VIENNACL_COMPRESSED_MATRIX_OPERATIONS_HPP_
/* =========================================================================
Copyright (c) 2010-2012, Institute for Microelectronics,
Institute for Analysis and Scientific Computing,
TU Wien.
-----------------
ViennaCL - The Vienna Computing Library
-----------------
Project Head: Karl Rupp rupp@iue.tuwien.ac.at
(A list of authors and contributors can be found in the PDF manual)
License: MIT (X11), see file LICENSE in the base directory
============================================================================= */
/** @file compressed_matrix_operations.hpp
@brief Implementations of operations using compressed_matrix
*/
#include "viennacl/forwards.h"
#include "viennacl/ocl/device.hpp"
#include "viennacl/ocl/handle.hpp"
#include "viennacl/ocl/kernel.hpp"
#include "viennacl/scalar.hpp"
#include "viennacl/vector.hpp"
#include "viennacl/tools/tools.hpp"
#include "viennacl/linalg/kernels/compressed_matrix_kernels.h"
namespace viennacl
{
namespace linalg
{
// A * x
/** @brief Returns a proxy class that represents matrix-vector multiplication with a compressed_matrix
*
* This is used for the convenience expression result = prod(mat, vec);
*
* @param mat The matrix
* @param vec The vector
*/
template<class SCALARTYPE, unsigned int ALIGNMENT, unsigned int VECTOR_ALIGNMENT>
vector_expression<const compressed_matrix<SCALARTYPE, ALIGNMENT>,
const vector<SCALARTYPE, VECTOR_ALIGNMENT>,
op_prod > prod_impl(const compressed_matrix<SCALARTYPE, ALIGNMENT> & mat,
const vector<SCALARTYPE, VECTOR_ALIGNMENT> & vec)
{
return vector_expression<const compressed_matrix<SCALARTYPE, ALIGNMENT>,
const vector<SCALARTYPE, VECTOR_ALIGNMENT>,
op_prod >(mat, vec);
}
/** @brief Carries out matrix-vector multiplication with a compressed_matrix
*
* Implementation of the convenience expression result = prod(mat, vec);
*
* @param mat The matrix
* @param vec The vector
* @param result The result vector
* @param NUM_THREADS Number of threads per work group. Can be used for fine-tuning.
*/
template<class TYPE, unsigned int ALIGNMENT, unsigned int VECTOR_ALIGNMENT>
void prod_impl(const viennacl::compressed_matrix<TYPE, ALIGNMENT> & mat,
const viennacl::vector<TYPE, VECTOR_ALIGNMENT> & vec,
viennacl::vector<TYPE, VECTOR_ALIGNMENT> & result,
size_t NUM_THREADS = 0)
{
assert(mat.size1() == result.size());
assert(mat.size2() == vec.size());
viennacl::ocl::kernel & k = viennacl::ocl::get_kernel(viennacl::linalg::kernels::compressed_matrix<TYPE, ALIGNMENT>::program_name(), "vec_mul");
viennacl::ocl::enqueue(k(mat.handle1(), mat.handle2(), mat.handle(), vec, result, static_cast<cl_uint>(mat.size1())));
}
/** @brief Inplace solution of a lower triangular compressed_matrix with unit diagonal. Typically used for LU substitutions
*
* @param L The matrix
* @param vec The vector
*/
template<typename SCALARTYPE, unsigned int MAT_ALIGNMENT, unsigned int VEC_ALIGNMENT>
void inplace_solve(compressed_matrix<SCALARTYPE, MAT_ALIGNMENT> const & L, vector<SCALARTYPE, VEC_ALIGNMENT> & vec, viennacl::linalg::unit_lower_tag)
{
viennacl::ocl::kernel & k = viennacl::ocl::get_kernel(viennacl::linalg::kernels::compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>::program_name(), "lu_forward");
unsigned int threads = k.local_work_size();
k.global_work_size(k.local_work_size());
viennacl::ocl::enqueue(k(L.handle1(), L.handle2(), L,
viennacl::ocl::local_mem(sizeof(int) * (threads+1)),
viennacl::ocl::local_mem(sizeof(SCALARTYPE) * threads),
vec, L.size1()));
}
/** @brief Convenience functions for result = solve(trans(mat), vec, unit_lower_tag()); Creates a temporary result vector and forwards the request to inplace_solve()
*
* @param L The lower triangular sparse matrix
* @param vec The load vector, where the solution is directly written to
* @param tag Dispatch tag
*/
template<typename SCALARTYPE, unsigned int MAT_ALIGNMENT, unsigned int VEC_ALIGNMENT, typename TAG>
vector<SCALARTYPE, VEC_ALIGNMENT> solve(compressed_matrix<SCALARTYPE, MAT_ALIGNMENT> const & L,
const vector<SCALARTYPE, VEC_ALIGNMENT> & vec,
const viennacl::linalg::unit_lower_tag & tag)
{
// do an inplace solve on the result vector:
vector<SCALARTYPE, VEC_ALIGNMENT> result(vec.size());
result = vec;
inplace_solve(L, result, tag);
return result;
}
/** @brief Inplace solution of a upper triangular compressed_matrix. Typically used for LU substitutions
*
* @param U The upper triangular matrix
* @param vec The vector
*/
template<typename SCALARTYPE, unsigned int MAT_ALIGNMENT, unsigned int VEC_ALIGNMENT>
void inplace_solve(compressed_matrix<SCALARTYPE, MAT_ALIGNMENT> const & U, vector<SCALARTYPE, VEC_ALIGNMENT> & vec, viennacl::linalg::upper_tag)
{
viennacl::ocl::kernel & k = viennacl::ocl::get_kernel(viennacl::linalg::kernels::compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>::program_name(), "lu_backward");
unsigned int threads = k.local_work_size();
k.global_work_size(k.local_work_size());
viennacl::ocl::enqueue(k(U.handle1().get(), U.handle2().get(), U.handle().get(),
viennacl::ocl::local_mem(sizeof(int) * (threads+2)),
viennacl::ocl::local_mem(sizeof(SCALARTYPE) * (threads+2)),
vec, U.size1()));
}
/** @brief Convenience functions for result = solve(trans(mat), vec, unit_lower_tag()); Creates a temporary result vector and forwards the request to inplace_solve()
*
* @param L The lower triangular sparse matrix
* @param vec The load vector, where the solution is directly written to
* @param tag Dispatch tag
*/
template<typename SCALARTYPE, unsigned int MAT_ALIGNMENT, unsigned int VEC_ALIGNMENT, typename TAG>
vector<SCALARTYPE, VEC_ALIGNMENT> solve(compressed_matrix<SCALARTYPE, MAT_ALIGNMENT> const & L,
const vector<SCALARTYPE, VEC_ALIGNMENT> & vec,
viennacl::linalg::upper_tag const & tag)
{
// do an inplace solve on the result vector:
vector<SCALARTYPE, VEC_ALIGNMENT> result(vec.size());
result = vec;
inplace_solve(L, result, tag);
return result;
}
} //namespace linalg
//v = A * x
/** @brief Implementation of the operation v1 = A * v2, where A is a matrix
*
* @param proxy An expression template proxy class.
*/
template <typename SCALARTYPE, unsigned int ALIGNMENT>
template <unsigned int MAT_ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT> &
viennacl::vector<SCALARTYPE, ALIGNMENT>::operator=(const viennacl::vector_expression< const compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>,
const viennacl::vector<SCALARTYPE, ALIGNMENT>,
viennacl::op_prod> & proxy)
{
// check for the special case x = A * x
if (proxy.rhs().handle().get() == this->handle().get())
{
viennacl::vector<SCALARTYPE, ALIGNMENT> result(proxy.rhs().size());
viennacl::linalg::prod_impl(proxy.lhs(), proxy.rhs(), result);
*this = result;
return *this;
}
else
{
viennacl::linalg::prod_impl(proxy.lhs(), proxy.rhs(), *this);
return *this;
}
return *this;
}
//v += A * x
/** @brief Implementation of the operation v1 += A * v2, where A is a matrix
*
* @param proxy An expression template proxy class.
*/
template <typename SCALARTYPE, unsigned int ALIGNMENT>
template <unsigned int MAT_ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT> &
viennacl::vector<SCALARTYPE, ALIGNMENT>::operator+=(const vector_expression< const compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>,
const vector<SCALARTYPE, ALIGNMENT>,
op_prod> & proxy)
{
vector<SCALARTYPE, ALIGNMENT> result(proxy.lhs().size1());
viennacl::linalg::prod_impl(proxy.lhs(), proxy.rhs(), result);
*this += result;
return *this;
}
/** @brief Implementation of the operation v1 -= A * v2, where A is a matrix
*
* @param proxy An expression template proxy class.
*/
template <typename SCALARTYPE, unsigned int ALIGNMENT>
template <unsigned int MAT_ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT> &
viennacl::vector<SCALARTYPE, ALIGNMENT>::operator-=(const vector_expression< const compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>,
const vector<SCALARTYPE, ALIGNMENT>,
op_prod> & proxy)
{
vector<SCALARTYPE, ALIGNMENT> result(proxy.lhs().size1());
viennacl::linalg::prod_impl(proxy.lhs(), proxy.rhs(), result);
*this -= result;
return *this;
}
//free functions:
/** @brief Implementation of the operation 'result = v1 + A * v2', where A is a matrix
*
* @param proxy An expression template proxy class.
*/
template <typename SCALARTYPE, unsigned int ALIGNMENT>
template <unsigned int MAT_ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT>::operator+(const vector_expression< const compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>,
const vector<SCALARTYPE, ALIGNMENT>,
op_prod> & proxy)
{
assert(proxy.lhs().size1() == size());
vector<SCALARTYPE, ALIGNMENT> result(size());
viennacl::linalg::prod_impl(proxy.lhs(), proxy.rhs(), result);
result += *this;
return result;
}
/** @brief Implementation of the operation 'result = v1 - A * v2', where A is a matrix
*
* @param proxy An expression template proxy class.
*/
template <typename SCALARTYPE, unsigned int ALIGNMENT>
template <unsigned int MAT_ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT>
viennacl::vector<SCALARTYPE, ALIGNMENT>::operator-(const vector_expression< const compressed_matrix<SCALARTYPE, MAT_ALIGNMENT>,
const vector<SCALARTYPE, ALIGNMENT>,
op_prod> & proxy)
{
assert(proxy.lhs().size1() == size());
vector<SCALARTYPE, ALIGNMENT> result(size());
viennacl::linalg::prod_impl(proxy.lhs(), proxy.rhs(), result);
result = *this - result;
return result;
}
} //namespace viennacl
#endif
| mit |
wmira/react-icons-kit | src/md/ic_traffic_twotone.js | 985 | export const ic_traffic_twotone = {"viewBox":"0 0 24 24","children":[{"name":"path","attribs":{"d":"M0 0h24v24H0V0z","fill":"none"},"children":[]},{"name":"path","attribs":{"d":"M9 19h6V5H9v14zm3-13c.83 0 1.5.67 1.5 1.5S12.83 9 12 9s-1.5-.67-1.5-1.5S11.17 6 12 6zm0 4.5c.83 0 1.5.67 1.5 1.5s-.67 1.5-1.5 1.5-1.5-.67-1.5-1.5.67-1.5 1.5-1.5zm0 4.5c.83 0 1.5.67 1.5 1.5S12.83 18 12 18s-1.5-.67-1.5-1.5.67-1.5 1.5-1.5z","opacity":".3"},"children":[]},{"name":"path","attribs":{"d":"M20 5h-3V4c0-.55-.45-1-1-1H8c-.55 0-1 .45-1 1v1H4c0 1.86 1.28 3.41 3 3.86V10H4c0 1.86 1.28 3.41 3 3.86V15H4c0 1.86 1.28 3.41 3 3.86V20c0 .55.45 1 1 1h8c.55 0 1-.45 1-1v-1.14c1.72-.45 3-2 3-3.86h-3v-1.14c1.72-.45 3-2 3-3.86h-3V8.86c1.72-.45 3-2 3-3.86zm-5 14H9V5h6v14zm-3-1c.83 0 1.5-.67 1.5-1.5S12.83 15 12 15s-1.5.67-1.5 1.5.67 1.5 1.5 1.5zm0-4.5c.83 0 1.5-.67 1.5-1.5s-.67-1.5-1.5-1.5-1.5.67-1.5 1.5.67 1.5 1.5 1.5zM12 9c.83 0 1.5-.67 1.5-1.5S12.83 6 12 6s-1.5.67-1.5 1.5S11.17 9 12 9z"},"children":[]}]}; | mit |
geekish/crap | src/Command/ListAliasesCommand.php | 1176 | <?php
namespace Geekish\Crap\Command;
use Symfony\Component\Console\Input\InputInterface;
use Symfony\Component\Console\Output\OutputInterface;
/**
* Class ListAliasesCommand
* @package Geekish\Crap\Command
*/
final class ListAliasesCommand extends BaseCommand
{
/**
* @inheritDoc
*/
protected function configure()
{
$this->setName('aliases');
$this->setDescription('List currently defined aliases');
}
/**
* @inheritDoc
* @codeCoverageIgnore
*/
protected function execute(InputInterface $input, OutputInterface $output)
{
$aliases = $this->helper->getAliases();
if (count($aliases) > 0) {
$pad = max(array_map('strlen', $aliases)) + 3;
foreach ($aliases as $alias) {
$package = $this->helper->getAlias($alias);
$output->writeln(sprintf(
'<comment>%s</comment> %s',
str_pad($alias, $pad, ' '),
$package
));
}
return 0;
}
$output->writeln('<comment>No aliases defined.</comment>');
return 0;
}
}
| mit |
erfannoury/Pitch-Annotator.Net | CsvPlayground/Properties/AssemblyInfo.cs | 1402 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("CsvPlayground")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("CsvPlayground")]
[assembly: AssemblyCopyright("Copyright © 2014")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("f3444b82-cf76-487b-9425-c7bed265c67a")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| mit |
paulcbetts/brightray | browser/browser_context.cc | 5428 | // Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE-CHROMIUM file.
#include "browser/browser_context.h"
#include "browser/download_manager_delegate.h"
#include "browser/inspectable_web_contents_impl.h"
#include "browser/network_delegate.h"
#include "common/application_info.h"
#include "base/environment.h"
#include "base/files/file_path.h"
#include "base/path_service.h"
#include "base/prefs/json_pref_store.h"
#include "base/prefs/pref_registry_simple.h"
#include "base/prefs/pref_service.h"
#include "base/prefs/pref_service_factory.h"
#include "content/public/browser/browser_thread.h"
#include "content/public/browser/resource_context.h"
#include "content/public/browser/storage_partition.h"
#if defined(OS_LINUX)
#include "base/nix/xdg_util.h"
#endif
using content::BrowserThread;
namespace brightray {
class BrowserContext::ResourceContext : public content::ResourceContext {
public:
ResourceContext() : getter_(nullptr) {}
void set_url_request_context_getter(URLRequestContextGetter* getter) {
getter_ = getter;
}
private:
virtual net::HostResolver* GetHostResolver() override {
return getter_->host_resolver();
}
virtual net::URLRequestContext* GetRequestContext() override {
return getter_->GetURLRequestContext();
}
// FIXME: We should probably allow clients to override this to implement more
// restrictive policies.
virtual bool AllowMicAccess(const GURL& origin) override {
return true;
}
// FIXME: We should probably allow clients to override this to implement more
// restrictive policies.
virtual bool AllowCameraAccess(const GURL& origin) override {
return true;
}
URLRequestContextGetter* getter_;
};
BrowserContext::BrowserContext() : resource_context_(new ResourceContext) {
}
void BrowserContext::Initialize() {
base::FilePath path;
#if defined(OS_LINUX)
scoped_ptr<base::Environment> env(base::Environment::Create());
path = base::nix::GetXDGDirectory(env.get(),
base::nix::kXdgConfigHomeEnvVar,
base::nix::kDotConfigDir);
#else
CHECK(PathService::Get(base::DIR_APP_DATA, &path));
#endif
path_ = path.Append(base::FilePath::FromUTF8Unsafe(GetApplicationName()));
auto prefs_path = GetPath().Append(FILE_PATH_LITERAL("Preferences"));
base::PrefServiceFactory prefs_factory;
prefs_factory.SetUserPrefsFile(prefs_path,
JsonPrefStore::GetTaskRunnerForFile(
prefs_path, BrowserThread::GetBlockingPool()));
auto registry = make_scoped_refptr(new PrefRegistrySimple);
RegisterInternalPrefs(registry);
RegisterPrefs(registry);
prefs_ = prefs_factory.Create(registry);
}
BrowserContext::~BrowserContext() {
BrowserThread::DeleteSoon(BrowserThread::IO,
FROM_HERE,
resource_context_.release());
}
void BrowserContext::RegisterInternalPrefs(PrefRegistrySimple* registry) {
InspectableWebContentsImpl::RegisterPrefs(registry);
}
net::URLRequestContextGetter* BrowserContext::CreateRequestContext(
content::ProtocolHandlerMap* protocol_handlers,
content::URLRequestInterceptorScopedVector protocol_interceptors) {
DCHECK(!url_request_getter_);
url_request_getter_ = new URLRequestContextGetter(
this,
GetPath(),
BrowserThread::UnsafeGetMessageLoopForThread(BrowserThread::IO),
BrowserThread::UnsafeGetMessageLoopForThread(BrowserThread::FILE),
protocol_handlers,
protocol_interceptors.Pass());
resource_context_->set_url_request_context_getter(url_request_getter_.get());
return url_request_getter_.get();
}
net::NetworkDelegate* BrowserContext::CreateNetworkDelegate() {
return new NetworkDelegate;
}
base::FilePath BrowserContext::GetPath() const {
return path_;
}
bool BrowserContext::IsOffTheRecord() const {
return false;
}
net::URLRequestContextGetter* BrowserContext::GetRequestContext() {
return GetDefaultStoragePartition(this)->GetURLRequestContext();
}
net::URLRequestContextGetter* BrowserContext::GetRequestContextForRenderProcess(
int renderer_child_id) {
return GetRequestContext();
}
net::URLRequestContextGetter* BrowserContext::GetMediaRequestContext() {
return GetRequestContext();
}
net::URLRequestContextGetter*
BrowserContext::GetMediaRequestContextForRenderProcess(
int renderer_child_id) {
return GetRequestContext();
}
net::URLRequestContextGetter*
BrowserContext::GetMediaRequestContextForStoragePartition(
const base::FilePath& partition_path,
bool in_memory) {
return GetRequestContext();
}
content::ResourceContext* BrowserContext::GetResourceContext() {
return resource_context_.get();
}
content::DownloadManagerDelegate* BrowserContext::GetDownloadManagerDelegate() {
if (!download_manager_delegate_)
download_manager_delegate_.reset(new DownloadManagerDelegate);
return download_manager_delegate_.get();
}
content::BrowserPluginGuestManager* BrowserContext::GetGuestManager() {
return NULL;
}
quota::SpecialStoragePolicy* BrowserContext::GetSpecialStoragePolicy() {
return NULL;
}
content::PushMessagingService* BrowserContext::GetPushMessagingService() {
return NULL;
}
content::SSLHostStateDelegate* BrowserContext::GetSSLHostStateDelegate() {
return nullptr;
}
} // namespace brightray
| mit |
mohitkr05/_wplove | lib/scripts.php | 2834 | <?php
/**
* Enqueue scripts and stylesheets
*
* Enqueue stylesheets in the following order:
* 1. /theme/assets/css/main.min.css
*
* Enqueue scripts in the following order:
* 1. jquery-1.11.0.min.js via Google CDN
* 2. /theme/assets/js/vendor/modernizr-2.7.0.min.js
* 3. /theme/assets/js/main.min.js (in footer)
*/
function roots_scripts() {
wp_enqueue_style('roots_main', get_template_directory_uri() . '/assets/sass/app.css', false, '64c2848549e90cef42796141ccce4c3e');
wp_enqueue_style('device_mockups', get_template_directory_uri() . '/assets/css/device-mockups.css', false, '64c2848549e90cef42796141ccce4c3e');
// jQuery is loaded using the same method from HTML5 Boilerplate:
// Grab Google CDN's latest jQuery with a protocol relative URL; fallback to local if offline
// It's kept in the header instead of footer to avoid conflicts with plugins.
if (!is_admin() && current_theme_supports('jquery-cdn')) {
wp_deregister_script('jquery');
wp_register_script('jquery', '//ajax.googleapis.com/ajax/libs/jquery/1.11.0/jquery.min.js', array(), null, false);
add_filter('script_loader_src', 'roots_jquery_local_fallback', 10, 2);
}
if (is_single() && comments_open() && get_option('thread_comments')) {
wp_enqueue_script('comment-reply');
}
wp_register_script('modernizr', get_template_directory_uri() . '/assets/js/vendor/modernizr-2.7.0.min.js', array(), null, false);
wp_register_script('roots_scripts', get_template_directory_uri() . '/assets/js/scripts.min.js', array(), '0fc6af96786d8f267c8686338a34cd38', true);
wp_enqueue_script('modernizr');
wp_enqueue_script('jquery');
wp_enqueue_script('roots_scripts');
}
add_action('wp_enqueue_scripts', 'roots_scripts', 100);
// http://wordpress.stackexchange.com/a/12450
function roots_jquery_local_fallback($src, $handle = null) {
static $add_jquery_fallback = false;
if ($add_jquery_fallback) {
echo '<script>window.jQuery || document.write(\'<script src="' . get_template_directory_uri() . '/assets/js/vendor/jquery-1.11.0.min.js"><\/script>\')</script>' . "\n";
$add_jquery_fallback = false;
}
if ($handle === 'jquery') {
$add_jquery_fallback = true;
}
return $src;
}
add_action('wp_head', 'roots_jquery_local_fallback');
function roots_google_analytics() { ?>
<script>
(function(b,o,i,l,e,r){b.GoogleAnalyticsObject=l;b[l]||(b[l]=
function(){(b[l].q=b[l].q||[]).push(arguments)});b[l].l=+new Date;
e=o.createElement(i);r=o.getElementsByTagName(i)[0];
e.src='//www.google-analytics.com/analytics.js';
r.parentNode.insertBefore(e,r)}(window,document,'script','ga'));
ga('create','<?php echo GOOGLE_ANALYTICS_ID; ?>');ga('send','pageview');
</script>
<?php }
if (GOOGLE_ANALYTICS_ID && !current_user_can('manage_options')) {
add_action('wp_footer', 'roots_google_analytics', 20);
}
| mit |
GreatBizTools/django-threaded-messages | threaded_messages/models.py | 9660 | from django.db import models
from django.contrib.auth.models import User
from django.core.cache import cache
from django.utils.translation import ugettext_lazy as _
from django.db.models import F, Q
from json import dumps, loads
from .listeners import start_listening
from .settings import INBOX_MESSAGE_CACHE, INBOX_MESSAGE_CACHE_TIME
start_listening()
class MessageManager(models.Manager):
def inbox_for(self, user, read=None, only_unreplied=None):
"""
Returns all messages that were received by the given user and are not
marked as deleted.
"""
inbox = self.filter(
user=user,
deleted_at__isnull=True,
)
if read != None:
if read == True:
# read messages have read_at set to a later value then last message of the thread
inbox = inbox.exclude(read_at__isnull=True)\
.filter(read_at__gt=F("thread__latest_msg__sent_at"))
else:
# unread threads are the ones that either have not been read at all or before the last message arrived
inbox = inbox.filter(Q(read_at__isnull=True)
| Q(read_at__lt=F("thread__latest_msg__sent_at")))
if only_unreplied != None:
if only_unreplied == True:
inbox = inbox.filter(Q(replied_at__isnull=True)
| Q(replied_at__lt=F("thread__latest_msg__sent_at")))
return inbox
def outbox_for(self, user, read=None, only_unreplied=None):
"""
Returns all messages that were sent by the given user and are not
marked as deleted.
"""
outbox = self.filter(
user=user,
replied_at__isnull=False,
deleted_at__isnull=True,
)
if read != None:
if read == True:
# read messages have read_at set to a later value then last message of the thread
outbox = outbox.exclude(read_at__isnull=True)\
.filter(read_at__gt=F("thread__latest_msg__sent_at"))
else:
# unread threads are the ones that either have not been read at all or before the last message arrived
outbox = outbox.filter(Q(read_at__isnull=True)
| Q(read_at__lt=F("thread__latest_msg__sent_at")))
if only_unreplied != None:
if only_unreplied == True:
inbox = outbox.filter(Q(replied_at__isnull=True)
| Q(replied_at__lt=F("thread__latest_msg__sent_at")))
return outbox
def trash_for(self, user, read=None, only_unreplied=None):
"""
Returns all messages that were either received or sent by the given
user and are marked as deleted.
"""
trash = self.filter(
user=user,
deleted_at__isnull=False,
)
if read != None:
if read == True:
# read messages have read_at set to a later value then last message of the thread
trash = trash.exclude(read_at__isnull=True)\
.filter(read_at__gt=F("thread__latest_msg__sent_at"))
else:
# unread threads are the ones that either have not been read at all or before the last message arrived
trash = trash.filter(Q(read_at__isnull=True)
| Q(read_at__lt=F("thread__latest_msg__sent_at")))
if only_unreplied != None:
if only_unreplied == True:
trash = trash.filter(Q(replied_at__isnull=True)
| Q(replied_at__lt=F("thread__latest_msg__sent_at")))
return trash
class Message(models.Model):
"""
A private message from user to user
"""
body = models.TextField(_("body"))
sender = models.ForeignKey(User, related_name='sent_messages', blank=True, null=True, verbose_name=_("sender"))
parent_msg = models.ForeignKey('self', related_name='next_messages', blank=True, null=True, verbose_name=_("parent message"))
sent_at = models.DateTimeField(_("sent at"), auto_now_add=True,
db_index=True)
def __unicode__(self):
return "%s - %s" % (str(self.sender), self.sent_at)
def save(self, **kwargs):
if not self.id:
from .utils import now
self.sent_at = now()
super(Message, self).save(**kwargs)
class Meta:
ordering = ['-sent_at']
verbose_name = _("Message")
verbose_name_plural = _("Messages")
class Thread(models.Model):
"""
A linear conversation between two or more Users
"""
subject = models.CharField(_("Subject"), max_length=120)
latest_msg = models.ForeignKey(Message, related_name='thread_latest', verbose_name=_("Latest message"))
all_msgs = models.ManyToManyField(Message, related_name='thread', verbose_name=_("Messages"))
# the following fields are used to filter out messages that have not been replied to in the inbox
creator = models.ForeignKey(User, related_name='created_threads', verbose_name=_("creator"))
replied = models.BooleanField(editable=False, default=False)
def __unicode__(self):
return self.subject
def get_absolute_url(self):
return ('tm:messages_detail', [self.id])
get_absolute_url = models.permalink(get_absolute_url)
class Meta:
ordering = ['latest_msg']
verbose_name = _("Thread")
verbose_name_plural = _("Threads")
class Participant(models.Model):
"""
Thread manager for each participant
"""
thread = models.ForeignKey(Thread, related_name='participants', verbose_name=_("message thread"))
user = models.ForeignKey(User, related_name='threads', verbose_name=_("participant users"))
read_at = models.DateTimeField(_("read at"), null=True, blank=True,
db_index=True)
replied_at = models.DateTimeField(_("replied at"), null=True, blank=True,
db_index=True)
deleted_at = models.DateTimeField(_("deleted at"), null=True, blank=True,
db_index=True)
objects = MessageManager()
def new(self):
"""returns whether the recipient has read the message or not"""
if self.read_at is None or self.read_at < self.thread.latest_msg.sent_at:
return True
return False
def replied(self):
"""returns whether the recipient has replied the message or not"""
if self.replied_at is None \
or self.replied_at < self.thread.latest_msg.sent_at:
return True
return False
def last_other_sender(self):
"""returns the last sender thats not the viewing user. if nobody
besides you sent a message to the thread we take a random one
"""
message = self.thread.all_msgs.exclude(sender=self.user)
if message:
return message[0].sender
#else:
# others = self.others()
# if others:
# return others[0].user
return None
def others(self):
"""returns the other participants of the thread"""
return self.thread.participants.exclude(user=self.user)
def get_next(self):
try:
participation = Participant.objects.inbox_for(
self.user
).filter(
thread__latest_msg__sent_at__gt=self.thread.latest_msg.sent_at
).reverse()[0]
return participation
except:
return None
def get_previous(self):
try:
participation = Participant.objects.inbox_for(
self.user
).filter(
thread__latest_msg__sent_at__lt=self.thread.latest_msg.sent_at)[0]
return participation
except:
return None
def read_thread(self):
"""
Marks thread as read and refill count cache
"""
from .utils import fill_message_cache, now
self.read_at = now()
self.save()
fill_message_cache(self.user)
def unread_thread(self):
from .utils import fill_message_cache
self.read_at = None
self.save()
fill_message_cache(self.user)
def __unicode__(self):
return "%s - %s" % (str(self.user), self.thread.subject)
class Meta:
ordering = ['thread']
verbose_name = _("participant")
verbose_name_plural = _("participants")
def inbox_count_for(user):
return Participant.objects.inbox_for(user, read=False).count()
def inbox_messages_for(user):
count = inbox_count_for(user)
unread_messages = [{'subject': p.thread.subject, 'sender': p.thread.latest_msg.sender.full_name(),'thread_id': p.thread.id, 'message_count':count} for p in
Participant.objects.inbox_for(user,read=False)[0:3]]#having the indexing on the Model object causes the ORM to issue a LIMIT sql query
unread_messages.reverse()
return unread_messages
def cached_inbox_messages_for(user):
"""
The messages displayed in the navbar dropdown but cached if available
:param user: a user object
:return:messages: a json string with 'subject', 'sender', 'thread_id', and 'count' keys
"""
messages = cache.get(INBOX_MESSAGE_CACHE % user.pk)
if messages:
return messages
else:
messages = inbox_messages_for(user)
cache.set(INBOX_MESSAGE_CACHE % user.pk, dumps(messages), INBOX_MESSAGE_CACHE_TIME)
return messages
| mit |
nx-hacker-news/nx-hacker-news.github.io | webpack.config.js | 211 | 'use strict'
module.exports = {
entry: './src/index.js',
output: {
path: __dirname,
filename: '/bundle.js'
},
module: {
loaders: [
{test: /\.(html)|(css)$/, loader: 'raw'}
]
}
}
| mit |
Kronuz/Xapiand | oldtests/test_threadpool.cc | 6089 | /*
* Copyright (c) 2015-2019 Dubalu LLC
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#include "test_threadpool.h"
#include "utils.h"
int test_pool() {
INIT_LOG
std::string results;
ThreadPool<> pool("W{}", 4);
pool.enqueue([task = std::make_shared<TestTask>("1", 0.08, results)]{ task->run(); });
std::this_thread::sleep_for(std::chrono::duration<double>(0.001));
pool.enqueue([task = std::make_shared<TestTask>("2", 0.02, results)]{ task->run(); });
std::this_thread::sleep_for(std::chrono::duration<double>(0.001));
pool.enqueue([task = std::make_shared<TestTask>("3", 0.04, results)]{ task->run(); });
std::this_thread::sleep_for(std::chrono::duration<double>(0.001));
pool.enqueue([task = std::make_shared<TestTask>("4", 0.01, results)]{ task->run(); });
pool.end();
pool.join();
if (results != "<1<2<3<44>2>3>1>") {
L_ERR("ThreadPool::enqueue is not working correctly. Result: {} Expected: <1<2<3<44>2>3>1>", results);
RETURN(1);
}
RETURN(0);
}
int test_pool_limit() {
INIT_LOG
std::string results;
ThreadPool<> pool("W{}", 3);
pool.enqueue([task = std::make_shared<TestTask>("1", 0.08, results)]{ task->run(); });
std::this_thread::sleep_for(std::chrono::duration<double>(0.001));
pool.enqueue([task = std::make_shared<TestTask>("2", 0.02, results)]{ task->run(); });
std::this_thread::sleep_for(std::chrono::duration<double>(0.001));
pool.enqueue([task = std::make_shared<TestTask>("3", 0.04, results)]{ task->run(); });
std::this_thread::sleep_for(std::chrono::duration<double>(0.001));
pool.enqueue([task = std::make_shared<TestTask>("4", 0.01, results)]{ task->run(); });
pool.end();
pool.join();
if (results != "<1<2<32><44>3>1>") {
L_ERR("ThreadPool::enqueue is not working correctly. Result: {} Expected: <1<2<32><44>3>1>", results);
RETURN(1);
}
RETURN(0);
}
int test_pool_func() {
INIT_LOG
ThreadPool<> pool("W{}", 4);
test_pool_class_t obj;
int i = 1;
std::vector<std::future<int>> results;
// Using lambda without parameters
results.emplace_back(pool.async([i = i]() {
return i * i;
}));
++i;
// Using lambda with parameters
results.emplace_back(pool.async([](int i) {
return i * i;
}, i));
++i;
// Using regular function
results.emplace_back(pool.async(&test_pool_func_func, i));
++i;
// Using member function
results.emplace_back(pool.async(&test_pool_class_t::func, &obj, i));
++i;
// Using captured object function
results.emplace_back(pool.async([&obj](int i) {
return obj.func(i);
}, i));
++i;
int total = 0;
for (auto& result: results) {
total += result.get();
}
if (total != 55) {
L_ERR("ThreadPool::async functions with int is not working correctly. Result: {} Expect: 30", total);
RETURN(1);
}
pool.end();
pool.join();
RETURN(0);
}
int test_pool_func_shared() {
INIT_LOG
ThreadPool<> pool("W{}", 4);
test_pool_class_t obj;
int i = 1;
std::vector<std::future<int>> results;
// Using lambda without parameters
results.emplace_back(pool.async([i = std::make_shared<int>(i)]() {
return *i * *i;
}));
++i;
// Using lambda with parameters
results.emplace_back(pool.async([](std::shared_ptr<int> i) {
return *i * *i;
}, std::make_shared<int>(i)));
++i;
// Using regular function
results.emplace_back(pool.async(&test_pool_func_func_shared, std::make_shared<int>(i)));
++i;
// Using member function
results.emplace_back(pool.async(&test_pool_class_t::func_shared, &obj, std::make_shared<int>(i)));
++i;
// Using captured object function
results.emplace_back(pool.async([&obj](std::shared_ptr<int> i) {
return obj.func_shared(std::move(i));
}, std::make_shared<int>(i)));
++i;
int total = 0;
for (auto& result: results) {
total += result.get();
}
if (total != 55) {
L_ERR("ThreadPool::async functions with std::shared_ptr is not working correctly. Result: {} Expect: 30", total);
RETURN(1);
}
pool.end();
pool.join();
RETURN(0);
}
int test_pool_func_unique() {
INIT_LOG
ThreadPool<> pool("W{}", 4);
test_pool_class_t obj;
int i = 1;
std::vector<std::future<int>> results;
// Using lambda without parameters
results.emplace_back(pool.async([i = std::make_unique<int>(i)]() {
return *i * *i;
}));
++i;
// Using lambda with parameters
results.emplace_back(pool.async([](std::unique_ptr<int> i) {
return *i * *i;
}, std::make_unique<int>(i)));
++i;
// Using regular function
results.emplace_back(pool.async(&test_pool_func_func_unique, std::make_unique<int>(i)));
++i;
// Using member function
results.emplace_back(pool.async(&test_pool_class_t::func_unique, &obj, std::make_unique<int>(i)));
++i;
// Using captured object function
results.emplace_back(pool.async([&obj](std::unique_ptr<int> i) {
return obj.func_unique(std::move(i));
}, std::make_unique<int>(i)));
++i;
int total = 0;
for (auto& result: results) {
total += result.get();
}
if (total != 55) {
L_ERR("ThreadPool::async functions with std::unique_ptr is not working correctly. Result: {} Expect: 30", total);
RETURN(1);
}
pool.end();
pool.join();
RETURN(0);
}
| mit |
risan/oauth1 | src/Provider/Tumblr.php | 666 | <?php
namespace Risan\OAuth1\Provider;
use Risan\OAuth1\Signature\HmacSha1Signer;
class Tumblr implements ProviderInterface
{
/*
* {@inheritdoc}
*/
public function getUriConfig()
{
return [
'temporary_credentials_uri' => 'https://www.tumblr.com/oauth/request_token',
'authorization_uri' => 'https://www.tumblr.com/oauth/authorize',
'token_credentials_uri' => 'https://www.tumblr.com/oauth/access_token',
'base_uri' => 'https://api.tumblr.com/v2/',
];
}
/*
* {@inheritdoc}
*/
public function getSigner()
{
return new HmacSha1Signer();
}
}
| mit |
ajlopez/SetTuples | Src/SetTuples/Tuple.cs | 711 | namespace SetTuples
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
public class Tuple
{
private object[] values;
public Tuple(int size)
{
this.values = new object[size];
}
public object this[int index]
{
get { return this.values[index]; }
set { this.values[index] = value; }
}
public Tuple AddColumns(int incsize)
{
Tuple tuple = new Tuple(this.values.Length + incsize);
Array.Copy(this.values, tuple.values, this.values.Length);
return tuple;
}
}
}
| mit |
Daniel18v/final-project | application/config/routes.php | 3618 | <?php
defined('BASEPATH') OR exit('No direct script access allowed');
/*
| -------------------------------------------------------------------------
| URI ROUTING
| -------------------------------------------------------------------------
| This file lets you re-map URI requests to specific controller functions.
|
| Typically there is a one-to-one relationship between a URL string
| and its corresponding controller class/method. The segments in a
| URL normally follow this pattern:
|
| example.com/class/method/id/
|
| In some instances, however, you may want to remap this relationship
| so that a different class/function is called than the one
| corresponding to the URL.
|
| Please see the user guide for complete details:
|
| https://codeigniter.com/user_guide/general/routing.html
|
| -------------------------------------------------------------------------
| RESERVED ROUTES
| -------------------------------------------------------------------------
|
| There are three reserved routes:
|
| $route['default_controller'] = 'welcome';
|
| This route indicates which controller class should be loaded if the
| URI contains no data. In the above example, the "welcome" class
| would be loaded.
|
| $route['404_override'] = 'errors/page_missing';
|
| This route will tell the Router which controller/method to use if those
| provided in the URL cannot be matched to a valid route.
|
| $route['translate_uri_dashes'] = FALSE;
|
| This is not exactly a route, but allows you to automatically route
| controller and method names that contain dashes. '-' isn't a valid
| class or method name character, so it requires translation.
| When you set this option to TRUE, it will replace ALL dashes in the
| controller and method URI segments.
|
| Examples: my-controller/index -> my_controller/index
| my-controller/my-method -> my_controller/my_method
*/
$route['default_controller'] = 'Main_controller';
$route['404_override'] = 'Errors_controller/error404';
$route['prohibido'] = 'Errors_controller/error403';
$route['translate_uri_dashes'] = FALSE;
$route['probando'] = 'Main_controller/probando';
$route['probando/(:any)'] = 'Main_controller/probando/$1';
$route['chat'] = 'Main_controller/chat';
$route['juegos/emulador'] = 'Roms_controller/emulator';
$route['juegos/emulador/(:any)'] = 'Consoles_controller/emulator/$1';
$route['juegos/emulador/(:any)/(:any)'] = 'Roms_controller/emulator/$1/$2';
$route['juegos'] = 'Html5_controller/games';
$route['juegos/(:any)'] = 'Html5_controller/games/$1';
$route['tienda'] = 'Main_controller/shop';
$route['social'] = 'Main_controller/social';
$route['noticias'] = 'Main_controller/news';
$route['foro'] = 'Main_controller/forum';
$route['login'] = 'User_controller/login';
$route['salir'] = 'User_controller/logout';
$route['add_coins'] = 'User_controller/add_coins';
$route['buy_game'] = 'User_controller/buy_game';
$route['add_coins_comment'] = 'User_controller/add_coins_comment';
$route['registro'] = 'User_controller/signup';
$route['subida'] = 'Upload_controller/upload';
$route['admin'] = 'ACP_controller/admin';
$route['blog'] = 'Main_controller/blog';
$route['blog/(:any)'] = 'Blog_controller/blog/$1';
$route['blog/(:any)/comment'] = 'Blog_controller/blog_comment/$1';
$route['admin/blog'] = 'ACP_controller/admin/blog';
$route['admin/blog/(:any)'] = 'Blog_controller/blog_actions/$1';
$route['perfil'] = 'UCP_controller/profile';
$route['perfil/editar'] = 'UCP_controller/edit';
$route['perfil/actualizar'] = 'UCP_controller/update_profile';
$route['perfil/mis-juegos'] = 'UCP_controller/show_games';
$route['perfil/(:any)'] = 'UCP_controller/profile/$1'; | mit |
zaeleus/ffi-gphoto2 | spec/gphoto2/camera_widgets/date_camera_widget_spec.rb | 348 | require 'spec_helper'
module GPhoto2
describe DateCameraWidget do
it_behaves_like CameraWidget
describe '#value' do
it 'has a Time return value' do
widget = DateCameraWidget.new(nil)
allow(widget).to receive(:value).and_return(Time.now)
expect(widget.value).to be_kind_of(Time)
end
end
end
end
| mit |
sensorberg-dev/windows10-sdk | SensorbergSDK/Internal/Services/StorageService.cs | 18466 | // Created by Kay Czarnotta on 10.03.2016
//
// Copyright (c) 2016, Sensorberg
//
// All rights reserved.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Net.Http;
using System.Threading.Tasks;
using Windows.Storage;
using MetroLog;
using Newtonsoft.Json;
using SensorbergSDK.Internal.Data;
using SensorbergSDK.Internal.Transport;
using SensorbergSDK.Internal.Utils;
using SensorbergSDK.Services;
namespace SensorbergSDK.Internal.Services
{
public class StorageService : IStorageService
{
private static readonly ILogger Logger = LogManagerFactory.DefaultLogManager.GetLogger<StorageService>();
private const string KeyLayoutHeaders = "layout_headers";
private const string KeyLayoutContent = "layout_content.cache"; // Cache file
public const string KeyLayoutRetrievedTime = "layout_retrieved_time";
private const int MaxRetries = 2;
public int RetryCount { get; set; } = 3;
public IStorage Storage { [DebuggerStepThrough] get; [DebuggerStepThrough] set; }
public StorageService(bool createdOnForeground = true)
{
Storage = new FileStorage() {Background = !createdOnForeground};
}
public async Task InitStorage()
{
await Storage.InitStorage();
}
/// <summary>
/// Checks whether the given API key is valid or not.
/// </summary>
/// <param name="apiKey">The API key to validate.</param>
/// <returns>The validation result.</returns>
public async Task<ApiKeyValidationResult> ValidateApiKey(string apiKey)
{
var responseMessage = await ExecuteCall(async () => await ServiceManager.ApiConnction.RetrieveLayoutResponse(apiKey));
if (responseMessage != null && responseMessage.IsSuccess)
{
return string.IsNullOrEmpty(responseMessage.Content) || responseMessage.Content.Length < Constants.MinimumLayoutContentLength
? ApiKeyValidationResult.Invalid
: ApiKeyValidationResult.Valid;
}
return responseMessage?.NetworResult == NetworkResult.NetworkError ? ApiKeyValidationResult.NetworkError : ApiKeyValidationResult.UnknownError;
}
private async Task WaitBackoff(int currentRetries)
{
await Task.Delay((int) Math.Pow(2, currentRetries + 1)*100);
}
public async Task<LayoutResult> RetrieveLayout()
{
ResponseMessage responseMessage = await ExecuteCall(async () => await ServiceManager.ApiConnction.RetrieveLayoutResponse());
if (responseMessage != null && responseMessage.IsSuccess)
{
Layout layout = null;
string headersAsString = Helper.StripLineBreaksAndExcessWhitespaces(responseMessage.Header);
string contentAsString = Helper.StripLineBreaksAndExcessWhitespaces(responseMessage.Content);
contentAsString = Helper.EnsureEncodingIsUtf8(contentAsString);
DateTimeOffset layoutRetrievedTime = DateTimeOffset.Now;
if (contentAsString.Length > Constants.MinimumLayoutContentLength)
{
try
{
layout = JsonConvert.DeserializeObject<Layout>(contentAsString);
layout?.FromJson(headersAsString, layoutRetrievedTime);
Logger.Debug("LayoutManager: new Layout received: Beacons: " + layout?.AccountBeaconId1S.Count + " Actions :" + layout?.ResolvedActions.Count);
}
catch (Exception ex)
{
Logger.Debug("LayoutManager.RetrieveLayout(): Failed to parse layout: " + ex);
layout = null;
}
}
if (layout != null)
{
// Store the parsed layout
await SaveLayoutToLocalStorage(headersAsString, contentAsString, layoutRetrievedTime);
return new LayoutResult() {Layout = layout, Result = NetworkResult.Success};
}
}
else
{
Layout layout = await LoadLayoutFromLocalStorage();
return new LayoutResult() {Result = layout != null ? NetworkResult.Success : NetworkResult.NetworkError, Layout = layout};
}
return new LayoutResult() {Result = NetworkResult.UnknownError};
}
public async Task<bool> FlushHistory()
{
try
{
History history = new History();
history.Actions = await Storage.GetUndeliveredActions();
history.Events = await Storage.GetUndeliveredEvents();
if ((history.Events != null && history.Events.Count > 0) || (history.Actions != null && history.Actions.Count > 0))
{
var responseMessage = await ExecuteCall(async () => await ServiceManager.ApiConnction.SendHistory(history));
if (responseMessage.IsSuccess)
{
if (history.Events != null && history.Events.Count > 0)
{
await Storage.SetEventsAsDelivered(history.Events);
}
if (history.Actions != null && history.Actions.Count > 0)
{
await Storage.SetActionsAsDelivered(history.Actions);
}
return true;
}
}
}
catch (Exception ex)
{
Logger.Error("Error while sending history: " + ex.Message, ex);
}
return false;
}
/// <summary>
/// Saves the strings that make up a layout.
/// </summary>
private async Task SaveLayoutToLocalStorage(string headers, string content, DateTimeOffset layoutRetrievedTime)
{
if (await StoreData(KeyLayoutContent, content))
{
ApplicationData.Current.LocalSettings.Values[KeyLayoutHeaders] = headers;
ApplicationData.Current.LocalSettings.Values[KeyLayoutRetrievedTime] = layoutRetrievedTime;
}
}
/// <summary>
/// Saves the given data to the specified file.
/// </summary>
/// <param name="fileName">The file name of the storage file.</param>
/// <param name="data">The data to save.</param>
/// <returns>True, if successful. False otherwise.</returns>
private async Task<bool> StoreData(string fileName, string data)
{
bool success = false;
try
{
var storageFile = await ApplicationData.Current.LocalFolder.CreateFileAsync(fileName, CreationCollisionOption.ReplaceExisting);
await FileIO.AppendTextAsync(storageFile, data);
success = true;
}
catch (Exception ex)
{
Logger.Error("LayoutManager.StoreData(): Failed to save content: " + ex, ex);
}
return success;
}
/// <summary>
/// Tries to load the layout from the local storage.
/// </summary>
/// <returns>A layout instance, if successful. Null, if not found.</returns>
public async Task<Layout> LoadLayoutFromLocalStorage()
{
Layout layout = null;
string headers = string.Empty;
string content = string.Empty;
DateTimeOffset layoutRetrievedTime = DateTimeOffset.Now;
if (ApplicationData.Current.LocalSettings.Values.ContainsKey(KeyLayoutHeaders))
{
headers = ApplicationData.Current.LocalSettings.Values[KeyLayoutHeaders].ToString();
}
if (ApplicationData.Current.LocalSettings.Values.ContainsKey(KeyLayoutRetrievedTime))
{
layoutRetrievedTime = (DateTimeOffset) ApplicationData.Current.LocalSettings.Values[KeyLayoutRetrievedTime];
}
try
{
var contentFile = await ApplicationData.Current.LocalFolder.TryGetItemAsync(KeyLayoutContent);
if (contentFile != null)
{
content = await FileIO.ReadTextAsync(contentFile as IStorageFile);
}
}
catch (Exception ex)
{
Logger.Error("LayoutManager.LoadLayoutFromLocalStorage(): Failed to load content: " + ex, ex);
}
if (!string.IsNullOrEmpty(content))
{
content = Helper.EnsureEncodingIsUtf8(content);
try
{
layout = JsonConvert.DeserializeObject<Layout>(content);
layout?.FromJson(headers, layoutRetrievedTime);
}
catch (Exception ex)
{
Logger.Error("LayoutManager.LoadLayoutFromLocalStorage(): Failed to parse layout: " + ex, ex);
}
}
if (layout == null)
{
// Failed to parse the layout => invalidate it
await InvalidateLayout();
}
return layout;
}
#region storage methods
public async Task<bool> SaveHistoryAction(string uuid, string beaconPid, DateTimeOffset now, BeaconEventType beaconEventType, string location)
{
return await SaveHistoryActionRetry(uuid, beaconPid, now, beaconEventType, location, MaxRetries);
}
private async Task<bool> SaveHistoryActionRetry(string uuid, string beaconPid, DateTimeOffset now, BeaconEventType beaconEventType, string location, int retry)
{
if (retry < 0)
{
return false;
}
try
{
HistoryAction action = FileStorageHelper.ToHistoryAction(uuid, beaconPid, now, beaconEventType, location);
if (await Storage.SaveHistoryAction(action))
{
return true;
}
return await SaveHistoryActionRetry(uuid, beaconPid, now, beaconEventType, location, --retry);
}
catch (UnauthorizedAccessException)
{
return await SaveHistoryActionRetry(uuid, beaconPid, now, beaconEventType, location, --retry);
}
catch (FileNotFoundException)
{
return await SaveHistoryActionRetry(uuid, beaconPid, now, beaconEventType, location, --retry);
}
}
public async Task<bool> SaveHistoryEvent(string pid, DateTimeOffset timestamp, BeaconEventType eventType, string location)
{
return await SaveHistoryEventRetry(pid, timestamp, eventType, location, MaxRetries);
}
private async Task<bool> SaveHistoryEventRetry(string pid, DateTimeOffset timestamp, BeaconEventType eventType, string location, int retry)
{
if (retry < 0)
{
return false;
}
try
{
if (await Storage.SaveHistoryEvents(FileStorageHelper.ToHistoryEvent(pid, timestamp, eventType, location)))
{
return true;
}
return await SaveHistoryEventRetry(pid, timestamp, eventType, location, --retry);
}
catch (UnauthorizedAccessException)
{
return await SaveHistoryEventRetry(pid, timestamp, eventType, location, --retry);
}
catch (FileNotFoundException)
{
return await SaveHistoryEventRetry(pid, timestamp, eventType, location, --retry);
}
}
public async Task CleanupDatabase()
{
await Storage.CleanupDatabase();
}
public async Task<IList<DelayedActionData>> GetDelayedActions()
{
return await Storage.GetDelayedActions();
}
public async Task SetDelayedActionAsExecuted(string id)
{
await Storage.SetDelayedActionAsExecuted(id);
}
public async Task<bool> SaveDelayedAction(ResolvedAction action, DateTimeOffset dueTime, string beaconPid, BeaconEventType eventType, string location)
{
return await SaveDelayedActionsRetry(action, dueTime, beaconPid, eventType, location, MaxRetries);
}
private async Task<bool> SaveDelayedActionsRetry(ResolvedAction action, DateTimeOffset dueTime, string beaconPid, BeaconEventType eventTypeDetectedByDevice, string location, int retry)
{
if (retry < 0)
{
return false;
}
try
{
if (await Storage.SaveDelayedAction(action, dueTime, beaconPid, eventTypeDetectedByDevice, location))
{
return true;
}
return await SaveDelayedActionsRetry(action, dueTime, beaconPid, eventTypeDetectedByDevice, location, --retry);
}
catch (UnauthorizedAccessException)
{
return await SaveDelayedActionsRetry(action, dueTime, beaconPid, eventTypeDetectedByDevice, location, --retry);
}
catch (FileNotFoundException)
{
return await SaveDelayedActionsRetry(action, dueTime, beaconPid, eventTypeDetectedByDevice, location, --retry);
}
}
public async Task<BackgroundEvent> GetLastEventStateForBeacon(string pid)
{
return await Storage.GetLastEventStateForBeacon(pid);
}
public async Task<bool> SaveBeaconEventState(string pid, BeaconEventType enter)
{
return await SaveBeaconEventStateRetry(pid, enter,MaxRetries);
}
private async Task<bool> SaveBeaconEventStateRetry(string pid, BeaconEventType enter, int retry)
{
if (retry < 0)
{
return false;
}
try
{
if (await Storage.SaveBeaconEventState(pid, enter))
{
return true;
}
return await SaveBeaconEventStateRetry(pid, enter, --retry);
}
catch (UnauthorizedAccessException)
{
return await SaveBeaconEventStateRetry(pid, enter, --retry);
}
catch (FileNotFoundException)
{
return await SaveBeaconEventStateRetry(pid, enter, --retry);
}
}
public async Task<List<BeaconAction>> GetActionsForForeground(bool doNotDelete = false)
{
List<BeaconAction> beaconActions = new List<BeaconAction>();
List<HistoryAction> historyActions = await Storage.GetActionsForForeground(doNotDelete);
foreach (HistoryAction historyAction in historyActions)
{
ResolvedAction action = ServiceManager.LayoutManager.GetAction(historyAction.EventId);
beaconActions.Add(action.BeaconAction);
}
return beaconActions;
}
#endregion
/// <summary>
/// Invalidates both the current and cached layout.
/// </summary>
public async Task InvalidateLayout()
{
ApplicationData.Current.LocalSettings.Values[KeyLayoutHeaders] = null;
ApplicationData.Current.LocalSettings.Values[KeyLayoutRetrievedTime] = null;
try
{
var contentFile = await ApplicationData.Current.LocalFolder.TryGetItemAsync(KeyLayoutContent);
if (contentFile != null)
{
await contentFile.DeleteAsync();
}
}
catch (Exception ex)
{
Logger.Error("Error invalidating layout", ex);
}
}
private async Task<ResponseMessage> ExecuteCall(Func<Task<ResponseMessage>> action)
{
bool networkError;
int retries = 0;
do
{
try
{
ResponseMessage responseMessage = await action();
responseMessage.NetworResult = NetworkResult.Success;
return responseMessage;
}
catch (TimeoutException e)
{
networkError = true;
Logger.Error("timeout error while executing call: " + e.Message, e);
await WaitBackoff(retries);
}
catch (IOException e)
{
networkError = true;
Logger.Error("Error while executing call: " + e.Message, e);
await WaitBackoff(retries);
}
catch (HttpRequestException e)
{
networkError = true;
Logger.Error("Error while executing call: " + e.Message, e);
await WaitBackoff(retries);
}
catch (Exception e)
{
networkError = false;
Logger.Error("Error while executing call: " + e.Message, e);
await WaitBackoff(retries);
}
finally
{
retries++;
}
} while (retries < RetryCount);
return new ResponseMessage() {NetworResult = networkError ? NetworkResult.NetworkError : NetworkResult.UnknownError};
}
}
} | mit |
KeldOelykke/FailFast | Java/FailFast/src/starkcoder/failfast/fails/generics/collections/IGenericCollectionFailer.java | 1716 | /////////////////////////////////////////////////////////////////////////////////////////
//
// The MIT License (MIT)
//
// Copyright (c) 2014-2015 Keld Oelykke
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
/////////////////////////////////////////////////////////////////////////////////////////
package starkcoder.failfast.fails.generics.collections;
/**
* Specification grouping all generic collection fail specifications.
* <p>
* This (or a derivative) should inherit all fail methods targeting Collection<?>.
* </p>
*
* @author Keld Oelykke
*/
public interface IGenericCollectionFailer extends IGenericCollectionEqualsFail,
IGenericCollectionNotEqualsFail
{
}
| mit |
ivey/merbivore-blog | app/plugins/feather-tagging/lib/article.rb | 677 | ##
# Reopen the article model and wang in the stuff we need
class Article
# We define this tags attribute so when the form posts, the params for tags get set, and we can subsequently access it later on
attr_accessor :tag_list
has_many :taggings
def create_tags
return if @tag_list.nil? || @tag_list.empty?
# Wax all the existing taggings
self.taggings.each {|t| t.destroy! }
@tag_list.split(",").each do |t|
unless t.empty?
tag = Tag.find_or_create(:name => t.strip)
Tagging.create(:article_id => self.id, :tag_id => tag.id)
end
end
end
def tags
taggings.map { |tagging| tagging.tag.name }.join(", ")
end
end | mit |
DailyActie/Surrogate-Model | 01-codes/OpenMDAO-Framework-dev/contrib/drea_usecase/DREA_HSRnoise.py | 8050 | """
DREA_HSRnoise.py - Code to run a DREA followed by HSRnoise for M-E off-design analysis
"""
from openmdao.lib.casehandlers.api import ListCaseIterator, DBCaseRecorder
from openmdao.lib.drivers.api import CaseIteratorDriver
from openmdao.main.api import Assembly, Case, Instance
from openmdao.main.datatypes.api import Float, Enum
# from openmdao.lib.drivers.api import DOEdriver
# from openmdao.lib.doegenerators.api import FullFactorial
from DREA import DREA
from hsrnoise import HSRNOISE
from DREAprep import DREAprep
from geometry import Geometry
from ACDgen import ACDgen
class DREA_HSRnoise(Assembly):
"""Assembly to execute on DREA followed by HSRnoise."""
geo = Instance(Geometry, iotype='in')
alt = Float(0.0, iotype='in', units='ft', desc='Altitude')
point = Enum(1, [1, 2, 3], iotype='in', desc='Certification observer point')
def __init__(self):
"""Creates an Assembly to run DREA and HSRnoise."""
super(DREA_HSRnoise, self).__init__()
FO1 = Case(inputs=[('point', 1), ('dreaprep.Mach', 0.28), ('alt', 2000.0), ('dreaprep.PC', 100.0),
('hsrnoise.phi', 0.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
FO2 = Case(inputs=[('point', 1), ('dreaprep.Mach', 0.28), ('alt', 2000.0), ('dreaprep.PC', 65.0),
('hsrnoise.phi', 0.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
App = Case(inputs=[('point', 2), ('dreaprep.Mach', 0.20), ('alt', 394.0), ('dreaprep.PC', 30.0),
('hsrnoise.phi', 0.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
SL1 = Case(inputs=[('point', 3), ('dreaprep.Mach', 0.25), ('alt', 1000.0), ('dreaprep.PC', 100.0),
('hsrnoise.phi', 0.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
SL2 = Case(inputs=[('point', 3), ('dreaprep.Mach', 0.25), ('alt', 1000.0), ('dreaprep.PC', 100.0),
('hsrnoise.phi', 30.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
SL3 = Case(inputs=[('point', 3), ('dreaprep.Mach', 0.25), ('alt', 1000.0), ('dreaprep.PC', 100.0),
('hsrnoise.phi', 60.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
SL4 = Case(inputs=[('point', 3), ('dreaprep.Mach', 0.25), ('alt', 1000.0), ('dreaprep.PC', 100.0),
('hsrnoise.phi', 90.0)],
outputs=[('drea.CFG', 0.0), ('hsrnoise.thetas', 0.0), ('hsrnoise.Freq', 0.0),
('hsrnoise.SPL_corr', 0), ('hsrnoise.OASPL30', 0.0), ('hsrnoise.OASPL60', 0.0),
('hsrnoise.OASPL90', 0.0), ('hsrnoise.OASPL120', 0.0), ('hsrnoise.OASPL150', 0.0)])
cases = ListCaseIterator([FO1, FO2, App, SL1, SL2, SL3, SL4])
db_recorder = DBCaseRecorder()
self.add('geo', Geometry())
self.add('dreaprep', DREAprep())
self.add('drea', DREA())
self.add('hsrnoise', HSRNOISE())
self.add('ACDgen', ACDgen())
self.add('analysis', CaseIteratorDriver())
self.analysis.iterator = cases
self.analysis.recorders = [db_recorder]
self.ACDgen.case_data = db_recorder.get_iterator()
# Set up the workflows
# ---------------------------
# self.analysis.workflow.add(['dreaprep', 'drea', 'hsrnoise'])
# self.driver.workflow.add(['analysis','ACDgen'])
self.driver.workflow.add(['dreaprep', 'drea', 'hsrnoise'])
# Connections
# ---------------------------
self.connect('geo', ['drea.geo_in', 'hsrnoise.geo_in'])
self.connect('alt', ['dreaprep.alt', 'hsrnoise.ALTEVO'])
self.connect('dreaprep.flow_out', 'drea.flow_in')
self.connect('drea.flow_out', 'hsrnoise.flow_in')
self.connect('drea.CFG', 'hsrnoise.CFG')
if __name__ == "__main__":
asy = DREA_HSRnoise()
import time
tt = time.time()
asy.drea.mode = 'Subsonic'
asy.dreaprep.dTs = 18.3
asy.dreaprep.Mach = 0.0
asy.alt = 0.0
asy.hsrnoise.phi = 90.0
asy.dreaprep.PC = 80
asy.dreaprep.deltaPt = 0.0
asy.dreaprep.deltaTt = 0.0
asy.dreaprep.deltaM = 0.0
asy.geo.Apri = 12.0
asy.geo.AsAp = 1.75
asy.geo.AR = 3.0
asy.geo.AeAt = .95
asy.geo.length = 8.0
asy.geo.ChuteAngles = 10
asy.geo.calc_geom(asy.geo.length, asy.geo.Apri, asy.geo.AsAp, asy.geo.AR, asy.geo.AeAt, asy.geo.LhMh,
asy.geo.LhWave)
asy.run()
print '\n\n'
print 'alt : ', asy.dreaprep.alt
print 'Mach : ', asy.dreaprep.Mach
print 'HMIC : ', asy.hsrnoise.HMIC
print 'SL : ', asy.hsrnoise.SL
print '\n'
print 'INPUTS DREA HSRNoise'
print 'Length : ', asy.drea.geo_in.length, ' ', asy.hsrnoise.geo_in.length
print 'Width : ', asy.drea.geo_in.width, ' ', asy.hsrnoise.geo_in.width
print 'Primary Area : ', asy.drea.geo_in.Apri, ' ', asy.hsrnoise.geo_in.Apri
print 'Secondary Area : ', asy.drea.geo_in.Asec, ' ', asy.hsrnoise.geo_in.Asec
print 'Exit Area : ', asy.drea.geo_in.Aexit, ' ', asy.hsrnoise.geo_in.Aexit
print 'Primary Pressure: ', asy.drea.flow_in.pri.Pt, ' ', asy.hsrnoise.flow_in.pri.Pt
print 'Primary Temp : ', asy.drea.flow_in.pri.Tt, ' ', asy.hsrnoise.flow_in.pri.Tt
print 'Primary Mach : ', asy.drea.flow_in.pri.Mach, ' ', asy.hsrnoise.flow_in.pri.Mach
print '\n'
print 'RESULTS'
print 'Gross Thrust : ', asy.drea.GrossThrust
print 'Exit Velocity : ', asy.drea.ExitVelocity
print 'Exit Mach : ', asy.drea.ExitMach
print 'CFG: : ', asy.drea.CFG
print 'Pumping Ratio : ', asy.drea.PumpingRatio
print 'Mass Flow (Pri) : ', asy.drea.flow_out.pri.W
print 'Mass Flow (Sec) : ', asy.drea.flow_out.sec.W
print 'Secondary Mach : ', asy.drea.flow_out.sec.Mach
print 'NPR : ', asy.drea.NPR
print 'Degree of mixing: ', asy.drea.DegreeOfMixing
print 'OASPL30 : ', asy.hsrnoise.OASPL30
print 'OASPL60 : ', asy.hsrnoise.OASPL60
print 'OASPL90 : ', asy.hsrnoise.OASPL90
print 'OASPL120 : ', asy.hsrnoise.OASPL120
print 'OASPL150 : ', asy.hsrnoise.OASPL150
# print 'test: ', asy.hsrnoise.SPL_corr
print "\n"
print "Elapsed time: ", time.time() - tt, "seconds"
| mit |
todorm85/TelerikAcademy | Courses/Software Technologies/AspNet MVC/Exam/Web/HikingPlanAndRescue.Web/App_Start/DatabaseConfig.cs | 618 | namespace VoiceSystem.Web
{
using System.Data.Entity;
using Data;
using Data.Migrations;
public class DatabaseConfig
{
public static void Config()
{
var dbReset = false;
if (dbReset)
{
Database.SetInitializer(new DropCreateDatabaseAlways<ApplicationDbContext>());
}
else
{
Database.SetInitializer(new MigrateDatabaseToLatestVersion<ApplicationDbContext, Configuration>());
}
ApplicationDbContext.Create().Database.Initialize(true);
}
}
} | mit |
RaduCiumag/shapeshift | server-http/src/main/java/com/github/raduciumag/shapeshift/model/transformation/generator/HttpHeadersGenerator.java | 1496 | package com.github.raduciumag.shapeshift.model.transformation.generator;
import com.github.raduciumag.shapeshift.model.server.HttpServerResponse;
import com.github.raduciumag.shapeshift.model.server.ServerResponse;
import com.google.common.base.Preconditions;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.Map;
/**
* Add headers to a HTTP response.
*/
@Component
public class HttpHeadersGenerator extends BaseTransformationGenerator {
public static final String NAME = "HTTP_HEADERS";
public HttpHeadersGenerator() {
super(NAME);
}
@Override
public void generate(final ServerResponse response, final Map<String, Object> args) {
/* Arguments validation */
Preconditions.checkArgument(response instanceof HttpServerResponse,
"This generator can process only HttpServerResponse type responses");
/* Add headers to response */
if (args == null || args.isEmpty()) {
return;
}
final HttpServerResponse httpResponse = (HttpServerResponse) response;
args.keySet().stream()
.forEach(headerName -> {
if (!httpResponse.getHeaders().containsKey(headerName)) {
httpResponse.getHeaders().put(headerName, new ArrayList<>());
}
httpResponse.getHeaders().get(headerName).add(String.valueOf(args.get(headerName)));
});
}
}
| mit |
jrechandi/sundahipP | src/Sunahip/PagosBundle/PagosBundle.php | 134 | <?php
namespace Sunahip\PagosBundle;
use Symfony\Component\HttpKernel\Bundle\Bundle;
class PagosBundle extends Bundle
{
}
| mit |
tinemakovecki/duel_game | Player.py | 432 | ### HUMAN PLAYER FILE
class Human():
def __init__(self, gui):
self.gui = gui
def play(self):
''' does nothing, waits for the player to select an attack '''
pass
def make_attack(self, selected, certain_hit):
''' makes an attack '''
self.gui.make_attack(selected, certain_hit)
def interrupt(self):
""" happens when interrupting players, does nothing """
pass | mit |
tomk79/mz2-baser-cms | zip_template/pickles2_export/Elements/widget_area.php | 408 | <?php
/**
* ウィジェットエリア(スマホ用)
*
* BcBaserHelper::widgetArea() で呼び出す
* <?php $this->BcBaser->widgetArea() ?>
**/
if (Configure::read('BcRequest.isMaintenance') || empty($no)) {
return;
}
if (!isset($subDir)) {
$subDir = true;
}
?>
<div class="widget-area widget-area-<?php echo $no ?>">
<?php $this->BcWidgetArea->show($no, array('subDir' => $subDir)) ?>
</div> | mit |
i-den/SoftwareUniversity | Software University/04) PHP Web Development Basics/23) MYSQL AND PHP ADVANCED/App/Core/autoload.php | 201 | <?php
/**
* If under Windows make sure to comment 7th row out
*/
spl_autoload_register(function ($class) {
$classFile = str_replace('\\', '/', $class) . '.php';
require_once $classFile;
});
| mit |
poppy-circus/wire | Gruntfile.js | 796 | function getConfigOf(value) {
var helper = './tasks/helper/grunt_config_';
return require(helper + value).getConfig();
}
module.exports = function(grunt) {
'use strict';
grunt.initConfig({
copy: getConfigOf('copy'),
clean: getConfigOf('clean'),
jasmine_node: getConfigOf('jasmine'),
jsdoc: getConfigOf('jsdoc'),
jshint: getConfigOf('jshint'),
lodash: getConfigOf('lodash')
});
grunt.loadNpmTasks('grunt-contrib-clean');
grunt.loadNpmTasks('grunt-contrib-copy');
grunt.loadNpmTasks('grunt-jasmine-node');
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-jsdoc');
grunt.loadNpmTasks('grunt-lodash');
grunt.loadTasks('tasks');
grunt.registerTask('default', ['jshint', 'jasmine_node']);
};
| mit |
Caspar12/Csharp | src/Zh.BLL.Define/Contracts/Imp/AutoCode/MemberAccountService.cs | 564 | /*
* Author: 陈志杭 Caspar
* Contact: 279397942@qq.com qq:279397942
* Description: 逻辑层数据接口契约文件
* 文件由模板生成
*/
using System;
using System.Collections;
using System.Collections.Generic;
using Zh.DAL.Define.Entities;
using Zh.DAL.Base.Define;
using Zh.DAL.Base.Define.Query;
using Zh.BLL.Base.Define;
using Zh.BLL.Define.Entities;
namespace Zh.BLL.Define.Contracts.Imp
{
public partial class MemberAccountService : GenericContextService<MemberAccountDto,Member_Account >,IMemberAccountService
{
}
}
| mit |
java-course-ee/java-course-se | Class/Interface/src/edu/javacourse/interfaces/Main.java | 777 | package edu.javacourse.interfaces;
import edu.javacourse.interfaces.impl.Human1;
import edu.javacourse.interfaces.impl.Human2;
import edu.javacourse.interfaces.impl.Human3;
import edu.javacourse.interfaces.service.Autobase;
import edu.javacourse.interfaces.service.Hospital;
public class Main {
public static void main(String[] args) {
final Hospital hospital = new Hospital();
final Autobase autobase = new Autobase();
Human1 ivan = new Human1();
hospital.hire(ivan);
autobase.hire(ivan);
Human2 petr = new Human2();
// hospital.hire(petr); // Cannot invoke
autobase.hire(petr);
Human3 sidor = new Human3();
hospital.hire(sidor);
// autobase.hire(sidor); // Cannot invoke
}
}
| mit |
shakuu/Parser | Parser/Src/Parser.Common.Logging/MessageType.cs | 115 | namespace Parser.Common.Logging
{
public enum MessageType
{
Info = 1,
Error = 2
}
}
| mit |
NeoAdonis/osu-framework | osu.Framework/Timing/OffsetClock.cs | 591 | // Copyright (c) 2007-2016 ppy Pty Ltd <contact@ppy.sh>.
// Licensed under the MIT Licence - https://raw.githubusercontent.com/ppy/osu-framework/master/LICENCE
namespace osu.Framework.Timing
{
public class OffsetClock : IClock
{
protected IClock Source;
public double Offset;
public double CurrentTime => Source.CurrentTime + Offset;
public double Rate => Source.Rate;
public bool IsRunning => Source.IsRunning;
public OffsetClock(IClock source)
{
Source = source;
}
}
}
| mit |
payitsquare/wepay-dotnet | WePay/Entities/Structure/MFA/WepayMFAChallengeData.cs | 486 | using Newtonsoft.Json;
using System.Collections.Generic;
namespace WePay.Entities.Structure
{
public class WepayMFAChallengeData
{
[JsonProperty("voice")]
public string Voice { get; set; }
[JsonProperty("sms")]
public string SMS { get; set; }
[JsonProperty("authenticator")]
public WepayMFAAuthenticator Authenticator { get; set; }
[JsonProperty("backup")]
public IList<string> Backup { get; set; }
}
}
| mit |
aspc/mainsite | aspc/courses/admin.py | 1326 | from django.contrib import admin
from aspc.courses.models import (Section, Course, Department, Meeting, RequirementArea, RefreshHistory, CourseReview,
FeaturingQuery)
class DepartmentAdmin(admin.ModelAdmin):
list_display = ('code', 'name', 'course_count')
class MeetingAdmin(admin.ModelAdmin):
list_display = ('section', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'begin', 'end', 'location',)
class MeetingInline(admin.TabularInline):
model = Meeting
list_display = ('section', 'monday', 'tuesday', 'wednesday', 'thursday', 'friday', 'begin', 'end', 'location',)
class CourseAdmin(admin.ModelAdmin):
list_display = ('code', 'number', 'name', 'primary_department')
class SectionAdmin(admin.ModelAdmin):
list_display = ('code', 'course', 'term', 'description', 'credit', 'spots', 'filled', 'perms')
inlines = [MeetingInline]
class CourseReviewAdmin(admin.ModelAdmin):
list_display = ('author', 'course', 'instructor', 'overall_rating')
admin.site.register(Course, CourseAdmin)
admin.site.register(Department, DepartmentAdmin)
admin.site.register(RequirementArea, DepartmentAdmin)
admin.site.register(Meeting, MeetingAdmin)
admin.site.register(Section, SectionAdmin)
admin.site.register(CourseReview, CourseReviewAdmin)
admin.site.register(RefreshHistory)
admin.site.register(FeaturingQuery) | mit |
elastification/php-client | tests/Unit/Request/V1x/Cat/RecoveryCatRequestTest.php | 3234 | <?php
namespace Elastification\Client\Tests\Unit\Request\V1x\Cat;
use Elastification\Client\Request\RequestMethods;
use Elastification\Client\Request\Shared\Cat\AbstractRecoveryCatRequest;
use Elastification\Client\Request\V1x\Cat\RecoveryCatRequest;
class RecoveryCatRequestTest extends \PHPUnit_Framework_TestCase
{
const RESPONSE_CLASS = 'Elastification\Client\Response\Response';
/**
* @var \PHPUnit_Framework_MockObject_MockObject
*/
private $serializer;
/**
* @var RecoveryCatRequest
*/
private $request;
protected function setUp()
{
parent::setUp();
$this->serializer = $this->getMockBuilder('Elastification\Client\Serializer\SerializerInterface')
->disableOriginalConstructor()
->getMock();
$this->request = new RecoveryCatRequest(null, null, $this->serializer);
}
protected function tearDown()
{
$this->serializer = null;
$this->request = null;
parent::tearDown();
}
public function testInstance()
{
$this->assertInstanceOf(
'Elastification\Client\Request\RequestInterface',
$this->request
);
$this->assertInstanceOf(
'Elastification\Client\Request\Shared\Cat\AbstractRecoveryCatRequest',
$this->request
);
$this->assertInstanceOf(
'Elastification\Client\Request\V1x\Cat\RecoveryCatRequest',
$this->request
);
}
public function testGetIndex()
{
$this->assertSame(AbstractRecoveryCatRequest::REQUEST_ACTION, $this->request->getIndex());
}
public function testGetType()
{
$this->assertNull($this->request->getType());
}
public function testGetMethod()
{
$this->assertSame(RequestMethods::GET, $this->request->getMethod());
}
public function testGetAction()
{
$this->assertSame(AbstractRecoveryCatRequest::CAT_TYPE, $this->request->getAction());
}
//
public function testGetSerializer()
{
$this->assertSame($this->serializer, $this->request->getSerializer());
}
public function testGetSerializerParams()
{
$this->assertTrue(is_array($this->request->getSerializerParams()));
$this->assertEmpty($this->request->getSerializerParams());
}
public function testSetGetBody()
{
$body = 'my test body';
$this->serializer->expects($this->never())
->method('serialize');
$this->request->setBody($body);
$this->assertNull($this->request->getBody());
}
public function testGetSupportedClass()
{
$this->assertSame(self::RESPONSE_CLASS, $this->request->getSupportedClass());
}
public function testCreateResponse()
{
$rawData = 'raw data for testing';
$response = $this->request->createResponse($rawData, $this->serializer);
$this->assertInstanceOf(self::RESPONSE_CLASS, $response);
}
public function testGetParameters()
{
$parameters = $this->request->getParameters();
$this->assertArrayHasKey('format', $parameters);
$this->assertSame('json', $parameters['format']);
}
}
| mit |
SunboX/fxos-washing-machine_interface | shared/js/settings_helper.js | 3312 | (function(exports) {
'use strict';
/**
* SettingsHelper simplifies mozSettings access. It provides getter and setter
* a specified setting. It iscreated by passing a setting key and an
* optional default value.
*
* @param {String} key - The setting key
* @param {Object} defaultValue - The default value
*
* Example:
* // create a helper with a default false
* var voicePrivacyHelper = SettingsHelper('ril.voicePrivacy.enabled', false);
* // get value
* voicePrivacyHelper.get(function(value) {});
* // set value
* voicePrivacyHelper.set(false, function() {});
*/
var SettingsHelper = function(key, defaultValue) {
var SETTINGS_KEY = key;
var _settings = navigator.mozSettings;
var _value = null;
var _defaultValue = defaultValue;
var _isReady = false;
var _callbacks = [];
var _return = function sh_return(callback) {
if (!callback) {
return;
}
callback.apply(null, Array.prototype.slice.call(arguments, 1));
};
var _ready = function sh_ready(callback) {
if (!callback) {
return;
}
if (_isReady) {
callback();
} else {
_callbacks.push(callback);
}
};
var _getValue = function sh_getValue(callback) {
var req = _settings.createLock().get(SETTINGS_KEY);
req.onsuccess = function() {
_return(callback, req.result[SETTINGS_KEY]);
};
req.onerror = function() {
console.error('Error getting ' + SETTINGS_KEY + '.');
_return(callback, null);
};
};
var _setValue = function sh_setValue(value, callback) {
var obj = {};
obj[SETTINGS_KEY] = value;
var req = _settings.createLock().set(obj);
req.onsuccess = function() {
_return(callback);
};
req.onerror = function() {
console.error('Error setting ' + SETTINGS_KEY + '.');
_return(callback);
};
};
var _valuechanged = function sh_valuehanged(e) {
_value = e.settingValue;
};
var _init = function sh_init(callback) {
_getValue(function(value) {
_value = value ? value : _defaultValue;
_return(callback);
});
_settings.addObserver(SETTINGS_KEY, _valuechanged);
};
_init(function() {
_isReady = true;
_callbacks.forEach(function(callback) {
callback();
});
});
return {
/**
* Get the setting value.
*
* @param {Function} callback - The setting value will be passed in the
callback function.
*/
get: function(callback) {
_ready(function() {
_return(callback, _value ? _value : _defaultValue);
});
},
/**
* Set the setting value.
*
* @param {Object} value The setting value
* @param {Function} callback The callback function.
*/
set: function(value, callback) {
_ready(function() {
_setValue(value, _return.bind(null, callback));
});
},
/**
* Cleanup ressources, specifically observers.
*/
uninit: function() {
_settings.removeObserver(SETTINGS_KEY, _valuechanged);
}
};
};
exports.SettingsHelper = SettingsHelper;
})(this);
| mit |
rnicoll/dogecoin | test/functional/p2p_compactblocks.py | 38688 | #!/usr/bin/env python3
# Copyright (c) 2016-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test compact blocks (BIP 152).
Version 1 compact blocks are pre-segwit (txids)
Version 2 compact blocks are post-segwit (wtxids)
"""
import random
from test_framework.blocktools import create_block, NORMAL_GBT_REQUEST_PARAMS, add_witness_commitment
from test_framework.messages import BlockTransactions, BlockTransactionsRequest, calculate_shortid, CBlock, CBlockHeader, CInv, COutPoint, CTransaction, CTxIn, CTxInWitness, CTxOut, FromHex, HeaderAndShortIDs, msg_no_witness_block, msg_no_witness_blocktxn, msg_cmpctblock, msg_getblocktxn, msg_getdata, msg_getheaders, msg_headers, msg_inv, msg_sendcmpct, msg_sendheaders, msg_tx, msg_block, msg_blocktxn, MSG_BLOCK, MSG_CMPCT_BLOCK, MSG_WITNESS_FLAG, NODE_NETWORK, P2PHeaderAndShortIDs, PrefilledTransaction, ser_uint256, ToHex
from test_framework.p2p import p2p_lock, P2PInterface
from test_framework.script import CScript, OP_TRUE, OP_DROP
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, softfork_active
# TestP2PConn: A peer we use to send messages to bitcoind, and store responses.
class TestP2PConn(P2PInterface):
def __init__(self, cmpct_version):
super().__init__()
self.last_sendcmpct = []
self.block_announced = False
# Store the hashes of blocks we've seen announced.
# This is for synchronizing the p2p message traffic,
# so we can eg wait until a particular block is announced.
self.announced_blockhashes = set()
self.cmpct_version = cmpct_version
def on_sendcmpct(self, message):
self.last_sendcmpct.append(message)
def on_cmpctblock(self, message):
self.block_announced = True
self.last_message["cmpctblock"].header_and_shortids.header.calc_sha256()
self.announced_blockhashes.add(self.last_message["cmpctblock"].header_and_shortids.header.sha256)
def on_headers(self, message):
self.block_announced = True
for x in self.last_message["headers"].headers:
x.calc_sha256()
self.announced_blockhashes.add(x.sha256)
def on_inv(self, message):
for x in self.last_message["inv"].inv:
if x.type == MSG_BLOCK:
self.block_announced = True
self.announced_blockhashes.add(x.hash)
# Requires caller to hold p2p_lock
def received_block_announcement(self):
return self.block_announced
def clear_block_announcement(self):
with p2p_lock:
self.block_announced = False
self.last_message.pop("inv", None)
self.last_message.pop("headers", None)
self.last_message.pop("cmpctblock", None)
def get_headers(self, locator, hashstop):
msg = msg_getheaders()
msg.locator.vHave = locator
msg.hashstop = hashstop
self.send_message(msg)
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [CBlockHeader(b) for b in new_blocks]
self.send_message(headers_message)
def request_headers_and_sync(self, locator, hashstop=0):
self.clear_block_announcement()
self.get_headers(locator, hashstop)
self.wait_until(self.received_block_announcement, timeout=30)
self.clear_block_announcement()
# Block until a block announcement for a particular block hash is
# received.
def wait_for_block_announcement(self, block_hash, timeout=30):
def received_hash():
return (block_hash in self.announced_blockhashes)
self.wait_until(received_hash, timeout=timeout)
def send_await_disconnect(self, message, timeout=30):
"""Sends a message to the node and wait for disconnect.
This is used when we want to send a message into the node that we expect
will get us disconnected, eg an invalid block."""
self.send_message(message)
self.wait_for_disconnect(timeout)
class CompactBlocksTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
self.extra_args = [[
"-acceptnonstdtxn=1",
]]
self.utxos = []
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def build_block_on_tip(self, node, segwit=False):
block = create_block(tmpl=node.getblocktemplate(NORMAL_GBT_REQUEST_PARAMS))
if segwit:
add_witness_commitment(block)
block.solve()
return block
# Create 10 more anyone-can-spend utxo's for testing.
def make_utxos(self):
block = self.build_block_on_tip(self.nodes[0])
self.segwit_node.send_and_ping(msg_no_witness_block(block))
assert int(self.nodes[0].getbestblockhash(), 16) == block.sha256
self.nodes[0].generatetoaddress(240, self.nodes[0].getnewaddress(address_type="bech32"))
total_value = block.vtx[0].vout[0].nValue
out_value = total_value // 10
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(block.vtx[0].sha256, 0), b''))
for _ in range(10):
tx.vout.append(CTxOut(out_value, CScript([OP_TRUE])))
tx.rehash()
block2 = self.build_block_on_tip(self.nodes[0])
block2.vtx.append(tx)
block2.hashMerkleRoot = block2.calc_merkle_root()
block2.solve()
self.segwit_node.send_and_ping(msg_no_witness_block(block2))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block2.sha256)
self.utxos.extend([[tx.sha256, i, out_value] for i in range(10)])
# Test "sendcmpct" (between peers preferring the same version):
# - No compact block announcements unless sendcmpct is sent.
# - If sendcmpct is sent with version > preferred_version, the message is ignored.
# - If sendcmpct is sent with boolean 0, then block announcements are not
# made with compact blocks.
# - If sendcmpct is then sent with boolean 1, then new block announcements
# are made with compact blocks.
# If old_node is passed in, request compact blocks with version=preferred-1
# and verify that it receives block announcements via compact block.
def test_sendcmpct(self, test_node, old_node=None):
preferred_version = test_node.cmpct_version
node = self.nodes[0]
# Make sure we get a SENDCMPCT message from our peer
def received_sendcmpct():
return (len(test_node.last_sendcmpct) > 0)
test_node.wait_until(received_sendcmpct, timeout=30)
with p2p_lock:
# Check that the first version received is the preferred one
assert_equal(test_node.last_sendcmpct[0].version, preferred_version)
# And that we receive versions down to 1.
assert_equal(test_node.last_sendcmpct[-1].version, 1)
test_node.last_sendcmpct = []
tip = int(node.getbestblockhash(), 16)
def check_announcement_of_new_block(node, peer, predicate):
peer.clear_block_announcement()
block_hash = int(node.generate(1)[0], 16)
peer.wait_for_block_announcement(block_hash, timeout=30)
assert peer.block_announced
with p2p_lock:
assert predicate(peer), (
"block_hash={!r}, cmpctblock={!r}, inv={!r}".format(
block_hash, peer.last_message.get("cmpctblock", None), peer.last_message.get("inv", None)))
# We shouldn't get any block announcements via cmpctblock yet.
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Try one more time, this time after requesting headers.
test_node.request_headers_and_sync(locator=[tip])
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "inv" in p.last_message)
# Test a few ways of using sendcmpct that should NOT
# result in compact block announcements.
# Before each test, sync the headers chain.
test_node.request_headers_and_sync(locator=[tip])
# Now try a SENDCMPCT message with too-high version
test_node.send_and_ping(msg_sendcmpct(announce=True, version=preferred_version+1))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Headers sync before next test.
test_node.request_headers_and_sync(locator=[tip])
# Now try a SENDCMPCT message with valid version, but announce=False
test_node.send_and_ping(msg_sendcmpct(announce=False, version=preferred_version))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message)
# Headers sync before next test.
test_node.request_headers_and_sync(locator=[tip])
# Finally, try a SENDCMPCT message with announce=True
test_node.send_and_ping(msg_sendcmpct(announce=True, version=preferred_version))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time (no headers sync should be needed!)
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time, after turning on sendheaders
test_node.send_and_ping(msg_sendheaders())
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Try one more time, after sending a version-1, announce=false message.
test_node.send_and_ping(msg_sendcmpct(announce=False, version=preferred_version-1))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" in p.last_message)
# Now turn off announcements
test_node.send_and_ping(msg_sendcmpct(announce=False, version=preferred_version))
check_announcement_of_new_block(node, test_node, lambda p: "cmpctblock" not in p.last_message and "headers" in p.last_message)
if old_node is not None:
# Verify that a peer using an older protocol version can receive
# announcements from this node.
old_node.send_and_ping(msg_sendcmpct(announce=True, version=preferred_version-1))
# Header sync
old_node.request_headers_and_sync(locator=[tip])
check_announcement_of_new_block(node, old_node, lambda p: "cmpctblock" in p.last_message)
# This test actually causes bitcoind to (reasonably!) disconnect us, so do this last.
def test_invalid_cmpctblock_message(self):
self.nodes[0].generate(241)
block = self.build_block_on_tip(self.nodes[0])
cmpct_block = P2PHeaderAndShortIDs()
cmpct_block.header = CBlockHeader(block)
cmpct_block.prefilled_txn_length = 1
# This index will be too high
prefilled_txn = PrefilledTransaction(1, block.vtx[0])
cmpct_block.prefilled_txn = [prefilled_txn]
self.segwit_node.send_await_disconnect(msg_cmpctblock(cmpct_block))
assert_equal(int(self.nodes[0].getbestblockhash(), 16), block.hashPrevBlock)
# Compare the generated shortids to what we expect based on BIP 152, given
# bitcoind's choice of nonce.
def test_compactblock_construction(self, test_node, use_witness_address=True):
version = test_node.cmpct_version
node = self.nodes[0]
# Generate a bunch of transactions.
node.generate(241)
num_transactions = 25
if use_witness_address:
address = node.getnewaddress(address_type="bech32")
else:
address = node.getnewaddress()
segwit_tx_generated = False
for _ in range(num_transactions):
txid = node.sendtoaddress(address, 0.1)
hex_tx = node.gettransaction(txid)["hex"]
tx = FromHex(CTransaction(), hex_tx)
if not tx.wit.is_null():
segwit_tx_generated = True
if use_witness_address:
assert segwit_tx_generated # check that our test is not broken
# Wait until we've seen the block announcement for the resulting tip
tip = int(node.getbestblockhash(), 16)
test_node.wait_for_block_announcement(tip)
# Make sure we will receive a fast-announce compact block
self.request_cb_announcements(test_node)
# Now mine a block, and look at the resulting compact block.
test_node.clear_block_announcement()
block_hash = int(node.generate(1)[0], 16)
# Store the raw block in our internal format.
block = FromHex(CBlock(), node.getblock("%064x" % block_hash, False))
for tx in block.vtx:
tx.calc_sha256()
block.rehash()
# Wait until the block was announced (via compact blocks)
test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
# Now fetch and check the compact block
header_and_shortids = None
with p2p_lock:
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids)
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
# Now fetch the compact block using a normal non-announce getdata
test_node.clear_block_announcement()
inv = CInv(MSG_CMPCT_BLOCK, block_hash)
test_node.send_message(msg_getdata([inv]))
test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
# Now fetch and check the compact block
header_and_shortids = None
with p2p_lock:
# Convert the on-the-wire representation to absolute indexes
header_and_shortids = HeaderAndShortIDs(test_node.last_message["cmpctblock"].header_and_shortids)
self.check_compactblock_construction_from_block(version, header_and_shortids, block_hash, block)
def check_compactblock_construction_from_block(self, version, header_and_shortids, block_hash, block):
# Check that we got the right block!
header_and_shortids.header.calc_sha256()
assert_equal(header_and_shortids.header.sha256, block_hash)
# Make sure the prefilled_txn appears to have included the coinbase
assert len(header_and_shortids.prefilled_txn) >= 1
assert_equal(header_and_shortids.prefilled_txn[0].index, 0)
# Check that all prefilled_txn entries match what's in the block.
for entry in header_and_shortids.prefilled_txn:
entry.tx.calc_sha256()
# This checks the non-witness parts of the tx agree
assert_equal(entry.tx.sha256, block.vtx[entry.index].sha256)
# And this checks the witness
wtxid = entry.tx.calc_sha256(True)
if version == 2:
assert_equal(wtxid, block.vtx[entry.index].calc_sha256(True))
else:
# Shouldn't have received a witness
assert entry.tx.wit.is_null()
# Check that the cmpctblock message announced all the transactions.
assert_equal(len(header_and_shortids.prefilled_txn) + len(header_and_shortids.shortids), len(block.vtx))
# And now check that all the shortids are as expected as well.
# Determine the siphash keys to use.
[k0, k1] = header_and_shortids.get_siphash_keys()
index = 0
while index < len(block.vtx):
if (len(header_and_shortids.prefilled_txn) > 0 and
header_and_shortids.prefilled_txn[0].index == index):
# Already checked prefilled transactions above
header_and_shortids.prefilled_txn.pop(0)
else:
tx_hash = block.vtx[index].sha256
if version == 2:
tx_hash = block.vtx[index].calc_sha256(True)
shortid = calculate_shortid(k0, k1, tx_hash)
assert_equal(shortid, header_and_shortids.shortids[0])
header_and_shortids.shortids.pop(0)
index += 1
# Test that bitcoind requests compact blocks when we announce new blocks
# via header or inv, and that responding to getblocktxn causes the block
# to be successfully reconstructed.
# Post-segwit: upgraded nodes would only make this request of cb-version-2,
# NODE_WITNESS peers. Unupgraded nodes would still make this request of
# any cb-version-1-supporting peer.
def test_compactblock_requests(self, test_node, segwit=True):
version = test_node.cmpct_version
node = self.nodes[0]
# Try announcing a block with an inv or header, expect a compactblock
# request
for announce in ["inv", "header"]:
block = self.build_block_on_tip(node, segwit=segwit)
if announce == "inv":
test_node.send_message(msg_inv([CInv(MSG_BLOCK, block.sha256)]))
test_node.wait_until(lambda: "getheaders" in test_node.last_message, timeout=30)
test_node.send_header_for_blocks([block])
else:
test_node.send_header_for_blocks([block])
test_node.wait_for_getdata([block.sha256], timeout=30)
assert_equal(test_node.last_message["getdata"].inv[0].type, 4)
# Send back a compactblock message that omits the coinbase
comp_block = HeaderAndShortIDs()
comp_block.header = CBlockHeader(block)
comp_block.nonce = 0
[k0, k1] = comp_block.get_siphash_keys()
coinbase_hash = block.vtx[0].sha256
if version == 2:
coinbase_hash = block.vtx[0].calc_sha256(True)
comp_block.shortids = [calculate_shortid(k0, k1, coinbase_hash)]
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# Expect a getblocktxn message.
with p2p_lock:
assert "getblocktxn" in test_node.last_message
absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, [0]) # should be a coinbase request
# Send the coinbase, and verify that the tip advances.
if version == 2:
msg = msg_blocktxn()
else:
msg = msg_no_witness_blocktxn()
msg.block_transactions.blockhash = block.sha256
msg.block_transactions.transactions = [block.vtx[0]]
test_node.send_and_ping(msg)
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
# Create a chain of transactions from given utxo, and add to a new block.
def build_block_with_transactions(self, node, utxo, num_transactions):
block = self.build_block_on_tip(node)
for _ in range(num_transactions):
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(utxo[0], utxo[1]), b''))
tx.vout.append(CTxOut(utxo[2] - 1000, CScript([OP_TRUE, OP_DROP] * 15 + [OP_TRUE])))
tx.rehash()
utxo = [tx.sha256, 0, tx.vout[0].nValue]
block.vtx.append(tx)
block.hashMerkleRoot = block.calc_merkle_root()
block.solve()
return block
# Test that we only receive getblocktxn requests for transactions that the
# node needs, and that responding to them causes the block to be
# reconstructed.
def test_getblocktxn_requests(self, test_node):
version = test_node.cmpct_version
node = self.nodes[0]
with_witness = (version == 2)
def test_getblocktxn_response(compact_block, peer, expected_result):
msg = msg_cmpctblock(compact_block.to_p2p())
peer.send_and_ping(msg)
with p2p_lock:
assert "getblocktxn" in peer.last_message
absolute_indexes = peer.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, expected_result)
def test_tip_after_message(node, peer, msg, tip):
peer.send_and_ping(msg)
assert_equal(int(node.getbestblockhash(), 16), tip)
# First try announcing compactblocks that won't reconstruct, and verify
# that we receive getblocktxn messages back.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, use_witness=with_witness)
test_getblocktxn_response(comp_block, test_node, [1, 2, 3, 4, 5])
msg_bt = msg_no_witness_blocktxn()
if with_witness:
msg_bt = msg_blocktxn() # serialize with witnesses
msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[1:])
test_tip_after_message(node, test_node, msg_bt, block.sha256)
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Now try interspersing the prefilled transactions
comp_block.initialize_from_block(block, prefill_list=[0, 1, 5], use_witness=with_witness)
test_getblocktxn_response(comp_block, test_node, [2, 3, 4])
msg_bt.block_transactions = BlockTransactions(block.sha256, block.vtx[2:5])
test_tip_after_message(node, test_node, msg_bt, block.sha256)
# Now try giving one transaction ahead of time.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
test_node.send_and_ping(msg_tx(block.vtx[1]))
assert block.vtx[1].hash in node.getrawmempool()
# Prefill 4 out of the 6 transactions, and verify that only the one
# that was not in the mempool is requested.
comp_block.initialize_from_block(block, prefill_list=[0, 2, 3, 4], use_witness=with_witness)
test_getblocktxn_response(comp_block, test_node, [5])
msg_bt.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]])
test_tip_after_message(node, test_node, msg_bt, block.sha256)
# Now provide all transactions to the node before the block is
# announced and verify reconstruction happens immediately.
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 10)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
for tx in block.vtx[1:]:
test_node.send_message(msg_tx(tx))
test_node.sync_with_ping()
# Make sure all transactions were accepted.
mempool = node.getrawmempool()
for tx in block.vtx[1:]:
assert tx.hash in mempool
# Clear out last request.
with p2p_lock:
test_node.last_message.pop("getblocktxn", None)
# Send compact block
comp_block.initialize_from_block(block, prefill_list=[0], use_witness=with_witness)
test_tip_after_message(node, test_node, msg_cmpctblock(comp_block.to_p2p()), block.sha256)
with p2p_lock:
# Shouldn't have gotten a request for any transaction
assert "getblocktxn" not in test_node.last_message
# Incorrectly responding to a getblocktxn shouldn't cause the block to be
# permanently failed.
def test_incorrect_blocktxn_response(self, test_node):
version = test_node.cmpct_version
node = self.nodes[0]
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 10)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Relay the first 5 transactions from the block in advance
for tx in block.vtx[1:6]:
test_node.send_message(msg_tx(tx))
test_node.sync_with_ping()
# Make sure all transactions were accepted.
mempool = node.getrawmempool()
for tx in block.vtx[1:6]:
assert tx.hash in mempool
# Send compact block
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, prefill_list=[0], use_witness=(version == 2))
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
absolute_indexes = []
with p2p_lock:
assert "getblocktxn" in test_node.last_message
absolute_indexes = test_node.last_message["getblocktxn"].block_txn_request.to_absolute()
assert_equal(absolute_indexes, [6, 7, 8, 9, 10])
# Now give an incorrect response.
# Note that it's possible for bitcoind to be smart enough to know we're
# lying, since it could check to see if the shortid matches what we're
# sending, and eg disconnect us for misbehavior. If that behavior
# change was made, we could just modify this test by having a
# different peer provide the block further down, so that we're still
# verifying that the block isn't marked bad permanently. This is good
# enough for now.
msg = msg_no_witness_blocktxn()
if version == 2:
msg = msg_blocktxn()
msg.block_transactions = BlockTransactions(block.sha256, [block.vtx[5]] + block.vtx[7:])
test_node.send_and_ping(msg)
# Tip should not have updated
assert_equal(int(node.getbestblockhash(), 16), block.hashPrevBlock)
# We should receive a getdata request
test_node.wait_for_getdata([block.sha256], timeout=10)
assert test_node.last_message["getdata"].inv[0].type == MSG_BLOCK or \
test_node.last_message["getdata"].inv[0].type == MSG_BLOCK | MSG_WITNESS_FLAG
# Deliver the block
if version == 2:
test_node.send_and_ping(msg_block(block))
else:
test_node.send_and_ping(msg_no_witness_block(block))
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
def test_getblocktxn_handler(self, test_node):
version = test_node.cmpct_version
node = self.nodes[0]
# bitcoind will not send blocktxn responses for blocks whose height is
# more than 10 blocks deep.
MAX_GETBLOCKTXN_DEPTH = 10
chain_height = node.getblockcount()
current_height = chain_height
while (current_height >= chain_height - MAX_GETBLOCKTXN_DEPTH):
block_hash = node.getblockhash(current_height)
block = FromHex(CBlock(), node.getblock(block_hash, False))
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [])
num_to_request = random.randint(1, len(block.vtx))
msg.block_txn_request.from_absolute(sorted(random.sample(range(len(block.vtx)), num_to_request)))
test_node.send_message(msg)
test_node.wait_until(lambda: "blocktxn" in test_node.last_message, timeout=10)
[tx.calc_sha256() for tx in block.vtx]
with p2p_lock:
assert_equal(test_node.last_message["blocktxn"].block_transactions.blockhash, int(block_hash, 16))
all_indices = msg.block_txn_request.to_absolute()
for index in all_indices:
tx = test_node.last_message["blocktxn"].block_transactions.transactions.pop(0)
tx.calc_sha256()
assert_equal(tx.sha256, block.vtx[index].sha256)
if version == 1:
# Witnesses should have been stripped
assert tx.wit.is_null()
else:
# Check that the witness matches
assert_equal(tx.calc_sha256(True), block.vtx[index].calc_sha256(True))
test_node.last_message.pop("blocktxn", None)
current_height -= 1
# Next request should send a full block response, as we're past the
# allowed depth for a blocktxn response.
block_hash = node.getblockhash(current_height)
msg.block_txn_request = BlockTransactionsRequest(int(block_hash, 16), [0])
with p2p_lock:
test_node.last_message.pop("block", None)
test_node.last_message.pop("blocktxn", None)
test_node.send_and_ping(msg)
with p2p_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(block_hash, 16))
assert "blocktxn" not in test_node.last_message
def test_compactblocks_not_at_tip(self, test_node):
node = self.nodes[0]
# Test that requesting old compactblocks doesn't work.
MAX_CMPCTBLOCK_DEPTH = 5
new_blocks = []
for _ in range(MAX_CMPCTBLOCK_DEPTH + 1):
test_node.clear_block_announcement()
new_blocks.append(node.generate(1)[0])
test_node.wait_until(test_node.received_block_announcement, timeout=30)
test_node.clear_block_announcement()
test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
test_node.wait_until(lambda: "cmpctblock" in test_node.last_message, timeout=30)
test_node.clear_block_announcement()
node.generate(1)
test_node.wait_until(test_node.received_block_announcement, timeout=30)
test_node.clear_block_announcement()
with p2p_lock:
test_node.last_message.pop("block", None)
test_node.send_message(msg_getdata([CInv(MSG_CMPCT_BLOCK, int(new_blocks[0], 16))]))
test_node.wait_until(lambda: "block" in test_node.last_message, timeout=30)
with p2p_lock:
test_node.last_message["block"].block.calc_sha256()
assert_equal(test_node.last_message["block"].block.sha256, int(new_blocks[0], 16))
# Generate an old compactblock, and verify that it's not accepted.
cur_height = node.getblockcount()
hashPrevBlock = int(node.getblockhash(cur_height - 5), 16)
block = self.build_block_on_tip(node)
block.hashPrevBlock = hashPrevBlock
block.solve()
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block)
test_node.send_and_ping(msg_cmpctblock(comp_block.to_p2p()))
tips = node.getchaintips()
found = False
for x in tips:
if x["hash"] == block.hash:
assert_equal(x["status"], "headers-only")
found = True
break
assert found
# Requesting this block via getblocktxn should silently fail
# (to avoid fingerprinting attacks).
msg = msg_getblocktxn()
msg.block_txn_request = BlockTransactionsRequest(block.sha256, [0])
with p2p_lock:
test_node.last_message.pop("blocktxn", None)
test_node.send_and_ping(msg)
with p2p_lock:
assert "blocktxn" not in test_node.last_message
def test_end_to_end_block_relay(self, listeners):
node = self.nodes[0]
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 10)
[l.clear_block_announcement() for l in listeners]
# ToHex() won't serialize with witness, but this block has no witnesses
# anyway. TODO: repeat this test with witness tx's to a segwit node.
node.submitblock(ToHex(block))
for l in listeners:
l.wait_until(lambda: "cmpctblock" in l.last_message, timeout=30)
with p2p_lock:
for l in listeners:
l.last_message["cmpctblock"].header_and_shortids.header.calc_sha256()
assert_equal(l.last_message["cmpctblock"].header_and_shortids.header.sha256, block.sha256)
# Test that we don't get disconnected if we relay a compact block with valid header,
# but invalid transactions.
def test_invalid_tx_in_compactblock(self, test_node, use_segwit=True):
node = self.nodes[0]
assert len(self.utxos)
utxo = self.utxos[0]
block = self.build_block_with_transactions(node, utxo, 5)
del block.vtx[3]
block.hashMerkleRoot = block.calc_merkle_root()
if use_segwit:
# If we're testing with segwit, also drop the coinbase witness,
# but include the witness commitment.
add_witness_commitment(block)
block.vtx[0].wit.vtxinwit = []
block.solve()
# Now send the compact block with all transactions prefilled, and
# verify that we don't get disconnected.
comp_block = HeaderAndShortIDs()
comp_block.initialize_from_block(block, prefill_list=[0, 1, 2, 3, 4], use_witness=use_segwit)
msg = msg_cmpctblock(comp_block.to_p2p())
test_node.send_and_ping(msg)
# Check that the tip didn't advance
assert int(node.getbestblockhash(), 16) is not block.sha256
test_node.sync_with_ping()
# Helper for enabling cb announcements
# Send the sendcmpct request and sync headers
def request_cb_announcements(self, peer):
node = self.nodes[0]
tip = node.getbestblockhash()
peer.get_headers(locator=[int(tip, 16)], hashstop=0)
peer.send_and_ping(msg_sendcmpct(announce=True, version=peer.cmpct_version))
def test_compactblock_reconstruction_multiple_peers(self, stalling_peer, delivery_peer):
node = self.nodes[0]
assert len(self.utxos)
def announce_cmpct_block(node, peer):
utxo = self.utxos.pop(0)
block = self.build_block_with_transactions(node, utxo, 5)
cmpct_block = HeaderAndShortIDs()
cmpct_block.initialize_from_block(block)
msg = msg_cmpctblock(cmpct_block.to_p2p())
peer.send_and_ping(msg)
with p2p_lock:
assert "getblocktxn" in peer.last_message
return block, cmpct_block
block, cmpct_block = announce_cmpct_block(node, stalling_peer)
for tx in block.vtx[1:]:
delivery_peer.send_message(msg_tx(tx))
delivery_peer.sync_with_ping()
mempool = node.getrawmempool()
for tx in block.vtx[1:]:
assert tx.hash in mempool
delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
self.utxos.append([block.vtx[-1].sha256, 0, block.vtx[-1].vout[0].nValue])
# Now test that delivering an invalid compact block won't break relay
block, cmpct_block = announce_cmpct_block(node, stalling_peer)
for tx in block.vtx[1:]:
delivery_peer.send_message(msg_tx(tx))
delivery_peer.sync_with_ping()
cmpct_block.prefilled_txn[0].tx.wit.vtxinwit = [CTxInWitness()]
cmpct_block.prefilled_txn[0].tx.wit.vtxinwit[0].scriptWitness.stack = [ser_uint256(0)]
cmpct_block.use_witness = True
delivery_peer.send_and_ping(msg_cmpctblock(cmpct_block.to_p2p()))
assert int(node.getbestblockhash(), 16) != block.sha256
msg = msg_no_witness_blocktxn()
msg.block_transactions.blockhash = block.sha256
msg.block_transactions.transactions = block.vtx[1:]
stalling_peer.send_and_ping(msg)
assert_equal(int(node.getbestblockhash(), 16), block.sha256)
def run_test(self):
# Get the nodes out of IBD
self.nodes[0].generate(1)
# Setup the p2p connections
self.segwit_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=2))
self.old_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=1), services=NODE_NETWORK)
self.additional_segwit_node = self.nodes[0].add_p2p_connection(TestP2PConn(cmpct_version=2))
# We will need UTXOs to construct transactions in later tests.
self.make_utxos()
assert softfork_active(self.nodes[0], "segwit")
self.log.info("Testing SENDCMPCT p2p message... ")
self.test_sendcmpct(self.segwit_node, old_node=self.old_node)
self.test_sendcmpct(self.additional_segwit_node)
self.log.info("Testing compactblock construction...")
self.test_compactblock_construction(self.old_node)
self.test_compactblock_construction(self.segwit_node)
self.log.info("Testing compactblock requests (segwit node)... ")
self.test_compactblock_requests(self.segwit_node)
self.log.info("Testing getblocktxn requests (segwit node)...")
self.test_getblocktxn_requests(self.segwit_node)
self.log.info("Testing getblocktxn handler (segwit node should return witnesses)...")
self.test_getblocktxn_handler(self.segwit_node)
self.test_getblocktxn_handler(self.old_node)
self.log.info("Testing compactblock requests/announcements not at chain tip...")
self.test_compactblocks_not_at_tip(self.segwit_node)
self.test_compactblocks_not_at_tip(self.old_node)
self.log.info("Testing handling of incorrect blocktxn responses...")
self.test_incorrect_blocktxn_response(self.segwit_node)
self.log.info("Testing reconstructing compact blocks from all peers...")
self.test_compactblock_reconstruction_multiple_peers(self.segwit_node, self.additional_segwit_node)
# Test that if we submitblock to node1, we'll get a compact block
# announcement to all peers.
# (Post-segwit activation, blocks won't propagate from node0 to node1
# automatically, so don't bother testing a block announced to node0.)
self.log.info("Testing end-to-end block relay...")
self.request_cb_announcements(self.old_node)
self.request_cb_announcements(self.segwit_node)
self.test_end_to_end_block_relay([self.segwit_node, self.old_node])
self.log.info("Testing handling of invalid compact blocks...")
self.test_invalid_tx_in_compactblock(self.segwit_node)
self.test_invalid_tx_in_compactblock(self.old_node)
self.log.info("Testing invalid index in cmpctblock message...")
self.test_invalid_cmpctblock_message()
if __name__ == '__main__':
CompactBlocksTest().main()
| mit |
vesopk/TechModule-Exercises | ClassesAndObjectsLab/SalesReport/SalesReport.cs | 1208 | using System;
using System.Collections.Generic;
class SalesReport
{
static void Main()
{
int salesCount = int.Parse(Console.ReadLine());
List<Sale> sales = new List<Sale>();
for (int i = 0; i < salesCount; i++)
{
string[] currentSale = Console.ReadLine().Split(' ');
Sale sale = new Sale { Town = currentSale[0],
Product = currentSale[1],
Price = decimal.Parse(currentSale[2]),
Quanity = decimal.Parse(currentSale[3])};
sales.Add(sale);
}
SortedDictionary<string,decimal> result = new SortedDictionary<string, decimal>();
foreach (var sale in sales)
{
if (!result.ContainsKey(sale.Town))
{
result[sale.Town] = 0;
}
result[sale.Town] += sale.Price * sale.Quanity;
}
foreach (var kvp in result)
{
Console.WriteLine($"{kvp.Key} -> {kvp.Value:F2}");
}
}
}
class Sale
{
public string Town { get; set; }
public string Product { get; set; }
public decimal Price { get; set; }
public decimal Quanity { get; set; }
} | mit |
gregwym/joos-compiler-java | src/ca/uwaterloo/joos/ast/visitor/MethodDeclVisitor.java | 651 | package ca.uwaterloo.joos.ast.visitor;
import ca.uwaterloo.joos.ast.ASTNode;
import ca.uwaterloo.joos.ast.decl.MethodDeclaration;
public abstract class MethodDeclVisitor extends ASTVisitor {
public MethodDeclVisitor() {
}
@Override
public void willVisit(ASTNode node) {
}
@Override
public void didVisit(ASTNode node) {
}
public boolean visit(ASTNode node) throws Exception {
if (node instanceof MethodDeclaration) {
MethodDeclaration methodDeclNode = (MethodDeclaration) node;
this.visitMethodDecl(methodDeclNode);
}
return true;
}
protected abstract void visitMethodDecl(MethodDeclaration node) throws Exception;
}
| mit |
polydom/polydom | src/components/trace.js | 703 | /**
* @instance
* @memberOf $dom
* @category DOM
*
* @description
* Return the HTML markup of a DOM node OR the HTML markup from within a DOM node.
*
* @param {Element} subject The element whose markup will be returned.
* @param {Boolean} inner Return only the inner markup of the element.
* @returns {String} The HTML markup from the first element that is matched.
*
* @example
*
* $.trace(element);
* $(selector).trace(true);
*
*/
var trace = function trace(elements, inner) {
inner = inner || false;
var len = elements.length;
// Prepare the element(s)
(len > 0) || (elements = [elements]);
return (!!inner) ? elements[0].innerHTML : elements[0].outerHTML;
};
| mit |
lextel/evolution | fuel/packages/faker/Faker/Provider/Lorem.php | 8885 | <?php
namespace Faker\Provider;
class Lorem extends \Faker\Provider\Base
{
protected static $wordList = array(
'alias', 'consequatur', 'aut', 'perferendis', 'sit', 'voluptatem',
'accusantium', 'doloremque', 'aperiam', 'eaque','ipsa', 'quae', 'ab',
'illo', 'inventore', 'veritatis', 'et', 'quasi', 'architecto',
'beatae', 'vitae', 'dicta', 'sunt', 'explicabo', 'aspernatur', 'aut',
'odit', 'aut', 'fugit', 'sed', 'quia', 'consequuntur', 'magni',
'dolores', 'eos', 'qui', 'ratione', 'voluptatem', 'sequi', 'nesciunt',
'neque', 'dolorem', 'ipsum', 'quia', 'dolor', 'sit', 'amet',
'consectetur', 'adipisci', 'velit', 'sed', 'quia', 'non', 'numquam',
'eius', 'modi', 'tempora', 'incidunt', 'ut', 'labore', 'et', 'dolore',
'magnam', 'aliquam', 'quaerat', 'voluptatem', 'ut', 'enim', 'ad',
'minima', 'veniam', 'quis', 'nostrum', 'exercitationem', 'ullam',
'corporis', 'nemo', 'enim', 'ipsam', 'voluptatem', 'quia', 'voluptas',
'sit', 'suscipit', 'laboriosam', 'nisi', 'ut', 'aliquid', 'ex', 'ea',
'commodi', 'consequatur', 'quis', 'autem', 'vel', 'eum', 'iure',
'reprehenderit', 'qui', 'in', 'ea', 'voluptate', 'velit', 'esse',
'quam', 'nihil', 'molestiae', 'et', 'iusto', 'odio', 'dignissimos',
'ducimus', 'qui', 'blanditiis', 'praesentium', 'laudantium', 'totam',
'rem', 'voluptatum', 'deleniti', 'atque', 'corrupti', 'quos',
'dolores', 'et', 'quas', 'molestias', 'excepturi', 'sint',
'occaecati', 'cupiditate', 'non', 'provident', 'sed', 'ut',
'perspiciatis', 'unde', 'omnis', 'iste', 'natus', 'error',
'similique', 'sunt', 'in', 'culpa', 'qui', 'officia', 'deserunt',
'mollitia', 'animi', 'id', 'est', 'laborum', 'et', 'dolorum', 'fuga',
'et', 'harum', 'quidem', 'rerum', 'facilis', 'est', 'et', 'expedita',
'distinctio', 'nam', 'libero', 'tempore', 'cum', 'soluta', 'nobis',
'est', 'eligendi', 'optio', 'cumque', 'nihil', 'impedit', 'quo',
'porro', 'quisquam', 'est', 'qui', 'minus', 'id', 'quod', 'maxime',
'placeat', 'facere', 'possimus', 'omnis', 'voluptas', 'assumenda',
'est', 'omnis', 'dolor', 'repellendus', 'temporibus', 'autem',
'quibusdam', 'et', 'aut', 'consequatur', 'vel', 'illum', 'qui',
'dolorem', 'eum', 'fugiat', 'quo', 'voluptas', 'nulla', 'pariatur',
'at', 'vero', 'eos', 'et', 'accusamus', 'officiis', 'debitis', 'aut',
'rerum', 'necessitatibus', 'saepe', 'eveniet', 'ut', 'et',
'voluptates', 'repudiandae', 'sint', 'et', 'molestiae', 'non',
'recusandae', 'itaque', 'earum', 'rerum', 'hic', 'tenetur', 'a',
'sapiente', 'delectus', 'ut', 'aut', 'reiciendis', 'voluptatibus',
'maiores', 'doloribus', 'asperiores', 'repellat'
);
/**
* @example 'Lorem'
*/
public static function word()
{
return static::randomElement(static::$wordList);
}
/**
* Generate an array of random words
*
* @example array('Lorem', 'ipsum', 'dolor')
* @param integer $nb how many words to return
* @param bool $asText if true the sentences are returned as one string
* @return array|string
*/
public static function words($nb = 3, $asText = false)
{
$words = array();
for ($i=0; $i < $nb; $i++) {
$words []= static::word();
}
return $asText ? join(' ', $words) : $words;
}
/**
* Generate a random sentence
*
* @example 'Lorem ipsum dolor sit amet.'
* @param integer $nbWords around how many words the sentence should contain
* @param boolean $variableNbWords set to false if you want exactly $nbWords returned,
* otherwise $nbWords may vary by +/-40% with a minimum of 1
* @return string
*/
public static function sentence($nbWords = 6, $variableNbWords = true)
{
if ($nbWords <= 0) {
return '';
}
if ($variableNbWords) {
$nbWords = self::randomizeNbElements($nbWords);
}
$words = static::words($nbWords);
$words[0] = ucwords($words[0]);
return join($words, ' ') . '.';
}
/**
* Generate an array of sentences
*
* @example array('Lorem ipsum dolor sit amet.', 'Consectetur adipisicing eli.')
* @param integer $nb how many sentences to return
* @param bool $asText if true the sentences are returned as one string
* @return array|string
*/
public static function sentences($nb = 3, $asText = false)
{
$sentences = array();
for ($i=0; $i < $nb; $i++) {
$sentences []= static::sentence();
}
return $asText ? join(' ', $sentences) : $sentences;
}
/**
* Generate a single paragraph
*
* @example 'Sapiente sunt omnis. Ut pariatur ad autem ducimus et. Voluptas rem voluptas sint modi dolorem amet.'
* @param integer $nbSentences around how many sentences the paragraph should contain
* @param boolean $variableNbSentences set to false if you want exactly $nbSentences returned,
* otherwise $nbSentences may vary by +/-40% with a minimum of 1
* @return string
*/
public static function paragraph($nbSentences = 3, $variableNbSentences = true)
{
if ($nbSentences <= 0) {
return '';
}
if ($variableNbSentences) {
$nbSentences = self::randomizeNbElements($nbSentences);
}
return join(static::sentences($nbSentences), ' ');
}
/**
* Generate an array of paragraphs
*
* @example array($paragraph1, $paragraph2, $paragraph3)
* @param integer $nb how many paragraphs to return
* @param bool $asText if true the paragraphs are returned as one string, separated by two newlines
* @return array|string
*/
public static function paragraphs($nb = 3, $asText = false)
{
$paragraphs = array();
for ($i=0; $i < $nb; $i++) {
$paragraphs []= static::paragraph();
}
return $asText ? join("\n\n", $paragraphs) : $paragraphs;
}
/**
* Generate a text string.
* Depending on the $maxNbChars, returns a string made of words, sentences, or paragraphs.
*
* @example 'Sapiente sunt omnis. Ut pariatur ad autem ducimus et. Voluptas rem voluptas sint modi dolorem amet.'
* @param integer $maxNbChars Maximum number of characters the text should contain (minimum 5)
* @return string
*/
public static function text($maxNbChars = 200)
{
$text = array();
if ($maxNbChars < 5) {
throw new \InvalidArgumentException('text() can only generate text of at least 5 characters');
} elseif ($maxNbChars < 25) {
// join words
while (empty($text)) {
$size = 0;
// determine how many words are needed to reach the $maxNbChars once;
while ($size < $maxNbChars) {
$word = ($size ? ' ' : '') . static::word();
$text []= $word;
$size += strlen($word);
}
array_pop($text);
}
$text[0][0] = static::toUpper($text[0][0]);
$text[count($text) - 1] .= '.';
} elseif ($maxNbChars < 100) {
// join sentences
while (empty($text)) {
$size = 0;
// determine how many sentences are needed to reach the $maxNbChars once;
while ($size < $maxNbChars) {
$sentence = ($size ? ' ' : '') . static::sentence();
$text []= $sentence;
$size += strlen($sentence);
}
array_pop($text);
}
} else {
// join paragraphs
while (empty($text)) {
$size = 0;
// determine how many paragraphs are needed to reach the $maxNbChars once;
while ($size < $maxNbChars) {
$paragraph = ($size ? "\n" : '') . static::paragraph();
$text []= $paragraph;
$size += strlen($paragraph);
}
array_pop($text);
}
}
return join($text, '');
}
protected static function randomizeNbElements($nbElements)
{
return (int) ($nbElements * mt_rand(60, 140) / 100) + 1;
}
}
| mit |
ganesh-narayanan/Entrevista | Entrevista/Trees/Properties/AssemblyInfo.cs | 1386 | using System.Reflection;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
// General Information about an assembly is controlled through the following
// set of attributes. Change these attribute values to modify the information
// associated with an assembly.
[assembly: AssemblyTitle("Trees")]
[assembly: AssemblyDescription("")]
[assembly: AssemblyConfiguration("")]
[assembly: AssemblyCompany("")]
[assembly: AssemblyProduct("Trees")]
[assembly: AssemblyCopyright("Copyright © 2015")]
[assembly: AssemblyTrademark("")]
[assembly: AssemblyCulture("")]
// Setting ComVisible to false makes the types in this assembly not visible
// to COM components. If you need to access a type in this assembly from
// COM, set the ComVisible attribute to true on that type.
[assembly: ComVisible(false)]
// The following GUID is for the ID of the typelib if this project is exposed to COM
[assembly: Guid("b589b31e-9114-464a-b86f-0204a85aba58")]
// Version information for an assembly consists of the following four values:
//
// Major Version
// Minor Version
// Build Number
// Revision
//
// You can specify all the values or you can default the Build and Revision Numbers
// by using the '*' as shown below:
// [assembly: AssemblyVersion("1.0.*")]
[assembly: AssemblyVersion("1.0.0.0")]
[assembly: AssemblyFileVersion("1.0.0.0")]
| mit |
abcd19/StorageUI | st-field/st-combobox/source/js/List/ListItem/Event/mouseOver.js | 386 | ST.namespace('ST.comboboxFld.list.listItem.event');
ST.comboboxFld.list.listItem.event.mouseOver = (function() {
var handler = function()
{
this.markMouseOver(true);
if(typeof(this._object['_data']['handler']['mouseOverItem']) == 'function')
{
this._object['_data']['handler']['mouseOverItem'](this);
}
};
return handler;
})();
| mit |