repo_name
stringlengths
4
116
path
stringlengths
4
379
size
stringlengths
1
7
content
stringlengths
3
1.05M
license
stringclasses
15 values
mirams/opencor
src/misc/guiutils.cpp
1582
/******************************************************************************* Copyright The University of Auckland Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. *******************************************************************************/ //============================================================================== // GUI utilities //============================================================================== #include "guiutils.h" //============================================================================== #include <QAction> #include <QApplication> #include <QMainWindow> //============================================================================== namespace OpenCOR { //============================================================================== #include "coreguiutils.cpp.inl" //============================================================================== } // namespace OpenCOR //============================================================================== // End of file //==============================================================================
apache-2.0
bartoszmajsak/arquillian-extension-persistence
core/src/main/java/org/jboss/arquillian/persistence/util/SecurityActions.java
12031
package org.jboss.arquillian.persistence.util; /* * JBoss, Home of Professional Open Source * Copyright 2009, Red Hat Middleware LLC, and individual contributors * by the @authors tag. See the copyright.txt in the distribution for a * full listing of individual contributors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import java.lang.annotation.Annotation; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import java.security.AccessController; import java.security.PrivilegedAction; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.List; /** * A set of privileged actions that are not to leak out * of this package * * @version $Revision: $ */ final class SecurityActions { //-------------------------------------------------------------------------------|| // Constructor ------------------------------------------------------------------|| //-------------------------------------------------------------------------------|| /** * No instantiation */ private SecurityActions() { throw new UnsupportedOperationException("No instantiation"); } //-------------------------------------------------------------------------------|| // Utility Methods --------------------------------------------------------------|| //-------------------------------------------------------------------------------|| /** * Obtains the Thread Context ClassLoader */ static ClassLoader getThreadContextClassLoader() { return AccessController.doPrivileged(GetTcclAction.INSTANCE); } static boolean isClassPresent(String name) { try { loadClass(name); return true; } catch (Exception e) { return false; } } static Class<?> loadClass(String className) { try { return Class.forName(className, true, getThreadContextClassLoader()); } catch (ClassNotFoundException e) { try { return Class.forName(className, true, SecurityActions.class.getClassLoader()); } catch (ClassNotFoundException e2) { throw new RuntimeException("Could not load class " + className, e2); } } } static <T> T newInstance(final String className, final Class<?>[] argumentTypes, final Object[] arguments, final Class<T> expectedType) { return newInstance(className, argumentTypes, arguments, expectedType, getThreadContextClassLoader()); } static <T> T newInstance(final String className, final Class<?>[] argumentTypes, final Object[] arguments, final Class<T> expectedType, ClassLoader classLoader) { Class<?> clazz = null; try { clazz = Class.forName(className, false, classLoader); } catch (Exception e) { throw new RuntimeException("Could not load class " + className, e); } Object obj = newInstance(clazz, argumentTypes, arguments); try { return expectedType.cast(obj); } catch (Exception e) { throw new RuntimeException("Loaded class " + className + " is not of expected type " + expectedType, e); } } /** * Create a new instance by finding a constructor that matches the argumentTypes signature * using the arguments for instantiation. * * @param className Full classname of class to create * @param argumentTypes The constructor argument types * @param arguments The constructor arguments * @return a new instance * @throws IllegalArgumentException if className, argumentTypes, or arguments are null * @throws RuntimeException if any exceptions during creation * @author <a href="mailto:aslak@conduct.no">Aslak Knutsen</a> * @author <a href="mailto:andrew.rubinger@jboss.org">ALR</a> */ static <T> T newInstance(final Class<T> implClass, final Class<?>[] argumentTypes, final Object[] arguments) { if (implClass == null) { throw new IllegalArgumentException("ImplClass must be specified"); } if (argumentTypes == null) { throw new IllegalArgumentException("ArgumentTypes must be specified. Use empty array if no arguments"); } if (arguments == null) { throw new IllegalArgumentException("Arguments must be specified. Use empty array if no arguments"); } final T obj; try { Constructor<T> constructor = getConstructor(implClass, argumentTypes); if(!constructor.isAccessible()) { constructor.setAccessible(true); } obj = constructor.newInstance(arguments); } catch (Exception e) { throw new RuntimeException("Could not create new instance of " + implClass, e); } return obj; } /** * Obtains the Constructor specified from the given Class and argument types * @param clazz * @param argumentTypes * @return * @throws NoSuchMethodException */ static <T> Constructor<T> getConstructor(final Class<T> clazz, final Class<?>... argumentTypes) throws NoSuchMethodException { try { return AccessController.doPrivileged(new PrivilegedExceptionAction<Constructor<T>>() { public Constructor<T> run() throws NoSuchMethodException { return clazz.getDeclaredConstructor(argumentTypes); } }); } // Unwrap catch (final PrivilegedActionException pae) { final Throwable t = pae.getCause(); // Rethrow if (t instanceof NoSuchMethodException) { throw (NoSuchMethodException) t; } else { // No other checked Exception thrown by Class.getConstructor try { throw (RuntimeException) t; } // Just in case we've really messed up catch (final ClassCastException cce) { throw new RuntimeException("Obtained unchecked Exception; this code should never be reached", t); } } } } /** * Set a single Field value * * @param target The object to set it on * @param fieldName The field name * @param value The new value */ public static void setFieldValue(final Class<?> source, final Object target, final String fieldName, final Object value) throws NoSuchFieldException { try { AccessController.doPrivileged(new PrivilegedExceptionAction<Void>() { @Override public Void run() throws Exception { Field field = source.getDeclaredField(fieldName); if(!field.isAccessible()) { field.setAccessible(true); } field.set(target, value); return null; } }); } // Unwrap catch (final PrivilegedActionException pae) { final Throwable t = pae.getCause(); // Rethrow if (t instanceof NoSuchFieldException) { throw (NoSuchFieldException) t; } else { // No other checked Exception thrown by Class.getConstructor try { throw (RuntimeException) t; } // Just in case we've really messed up catch (final ClassCastException cce) { throw new RuntimeException("Obtained unchecked Exception; this code should never be reached", t); } } } } public static List<Field> getFieldsWithAnnotation(final Class<?> source, final Class<? extends Annotation> annotationClass) { List<Field> declaredAccessableFields = AccessController.doPrivileged(new PrivilegedAction<List<Field>>() { public List<Field> run() { List<Field> foundFields = new ArrayList<Field>(); Class<?> nextSource = source; while (nextSource != Object.class) { for(Field field : nextSource.getDeclaredFields()) { if(field.isAnnotationPresent(annotationClass)) { if(!field.isAccessible()) { field.setAccessible(true); } foundFields.add(field); } } nextSource = nextSource.getSuperclass(); } return foundFields; } }); return declaredAccessableFields; } public static List<Method> getMethodsWithAnnotation(final Class<?> source, final Class<? extends Annotation> annotationClass) { List<Method> declaredAccessableMethods = AccessController.doPrivileged(new PrivilegedAction<List<Method>>() { public List<Method> run() { List<Method> foundMethods = new ArrayList<Method>(); Class<?> nextSource = source; while (nextSource != Object.class) { for(Method method : nextSource.getDeclaredMethods()) { if(method.isAnnotationPresent(annotationClass)) { if(!method.isAccessible()) { method.setAccessible(true); } foundMethods.add(method); } } nextSource = nextSource.getSuperclass(); } return foundMethods; } }); return declaredAccessableMethods; } static String getProperty(final String key) { try { String value = AccessController.doPrivileged(new PrivilegedExceptionAction<String>() { public String run() { return System.getProperty(key); } }); return value; } // Unwrap catch (final PrivilegedActionException pae) { final Throwable t = pae.getCause(); // Rethrow if (t instanceof SecurityException) { throw (SecurityException) t; } if (t instanceof NullPointerException) { throw (NullPointerException) t; } else if (t instanceof IllegalArgumentException) { throw (IllegalArgumentException) t; } else { // No other checked Exception thrown by System.getProperty try { throw (RuntimeException) t; } // Just in case we've really messed up catch (final ClassCastException cce) { throw new RuntimeException("Obtained unchecked Exception; this code should never be reached", t); } } } } //-------------------------------------------------------------------------------|| // Inner Classes ----------------------------------------------------------------|| //-------------------------------------------------------------------------------|| /** * Single instance to get the TCCL */ private enum GetTcclAction implements PrivilegedAction<ClassLoader> { INSTANCE; public ClassLoader run() { return Thread.currentThread().getContextClassLoader(); } } }
apache-2.0
carnegiespeech/translations
zh_cn/moodle.php
107320
<?php // This file is part of Moodle - http://moodle.org/ // // Moodle is free software: you can redistribute it and/or modify // it under the terms of the GNU General Public License as published by // the Free Software Foundation, either version 3 of the License, or // (at your option) any later version. // // Moodle is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with Moodle. If not, see <http://www.gnu.org/licenses/>. /** * Strings for component 'moodle', language 'zh_cn', branch 'MOODLE_22_STABLE' * * @package moodle * @copyright 1999 onwards Martin Dougiamas {@link http://moodle.com} * @license http://www.gnu.org/copyleft/gpl.html GNU GPL v3 or later */ defined('MOODLE_INTERNAL') || die(); $string['abouttobeinstalled'] = '即将开始安装'; $string['action'] = '动作'; $string['actions'] = '动作'; $string['active'] = '激活'; $string['activeusers'] = '活跃的用户'; $string['activities'] = '活动'; $string['activities_help'] = '各种活动,例如讨论区、测验和Wiki,使课程获得各种互动功能'; $string['activity'] = '活动'; $string['activityclipboard'] = '移动此活动: <b>{$a}</b>'; $string['activityiscurrentlyhidden'] = '很抱歉!此活动目前处于隐藏状态'; $string['activitymodule'] = '活动模块'; $string['activitymodules'] = '活动模块'; $string['activityreport'] = '活动报表'; $string['activityreports'] = '活动报表'; $string['activityselect'] = '选择把此活动移动到其它地方'; $string['activitysince'] = '活动始于{$a}'; $string['activityweighted'] = '每个用户的活动'; $string['add'] = '添加'; $string['addactivity'] = '添加活动...'; $string['addadmin'] = '添加网站管理员'; $string['addblock'] = '添加一个版块'; $string['addcomment'] = '添加一条评论...'; $string['addcountertousername'] = '向用户名添加数字来建立用户'; $string['addcreator'] = '添加课程管理员'; $string['adddots'] = '添加...'; $string['added'] = '已添加{$a}'; $string['addedrecip'] = '已添加{$a}个新收件人'; $string['addedrecips'] = '已添加{$a}个新收件人'; $string['addedtogroup'] = '已加入到组“{$a}”'; $string['addedtogroupnot'] = '未加入到组“{$a}”'; $string['addedtogroupnotenrolled'] = '未加入组“{$a}”,因为未选课'; $string['addinganew'] = '添加一个新{$a}'; $string['addinganewto'] = '向{$a->to}添加一个新{$a->what}'; $string['addingdatatoexisting'] = '加入数据到现有的'; $string['addnewcategory'] = '添加新类别'; $string['addnewcourse'] = '添加新课程'; $string['addnewuser'] = '添加用户'; $string['addnousersrecip'] = '把还没有进入这个{$a}的用户添加到收件人名单中'; $string['addresource'] = '添加资源...'; $string['address'] = '地址'; $string['addstudent'] = '添加学生'; $string['addsubcategory'] = '添加子类别'; $string['addteacher'] = '添加教师'; $string['admin'] = '管理'; $string['adminhelpaddnewuser'] = '手动创建一个新用户帐号'; $string['adminhelpassignadmins'] = '网站管理员可以做任何操作,可以访问站点内的任何地方'; $string['adminhelpassigncreators'] = '课程创建者可以创建新课程'; $string['adminhelpassignsiteroles'] = '分配已定义的站点角色给特定用户'; $string['adminhelpassignstudents'] = '进入一门课程,然后在管理菜单中添加学生'; $string['adminhelpauthentication'] = '可以使用内部网络帐户或者外部数据库'; $string['adminhelpbackup'] = '设定自动备份和备份日程'; $string['adminhelpconfiguration'] = '设定网站外观及工作参数'; $string['adminhelpconfigvariables'] = '设定影响网站一般操作的变量'; $string['adminhelpcourses'] = '定义课程及类别,指派用户,编辑申请中的课程'; $string['adminhelpeditorsettings'] = '为HTML编辑器设定基本属性'; $string['adminhelpedituser'] = '浏览用户列表,并可编辑'; $string['adminhelpenvironment'] = '检查您的服务器是否满足当前和未来的安装要求'; $string['adminhelpfailurelogs'] = '浏览登录失败的日志'; $string['adminhelplanguage'] = '检查和编辑当前的语言包'; $string['adminhelplogs'] = '浏览此站点上的所有活动日志'; $string['adminhelpmanageblocks'] = '管理已安装的版块和设置'; $string['adminhelpmanagedatabase'] = '直接访问数据库(小心!)'; $string['adminhelpmanagefilters'] = '选择文本过滤器和相关的设置'; $string['adminhelpmanagemodules'] = '管理已安装的模块和设置'; $string['adminhelpmanageroles'] = '创建和定义可分配给用户的角色'; $string['adminhelpmymoodle'] = '为用户配置“我的moodle”版块'; $string['adminhelpreports'] = '站点级报表'; $string['adminhelpsitefiles'] = '发布普通文件或者上传外部备份'; $string['adminhelpsitesettings'] = '定义首页外观'; $string['adminhelpstickyblocks'] = '配置Moodle全站的粘性版块'; $string['adminhelpthemes'] = '选择网站的外观(颜色、字体等)'; $string['adminhelpuploadusers'] = '从文本文件导入新的用户帐号'; $string['adminhelpusers'] = '定义用户并设定验证'; $string['adminhelpxmldbeditor'] = '编辑XMLDB文件。仅限开发者。'; $string['administration'] = '课程管理'; $string['administrationsite'] = '网站管理'; $string['administrator'] = '管理员'; $string['administratordescription'] = '管理员可操作站点内的任何课程。'; $string['administrators'] = '管理员'; $string['administratorsall'] = '全部管理员'; $string['administratorsandteachers'] = '管理员和教师'; $string['advanced'] = '高级'; $string['advancedfilter'] = '高级搜索'; $string['advancedsettings'] = '高级设置'; $string['again'] = '再一次'; $string['aimid'] = 'AIM号码'; $string['ajaxno'] = '不:使用基本的Web功能'; $string['ajaxuse'] = 'AJAX和JavaScript'; $string['ajaxyes'] = '是:使用高级的Web功能'; $string['all'] = '所有'; $string['allactions'] = '参加了课程'; $string['allactivities'] = '所有活动'; $string['allchanges'] = '所有改变'; $string['alldays'] = '所有日期'; $string['allfieldsrequired'] = '所有字段都是必需的'; $string['allfiles'] = '所有文件'; $string['allgroups'] = '所有组'; $string['alllogs'] = '所有日志'; $string['allmods'] = '所有{$a}'; $string['allow'] = '允许'; $string['allowinternal'] = '也允许内部方法'; $string['allownone'] = '全部禁止'; $string['allownot'] = '不允许'; $string['allparticipants'] = '所有成员'; $string['allteachers'] = '所有教师'; $string['alphanumerical'] = '只能包含字母、数字、横线(-)或小数点(.)'; $string['alreadyconfirmed'] = '您的注册已经确认了'; $string['always'] = '始终'; $string['and'] = '{$a->one} 和 {$a->two}'; $string['answer'] = '回答'; $string['any'] = '任意'; $string['approve'] = '批准'; $string['areyousuretorestorethis'] = '您确定要继续吗?'; $string['areyousuretorestorethisinfo'] = '稍后,您要选择把此备份添加到一门现有课程中,还是创建一门新课程。'; $string['asc'] = '升序'; $string['assessment'] = '评价'; $string['assignadmins'] = '任命网站管理员'; $string['assigncreators'] = '任命课程管理员'; $string['assignsiteroles'] = '指派网站范围的角色'; $string['authenticateduser'] = '已认证用户'; $string['authenticateduserdescription'] = '所有已登录用户。'; $string['authentication'] = '身份认证'; $string['authenticationplugins'] = '身份认证插件'; $string['autosubscribe'] = '讨论区自动订阅'; $string['autosubscribeno'] = '不:不要自动订阅讨论区'; $string['autosubscribeyes'] = '是:当我发帖时,就自动订阅该讨论区'; $string['availability'] = '有效性'; $string['availability_help'] = '这个设置决定课程是否出现在课程列表中。教师和管理员以外的其他用户都不能进入此课程。'; $string['availablecourses'] = '现有课程'; $string['back'] = '返回'; $string['backto'] = '回到“{$a}”'; $string['backtocourselisting'] = '返回课程列表'; $string['backtopageyouwereon'] = '回到刚才的页面'; $string['backtoparticipants'] = '返回成员列表'; $string['backup'] = '备份'; $string['backupactivehelp'] = '选择是否进行自动备份。'; $string['backupcancelled'] = '备份被中断'; $string['backupcoursefileshelp'] = '如果允许,那么课程文件将会被包含在自动备份中'; $string['backupdate'] = '备份日期'; $string['backupdatenew'] = '{$a->TAG}现在是{$a->year}{$a->month}{$a->mday},{$a->weekday}<br />'; $string['backupdateold'] = '{$a->TAG}曾是{$a->year}{$a->month}{$a->mday},{$a->weekday}<br />'; $string['backupdaterecordtype'] = '<br />{$a->recordtype} - {$a->recordname}<br />'; $string['backupdetails'] = '备份细节'; $string['backupexecuteathelp'] = '选择需要运行自动备份的时间'; $string['backupfailed'] = '您的一些课程尚未保存!!'; $string['backupfilename'] = '备份'; $string['backupfinished'] = '备份圆满成功'; $string['backupfromthissite'] = '此站已经备份过了吗?'; $string['backupgradebookhistoryhelp'] = '如果启用,成绩单的历史会被包含在自动备份中。注意,如果想使用这个功能,成绩历史就一定不能在服务器设置(disablegradehistory)中被禁用'; $string['backupincludemoduleshelp'] = '在自动备份中,选择是否希望包含带有或不带有用户数据的课程模块'; $string['backupincludemoduleuserdatahelp'] = '选择是否希望在自动备份中包含模块用户的数据。'; $string['backupkeephelp'] = '您想要为每门课程保存多少个最近的备份? (将会自动删除比较旧的备份)'; $string['backuplogdetailed'] = '详细的运行日志'; $string['backuploglaststatus'] = '最后的运行日志'; $string['backupmissinguserinfoperms'] = '注意:本次备份不包含用户数据。“练习”模块和“工作室”模块与此种备份不兼容,所以它们完全不被备份。'; $string['backupnext'] = '下一次备份'; $string['backupnonisowarning'] = '警告:此备份来自Moodle的非Unicode的版本(1.6版以前)。如果此备份包含任何非ISO-8859-1文本,那么当试图把此备份恢复到Moodle的Unicode版本时,他们可能被破坏。请参考<a href="http://docs.moodle.org/en/Backup_FAQ">Backup FAQ</a>获取更多关于正确恢复备份的方法。'; $string['backuporiginalname'] = '备份名称'; $string['backuproleassignments'] = '备份这些角色的角色分配'; $string['backupsavetohelp'] = '存放备份文件的位置的完整路径<br />(不填则保存在课程的默认目录中)'; $string['backupsitefileshelp'] = '如果启用,那么课程中使用的网站文件将自动包含在备份文件中。'; $string['backuptakealook'] = '请看一下如下目录中的备份日志: {$a}'; $string['backupuserfileshelp'] = '选择是否在自动备份中包含用户文件(如用户头像)'; $string['backupversion'] = '备份版本'; $string['block'] = '版块'; $string['blockconfiga'] = '配置{$a}版块'; $string['blockconfigbad'] = '由于此版块未正确实现,因此无法显示配置界面。'; $string['blockdeleteconfirm'] = '您将完全删除版块“{$a}”。这将从数据库删除和此版块相关的全部数据。您确定要继续吗?'; $string['blockdeletefiles'] = '与版块“{$a->block}”相关联的所有数据都已经被删除了。要完成删除(并防止版块重新自动安装),您应当从服务器上删除如下目录:{$a->directory}。'; $string['blocks'] = '版块'; $string['blocksaddedit'] = '添加/修改版块'; $string['blockseditoff'] = '版块编辑关'; $string['blocksediton'] = '版块编辑开'; $string['blocksetup'] = '设定版块表格'; $string['blocksuccess'] = '{$a}表格已经正确设置了'; $string['brief'] = '简短'; $string['bycourseorder'] = '按课程顺序'; $string['byname'] = '由“{$a}”'; $string['bypassed'] = '忽视'; $string['cachecontrols'] = '缓存控制'; $string['cancel'] = '取消'; $string['cancelled'] = '取消'; $string['categories'] = '课程分类'; $string['category'] = '课程分类'; $string['categoryadded'] = '已经添加类别‘{$a}’'; $string['categorycontents'] = '课程子目录和课程'; $string['categorycurrentcontents'] = '{$a}的内容'; $string['categorydeleted'] = '已经删除类别‘{$a}’'; $string['categoryduplicate'] = '“{$a}”类别已经存在!'; $string['category_help'] = '<p align="center"><b>课程类别</b></p> <p>您的 Moodle 管理员可能已经设立了几个课程分类。</p> <p>例如,“自然科学”,“人文科学”,“公共卫生”等。</p> <p>选择一个最适合用于您的课程的。这个选择将影响到您的课程在课程列表上的显示,也会更容易的让学生们找到您的课程。</p>'; $string['categorymodifiedcancel'] = '课程目录被编辑,请取消后重试。'; $string['categoryname'] = '类别名称'; $string['categoryupdated'] = '‘{$a}’已经更新'; $string['changedpassword'] = '新密码'; $string['changepassword'] = '更改密码'; $string['changessaved'] = '更改已经保存'; $string['check'] = '检查'; $string['checkall'] = '检查全部'; $string['checkingbackup'] = '检查备份'; $string['checkingcourse'] = '检查课程'; $string['checkingforbbexport'] = '检查BlackBoard的输出'; $string['checkinginstances'] = '检查实例'; $string['checkingsections'] = '检查文章'; $string['checklanguage'] = '校对语言'; $string['checknone'] = '不检查'; $string['childcoursenotfound'] = '未发现子课程!'; $string['childcourses'] = '子课程'; $string['choose'] = '选择'; $string['choosecourse'] = '选择一个课程'; $string['choosedots'] = '选择...'; $string['chooselivelogs'] = '或者查看当前活动项目'; $string['chooselogs'] = '选择您要看的日志'; $string['choosereportfilter'] = '为报表选择一个过滤器'; $string['choosetheme'] = '选择主题风格'; $string['chooseuser'] = '选择一个用户'; $string['city'] = '市/县'; $string['clambroken'] = '您的管理员激活了对上传文件的病毒检查,但有些地方配置错误了。<br />您上传文件的操作并未成功。系统已经给管理员发信通知他们修复这个错误。<br />您可以稍后重新上传这个文件。'; $string['clamdeletedfile'] = '文件已经被删除了'; $string['clamdeletedfilefailed'] = '无法删除文件'; $string['clamemailsubject'] = '{$a} :: Clam AV 通知'; $string['clamfailed'] = 'Clam AV运行失败。返回的错误信息是{$a}。下面是Clam的输出:'; $string['clamlost'] = 'Moodle已经被配置为运行clam对上传的文件进行检查,但提供给Clam AV的路径 {$a} 是非法的。'; $string['clamlostandactinglikevirus'] = '此外,Moodle当前被配置为,如果clam的运行失败了,系统将会以对待病毒的方式来对待所有文件。这意味着在错误被修复前,学生们不能上传任何文件。'; $string['clammovedfile'] = '文件已经被移动到您指定的隔离目录中,新的位置是{$a}。'; $string['clammovedfilebasic'] = '文件已经被移动到一个隔离目录中了。'; $string['clamquarantinedirfailed'] = '无法将文件转移到隔离目录中,您应当修复这个问题,否则系统发现文件被感然后将会删除它们。'; $string['clamunknownerror'] = 'clam发生了未知错误。'; $string['cleaningtempdata'] = '清空临时数据'; $string['clear'] = '清空'; $string['clickhelpiconformoreinfo'] = '...继续...点击帮助图标阅读全文'; $string['clickhere'] = '点击这里...'; $string['clicktochange'] = '点此修改'; $string['clicktochangeinbrackets'] = '{$a}(点击修改)'; $string['clicktohideshow'] = '点击来展开或折叠'; $string['closewindow'] = '关闭窗口'; $string['collapseall'] = '全部折叠'; $string['commentincontext'] = '在上下文中查找此条评论'; $string['comments'] = '评论'; $string['commentsnotenabled'] = '未启用评论功能'; $string['commentsrequirelogin'] = '登录后才能查看评论'; $string['comparelanguage'] = '比较并编辑当前语言'; $string['complete'] = '完全'; $string['completereport'] = '完整报表'; $string['configuration'] = '设置'; $string['confirm'] = '确认'; $string['confirmcheckfull'] = '您真的确认{$a}?'; $string['confirmed'] = '您的注册已经得到确认'; $string['confirmednot'] = '您的注册尚未得到确认!'; $string['content'] = '内容'; $string['continue'] = '继续'; $string['continuetocourse'] = '点击这里进入您的课程'; $string['convertingwikitomarkdown'] = '把Wiki转为Markdown'; $string['cookiesenabled'] = '您浏览器的 cookies 设置必须打开'; $string['cookiesenabled_help'] = '本站使用两个cookie: 最重要的是会话cookie,通常叫做 MoodleSession。您必须在浏览器里接受这个 cookie,才能使您的登录在页与页之间连续有效。当您登出或关闭浏览器,这个 cookie 在浏览器和服务器里都会被删除。 另一个 cookie 纯粹是为了方便,通常被叫做类似 MOODLEID 之类的名字。它只用来在浏览器里记住您的用户名。这意味着当您再次回到本站,登录页面里的用户名已经替您填好了。拒绝此 cookie 是安全的,只不过每次登录都要重新输入用户名而已。'; $string['cookiesnotenabled'] = '很不幸,您的浏览器目前并未开启对 cookie 的支持'; $string['copy'] = '复制'; $string['copyasnoun'] = '复制'; $string['copyingcoursefiles'] = '复制课程文件'; $string['copyingsitefiles'] = '复制课程中使用的站点文件'; $string['copyinguserfiles'] = '复制用户文件'; $string['copyingzipfile'] = '复制zip文件'; $string['copyrightnotice'] = '版权声明'; $string['coresystem'] = '系统'; $string['cost'] = '课程费用'; $string['costdefault'] = '缺省费用'; $string['counteditems'] = '{$a->count}{$a->items}'; $string['country'] = '国家和地区'; $string['course'] = '课程'; $string['courseadministration'] = '课程管理'; $string['courseapprovedemail'] = '您申请的课程《{$a->name}》已经被批准了,并且您已经是该课程的{$a->teacher}。要访问您的新课程,请点击 {$a->url}。'; $string['courseapprovedemail2'] = '您申请的课程,{$a->name},已经被批准。访问您的新课程,请到 {$a->url}'; $string['courseapprovedfailed'] = '保存待批准课程失败'; $string['courseapprovedsubject'] = '您的课程已经通过'; $string['courseavailable'] = '该课程允许学生学习'; $string['courseavailablenot'] = '该课程不允许学生学习'; $string['coursebackup'] = '课程备份'; $string['coursecategories'] = '课程类别'; $string['coursecategory'] = '课程类别'; $string['coursecategorydeleted'] = '已删除课程类别 {$a}'; $string['coursecompletion'] = '修完课程'; $string['coursecompletions'] = '修完课程'; $string['coursecreators'] = '课程创建者'; $string['coursecreatorsdescription'] = '课程创建者可以创建新课程。'; $string['coursedeleted'] = '已删除课程 {$a}'; $string['courseextendednamedisplay'] = '{$a->shortname} {$a->fullname}'; $string['coursefiles'] = '从旧版继承的课程文件'; $string['coursefilesedit'] = '编辑课程文件'; $string['coursefileswarning'] = '课程文件已被废弃'; $string['coursefileswarning_help'] = 'Moodle 2.0之后,课程文件将不再存在。请尽可能使用外部容器。'; $string['courseformatdata'] = '课程数据格式'; $string['courseformats'] = '课程格式'; $string['coursegrades'] = '课程成绩'; $string['coursehelpcategory'] = '调整课程在课程列表中的位置可以让学生更容易发现它。'; $string['coursehelpforce'] = '强制课程中的所有活动都使用课程的组模式。'; $string['coursehelpformat'] = '课程主页会用这里指定的格式显示。'; $string['coursehelphiddensections'] = '课程中隐藏的节如何向学生显示。'; $string['coursehelpmaximumupload'] = '定义课程中上传文件的最大长度。会受到全站设置的影响。'; $string['coursehelpnewsitemsnumber'] = '课程主页右边新闻版块中显示的最近新闻条数。(设为0表示新闻版块不会出现)'; $string['coursehelpnumberweeks'] = '课程主页显示的星期/主题个数'; $string['coursehelpshowgrades'] = '打开成绩单的显示。此设置不会影响每个活动中成绩的显示。'; $string['coursehidden'] = '此课程目前并不对学生开放'; $string['courseinfo'] = '课程信息'; $string['courselegacyfiles'] = '课程文件'; $string['courselegacyfiles_help'] = '“课程文件”用来与Moodle 1.9及更早的版本兼容。存放于此的文件可被课程所有成员访问,无论您是否加了指向它们的链接。并且您不会知道这些文件都被用在Moodle的哪里。 如果您在这里保存课程文件,将会遇到一些关于隐私和安全的问题。在备份、课程导入、内容共享或复用时,有可能遇到文件丢失的情况。因此,建议您不用使用此存储区,除非您很清楚您正在做什么。 下面的链接会提供进一步的信息,并展示给您一些在Moodle 2中管理文件的更好方法。'; $string['coursemessage'] = '课程用户消息'; $string['coursenotaccessible'] = '这个课程未对公众开放'; $string['courseoverview'] = '课程总览'; $string['courseoverviewgraph'] = '课程记录预览'; $string['courseprofiles'] = '课程档案'; $string['coursereasonforrejecting'] = '拒绝此申请的理由'; $string['coursereasonforrejectingemail'] = '会通过email发给请求人'; $string['coursereject'] = '拒绝课程申请'; $string['courserejected'] = '已经拒绝了课程申请并通知了申请人。'; $string['courserejectemail'] = '很抱歉,您提交的课程申请已经被拒绝了。下面是原因: {$a}'; $string['courserejectreason'] = '请简要说明拒绝这个课程的理由<br />(系统会将它发送给申请人)'; $string['courserejectsubject'] = '您的课程申请已经被拒绝'; $string['coursereport'] = '课程报告'; $string['coursereports'] = '课程报表'; $string['courserequest'] = '课程申请'; $string['courserequestdetails'] = '您正申请的课程的详细信息'; $string['courserequestfailed'] = '由于某些原因,无法保存您的课程申请。'; $string['courserequestintro'] = '通过下面的表单可以申请创建一个新课程。<br />请尽量仔细填写以便管理员了解您有多需要这门课。'; $string['courserequestreason'] = '申请此课程的原因'; $string['courserequestsuccess'] = '已经保存了您的课程申请。期望几天内会有 Email 通知您结果'; $string['courserequestsupport'] = '一些额外的信息,帮助管理员来评估您的请求'; $string['courserestore'] = '课程恢复'; $string['courses'] = '课程'; $string['coursesectionsummaries'] = '课程小节概述'; $string['coursesettings'] = '课程缺省设置'; $string['coursesmovedout'] = '课程已从 {$a} 中移出'; $string['coursespending'] = '待审批的课程'; $string['coursestart'] = '课程开始'; $string['coursesummary'] = '课程简介'; $string['coursesummary_help'] = '课程简介会显示在课程列表中。搜索课程时会同时搜索课程名和课程简介。'; $string['courseupdates'] = '课程更新'; $string['courseuploadlimit'] = '课程上传限制'; $string['create'] = '创建'; $string['createaccount'] = '提交'; $string['createcategory'] = '创建课程分类'; $string['createfolder'] = '在{$a}创建一个文件夹'; $string['createuser'] = '创建用户'; $string['createuserandpass'] = '设定用户名和密码'; $string['createziparchive'] = '创建zip压缩文件'; $string['creatingblocks'] = '创建版块'; $string['creatingblocksroles'] = '创建版块级角色分配和覆盖'; $string['creatingblogsinfo'] = '创建博客信息'; $string['creatingcategoriesandquestions'] = '创建类别和题目'; $string['creatingcoursemodules'] = '创建课程模块'; $string['creatingcourseroles'] = '创建课程级角色分配和覆盖'; $string['creatingevents'] = '创建事件'; $string['creatinggradebook'] = '创建成绩单'; $string['creatinggroupings'] = '正在创建大组'; $string['creatinggroupingsgroups'] = '正在把小组加入大组中'; $string['creatinggroups'] = '创建组'; $string['creatinglogentries'] = '创建日志'; $string['creatingmessagesinfo'] = '创建消息信息'; $string['creatingmodroles'] = '创建模块级角色分配和覆盖'; $string['creatingnewcourse'] = '创建新课程'; $string['creatingrolesdefinitions'] = '创建角色定义'; $string['creatingscales'] = '创建等级'; $string['creatingsections'] = '创建小节'; $string['creatingtemporarystructures'] = '创建临时组织'; $string['creatinguserroles'] = '创建用户级角色分配和覆盖'; $string['creatingusers'] = '创建用户'; $string['creatingxmlfile'] = '创建XML文件'; $string['currency'] = '货币'; $string['currentcourseadding'] = '当前课程,向其中添加数据'; $string['currentcoursedeleting'] = '当前课程,先删除它'; $string['currentlanguage'] = '当前语言'; $string['currentlocaltime'] = '您的当地时间'; $string['currentlyselectedusers'] = '当前已选用户'; $string['currentpicture'] = '当前图片'; $string['currentrelease'] = '当前发行版信息'; $string['currentversion'] = '当前版本'; $string['databasechecking'] = '把Moodle数据库从{$a->oldversion}升级到{$a->newversion}'; $string['databaseperformance'] = '数据库选项'; $string['databasesetup'] = '正在安装数据库'; $string['databasesuccess'] = '数据库升级成功'; $string['databaseupgradebackups'] = '现在的备份版本是{$a}'; $string['databaseupgradeblocks'] = '现在的版块版本是{$a}'; $string['databaseupgradegroups'] = '现在组的版本是{$a}'; $string['databaseupgradelocal'] = '现在的本地数据库版本是{$a}'; $string['databaseupgrades'] = '升级数据库'; $string['date'] = '日期'; $string['datechanged'] = '日期已变更'; $string['datemostrecentfirst'] = '日期 - 近的在前'; $string['datemostrecentlast'] = '日期 - 最近的在后'; $string['day'] = '天'; $string['days'] = '天'; $string['decodinginternallinks'] = '解码内部链接'; $string['default'] = '缺省'; $string['defaultcoursestudent'] = '学生'; $string['defaultcoursestudentdescription'] = '学生在课程中通常拥有较少的特权。'; $string['defaultcoursestudents'] = '学生'; $string['defaultcoursesummary'] = '写一段简明有趣的文字介绍一下该课程'; $string['defaultcourseteacher'] = '教师'; $string['defaultcourseteacherdescription'] = '教师可以在负责的课程中做任何事,包括更改活动和为学生评分。'; $string['defaultcourseteachers'] = '教师'; $string['delete'] = '删除'; $string['deleteablock'] = '删除一个版块'; $string['deleteall'] = '全部删除'; $string['deleteallcannotundo'] = '全部删除(不能撤销)'; $string['deleteallcomments'] = '删除所有评论'; $string['deleteallratings'] = '删除所有评分'; $string['deletecategory'] = '删除课程类别:{$a}'; $string['deletecategorycheck'] = '您确定要完全删除类别<b>“{$a}”</b>么?<br />此类别中的所有课程将会被移动到父类别或类别“五花八门”中。'; $string['deletecategorycheck2'] = '如果删除此课程类别,您必须选择该类别下的课程和子类别的处理办法。'; $string['deletecategoryempty'] = '此分类为空'; $string['deletecheck'] = '删除{$a}吗?'; $string['deletecheckfiles'] = '您真的确定要彻底删除这些文件?'; $string['deletecheckfull'] = '您真的确定要彻底删除{$a}吗?'; $string['deletecheckwarning'] = '您打算删除这些文件'; $string['deletecomment'] = '删除这条评论'; $string['deletecompletely'] = '彻底删除'; $string['deletecourse'] = '删除一个课程'; $string['deletecoursecheck'] = '您真的确定要彻底删除该课程及其所有文件资料吗?'; $string['deleted'] = '已经删除'; $string['deletedactivity'] = '已经删除{$a}'; $string['deletedcourse'] = '{$a}已经被彻底删除'; $string['deletednot'] = '无法删除{$a}!'; $string['deletelogs'] = '删除日志'; $string['deleteselected'] = '删除选定的项'; $string['deleteselectedkey'] = '删除选定的key'; $string['deletingcourse'] = '正在删除{$a}'; $string['deletingexistingcoursedata'] = '删除已存在的课程数据'; $string['deletingolddata'] = '删除旧数据'; $string['department'] = '系别'; $string['desc'] = '降序'; $string['description'] = '描述'; $string['deselectall'] = '全不选'; $string['detailedless'] = '少些细节'; $string['detailedmore'] = '多些细节'; $string['directory'] = '字典'; $string['disable'] = '禁用'; $string['disabledcomments'] = '评论被禁止'; $string['displayingfirst'] = '只显示第一个{$a->count} {$a->things}'; $string['displayingrecords'] = '显示{$a}条记录'; $string['displayingusers'] = '从{$a->start}到{$a->end}显示用户'; $string['displayonpage'] = '在页面显示'; $string['documentation'] = 'Moodle文档'; $string['down'] = '向下'; $string['download'] = '下载'; $string['downloadall'] = '全部下载'; $string['downloadexcel'] = '以Excel文件格式下载'; $string['downloadfile'] = '下载文件'; $string['downloadods'] = '以ODS格式下载'; $string['downloadtext'] = '以纯文本格式下载'; $string['doyouagree'] = '您已经阅读并理解了这些条件吗?'; $string['duplicate'] = '复制'; $string['duplicateconfirm'] = '您确定要复制{$a->modtype}“{$a->modname}”?'; $string['duplicatecontcourse'] = '返回课程'; $string['duplicatecontedit'] = '修改新拷贝'; $string['duplicatenosupport'] = '不能复制活动“{$a->modname}”,因为{$a->modtype}模块不支持备份和恢复。'; $string['duplicatesuccess'] = '{$a->modtype}“{$a->modname}”已被成功复制'; $string['duplicatinga'] = '正在复制: {$a}'; $string['edhelpaspellpath'] = '要在编辑器中使用拼写检查功能,您必须在服务器上安装<strong>aspell 0.50</strong>或其更高版本,并且还要正确地指定aspell程序的路径。在Unix/Linux系统上通常是<strong>/usr/bin/aspell</strong>,当然也可能是其它地方。'; $string['edhelpbgcolor'] = '定义可编辑区域的背景颜色。<br />可以使用如下合法的值: #ffffff 或 white'; $string['edhelpcleanword'] = '这个项目激活或关闭Word相关的格式过滤。'; $string['edhelpenablespelling'] = '激活或关闭拼写检查。当激活时,服务器上必须已经安装了<strong>aspell</strong>。第二个参数是<strong>缺省字典</strong>。它将会在aspell没有用户所用语言的字典时使用。'; $string['edhelpfontfamily'] = '字体属性是一系列具体的或通用字体名称的列表。字体名称必须以半角逗号分隔。'; $string['edhelpfontlist'] = '定义在编辑器的下拉菜单中使用的字体。'; $string['edhelpfontsize'] = '缺省的字号设定了字体的大小。<br />合法的值如下: medium、large、smaller、lager、10pt、11px。'; $string['edit'] = '编辑'; $string['edita'] = '编辑{$a}'; $string['editcategorysettings'] = '编辑类别设置'; $string['editcategorythis'] = '编辑该类别'; $string['editcoursesettings'] = '编辑课程设定'; $string['editfiles'] = '编辑文件'; $string['editgroupprofile'] = '编辑组'; $string['editinga'] = '编辑{$a}'; $string['editingteachershort'] = '编辑器'; $string['editlock'] = '不能修改这个值!'; $string['editmyprofile'] = '编辑个人资料'; $string['editorbgcolor'] = '背景颜色'; $string['editorcleanonpaste'] = '在粘贴时净化Word HTML'; $string['editorcommonsettings'] = '一般设置'; $string['editordefaultfont'] = '缺省字体'; $string['editorenablespelling'] = '激活拼写检查'; $string['editorfontlist'] = '字体列表'; $string['editorfontsize'] = '缺省字号'; $string['editorresettodefaults'] = '重设为缺省设置'; $string['editorsettings'] = '编辑器设置'; $string['editorshortcutkeys'] = '编辑器快捷键'; $string['editsettings'] = '更改设置'; $string['editsummary'] = '编辑概要'; $string['editthisactivity'] = '编辑这个活动项目'; $string['editthiscategory'] = '编辑该类别'; $string['edituser'] = '编辑用户帐号'; $string['email'] = 'Email地址'; $string['emailactive'] = 'Email已激活'; $string['emailagain'] = 'Email (重复)'; $string['emailcharset'] = 'Email字符集'; $string['emailconfirm'] = '确认您的帐号'; $string['emailconfirmation'] = '{$a->firstname},您好: 刚才您用您的电子邮箱地址在“{$a->sitename}上”申请了一个新帐号。 如果要确认您的新帐号,请点击以下链接: {$a->link} 在多数的邮件软件中,这个地址应该显示为蓝色的链接,可以直接点击。如果不是这样,请将以上地址复制粘贴到网页浏览器窗口上部的地址栏中,打开这个网页。 如果您需要帮助,请联系网站管理员,{$a->admin}'; $string['emailconfirmationsubject'] = '{$a}: 帐号确认'; $string['emailconfirmsent'] = '<p>一封邮件已经发送到您的地址 <b>{$a}</b>。</p> <p>这封邮件简要说明了您如何完成注册。</p> <p>如果您还遇到什么困难,请和网站管理员联系。</p>'; $string['emaildigest'] = 'Email摘要类型'; $string['emaildigestcomplete'] = '完整的(每天一封,包含完整内容)'; $string['emaildigestoff'] = '无摘要(每个帖子发送一封Email)'; $string['emaildigestsubjects'] = '主题(每天一封,只包含主题)'; $string['emaildisable'] = '这个email地址已禁用'; $string['emaildisableclick'] = '点击此处不再向此地址发送任何邮件'; $string['emaildisplay'] = '显示Email'; $string['emaildisplaycourse'] = '只允许课程成员查看我的email地址'; $string['emaildisplayhidden'] = '已隐藏 Email'; $string['emaildisplayno'] = '对所有人隐藏我的email地址'; $string['emaildisplayyes'] = '允许所有人看到我的email地址'; $string['emailenable'] = '该email地址已激活'; $string['emailenableclick'] = '点击此处重新允许向此地址发送email'; $string['emailexists'] = '该email地址已经注册过了。'; $string['emailformat'] = 'Email格式'; $string['emailmustbereal'] = '注意:您的Email地址必须是真实的'; $string['emailnotallowed'] = '禁止使用如下域名的Email地址:({$a})'; $string['emailnotfound'] = '数据库中未找到此Email'; $string['emailonlyallowed'] = '此Email不在允许的域名中: ({$a})'; $string['emailpasswordchangeinfo'] = '{$a->firstname},您好: 有人(一般情况下是您自己)为您在“{$a->sitename}”上的帐号申请了新的密码。 要修改密码,请访问以下链接: {$a->link} 在多数的邮件软件中,这个地址应该显示为蓝色的链接,可以直接点击。如果不是这样,请将以上地址复制粘贴到网页浏览器窗口上部的地址栏中,打开这个网页。 如果您需要帮助,请联系网站管理员,{$a->admin}'; $string['emailpasswordchangeinfodisabled'] = '{$a->firstname}您好: 有人(您)从\'{$a->sitename}\'请求获得密码。 但是很抱歉您的帐号无法使用并且无法重置。请与管理员{$a->admin}联系。'; $string['emailpasswordchangeinfofail'] = '{$a->firstname},您好, 有人(可能是您自己)为您在“{$a->sitename}”的账号请求换个新密码。 但很遗憾,此网站上的密码不能重置。如果您需要其他帮助,请联系网站管理员, {$a->admin}'; $string['emailpasswordchangeinfosubject'] = '{$a}:确定改变密码'; $string['emailpasswordconfirmation'] = '{$a->firstname},您好: 有人(一般情况下是您自己)为您在“{$a->sitename}”上的帐号申请了新的密码。 如果您确认这是您做的,并想通过email获得新密码,请点击以下链接: {$a->link} 在多数的邮件软件中,这个地址应该显示为蓝色的链接,可以直接点击。如果不是这样,请将以上地址复制粘贴到网页浏览器窗口上部的地址栏中,打开这个网页。 如果您需要帮助,请联系网站管理员,{$a->admin}'; $string['emailpasswordconfirmationsubject'] = '{$a}: 确认更改密码'; $string['emailpasswordconfirmmaybesent'] = '<p>如果您提供了正确的用户名或email地址,那么一封email已经发送给您。</p> <p>它指导您如何确认和完成对密码的修改,这个过程很简单。如果您还是遇到了困难,请联系网站管理员。</p>'; $string['emailpasswordconfirmsent'] = '一封Email已经发往您的地址<b>{$a}</b>。 <br />其中有关于如何确认并完成本次密码更改的简单说明。 如果您还是遇到困难,请联系网站管理员。'; $string['emailpasswordsent'] = '谢谢,您已经确认了密码更改。 一封包含新密码的邮件已经发往您的邮箱<br /><b>{$a->email}</b>。<br /> 这个新密码是自动生成的――您可能会想将它<a href="{$a->link}">更改<a>为一个更容易记住的密码。'; $string['enable'] = '使用'; $string['encryptedcode'] = '加密代码'; $string['english'] = '英语'; $string['entercourse'] = '点击进入该课程'; $string['enteremail'] = '输入您的 Email 地址'; $string['enteremailaddress'] = '输入您的Email地址以重设密码,新密码将送到您的信箱中。'; $string['enterusername'] = '输入您的用户名'; $string['entries'] = '条目'; $string['error'] = '错误'; $string['errortoomanylogins'] = '很抱歉,您已超过允许的最大登录尝试次数。请重启您的浏览器。'; $string['errorwhenconfirming'] = '因为发生错误,您的身份还未被确认。如果您是通过点击email中的一个链接来到的这里,请确认您email里的链接没有损坏或被自动换行。您可以通过拷贝和粘贴来重构正确的链接。'; $string['everybody'] = '所有人'; $string['executeat'] = '执行'; $string['existing'] = '已有'; $string['existingadmins'] = '现有网站管理员'; $string['existingcourse'] = '现有课程'; $string['existingcourseadding'] = '现有课程,向其中添加数据'; $string['existingcoursedeleting'] = '现有课程,先删除它'; $string['existingcreators'] = '现有课程管理员'; $string['existingstudents'] = '已选课学生'; $string['existingteachers'] = '现有教师'; $string['expandall'] = '展开全部'; $string['expirynotify'] = '选课期满通告'; $string['expirynotifyemail'] = '{$a->threshold}天之后,本课程里下列学生的选课将过期: {$a->current} {$a->threshold}天之内,本课程里下列学生的选课将过期: {$a->past} 您可以到下述页面延长他们的选课时间: {$a->extendurl}'; $string['expirynotifystudents'] = '是否提醒学生'; $string['expirynotifystudentsemail'] = '亲爱的{$a->studentstr}: 本信提醒您在课程“{$a->course}”的选课将在{$a->threshold}天之后到期。 如有疑问,请联系{$a->teacherstr}。'; $string['expirynotifystudents_help'] = '如果指定了选课周期,那么这项设置会决定学生在即将被课程注销时是否会收到email通知。'; $string['expirythreshold'] = '课程到期后期限'; $string['expirythreshold_help'] = '如果指定了选课周期,那么这项设置会决定在学生被课程注销之前多少天发出通知。'; $string['explanation'] = '解释'; $string['extendenrol'] = '延长选课时间(个人)'; $string['extendperiod'] = '延长选课'; $string['failedloginattempts'] = '从您上次登录后共有{$a->attempts}次失败的登录尝试。'; $string['failedloginattemptsall'] = '帐号{$a->accounts}共有{$a->attempts}次失败的登录尝试。'; $string['feedback'] = '反馈'; $string['file'] = '文件'; $string['filemissing'] = '找不到文件 {$a}'; $string['files'] = '文件'; $string['filesfolders'] = '文件/文件夹'; $string['filloutallfields'] = '请填完这个表单的所有空格'; $string['filter'] = '过滤器'; $string['findmorecourses'] = '查询更多课程...'; $string['firstaccess'] = '首次访问'; $string['firstname'] = '名'; $string['firsttime'] = '第一次来访吗?'; $string['folder'] = '文件夹'; $string['folderclosed'] = '关闭文件夹'; $string['folderopened'] = '打开文件夹'; $string['followingoptional'] = '以下为可选项'; $string['followingrequired'] = '以下项目是必需的'; $string['force'] = '强制'; $string['forcedmode'] = '强制模式'; $string['forcedmodeinbrackets'] = '{$a}(强制模式)'; $string['forcelanguage'] = '指定课程语言类型'; $string['forceno'] = '不强制'; $string['forcepasswordchange'] = '强制修改密码'; $string['forcepasswordchangecheckfull'] = '您十分确定要将密码强行改为 {$a} 吗?'; $string['forcepasswordchange_help'] = '如果此复选框被勾选,那么用户在下次登录时会被要求修改密码'; $string['forcepasswordchangenot'] = '不能强制将密码改为 {$a}'; $string['forcepasswordchangenotice'] = '继续下去之前,您必须修改您的密码。'; $string['forcetheme'] = '强制使用主题'; $string['forgotaccount'] = '忘记了密码?'; $string['forgotten'] = '忘记用户名或密码了?'; $string['forgottenduplicate'] = '该邮件地址已被几个账户使用了,请键入用户名来代替。'; $string['forgotteninvalidurl'] = '无效的重置密码链接'; $string['format'] = '格式'; $string['format_help'] = '课程格式决定了课程页面的布局。 * SCORM格式 - 在课程页面的第一节显示一个SCORM包(也可以使用SCORM/AICC模块) * 社区格式 - 在课程页面显示一个讨论区 * 主题格式 - 课程页面分为若干个小节,每个小节一个主题 * 星期格式 - 课程页面分为若干个小节,每个小节代表一个星期,第一个星期开始于课程开始日期'; $string['formathtml'] = 'HTML格式'; $string['formatmarkdown'] = 'Markdown格式'; $string['formatplain'] = '纯文本格式'; $string['formattext'] = 'Moodle自动格式'; $string['formattexttype'] = '格式'; $string['framesetinfo'] = '本框架文档包括'; $string['from'] = '开始时间'; $string['frontpagecategorycombo'] = '组合列表'; $string['frontpagecategorynames'] = '显示类别列表'; $string['frontpagecourselist'] = '显示课程列表'; $string['frontpagedescription'] = '首页说明'; $string['frontpagedescriptionhelp'] = '此网站描述将显示在首页。'; $string['frontpageformat'] = '首页格式'; $string['frontpageformatloggedin'] = '登录后的首页格式'; $string['frontpagenews'] = '显示新闻项'; $string['frontpagesettings'] = '首页设置'; $string['frontpagetopiconly'] = '主题节'; $string['fulllistofcourses'] = '所有课程'; $string['fullname'] = '全名'; $string['fullnamecourse'] = '课程全称'; $string['fullnamecourse_help'] = '课程全称会显示在课程的页面顶端和课程列表中。'; $string['fullnamedisplay'] = '{$a->lastname}{$a->firstname}'; $string['fullnameuser'] = '用户全名'; $string['fullprofile'] = '个人详细资料'; $string['fullsitename'] = '网站全称'; $string['functiondisabled'] = '那个功能现在是关闭的'; $string['gd1'] = 'GD 1.x已经安装'; $string['gd2'] = 'GD 2.x已经安装'; $string['gdneed'] = '要看到该图必须安装GD'; $string['gdnot'] = 'GD没有安装'; $string['general'] = '常规项'; $string['geolocation'] = '维度—经度'; $string['gettheselogs'] = '获取这些日志'; $string['go'] = '继续'; $string['gpl'] = '版权所有 (C) 1999及以后 Martin Dougiamas (http://moodle.com) 该系统是自由软件;您可以按照自由软件基金会所公布的GNU通用公共许可证(GPL)进行修改或重新发布;许可证可以是第2版或(由您决定)更新的版本。 发布该软件是希望它有用有益,但不提供任何形式的担保,不论是明确的还是隐含的,包括但不限于可销售和适合特定用途的隐含保证。 查看Moodle许可证信息页了解详情: http://docs.moodle.org/en/License'; $string['gpl3'] = '版权所有 (C) 1999及以后 Martin Dougiamas (http://moodle.com) 该系统是自由软件;您可以按照自由软件基金会所公布的GNU通用公共许可证(GPL)进行修改或重新发布;许可证可以是第3版或(由您决定)更新的版本。 发布该软件是希望它有用有益,但不提供任何形式的担保,不论是明确的还是隐含的,包括但不限于可销售和适合特定用途的隐含保证。 查看Moodle许可证信息页了解详情: http://docs.moodle.org/en/License'; $string['gpllicense'] = 'GPL许可证'; $string['grade'] = '成绩'; $string['grades'] = '成绩'; $string['gravatarenabled'] = '本站支持<a href="http://www.gravatar.com/">Gravatar</a> 。如果您不上传头像,Moodle将会尝试从您的Gravatar加载头像。'; $string['group'] = '小组'; $string['groupadd'] = '添加新组'; $string['groupaddusers'] = '添加所选的到组'; $string['groupextendenrol'] = '延长选课时间(通用)'; $string['groupfor'] = '为组'; $string['groupinfo'] = '所选组信息'; $string['groupinfoedit'] = '修改组设置'; $string['groupinfomembers'] = '所选成员信息'; $string['groupinfopeople'] = '所选人信息'; $string['groupmembers'] = '组员'; $string['groupmemberssee'] = '查看组员'; $string['groupmembersselected'] = '选定的组的成员'; $string['groupmode'] = '小组模式'; $string['groupmodeforce'] = '强制小组模式'; $string['groupmy'] = '我的组'; $string['groupnonmembers'] = '不属于某组的用户'; $string['groupnotamember'] = '很抱歉,您不是那个组的成员'; $string['grouprandomassign'] = '随机分配所有人到组'; $string['groupremove'] = '删除选定的组'; $string['groupremovemembers'] = '删除选定的成员'; $string['groups'] = '小组'; $string['groupsnone'] = '无小组'; $string['groupsseparate'] = '分隔小组'; $string['groupsvisible'] = '可视小组'; $string['guest'] = '访客'; $string['guestdescription'] = '访客拥有最小的权限,而且通常不能在任何地方输入文本。'; $string['guestskey'] = '允许有密钥的访客进入'; $string['guestsno'] = '不允许访客进入'; $string['guestsnotallowed'] = '很抱歉,“{$a}”不允许访客进入。'; $string['guestsyes'] = '允许没有密钥的访客进入'; $string['guestuser'] = '访客用户'; $string['guestuserinfo'] = '该用户是个特殊用户,允许以只读方式参观一些课程。'; $string['help'] = '帮助'; $string['helpprefix2'] = '关于“{$a}”的帮助'; $string['helpwiththis'] = '帮助'; $string['hiddenassign'] = '隐藏分配'; $string['hiddenfromstudents'] = '对学生隐藏'; $string['hiddensections'] = '隐藏小节'; $string['hiddensectionscollapsed'] = '以折叠方式显示隐藏的小节'; $string['hiddensections_help'] = '此设置决定隐藏的小节是向学生折叠显示(比如在星期格式的课程中表示假期)还是完全隐藏。'; $string['hiddensectionsinvisible'] = '完全不显示隐藏的小节'; $string['hide'] = '隐藏'; $string['hideadvancedsettings'] = '隐藏高级设置'; $string['hidepicture'] = '隐藏图片'; $string['hidesection'] = '隐藏小节{$a}'; $string['hidesettings'] = '隐藏设置'; $string['hideshowblocks'] = '隐藏或显示版块'; $string['hidetopicfromothers'] = '隐藏主题'; $string['hideweekfromothers'] = '隐藏星期'; $string['hits'] = '点击次数'; $string['hitsoncourse'] = '用户{$a->username}浏览了{$a->coursename}'; $string['hitsoncoursetoday'] = '今天用户{$a->username}浏览了{$a->coursename}'; $string['home'] = '首页'; $string['hour'] = '小时'; $string['hours'] = '小时'; $string['howtomakethemes'] = '如何创建新的主题风格'; $string['htmleditor'] = '使用HTML编辑器(只支持部分浏览器)'; $string['htmleditoravailable'] = 'HTML编辑器可用'; $string['htmleditordisabled'] = '您在个人资料项里关闭了HTML编辑器'; $string['htmleditordisabledadmin'] = '网站管理员已将本站的文本格式编辑器关闭了'; $string['htmleditordisabledbrowser'] = 'HTML编辑器不可用,因为您的浏览器不能支持它。'; $string['htmlfilesonly'] = '仅HTML文件'; $string['htmlformat'] = '美化的HTML格式'; $string['icon'] = '图标'; $string['icqnumber'] = 'ICQ号码'; $string['idnumber'] = '学号'; $string['idnumbercourse'] = '课程编号'; $string['idnumbercoursecategory'] = '分类ID'; $string['idnumbercoursecategory_help'] = '课程分类的ID号只用来和外部系统匹配,不会在网站的任何地方显示。如果此分类有官方编号,可以在此输入,否则可以留空。'; $string['idnumbercourse_help'] = '课程的编号用于和外部系统匹配——它从不显示于Moodle的内部。如果您的课程有一个正式的编号,可以在这里使用它,否则您可以空着它。'; $string['idnumbermod'] = 'ID号'; $string['idnumbermod_help'] = 'ID号可以在成绩计算公式中唯一标识一个活动。 如果这个活动与任何成绩计算公式无关,那么它的ID号可以为空。 ID号也可以在成绩薄中设定,不过只能在活动设置页面里编辑。'; $string['idnumbertaken'] = '该ID已使用'; $string['imagealt'] = '图片描述'; $string['import'] = '导入'; $string['importdata'] = '导入课程数据'; $string['importdataexported'] = '从“源”课程成功导出数据。<br />继续向“目标”课程导入。'; $string['importdatafinished'] = '导入完成! 继续您的课程'; $string['importdatafrom'] = '找到一个做为导入源的课程:'; $string['inactive'] = '不活动的'; $string['include'] = '包括'; $string['includeallusers'] = '包含所有用户'; $string['includecoursefiles'] = '包含课程文件'; $string['includecourseusers'] = '包含课程用户'; $string['included'] = '被包括'; $string['includelogentries'] = '包含日志条目'; $string['includemodules'] = '包含模块'; $string['includemoduleuserdata'] = '包括模块用户数据'; $string['includeneededusers'] = '包含需要的用户'; $string['includenoneusers'] = '不包含任何用户'; $string['includeroleassignments'] = '包含角色分配'; $string['includesitefiles'] = '包含课程中使用的站点文件'; $string['includeuserfiles'] = '包含用户文件'; $string['info'] = '信息'; $string['institution'] = '机构'; $string['instudentview'] = '在学生视图中'; $string['interests'] = '兴趣'; $string['interestslist'] = '兴趣列表'; $string['interestslist_help'] = '输入您的兴趣爱好。用半角逗号分隔不同的爱好。您的爱好将在您的个人资料页面中显示为标签。'; $string['invalidemail'] = 'Email地址无效'; $string['invalidlogin'] = '登录无效,请重试'; $string['invalidusername'] = '用户名只能包含数字、小写字母、下划线(_)、横线(-)、小数点(.)或(@)'; $string['invalidusernameupload'] = '无效的用户名'; $string['ip_address'] = 'IP地址'; $string['jump'] = '跳'; $string['jumpto'] = '跳至...'; $string['keep'] = '保持'; $string['keepsearching'] = '继续选人'; $string['langltr'] = '文字书写方向从左到右'; $string['langrtl'] = '文字书写方向从右到左'; $string['language'] = '语言'; $string['languagegood'] = '该语言包是最新的! :-)'; $string['lastaccess'] = '上次访问'; $string['lastedited'] = '最近编辑'; $string['lastlogin'] = '最近登录'; $string['lastmodified'] = '最后修改'; $string['lastname'] = '姓'; $string['lastyear'] = '去年'; $string['latestlanguagepack'] = '从moodle.org检查有无最新的语言包'; $string['layouttable'] = '布局表'; $string['leavetokeep'] = '留空以保留现有密码'; $string['legacythemeinuse'] = '因为您的浏览器版本太老了,所以本站将在兼容模型下显示。'; $string['license'] = '许可证'; $string['licenses'] = '许可证'; $string['liketologin'] = '您希望现在以注册用户帐号登录么?'; $string['list'] = '列表'; $string['listfiles'] = '{$a} 中的文件列表'; $string['listofallpeople'] = '成员列表'; $string['listofcourses'] = '课程列表'; $string['local'] = '本地'; $string['localplugindeleteconfirm'] = '您正要完全删除本地插件“{$a}”。这将从数据库删除与它有关的所有数据。您确信要继续吗?'; $string['localplugins'] = '本地插件'; $string['localpluginsmanage'] = '管理本地插件'; $string['location'] = '来自'; $string['log_excel_date_format'] = 'yyyy mmmm d h:mm'; $string['loggedinas'] = '您以{$a}登录'; $string['loggedinasguest'] = '您正在用访客帐号访问'; $string['loggedinnot'] = '您尚未登录。'; $string['login'] = '登录'; $string['loginalready'] = '您已经登录'; $string['loginas'] = '登录为'; $string['loginaspasswordexplain'] = '<p>要使用此功能,您需要特别的“登录为密码”。<br />如果您没有,请和服务器管理员联系。</p>'; $string['login_failure_logs'] = '登录失败日志'; $string['loginguest'] = '以访客身份浏览'; $string['loginsite'] = '登录本网站'; $string['loginsteps'] = '嗨!为了能完全访问本站,您需要花费一分钟在本站建立一个新帐号。某些课程可能还设有只需输入一次的“选课密钥”,但目前还不需要。下面是步骤: <ol> <li>将您个人的详细信息填入<a href="{$a}">注册表单</a>中。</li> <li>一封Email将被立刻发往您的Email地址。</li> <li>阅读您收到的Email,点击信中包含的网络链接。</li> <li>您的帐号会得到确认,并且可以登录了。</li> <li>现在,选择您想要参与的课程。</li> <li>如果您被提示要求输入“选课密码”,请输入教师给您的密码。这样,您就选课成功了。</li> <li>您现在可以完全访问此课程了。从现在开始,您只需要输入用户名和密码(在本页的表单中),就可以登录并访问任何您已经加入的课程。</li> </ol>'; $string['loginstepsnone'] = '<p>嗨! </p> <p>为了完全访问课程您必须申请一个新帐号。</p> <p>您所需要做的是创建一个用户名和密码,并在本页面中使用。</p> <p>如果别人已经申请了相同的用户名,只好请您委屈一下用别的用户名重试。</p>'; $string['loginto'] = '登录到{$a}'; $string['loginusing'] = '请使用用户名和密码登录'; $string['logout'] = '退出'; $string['logoutconfirm'] = '您真要退出吗?'; $string['logs'] = '日志'; $string['logtoomanycourses'] = '[<a href="{$a->url}">更多</a>]'; $string['logtoomanyusers'] = '[<a href="{$a->url}">更多</a>]'; $string['lookback'] = '回顾'; $string['mailadmins'] = '通知管理员'; $string['mailstudents'] = '通知学生'; $string['mailteachers'] = '通知教师'; $string['makeafolder'] = '新建文件夹'; $string['makeeditable'] = '如果您设置“{$a}”为Web服务器进程(例如apache)可写,那么您可以在此页中直接编辑该文件'; $string['makethismyhome'] = '将此页设为我的缺省主页'; $string['manageblocks'] = '版块'; $string['managecourses'] = '课程管理'; $string['managedatabase'] = '管理数据库'; $string['manageeditorfiles'] = '管理已被编辑使用的文件'; $string['managefilters'] = '过滤器'; $string['managemodules'] = '模块管理'; $string['manageroles'] = '角色和权限'; $string['markedthistopic'] = '该主题作为当前主题突出显示'; $string['markthistopic'] = '标记该主题为当前主题'; $string['matchingsearchandrole'] = '与“{$a->search}”匹配的{$a->role}'; $string['maxfilesize'] = '新上传文件的最大体积:{$a}'; $string['maximumchars'] = '最多{$a}个字符'; $string['maximumgrade'] = '最高分值'; $string['maximumgradex'] = '最高分:{$a}'; $string['maximumshort'] = '最大'; $string['maximumupload'] = '最大上传文件'; $string['maximumupload_help'] = '<p align="center"><b>课程上传大小</b></p> <p>此设置定义在课程中学生可以上传的最大的文件尺寸。它受到管理员设置的全站设置的限制。</p> <p>每个活动模块里的设置也可以进一步地限制它。</p>'; $string['maxnumberweeks'] = '星期/主题的最大数目'; $string['maxnumberweeks_desc'] = '此设置控制在课程设置中“星期/主题最大数目”里出现的最大选项。'; $string['maxnumcoursesincombo'] = '浏览<a href="{$a->link}">{$a->numberofcourses}门课程</a>。'; $string['maxsize'] = '大小限制: {$a}'; $string['memberincourse'] = '课程成员'; $string['messagebody'] = '消息正文'; $string['messagedselectedusers'] = '消息已经发给选择的用户,接收者列表被重置。'; $string['messagedselectedusersfailed'] = '给选中用户发送信息时出错。一些用户可能收到了邮件。'; $string['messageprovider:backup'] = '备份通知'; $string['messageprovider:courserequestapproved'] = '课程申请被批准通知'; $string['messageprovider:courserequested'] = '课程创建请求通知'; $string['messageprovider:courserequestrejected'] = '课程申请被拒绝通知'; $string['messageprovider:errors'] = '本站的严重错误'; $string['messageprovider:errors_help'] = '有管理员必须知晓的严重的错误。'; $string['messageprovider:instantmessage'] = '用户间的私人信息'; $string['messageprovider:instantmessage_help'] = '此小节配置在收到本站其他用户直接发给您的消息后,该如何处理。'; $string['messageprovider:notices'] = '关于某些小问题的通知'; $string['messageprovider:notices_help'] = '有一些管理员可能会感兴趣的通知'; $string['messageselect'] = '选择此用户作为消息接收者'; $string['messageselectadd'] = '发消息'; $string['migratinggrades'] = '移动成绩'; $string['min'] = '分钟'; $string['mins'] = '分钟'; $string['minute'] = '分'; $string['minutes'] = '分钟'; $string['miscellaneous'] = '其他'; $string['missingcategory'] = '您必需选择一个分类'; $string['missingcity'] = '市/县没填'; $string['missingcountry'] = '国家和地区没填'; $string['missingdescription'] = '自述没填'; $string['missingemail'] = 'Email地址没填'; $string['missingfirstname'] = '名没填'; $string['missingfromdisk'] = '磁盘中缺少'; $string['missingfullname'] = '全称没填'; $string['missinglastname'] = '姓没填'; $string['missingname'] = '名没填'; $string['missingnewpassword'] = '新密码没填'; $string['missingpassword'] = '密码没填'; $string['missingrecaptchachallengefield'] = '缺少reCAPTCHA验证字段'; $string['missingreqreason'] = '原因没填'; $string['missingshortname'] = '简称没填'; $string['missingshortsitename'] = '站名简称没填'; $string['missingsitedescription'] = '站点描述没填'; $string['missingsitename'] = '站名没填'; $string['missingstrings'] = '检查未翻译的字串'; $string['missingstudent'] = '必须选个东西'; $string['missingsummary'] = '概况没填'; $string['missingteacher'] = '必须选个东西'; $string['missingurl'] = '遗漏了URL'; $string['missingusername'] = '用户名没填'; $string['modified'] = '修改时间'; $string['moduledeleteconfirm'] = '您将要完全删除模块“{$a}”。这将完全删除数据库中与该活动模块有关的所有资料。您确定要继续下去吗?'; $string['moduledeletefiles'] = '所有与模块“{$a->module}”有关的资料已经从数据库中删除。要彻底删除(防止模块自己又重新安装),您还须在您的服务器上删除以下目录: {$a->directory}'; $string['moduleintro'] = '描述'; $string['modulesetup'] = '安装模组表'; $string['modulesuccess'] = '{$a} 表已经正确安装'; $string['month'] = '月'; $string['months'] = '月'; $string['moodledocs'] = 'Moodle文档'; $string['moodledocslink'] = '此页的Moodle文档'; $string['moodlerelease'] = 'Moodle发行版'; $string['moodleversion'] = 'Moodle版本'; $string['more'] = '更多'; $string['morehelp'] = '更多帮助'; $string['moreinformation'] = '关于这个错误的更多信息'; $string['moreprofileinfoneeded'] = '请告诉我们关于您的更多信息'; $string['mostrecently'] = '最近'; $string['move'] = '移动'; $string['movecategorycontentto'] = '移至'; $string['movecategoryto'] = '移动类别到:'; $string['movecontentstoanothercategory'] = '移动内容到另一个分类'; $string['movecourseto'] = '移动课程到:'; $string['movedown'] = '下移'; $string['movefilestohere'] = '移动文件到这里'; $string['movefull'] = '移动{$a}到这个位置'; $string['movehere'] = '移到这里'; $string['moveleft'] = '左移'; $string['moveright'] = '右移'; $string['movesection'] = '移动小节{$a}'; $string['moveselectedcoursesto'] = '移动选定的课程到'; $string['movetoanotherfolder'] = '移动至另一文件夹'; $string['moveup'] = '上移'; $string['msnid'] = 'MSN号码'; $string['mustchangepassword'] = '新的密码必须和当前的不同'; $string['mustconfirm'] = '您必须确认一下您的登录'; $string['mycourses'] = '我的课程'; $string['myfiles'] = '我的私人文件'; $string['myfilesmanage'] = '管理我的私人文件'; $string['myhome'] = '我的主页'; $string['mymoodledashboard'] = '我的Moodle控制台'; $string['myprofile'] = '个人资料'; $string['name'] = '名称'; $string['navigation'] = '导航'; $string['needed'] = '需要的'; $string['never'] = '从未'; $string['neverdeletelogs'] = '从不删除日志'; $string['new'] = '新'; $string['newaccount'] = '新帐号'; $string['newcourse'] = '新课程'; $string['newpassword'] = '新密码'; $string['newpasswordfromlost'] = '<strong>注意:</strong>您的<strong>当前密码</strong> 将在两封email的<strong>第二</strong>封里发送给您。这是恢复丢失密码流程的一部分。在从此屏幕继续之前,请确认您收到了新的密码。'; $string['newpassword_help'] = '输入一个新密码或留空以保留当前密码。'; $string['newpasswordtext'] = '{$a->firstname},您好: 您在“{$a->sitename}”上的帐号密码已被重设为一个新生成的临时密码。 现在您的登录信息是: 用户名:{$a->username} 密码:{$a->newpassword} 请到这里更改您的密码: {$a->link} 在多数的邮件程序中,这个地址应该显示为蓝色的链接,可以直接点击。如果不是这样,请将以上地址复制粘贴到网页浏览器窗口上部的地址栏中,打开这个网页。 此致 敬礼! {$a->signoff}'; $string['newpicture'] = '新照片'; $string['newpicture_help'] = '要添加新头像,先浏览和选择一个JPG或PNG格式的图片,然后点击“更新个人档案”。图片会被剪裁为正方形,尺寸修改为100x100像素。'; $string['newsitem'] = '新闻条目'; $string['newsitems'] = '新闻条目'; $string['newsitemsnumber'] = '显示的新闻条目数'; $string['newsitemsnumber_help'] = '<p align="center"><b>课程新闻项</b></p> <p>一个叫做“新闻”的特殊讨论区会出现在“星期”和“主题”式的课程中。这是一个很适合用来向所有学生发布通知的地方。(缺省情况,所有学生都订阅此论坛,并会通过email收到您的通知。)</p> <p>此设置决定在课程主页右边的新闻框里显示几个最新的新闻条目。</p> <p>如果您设置其为“0新闻条目”,那么新闻框根本不会被显示。</p>'; $string['newuser'] = '新用户'; $string['newusernewpasswordsubj'] = '新用户账号'; $string['newusernewpasswordtext'] = '{$a->firstname},您好, 已经为您在“{$a->sitename}”创建了一个新账号,并同时为您生成了一个临时密码。 您现在的登录信息是: 用户名:{$a->username} 密码:{$a->newpassword} (在首次登录时您会被要求更改密码) 请点击 {$a->link} 并登录,开始使用“{$a->sitename}”。 在大多数邮件程序中,上面的链接将呈现为蓝色可点击的链接。如果链接不能点击,请复制它,然后粘贴到浏览器上方的地址栏内。 祝您使用愉快! “{$a->sitename}”管理员 {$a->signoff}'; $string['newusers'] = '新用户'; $string['newwindow'] = '新窗口'; $string['next'] = '向后'; $string['nextsection'] = '下一步'; $string['no'] = '否'; $string['noblockstoaddhere'] = '没有可以添加到此页的版块'; $string['nobody'] = '没人'; $string['nochange'] = '无变化'; $string['nocomments'] = '无评论'; $string['nocourses'] = '没有课程'; $string['nocoursesfound'] = '没有含关键词“{$a}”的课程'; $string['nocoursesyet'] = '此类中无课程'; $string['nodstpresets'] = '管理员未开启本站的夏时制功能'; $string['nofilesselected'] = '没有文件被选中用来恢复'; $string['nofilesyet'] = '您的课程尚未上传任何文件'; $string['nograde'] = '没有分数'; $string['noimagesyet'] = '您的课程尚未上传任何图片'; $string['nologsfound'] = '未发现日志'; $string['nomatchingusers'] = '没有用户与“{$a}”匹配'; $string['nomorecourses'] = '没有匹配的课程'; $string['nomoreidnumber'] = '并未使用ID号避免冲突'; $string['none'] = '无'; $string['noneditingteacher'] = '无编辑权教师'; $string['noneditingteacherdescription'] = '无编辑权教师可以教课和给学生打分,但是可能不能修改活动。'; $string['nonstandard'] = '非标准'; $string['nopendingcourses'] = '没有待审批课程'; $string['nopotentialadmins'] = '没有候选网站管理员'; $string['nopotentialcreators'] = '没有候选课程管理员'; $string['nopotentialstudents'] = '没有候选的学生'; $string['nopotentialteachers'] = '没有候选教师'; $string['norecentactivity'] = '最近没有活动'; $string['noreplybouncemessage'] = '您把回复信发到了一个”不可回复“的email地址上。如果您想要回复论坛中的帖子,请使用讨论区{$a}来。 下面是您信件的内容:'; $string['noreplybouncesubject'] = '{$a}-退信。'; $string['noreplyname'] = '不要回复此邮件'; $string['noresults'] = '没有结果'; $string['normal'] = '正常'; $string['normalfilter'] = '常规过滤器'; $string['nosite'] = '找不到网站级别的课程'; $string['nostudentsfound'] = '未找到{$a}'; $string['nostudentsingroup'] = '在这个组中尚未有学生'; $string['nostudentsyet'] = '尚无学生选修此课程'; $string['nosuchemail'] = '没有这样的email地址'; $string['notavailable'] = '不可用'; $string['noteachersyet'] = '此课程尚无教师'; $string['notenrolled'] = '{$a}没有选修该课程。'; $string['notenrolledprofile'] = '此资料无效,因为该用户没有注册此课程。'; $string['noteusercannotrolldatesoncontext'] = '<strong>注意:</strong>因为您缺少必需的权限,在恢复备份时不能回滚日期。'; $string['noteuserschangednonetocourse'] = '<strong>注意:</strong>恢复用户数据(活动中的数据、文件或消息)时用户资料也要恢复。此项设置已经替您更改。'; $string['nothingnew'] = '自从您上次登录以来尚无新东西'; $string['nothingtodisplay'] = '没有可显示内容'; $string['notice'] = '注意'; $string['noticenewerbackup'] = '这个备份文件是使用Moodle {$a->backuprelease}({$a->backupversion})创建的,它比您正在使用的Moodle {$a->serverrelease}({$a->serverversion})新。由于备份文件的向后兼容无法保证,这可能会导致恢复的内容和备份时不一致。'; $string['notifications'] = '通告'; $string['notifyloginfailuresmessage'] = '{$a->time}, IP: {$a->ip}, 用户: {$a->info}'; $string['notifyloginfailuresmessageend'] = '您可以在 {$a}/report/log/index.php?id=1&chooselog=1&modid=site_errors 查看日志。'; $string['notifyloginfailuresmessagestart'] = '这是自您上次于{$a}登录后的失败登录尝试'; $string['notifyloginfailuressubject'] = '{$a}::失败登录通知'; $string['notincluded'] = '不包括的'; $string['notingroup'] = '很抱歉,只有组员可查看该活动。'; $string['notpublic'] = '非公开'; $string['nousersfound'] = '未找到用户'; $string['nousersmatching'] = '没找到与“{$a}”相匹配的用户'; $string['nousersyet'] = '尚无用户'; $string['novalidcourses'] = '没有可利用的课程'; $string['now'] = '现在'; $string['numattempts'] = '{$a}次失败登录'; $string['numberofcourses'] = '课程编号'; $string['numberweeks'] = '星期/主题的数目'; $string['numdays'] = '{$a}天'; $string['numhours'] = '{$a}小时'; $string['numletters'] = '{$a}字'; $string['numminutes'] = '{$a}分钟'; $string['nummonths'] = '{$a}个月'; $string['numseconds'] = '{$a}秒'; $string['numviews'] = '阅读{$a}次'; $string['numweeks'] = '{$a}星期'; $string['numwords'] = '{$a}单词'; $string['numyears'] = '{$a}年'; $string['ok'] = '确认'; $string['oldpassword'] = '当前密码'; $string['olduserdirectory'] = '这是OLD用户目录,现在不再需要。您可以安全的删除它,该文件已经包含在新的用户目录中。'; $string['opentoguests'] = '是否允许访客进入课程'; $string['optional'] = '可选的'; $string['options'] = '选项'; $string['order'] = '次序'; $string['orphanedactivities'] = '无主活动'; $string['other'] = '其它'; $string['outline'] = '概要'; $string['outlinereport'] = '概要报表'; $string['page'] = '页'; $string['pageheaderconfigablock'] = '配置“%fullname%”中的版块'; $string['pagepath'] = '页面路径'; $string['pageshouldredirect'] = '本页会被自动重定向。如果什么都没发生,请点击下面的“继续”链接。'; $string['parentcategory'] = '父类别'; $string['parentcoursenotfound'] = '父课程未找到!'; $string['parentfolder'] = '父文件夹'; $string['participants'] = '成员'; $string['participantslist'] = '成员列表'; $string['participationratio'] = '参与比例'; $string['participationreport'] = '参与报表'; $string['password'] = '密码'; $string['passwordchanged'] = '密码已被更改'; $string['passwordconfirmchange'] = '确认密码更改'; $string['passwordextlink'] = '下面的链接提供恢复您忘记的密码。您将会退出Moodle.'; $string['passwordforgotten'] = '忘记了密码'; $string['passwordforgotteninstructions'] = '必须先在用户数据库中找到您的详细信息。请将您的用户名<strong>或</strong>注册时使用的email地址输入到相应的文本框中。两者不用都输入。'; $string['passwordforgotteninstructions2'] = '想重置密码,请在下面提交您的用户名或email地址。如果数据库中有关于您的记录,我们会向您的email地址发送一封邮件。邮件会讲解如何重新获得访问权。'; $string['passwordnohelp'] = '无法找到您丢失的密码。请联系您的moodle管理员'; $string['passwordrecovery'] = '是的,请帮我登录'; $string['passwordsdiffer'] = '这些密码不对'; $string['passwordsent'] = '密码已被发送'; $string['passwordsenttext'] = '<p>一封Email已经发往您的邮箱{$a->email}。</p> <p><b>请查收您的Email以获取密码。</b></p> <p>这个新密码是自动生成的,所以您可能需要点击<a href="{$a->link}">这里</a>改成您自己容易记住的密码。</p>'; $string['path'] = '路径'; $string['pathnotexists'] = '该路径在您的服务器上并不存在!'; $string['pathslasherror'] = '路径不能以斜线符号结束!!'; $string['paymentinstant'] = '点击下面的按钮便可以快速付费并加入课程!'; $string['paymentpending'] = '(<small><b><u>{$a}</u></b> pending</small>)'; $string['paymentrequired'] = '此课程需要付费才能进入。'; $string['payments'] = '付费'; $string['paymentsorry'] = '谢谢您付费! 您的付费手续尚未处理完毕,您还不能注册进入课程“{$a->fullname}”。请过几分钟后再试,但如果您始终有问题,请告知{$a->teacher}或站点管理员。'; $string['paymentthanks'] = '谢谢您付费! 现在您已经可以加入如下课程了: <br />“{$a}”'; $string['pendingrequests'] = '待批申请'; $string['periodending'] = '结束时间({$a})'; $string['personal'] = '个人'; $string['personalprofile'] = '个人信息'; $string['phone'] = '电话'; $string['phone2'] = '手机'; $string['phpinfo'] = 'PHP 信息'; $string['pictureof'] = '{$a}的头像'; $string['pictureofuser'] = '用户头像'; $string['pleaseclose'] = '现在,请关闭此窗口。'; $string['pleasesearchmore'] = '请多搜一些关键词'; $string['pleaseusesearch'] = '请使用搜索功能'; $string['plugin'] = '插件'; $string['plugincheck'] = '检查插件'; $string['plugindeletefiles'] = '与插件“{$a->name}”相关的所有数据都已经从数据库删除。为了阻止插件被重新安装,您现在就应该从服务器删除此目录:{$a->directory} '; $string['pluginsetup'] = '创建插件表格'; $string['policyaccept'] = '我了解和同意了'; $string['policyagree'] = '如要继续使用此站,您必须同意此协议。您同意么?'; $string['policyagreement'] = '网站使用协议'; $string['policyagreementclick'] = '网站政策协议的链接'; $string['popup'] = '弹出'; $string['popupwindow'] = '在新窗口中打开文件'; $string['popupwindowname'] = '弹出窗口'; $string['post'] = '发布'; $string['posts'] = '发布'; $string['potentialadmins'] = '候选网站管理员'; $string['potentialcreators'] = '候选课程管理员'; $string['potentialstudents'] = '候选学生'; $string['potentialteachers'] = '候选教师'; $string['preferences'] = '使用偏好'; $string['preferredlanguage'] = '偏爱的语言'; $string['preferredtheme'] = '偏爱的主题风格'; $string['preprocessingbackupfile'] = '预处理备份文件'; $string['preview'] = '预览'; $string['previewhtml'] = 'HTML格式预览'; $string['previeworchoose'] = '预览或选取一个主题风格'; $string['previous'] = '向前'; $string['previouslyselectedusers'] = '之前选择的用户不符合“{$a}”'; $string['previoussection'] = '上一步'; $string['primaryadminsetup'] = '设置管理员帐号'; $string['profile'] = '信息概要'; $string['profilenotshown'] = '选了至少一门课以后,才能显示此人的个人描述。'; $string['publicprofile'] = '公开的个人资料'; $string['publicsitefileswarning'] = '注意:存放于此的文件任何人都能访问'; $string['publicsitefileswarning2'] = '注意:这里的文件可以被任何知道(或猜到)URL的人访问,建议将所有备份文件在恢复后立即删除。'; $string['publicsitefileswarning3'] = '注意:放在这里的文件可以被任何知道(或猜到)URL的人访问。<br />出于安全考虑,备份文件应该只保存到安全的backupdate文件夹。'; $string['publish'] = '发布'; $string['question'] = '试题'; $string['questionsinthequestionbank'] = '题库中的题目'; $string['readinginfofrombackup'] = '读取备份信息'; $string['readme'] = '说明'; $string['recentactivity'] = '最近活动'; $string['recentactivityreport'] = '最近活动的完整报表...'; $string['recipientslist'] = '收件人名单'; $string['recreatedcategory'] = '重创建的类{$a}'; $string['redirect'] = '重定向'; $string['refresh'] = '刷新'; $string['refreshingevents'] = '刷新事件'; $string['registration'] = 'Moodle注册'; $string['registrationcontact'] = '允许从公众网络访问'; $string['registrationcontactno'] = '不,我不希望在网站列表中显示联系表格'; $string['registrationcontactyes'] = '是,提供一个表格给预期的Moodle用户以方便他联系我'; $string['registrationemail'] = 'Email通知'; $string['registrationinfo'] = '<p>您可以在本页面中到moodle.org注册您的站。这个过程是免费的。注册的主要好处是您会被加入一个mailing list,它只发送一些重要的提醒信息,比如安全警告和Moodle的版本更新。</P> <p>缺省情况,您的信息是被保密的,永远不会被出售或转让给它人。收集信息的目的仅仅是出于客户服务的目的,同时也用来建一个Moodle社区的统计图。</p> <p>如果您愿意,那么可以把您的站名、国家和URL放到Moodle官方网站的公用列表中。</p> <p>所有的新注册在加入列表之前都会被人工验证。不过一旦您被加入列表中,那么您可以随时通过提交这个表单来更新您的注册信息(公用列表也会更新)。</p>'; $string['registrationinfotitle'] = '注册信息'; $string['registrationno'] = '不,我不想收Email'; $string['registrationsend'] = '发送注册信息到moodle.org'; $string['registrationyes'] = '是的,有重要事情请通知我'; $string['reject'] = '拒绝'; $string['rejectdots'] = '拒绝...'; $string['reload'] = '重新载入'; $string['remoteappuser'] = '远程{$a}用户'; $string['remove'] = '免除'; $string['removeadmin'] = '免除网站管理员'; $string['removecreator'] = '免除课程管理员'; $string['removestudent'] = '开除学生'; $string['removeteacher'] = '免除教师'; $string['rename'] = '重命名'; $string['renamefileto'] = '将<b>{$a}</b>重命名为'; $string['report'] = '报表'; $string['reports'] = '报表'; $string['repositories'] = '容器'; $string['requestcourse'] = '申请新建一门课程'; $string['requestedby'] = '申请人'; $string['requestedcourses'] = '请求的课程'; $string['requestreason'] = '申请理由'; $string['required'] = '必需的'; $string['requirespayment'] = '此课程需要付费才能访问'; $string['reset'] = '重置'; $string['resetcomponent'] = '组件'; $string['resetcourse'] = '重置课程'; $string['resetinfo'] = '在此页面,您可以清空课程的用户数据,并保留所有活动和其它设置。但请注意,选择下面的选项并提交本页面后,将永远从课程删除您所选择的用户数据!'; $string['resetnotimplemented'] = '未实现重置功能的模块'; $string['resetstartdate'] = '重置开始时间'; $string['resetstatus'] = '状态'; $string['resettask'] = '任务'; $string['resettodefaults'] = '重置为缺省'; $string['resortcoursesbyname'] = '按名称对课程重新排序'; $string['resource'] = '资源'; $string['resourcedisplayauto'] = '自动'; $string['resourcedisplaydownload'] = '强制下载'; $string['resourcedisplayembed'] = '嵌入'; $string['resourcedisplayframe'] = '在框架中'; $string['resourcedisplaynew'] = '新窗口'; $string['resourcedisplayopen'] = '打开'; $string['resourcedisplaypopup'] = '在弹出窗口中'; $string['resources'] = '资源库'; $string['resources_help'] = '利用资源类型,几乎可以把任意类型的web资源插入到课程中'; $string['restore'] = '恢复'; $string['restorecancelled'] = '恢复被取消'; $string['restorecannotassignroles'] = '恢复课程的过程需要分配角色,但是您没有相应的权限'; $string['restorecannotcreateorassignroles'] = '恢复课程的过程需要新建或分配角色,但是您没有相应的权限'; $string['restorecannotcreateuser'] = '恢复过程需要从备份文件创建用户\'{$a}\',但是您没有相应的权限'; $string['restorecannotoverrideperms'] = '恢复课程的过程需要覆盖角色,但是您没有相应的权限'; $string['restorecoursenow'] = '恢复课程'; $string['restoredaccount'] = '已恢复帐号'; $string['restoredaccountinfo'] = '此帐号系从其它服务器导入,密码已丢失。要通过email设置新密码,请点击“继续”'; $string['restorefinished'] = '恢复成功'; $string['restoreto'] = '恢复到'; $string['restoretositeadding'] = '警告:您正要恢复站点的首页,向它添加数据!'; $string['restoretositedeleting'] = '警告:您正要恢复站点的首页,并且会先删除数据!'; $string['restoreuserconflict'] = '从备份文件恢复用户\'{$a}\'会引起冲突'; $string['restoreuserinfofailed'] = '恢复过程已经终止,因为您没有恢复用户数据的权限。'; $string['restoreusersprecheck'] = '检查用户数据'; $string['restoreusersprecheckerror'] = '检查用户数据时发现一些问题'; $string['restricted'] = '受限的'; $string['restrictmodules'] = '限制活动模块?'; $string['returningtosite'] = '再次访问本站?'; $string['returntooriginaluser'] = '返回到{$a}'; $string['revert'] = '重置'; $string['role'] = '角色'; $string['rolemappings'] = '角色映射'; $string['rolerenaming'] = '重命名角色'; $string['rolerenaming_help'] = '这项设置允许修改角色在课程中显示的名称。只有显示的名称可被修改,角色的权限不会受到影响。 新的角色名称会显示在课程成员页面上和课程内的其他地方。 如果这个重命名了的角色已经被网站管理员指定为课程管理者,这个新的角色名称也会作为课程列表的一部分显示出来。'; $string['roles'] = '角色'; $string['rss'] = 'RSS'; $string['rssarticles'] = 'RSS最新文章数目'; $string['rsserror'] = '读取RSS数据错误'; $string['rsserrorauth'] = '您的RSS链接不包含有效的认证标记'; $string['rsserrorguest'] = '这个种子使用访客身份访问数据,但是访客没有阅读数据的权限。请以一个有效的用户访问这个种子的原始地址(URL),并取得一个新的 RSS 链接。'; $string['rsskeyshelp'] = '为保证安全和私密,RSS种子的URL中包含一个特殊的令牌,用来标明它们属于哪个用户。这可以阻止其他用户从RSS种子获取他们不应该访问到的信息。</p><p>在您第一次访问会生成RSS种子的Moodle页面时,此令牌会自动创建。如果您担心您的RSS种子令牌已经被泄露,您可以点击这里的重置链接再请求一个新。但请注意,您当前的RSS种子URL将会失效。'; $string['rsstype'] = '此活动的RSS种子'; $string['saveandnext'] = '保存并显示下一个'; $string['savechanges'] = '保存更改'; $string['savechangesanddisplay'] = '保存并预览'; $string['savechangesandreturntocourse'] = '保存并返回课程'; $string['savecomment'] = '保存评论'; $string['savedat'] = '保存在:'; $string['savepreferences'] = '保存使用偏好'; $string['saveto'] = '保存到'; $string['scale'] = '等级'; $string['scale_help'] = '<p align="center"><b>等级</b></p> <p>教师可以在课程中为任意可评分的活动新建自定义的等级。</p> <p>等级的名字应是一个可以清晰地分辨它的短语:它将出现在等级选择列表,也会出现在即时帮助按钮上。</p> <p>等级由一列按由负到正的次序排好的值定义,用逗号分隔。例如:</p> <blockquote><i> 令人失望,不够好,平均水平,好,很好,优秀! </i></blockquote> <p>等级应该也包含一个良好的描述来说明它是什么和应该如何被使用。此描述会出现在教师与学生的帮助页内。</p> <p>最后,这里有一个或多个“标准”等级由系统管理员在网站上定义。它们可以在所有的课程里使用。</p>'; $string['scales'] = '等级'; $string['scalescustom'] = '自定义等级'; $string['scalescustomcreate'] = '添加等级'; $string['scalescustomno'] = '未创建自定义等级'; $string['scalesstandard'] = '标准等级'; $string['scalestandard'] = '标准等级'; $string['scalestandard_help'] = '标准等级在全站的所有课程中都可用。'; $string['scalestip'] = '使用管理菜单的“等级...”链接创建自定义等级'; $string['scalestip2'] = '要建立自定义等级,请点击课程管理菜单里的“成绩”链接,然后选择编辑、等级。'; $string['schedule'] = '时间表'; $string['screenreaderno'] = '否'; $string['screenreaderuse'] = '屏幕阅读器'; $string['screenreaderuse_help'] = '如果设为是,很多地方(比如聊天)的界面提供更易于用户访问方式(如视碍人士)。'; $string['screenreaderyes'] = '是'; $string['screenshot'] = '抓屏'; $string['search'] = '搜索'; $string['searchagain'] = '再次搜索'; $string['searchbyemail'] = '用email地址搜索'; $string['searchbyusername'] = '用用户名搜索'; $string['searchcourses'] = '搜索课程'; $string['searchhelp'] = '您可以使用多关键字搜索。<br />word: 搜索任何包含该关键字的内容。<br />+word: 严格匹配。<br />-word: 结果中不含该关键词。'; $string['search_help'] = '基本搜索可以在文本中查找一个或多个词。词之间用空格分隔。只有一个字母的单词会被忽略。 如要进行高级搜索,直接点击搜索按钮,搜索框中什么都不要输入,便可访问高级搜索表单。'; $string['searchoptions'] = '搜索选项'; $string['searchresults'] = '搜索结果'; $string['sec'] = '秒'; $string['seconds'] = '秒'; $string['secondstotime172800'] = '2天'; $string['secondstotime259200'] = '3天'; $string['secondstotime345600'] = '4天'; $string['secondstotime432000'] = '5天'; $string['secondstotime518400'] = '6天'; $string['secondstotime604800'] = '1周'; $string['secondstotime86400'] = '1天'; $string['secretalreadyused'] = '更改密码确认链接已经使用过,密码不能被更改。'; $string['secs'] = '秒'; $string['section'] = '小节'; $string['sectionname'] = '小节名'; $string['sections'] = '小节'; $string['sectionusedefaultname'] = '使用缺省的小节名'; $string['seealsostats'] = '请参阅:统计'; $string['selctauser'] = '选择一个用户'; $string['select'] = '选择'; $string['selectacountry'] = '选择一个国家或地区'; $string['selectacourse'] = '选择一门课程'; $string['selectacoursesite'] = '选择一个课程或网站'; $string['selectagroup'] = '选择一个小组'; $string['selectall'] = '全选'; $string['selectamodule'] = '请选择一个活动模块'; $string['selectanaction'] = '选择一个动作'; $string['selectanoptions'] = '选择一个选项'; $string['selectaregion'] = '选择一个地区'; $string['selectdefault'] = '选择默认值'; $string['selectedfile'] = '已选文件'; $string['selectednowmove'] = '{$a}个文件要被移动位置。现在请前往目的地址点击“移动文件到这里”'; $string['selectfiles'] = '选择文件'; $string['selectnos'] = '都选“不”'; $string['selectperiod'] = '选择时间'; $string['senddetails'] = '用Email发送我的详细资料'; $string['separate'] = '分离'; $string['separateandconnected'] = '独立型和情景型学习方式'; $string['separateandconnectedinfo'] = '此评级基于独立型和情景型认知理论。此理论描述了我们了解和评价所闻所见事物的两种不同方法。<ul><li><strong>独立型的人</strong>总是尽量保持客观,避免受感觉和情绪的影响。在与其他人讨论时,他们喜欢维护自己的观点,用逻辑找出对手观点中的漏洞。</li><li><strong>情景型的人</strong>比其他人更敏感。 他们善于换位思考,喜欢倾听和提问,直到他们觉得自己能从别人的角度理解事物。他们通过分享自己的经验,来从他人获得知识。</li></ul>'; $string['serverlocaltime'] = '服务器的当地时间'; $string['setcategorytheme'] = '设置类别主题风格'; $string['settings'] = '设置'; $string['shortname'] = '简称'; $string['shortnamecollisionwarning'] = '[*]=这个简称已经被某个课程所使用,需要改变它以获得批准。'; $string['shortnamecourse'] = '课程简称'; $string['shortnamecourse_help'] = '课程简称将在导航中显示,并且会被用在课程email的标题中。'; $string['shortnametaken'] = '这个简称已用于另一个课程({$a})'; $string['shortnameuser'] = '用户简称'; $string['shortsitename'] = '网站简称'; $string['show'] = '显示'; $string['showactions'] = '显示动作'; $string['showadvancedsettings'] = '显示高级设置'; $string['showall'] = '显示所有({$a})'; $string['showallcourses'] = '显示所有课程'; $string['showalltopics'] = '显示所有主题'; $string['showallusers'] = '显示所有用户'; $string['showallweeks'] = '显示所有周安排'; $string['showblockcourse'] = '显示包含版块的课程列表'; $string['showcomments'] = '显示/隐藏评论'; $string['showcommentsnonjs'] = '显示评论'; $string['showdescription'] = '在课程页面显示简介'; $string['showdescription_help'] = '如果启用,上面的简介信息会显示在课程页面中,紧挨着到活动/资源的链接。'; $string['showgrades'] = '向学生显示成绩单'; $string['showgrades_help'] = '<p align="center"><b>成绩</b></p> <p>很多活动都允许打分。</p> <p>缺省情况,课程中的所有有成绩的活动的结果都可以在“成绩”页看到。该页可从课程主页访问。</p> <p>如果教师并不想在课程中使用成绩,或者想向学生隐藏成绩,他可以在课程设置里禁止显示成绩。这只会使结果不向学生显示,但不影响再活动中使用或设置成绩。</p>'; $string['showlistofcourses'] = '显示课程列表'; $string['showmodulecourse'] = '显示包含活动的课程列表'; $string['showonly'] = '只显示'; $string['showonlytopic'] = '只显示第{$a}个主题'; $string['showonlyweek'] = '只显示第{$a}周安排'; $string['showperpage'] = '每页显示{$a}'; $string['showrecent'] = '显示最近动态'; $string['showreports'] = '是否显示活动报表'; $string['showreports_help'] = '<p align="center"><b>活动报表</b></p> <p>每个参与者都有一个活动报表,可以显示他们再次课程中的活动。除了列出他们的贡献之外,这些报表还包括细致的访问日志。</p> <p>教师总可以访问这些报表。在每个用户的个人资料页面可以看到访问的按钮。</p> <p>学生对自己的报表的访问权限由教师在课程设置里控制。对一些课程来说,这些报表是体现学生在在线环境里的表现的好工具,但对另外一些课程来说,这可能并不必要。</p> <p>关闭它的另一个原因是报表在生成时会给服务器带来一点负荷。对于较大或时间较长的课程,可能关闭它效率能更高一些。<p>'; $string['showsettings'] = '显示设置'; $string['showtheselogs'] = '显示这些日志'; $string['showthishelpinlanguage'] = '显示本帮助的{$a}版本'; $string['showtopicfromothers'] = '显示主题'; $string['showweekfromothers'] = '显示星期'; $string['since'] = '开始时间'; $string['sincelast'] = '最后登录'; $string['site'] = '网站'; $string['sitedefault'] = '网站缺省'; $string['siteerrors'] = '本站错误'; $string['sitefiles'] = '本站文件'; $string['sitefilesused'] = '在该课程中使用的站点文件'; $string['sitehome'] = '网站首页'; $string['sitelegacyfiles'] = '继承的网站文件'; $string['sitelogs'] = '本站日志'; $string['sitenews'] = '本站新闻'; $string['sitepages'] = '网站页面'; $string['sitepartlist'] = '您无权查看参加者名单'; $string['sitepartlist0'] = '只允许本站教师查看到站点成员名单'; $string['sitepartlist1'] = '只允许教师查看站点成员名单'; $string['sites'] = '网站'; $string['sitesection'] = '包含一个主题节'; $string['sitesettings'] = '本站设置'; $string['siteteachers'] = '本站教师'; $string['size'] = '大小'; $string['sizeb'] = '字节'; $string['sizegb'] = 'GB'; $string['sizekb'] = 'KB'; $string['sizemb'] = 'MB'; $string['skipped'] = '跳过'; $string['skypeid'] = 'Skype号码'; $string['socialheadline'] = '社区论坛――最新话题'; $string['someallowguest'] = '有些课程可能允许访客浏览'; $string['someerrorswerefound'] = '可能有些信息缺失或有误。详细情况请看下面。'; $string['sortby'] = '排序'; $string['sortbyx'] = '按{$a}增序排序'; $string['sortbyxreverse'] = '按{$a}降序排序'; $string['sourcerole'] = '源角色'; $string['specifyname'] = '您必须指定一个名称'; $string['standard'] = '标准'; $string['starpending'] = '([*] = 待审批的课程)'; $string['startdate'] = '课程开始时间'; $string['startdate_help'] = '此设置决定星期格式的课程的开始日期是哪个星期。它也决定了最早开始记录课程活动日志的时间。'; $string['startingfrom'] = '开始时间'; $string['startsignup'] = '注册新帐号'; $string['state'] = '省/直辖市'; $string['statistics'] = '统计'; $string['statisticsgraph'] = '统计图'; $string['stats'] = '统计'; $string['statslogins'] = '登录'; $string['statsmodedetailed'] = '详细(用户)信息'; $string['statsmodegeneral'] = '一般信息'; $string['statsnodata'] = '课程和学时没有提供可使用的数据。'; $string['statsnodatauser'] = '课程,用户和学时没有提供可使用的数据。'; $string['statsoff'] = '当前统计没有激活。'; $string['statsreads'] = '浏览'; $string['statsreport1'] = '登录'; $string['statsreport10'] = '活动用户'; $string['statsreport11'] = '最活跃课程'; $string['statsreport12'] = '最活跃课程(加权)'; $string['statsreport13'] = '最具参与性课程(学生)'; $string['statsreport14'] = '最具参与性课程(浏览/发布)'; $string['statsreport2'] = '浏览(全部角色)'; $string['statsreport3'] = '发布(全部角色)'; $string['statsreport4'] = '所有活动(全部角色)'; $string['statsreport5'] = '所有活动(浏览和发布)'; $string['statsreport7'] = '用户活动(浏览和发布)'; $string['statsreport8'] = '所有用户活动'; $string['statsreport9'] = '登录(网站课程)'; $string['statsreportactivity'] = '所有活动(全部角色)'; $string['statsreportactivitybyrole'] = '所有活动(浏览和发布)'; $string['statsreportforuser'] = '为'; $string['statsreportlogins'] = '登录'; $string['statsreportreads'] = '浏览(所有角色)'; $string['statsreporttype'] = '报表类型'; $string['statsreportwrites'] = '发布(所有角色)'; $string['statsstudentactivity'] = '学生活动'; $string['statsstudentreads'] = '学生浏览'; $string['statsstudentwrites'] = '学生发布'; $string['statsteacheractivity'] = '教师活动'; $string['statsteacherreads'] = '教师浏览'; $string['statsteacherwrites'] = '教师发布'; $string['statstimeperiod'] = '时间段——过去的:'; $string['statsuniquelogins'] = '唯一登录'; $string['statsuseractivity'] = '所有活动'; $string['statsuserlogins'] = '登录'; $string['statsuserreads'] = '浏览'; $string['statsuserwrites'] = '发布'; $string['statswrites'] = '发布'; $string['status'] = '状态'; $string['stringsnotset'] = '以下字串{$a}中没有定义'; $string['studentnotallowed'] = '很抱歉,您作为“{$a}”不能进入该课程'; $string['students'] = '学生'; $string['studentsandteachers'] = '学生和教师'; $string['subcategories'] = '子类别'; $string['submit'] = '提交'; $string['success'] = '成功'; $string['summary'] = '概要'; $string['summary_help'] = '概要是一小段文字,向学生简介该主题或星期内的活动。这段文字会显示在课程页面中小节名的后面。'; $string['summaryof'] = '{$a}的概要'; $string['supplyinfo'] = '填写相关信息'; $string['switchdevicedefault'] = '切换到标准主题'; $string['switchdevicerecommended'] = '切换到建议您的设备使用的主题'; $string['switchrolereturn'] = '切换回我正常的角色'; $string['switchroleto'] = '切换角色到...'; $string['tag'] = '标签'; $string['tagalready'] = '该标签已经存在'; $string['tagmanagement'] = '添加/删除标签'; $string['tags'] = '标签'; $string['targetrole'] = '目标角色'; $string['teacheronly'] = '{$a}专用'; $string['teacherroles'] = '{$a}角色'; $string['teachers'] = '教师'; $string['textediting'] = '在编辑文本时'; $string['texteditor'] = '使用标准网页形式'; $string['textformat'] = '纯文本格式'; $string['thanks'] = '感谢'; $string['theme'] = '主题风格'; $string['themes'] = '主题风格'; $string['themesaved'] = '新主题风格已保存'; $string['therearecourses'] = '这里有 {$a} 门课'; $string['thereareno'] = '该课程中没有{$a}'; $string['thiscategorycontains'] = '此类别包含'; $string['time'] = '时间'; $string['timezone'] = '时区'; $string['to'] = '截止时间'; $string['tocreatenewaccount'] = '直接跳到建立新帐号'; $string['today'] = '今天'; $string['todaylogs'] = '今日日志'; $string['toeveryone'] = '给每个人'; $string['toomanybounces'] = '那个email地址已经有太多的退信。在继续前您<b>必须</b>修改它。'; $string['toomanytoshow'] = '要显示的用户太多'; $string['toomanyusersmatchsearch'] = '太多用户({$a->count}个)匹配“{$a->search}”'; $string['toomanyuserstoshow'] = '太多用户({$a}个)要显示'; $string['toonly'] = '只给{$a}'; $string['top'] = '置顶'; $string['topic'] = '主题'; $string['topichide'] = '向{$a}隐藏该主题'; $string['topicoutline'] = '主题目录'; $string['topicshow'] = '显示该主题于{$a}'; $string['total'] = '总计'; $string['trackforums'] = '跟踪讨论区'; $string['trackforumsno'] = '不:不要跟踪我看过的帖子'; $string['trackforumsyes'] = '对:把新帖子突出显示给我'; $string['trysearching'] = '试试搜索'; $string['turneditingoff'] = '关闭编辑功能'; $string['turneditingon'] = '打开编辑功能'; $string['undecided'] = '未定'; $string['unfinished'] = '未完'; $string['unknowncategory'] = '未知类别'; $string['unlimited'] = '无限制'; $string['unpacking'] = '对{$a}解包'; $string['unsafepassword'] = '不安全的密码――试试别的吧'; $string['untilcomplete'] = '直到完成'; $string['unusedaccounts'] = '帐号{$a}天不用将视作自动撤销选课'; $string['unzip'] = '解压'; $string['unzippingbackup'] = '解压备份文件'; $string['up'] = '向上'; $string['update'] = '更改'; $string['updated'] = '已更改的{$a}'; $string['updatemymoodleoff'] = '停止自定义此页'; $string['updatemymoodleon'] = '自定义此页'; $string['updatemyprofile'] = '更新个人资料 '; $string['updatesevery'] = '每隔{$a}秒刷新一次'; $string['updatethis'] = '更改这个{$a}'; $string['updatethiscourse'] = '更改该课程'; $string['updatinga'] = '正在更改{$a}'; $string['updatingain'] = '正在更改{$a->in}中的{$a->what}'; $string['upload'] = '上传'; $string['uploadafile'] = '上传一个文件'; $string['uploadcantwrite'] = '写入失败'; $string['uploadedfile'] = '文件上传成功'; $string['uploadedfileto'] = '上传{$a->file}到{$a->directory}'; $string['uploadedfiletoobig'] = '很抱歉,那个文件太大了(限制为{$a}字节)'; $string['uploadextension'] = '某个 PHP 扩展中止了文件上传'; $string['uploadfailednotrecovering'] = '您上传文件的操作并没有成功,在上传下列文件之一时有问题: {$a->name}。<br />此处是错误日志: <br />{$a->problem}<br />没有恢复。'; $string['uploadfilelog'] = '文件{$a}的上传日志'; $string['uploadformlimit'] = '上传文件的大小超过了表单中设置的最大尺寸限制'; $string['uploadlabel'] = '标题:'; $string['uploadnewfile'] = '上传新文件'; $string['uploadnofilefound'] = '找不到文件――您能确定已经选择了一个文件上传吗?'; $string['uploadnotallowed'] = '不允许上传'; $string['uploadnotempdir'] = '没有临时文件夹'; $string['uploadoldfilesdeleted'] = '您的上传区中的旧文件已经删除了'; $string['uploadpartialfile'] = '文件只部分地上传了'; $string['uploadproblem'] = '上传文件“{$a}”时有一个未知错误发生(也许是它太大?)'; $string['uploadrenamedchars'] = '由于无效字符,文件{$a->oldname}已经更名为{$a->newname}。'; $string['uploadrenamedcollision'] = '由于存在同名文件,文件{$a->oldname}已经更名为{$a->newname}。'; $string['uploadserverlimit'] = '上传文件的大小超过了服务器设定的最大尺寸。'; $string['uploadthisfile'] = '上传这个文件'; $string['url'] = '网址'; $string['used'] = '已使用'; $string['usedinnplaces'] = '在{$a}处使用'; $string['usemessageform'] = '或者使用下面输入框发消息给已选择的学生'; $string['user'] = '用户'; $string['userconfirmed'] = '确认{$a}'; $string['usercurrentsettings'] = '个人资料设置'; $string['userdata'] = '用户数据'; $string['userdeleted'] = '该用户帐号已被删除'; $string['userdescription'] = '自述'; $string['userdescription_help'] = '可以在此框中输入一些关于您自己的文字。它们会在您的个人资料页中显示给其他人。'; $string['userdetails'] = '用户细节'; $string['userfiles'] = '用户文件'; $string['userlist'] = '用户列表'; $string['username'] = '用户名'; $string['usernameemailmatch'] = '用户名和 Email 地址不属于同一个用户'; $string['usernameexists'] = '这个用户名已经存在,用别的吧'; $string['usernamelowercase'] = '只允许小写字母'; $string['usernamenotfound'] = '在数据库中找不到这个用户名'; $string['usernameoremail'] = '键入用户名或者Email地址'; $string['usernotconfirmed'] = '不能确认{$a}'; $string['userpic'] = '用户头像'; $string['users'] = '用户'; $string['userselectorautoselectunique'] = '如果只搜到一个符合的用户,就自动选择他'; $string['userselectorpreserveselected'] = '保留被选择的用户,即使他们不再符合搜索条件'; $string['userselectorsearchanywhere'] = '在用户姓名中的任意位置搜索'; $string['usersnew'] = '新用户'; $string['usersnoaccesssince'] = '未活动超过'; $string['userswithfiles'] = '有文件的用户'; $string['useruploadtype'] = '用户上传类型:{$a}'; $string['userviewingsettings'] = '{$a}的个人资料设置'; $string['userzones'] = '用户区'; $string['usetheme'] = '使用此主题'; $string['usingexistingcourse'] = '使用现有课程'; $string['valuealreadyused'] = '此值已被使用'; $string['version'] = '版本'; $string['view'] = '浏览'; $string['viewallcourses'] = '查看所有课程'; $string['viewallcoursescategories'] = '查看所有课程和分类'; $string['viewfileinpopup'] = '在弹出窗口中查看文件'; $string['viewprofile'] = '查看个人资料'; $string['views'] = '浏览'; $string['viewsolution'] = '查看解决方案'; $string['virusfound'] = '管理员请注意! Clam AV发现用户{$a->user}在课程{$a->course}中上传的文件有病毒。以下是clamscan的输出:'; $string['virusfoundlater'] = '您于{$a->date}在课程{$a->course}中上传的文件{$a->filename}有病毒。以下是关于该文件的汇总: {$a->action} 如果这个文件是提交给教师的,您需要重新提交一遍。'; $string['virusfoundlateradmin'] = '管理员请注意! 一个由用户{$a->user}于{$a->date}上传到课程{$a->course}中的文件{$a->filename}有病毒。以下是关于此文件的汇总: {$a->action} 已经通知了用户。'; $string['virusfoundlateradminnolog'] = '管理员请注意! 系统发现一个用户上传的名为{$a->filename}的文件有病毒。Moodle无法将此文件退回给上传它的用户。 下面是关于此文件的汇总: {$a->action}'; $string['virusfoundsubject'] = '{$a}: 发现病毒!'; $string['virusfounduser'] = '您上传的文件{$a->filename}经检查已经感染病毒! 文件上传并未成功。'; $string['virusplaceholder'] = '上传的文件有病毒。它已被转移或删除了,并且已经通知用户。'; $string['visible'] = '是否可见'; $string['visibletostudents'] = '对{$a}可见'; $string['warning'] = '警告'; $string['warningdeleteresource'] = '警告:在某个资源中引用了{$a}。您要更新相应的资源吗?'; $string['webpage'] = '网页'; $string['week'] = '教学周'; $string['weekhide'] = '从{$a}隐藏这个星期'; $string['weeklyoutline'] = '每周概要'; $string['weekshow'] = '显示这个星期于{$a}'; $string['welcometocourse'] = '欢迎进入课程{$a}'; $string['welcometocoursetext'] = '欢迎您加入{$a->coursename}! 如果您还没有做,请修改课程中个人资料,这样我们才可以进一步了解您: {$a->profileurl}'; $string['whattocallzip'] = '如何命名该zip文件?'; $string['whattodo'] = '怎么处理'; $string['windowclosing'] = '本窗口会被自动关闭。如果没自动关闭,请现在关闭它。'; $string['withchosenfiles'] = '将选中的文件...'; $string['withoutuserdata'] = '且不含用户资料'; $string['withselectedusers'] = '对选中的用户...'; $string['withselectedusers_help'] = '* 发送消息 - 向一名或多名成员发送消息 * 添加备注 - 向一名已选择的成员添加备注 * 添加通用备注 - 向多名成员添加相同的备注'; $string['withuserdata'] = '且包含用户资料'; $string['wordforstudent'] = '您用于称呼学生的字词'; $string['wordforstudenteg'] = '例如学生、成员等等'; $string['wordforstudents'] = '您用于称呼多个学生的字词'; $string['wordforstudentseg'] = '例如学生们、成员们'; $string['wordforteacher'] = '您用于称呼教师的字词'; $string['wordforteachereg'] = '例如教师、导师、老师和教员等'; $string['wordforteachers'] = '您用于称呼多个教师的字词'; $string['wordforteacherseg'] = '例如教师们、导师们、老师们和教员们等'; $string['writingblogsinfo'] = '撰写Blog信息'; $string['writingcategoriesandquestions'] = '写入类别及题目'; $string['writingcoursedata'] = '写入课程数据'; $string['writingeventsinfo'] = '写入事件信息'; $string['writinggeneralinfo'] = '写入常规信息'; $string['writinggradebookinfo'] = '写入成绩单信息'; $string['writinggroupingsgroupsinfo'] = '正在写入大组-小组信息'; $string['writinggroupingsinfo'] = '正在写入大组信息'; $string['writinggroupsinfo'] = '写入组信息'; $string['writingheader'] = '写入头'; $string['writingloginfo'] = '写入日志信息'; $string['writingmessagesinfo'] = '写入消息信息'; $string['writingmoduleinfo'] = '写入模块信息'; $string['writingscalesinfo'] = '写入等级信息'; $string['writinguserinfo'] = '写入用户信息'; $string['wrongpassword'] = '该用户的密码错误'; $string['yahooid'] = 'Yahoo号码'; $string['year'] = '年'; $string['years'] = '年'; $string['yes'] = '是'; $string['youareabouttocreatezip'] = '您打算创建一个zip文件包'; $string['youaregoingtorestorefrom'] = '您将恢复'; $string['youneedtoenrol'] = '为了执行此动作,您需要选修此课程。'; $string['yourlastlogin'] = '您上次登录是在'; $string['yourself'] = '您自己'; $string['yourteacher'] = '您的{$a}'; $string['yourwordforx'] = '您称呼“{$a}”为'; $string['zippingbackup'] = '压缩备份文件';
apache-2.0
ste69r/Biokanga
biokanga/SQLiteMarkers.cpp
44941
/* * CSIRO Open Source Software License Agreement (GPLv3) * Copyright (c) 2017, Commonwealth Scientific and Industrial Research Organisation (CSIRO) ABN 41 687 119 230. * See LICENSE for the complete license information (https://github.com/csiro-crop-informatics/biokanga/LICENSE) * Contact: Alex Whan <alex.whan@csiro.au> */ #include "stdafx.h" #ifdef HAVE_CONFIG_H #include <config.h> #endif #if _WIN32 #include <process.h> #include "../libbiokanga/commhdrs.h" #else #include <sys/mman.h> #include <pthread.h> #include "../libbiokanga/commhdrs.h" #endif #include "SQLiteMarkers.h" // Following database schema is utilised // Tables // TblExprs One row for each experiment // TblCults One row for each cultivar or species // TblSeqs One row for each sequence (DNA chromosome or assembly contig if genomic, transcript if RNA) // TblLoci One row for each loci identified on a sequence plus the cannonical base at that loci // TblSnps One row for each SNP at an identified loci for each cultivar and relavant experiment // TblMarkers One row for each marker at an identified loci plus cultivar specific base and score // TblMarkerSnps One row for each SNP processed and contributing to a specific identified marker // In each table the following columns are defined // TblExprs One row for each experiment // ExprID INTEGER PRIMARY KEY ASC, -- Uniquely identifies this experiment instance // ExprType INTEGER -- type, markers 0 or SNPs 1 // ExprInFile VARCHAR(200), -- Input CSV filename // ExprName VARCHAR(50) UINQUE, -- Short name of this experiment // ExprDescr VARCHAR(200), -- Describes the experiment // CultName VARCHAR(50), -- Short name of target cultivar against which alignments were made // TblCults One row for each cultivar or species // CultID INTEGER PRIMARY KEY ASC, -- Uniquely identifies this cultivar instance // CultName VARCHAR(50) UNIQUE, -- Short name of this cultivar // TblSeqs One row for each sequence (DNA chromosome or assembly contig if genomic, transcript if RNA) // SeqID INTEGER PRIMARY KEY ASC, -- Uniquely identifies this sequence instance // ExprID INTEGER, -- Sequence was target in this experiment // SeqName VARCHAR(50), -- Short name of this sequence // TblLoci One row for each loci identified on a sequence plus the cannonical base at that loci // LociID INTEGER PRIMARY KEY ASC, -- Uniquely identifies this loci instance // ExprID INTEGER, -- Loci in this experiment // SeqID INTEGER, -- Loci is on this sequence instance // Offset INTEGER, -- And at this offset on the sequence // Base VARCHAR(1), -- With the sequence offset having this cannonical base // TblSnps One row for each SNP at an identified loci for each cultivar and relavant experiment // SnpID INTEGER PRIMARY KEY ASC, -- Uniquely identifies this SNP instance // ExprID INTEGER, -- SNP identified within this experiment // CultID INTEGER, -- SNP is in this cultivar relative to the experiment target cultivar // LociID INTEGER, -- Identifies the loci instance at which the SNP is being called // SrcCnts VARCHAR(1), -- From where the counts were derived - 'S' SNP call, 'I' imputed from coverage or from SAM/BAM alignment sequences // Acnt INTEGER DEFAULT 0, -- Number of bases A in relative cultivar reads covering the SNP loci // Ccnt INTEGER DEFAULT 0, -- Number of bases C in relative cultivar reads covering the SNP loci // Gcnt INTEGER DEFAULT 0, -- Number of bases G in relative cultivar reads covering the SNP loci // Tcnt INTEGER DEFAULT 0, -- Number of bases T in relative cultivar reads covering the SNP loci // Ncnt INTEGER DEFAULT 0, -- Number of bases N in in relative cultivar reads covering the SNP loci // TotCovCnt INTEGER DEFAULT 0, -- Total number of bases in relative cultivar reads covering the SNP loci // TotMMCnt INTEGER DEFAULT 0, -- Total number of mismatches bases in relative cultivar reads covering the SNP loci // TblMarkers One row for each marker at an identified loci plus cultivar specific base and score // MarkerID INTEGER PRIMARY KEY ASC, -- Uniquely identifies this marker instance // ExprID INTEGER, -- marker identified within this experiment // CultID INTEGER, -- marker is in this cultivar relative to the experiment target cultivar // LociID INTEGER, -- Identifies the loci instance at which the marker is being called // Base VARCHAR(1), -- Called marker base // Score INTEGER, -- Called marker score // TblMarkerSnps One row for each SNP processed and contributing to a specific identified marker // MarkerSnpsID INTEGER PRIMARY KEY ASC,-- Uniquely identifies this marker SNP instance // SnpID INTEGER, -- Identifies SNP instance // MarkerID INTEGER, -- Used to generate this marker instance tsStmSQL CSQLiteMarkers::m_StmSQL[7] = { {(char *)"TblExprs", (char *)"CREATE TABLE TblExprs (ExprID INTEGER PRIMARY KEY ASC,ExprType Integer, ExprInFile VARCHAR(200), ExprName VARCHAR(50) UNIQUE,ExprDescr VARCHAR(200) DEFAULT '', CultName VARCHAR(50),CultDescr VARCHAR(1000) DEFAULT '')", (char *)"INSERT INTO TblExprs (ExprType,ExprInFile,ExprName,ExprDescr,CultName,CultDescr) VALUES(?,?,?,?,?,?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblExprs_ExprName' ON 'TblExprs' ('ExprName' ASC);CREATE INDEX IF NOT EXISTS 'TblExprs_CultName' ON 'TblExprs' ('CultName' ASC)", (char *)"CREATE INDEX IF NOT EXISTS 'TblExprs_ExprName' ON 'TblExprs' ('ExprName' ASC);CREATE INDEX IF NOT EXISTS 'TblExprs_CultName' ON 'TblExprs' ('CultName' ASC)", NULL, NULL }, { (char *)"TblCults", (char *)"CREATE TABLE TblCults ( CultID INTEGER PRIMARY KEY ASC,CultName VARCHAR(50) UNIQUE)", (char *)"INSERT INTO TblCults (CultName) VALUES(?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblCults_CultName' ON 'TblCults' ('CultName' ASC)", (char *)"CREATE INDEX IF NOT EXISTS 'TblCults_CultName' ON 'TblCults' ('CultName' ASC)", NULL, NULL }, { (char *)"TblSeqs", (char *)"CREATE TABLE TblSeqs (SeqID INTEGER PRIMARY KEY ASC,ExprID INTEGER,SeqName VARCHAR(50))", (char *)"INSERT INTO TblSeqs (ExprID,SeqName) VALUES(?,?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblSeqs_ExprIDSeqName' ON 'TblSeqs' ('ExprID' ASC,'SeqName' ASC)", NULL, (char *)"DROP INDEX IF EXISTS 'TblSeqs_ExprIDSeqName';CREATE INDEX IF NOT EXISTS 'TblSeqs_ExprID' ON 'TblSeqs' ('ExprID' ASC);CREATE INDEX IF NOT EXISTS 'TblSeqs_SeqName' ON 'TblSeqs' ('SeqName' ASC)", (char *)"DROP INDEX IF EXISTS 'TblSeqs_ExprIDSeqName';DROP INDEX IF EXISTS 'TblSeqs_ExprID';DROP INDEX IF EXISTS 'TblSeqs_SeqName'"}, { (char *)"TblLoci", (char *)"CREATE TABLE TblLoci (LociID INTEGER PRIMARY KEY ASC,ExprID INTEGER, SeqID INTEGER,Offset INTEGER, Base VARCHAR(1))", (char *)"INSERT INTO TblLoci (ExprID,SeqID,Offset,Base) VALUES(?,?,?,?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblLoci_ExprIDSeqIDOffset' ON 'TblLoci' ('ExprID' ASC,'SeqID' ASC,'Offset' ASC)", NULL, (char *)"DROP INDEX IF EXISTS 'TblLoci_ExprIDSeqIDOffset';CREATE INDEX IF NOT EXISTS 'TblLoci_ExprIDSeqIDOffset' ON 'TblLoci' ('ExprID' ASC,'SeqID' ASC,'Offset' ASC);CREATE INDEX IF NOT EXISTS 'TblLoci_SeqID' ON 'TblLoci' ('SeqID' ASC);CREATE INDEX IF NOT EXISTS 'TblLoci_SeqIDOffset' ON 'TblLoci' ('SeqID' ASC,'Offset' ASC)", (char *)"DROP INDEX IF EXISTS 'TblLoci_SeqID';DROP INDEX IF EXISTS 'TblLoci_SeqIDOffset'" }, { (char *)"TblSnps", (char *)"CREATE TABLE TblSnps (SnpID INTEGER PRIMARY KEY ASC,ExprID INTEGER,CultID INTEGER,LociID INTEGER,SrcCnts VARCHAR(1),Acnt INTEGER DEFAULT 0,Ccnt INTEGER DEFAULT 0,Gcnt INTEGER DEFAULT 0,Tcnt INTEGER DEFAULT 0,Ncnt INTEGER DEFAULT 0,TotCovCnt INTEGER DEFAULT 0,TotMMCnt INTEGER DEFAULT 0)", (char *)"INSERT INTO TblSnps (ExprID,CultID,LociID,SrcCnts,Acnt,Ccnt,Gcnt,Tcnt,Ncnt,TotCovCnt,TotMMCnt) VALUES(?,?,?,?,?,?,?,?,?,?,?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblSnps_ExprIDCultIDLociID' ON 'TblSnps' ('ExprID' ASC,'CultID' ASC, 'LociID' ASC)", NULL, (char *)"DROP INDEX IF EXISTS 'TblSnps_ExprIDCultIDLociID';CREATE INDEX IF NOT EXISTS 'TblSnps_ExprID' ON 'TblSnps' ('ExprID' ASC);CREATE INDEX IF NOT EXISTS 'TblSnps_CultID' ON 'TblSnps' ('CultID' ASC);CREATE INDEX IF NOT EXISTS 'TblSnps_LociID' ON 'TblSnps' ('LociID' ASC)", (char *)"DROP INDEX IF EXISTS 'TblSnps_ExprID'; DROP INDEX IF EXISTS 'TblSnps_CultID';DROP INDEX IF EXISTS 'TblSnps_LociID'"}, { (char *)"TblMarkers", (char *)"CREATE TABLE TblMarkers (MarkerID INTEGER PRIMARY KEY ASC,ExprID INTEGER,CultID INTEGER,LociID INTEGER,Base VARCHAR(1),Score INTEGER)", (char *)"INSERT INTO TblMarkers (ExprID, CultID,LociID,Base,Score) VALUES(?,?,?,?,?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblMarkers_ExprIDCultIDLociID' ON 'TblMarkers' ('ExprID' ASC,'CultID' ASC,'LociID' ASC)", NULL, (char *)"DROP INDEX IF EXISTS 'TblMarkers_ExprIDCultIDLociID';CREATE INDEX IF NOT EXISTS 'TblMarkers_ExprID' ON 'TblMarkers' ('ExprID' ASC);CREATE INDEX IF NOT EXISTS 'TblMarkers_CultID' ON 'TblMarkers' ('CultID' ASC);CREATE INDEX IF NOT EXISTS 'TblMarkers_LociID' ON 'TblMarkers' ('LociID' ASC)", (char *)"DROP INDEX IF EXISTS 'TblMarkers_ExprIDCultIDLociID';DROP INDEX IF EXISTS 'TblMarkers_ExprID'; DROP INDEX IF EXISTS 'TblMarkers_CultID';DROP INDEX IF EXISTS 'TblMarkers_LociID'"}, { (char *)"TblMarkerSnps", (char *)"CREATE TABLE TblMarkerSnps (MarkerSnpsID INTEGER PRIMARY KEY ASC,ExprID INTEGER, MarkerID INTEGER, SnpID INTEGER)", (char *)"INSERT INTO TblMarkerSnps (ExprID,MarkerID,SnpID) VALUES(?,?,?)", NULL, (char *)"CREATE INDEX IF NOT EXISTS 'TblMarkerSnps_ExprIDMarkerIDSnpID' ON 'TblMarkerSnps' ('ExprID' ASC,'MarkerID' ASC, 'SnpID' ASC)", NULL, (char *)"DROP INDEX IF EXISTS 'TblMarkerSnps_ExprIDMarkerIDSnpID';CREATE INDEX IF NOT EXISTS 'TblMarkerSnps_SnpID' ON 'TblMarkerSnps' ('SnpID' ASC);CREATE INDEX IF NOT EXISTS 'TblMarkerSnps_MarkerID' ON 'TblMarkerSnps' ('MarkerID' ASC)", (char *)"DROP INDEX IF EXISTS 'TblMarkerSnps_ExprIDMarkerIDSnpID';DROP INDEX IF EXISTS 'TblMarkerSnps_SnpID';DROP INDEX IF EXISTS 'TblMarkerSnps_MarkerID'"} }; char * CSQLiteMarkers::RemoveQuotes(char *pszRawText) { char *pSrcChr; char *pDstChr; bool bInSpace; char Chr; CUtility::TrimQuotedWhitespcExtd(pszRawText); pSrcChr = pszRawText; pDstChr = pSrcChr; bInSpace = false; while((Chr = *pSrcChr++)!= '\0') { if(Chr == '\'' || Chr == '"') continue; if(Chr == ' ' || Chr == '\t') { if(bInSpace) continue; bInSpace = true; } else bInSpace = false; *pDstChr++ = Chr; } *pDstChr = '\0'; return(pszRawText); } CSQLiteMarkers::CSQLiteMarkers(void) { m_pDB = NULL; m_NumSeqMRA = 0; m_NumSeqs = 0; m_NumSNPLoci = 0; m_NumSNPs = 0; m_NumMarkers = 0; m_bSafe = true; } CSQLiteMarkers::~CSQLiteMarkers(void) { if(m_pDB != NULL) { sqlite3_close_v2(m_pDB); sqlite3_shutdown(); m_pDB = NULL; } } sqlite3 * CSQLiteMarkers::CreateDatabase(bool bSafe, // true if sourcing from input CSV of unknown origin which may contain duplicates etc.. char *pszDatabase) // database to create (any existing database is deleted then clean created) { tsStmSQL *pStms; int TblIdx; int sqlite_error; // note if database already exists in case bReplace is requested struct stat TargStat; int StatRslt = stat(pszDatabase,&TargStat); if(StatRslt >= 0) remove(pszDatabase); m_bSafe = bSafe; // try creating the database if((sqlite_error = sqlite3_open_v2(pszDatabase, &m_pDB,SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't open database: %s", sqlite3_errmsg(m_pDB)); sqlite3_shutdown(); m_pDB = NULL; return(NULL); } // create all tables pStms = m_StmSQL; for(TblIdx = 0; TblIdx < 7; TblIdx++,pStms++) { pStms->pPrepInsert = NULL; if(pStms->pszCreateTbl == NULL) continue; if((sqlite_error = sqlite3_exec(m_pDB,pStms->pszCreateTbl,0,0,0))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't create table %s : %s", pStms->pTblName,sqlite3_errmsg(m_pDB)); gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - statement: %s",pStms->pszCreateTbl); sqlite3_close_v2(m_pDB); sqlite3_shutdown(); m_pDB = NULL; return(NULL); } } pStms = m_StmSQL; if(bSafe) { for(TblIdx = 0; TblIdx < 7; TblIdx++,pStms++) { if(pStms->pszOpenCreateSafeIndexes == NULL) continue; if((sqlite_error = sqlite3_exec(m_pDB,pStms->pszOpenCreateSafeIndexes,0,0,0))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't create safe indexes on table %s : %s", pStms->pTblName,sqlite3_errmsg(m_pDB)); gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - statement: %s",pStms->pszOpenCreateIndexes); sqlite3_close_v2(m_pDB); sqlite3_shutdown(); m_pDB = NULL; return(NULL); } } } else { for(TblIdx = 0; TblIdx < 7; TblIdx++,pStms++) { if(pStms->pszOpenCreateIndexes == NULL) continue; if((sqlite_error = sqlite3_exec(m_pDB,pStms->pszOpenCreateIndexes,0,0,0))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't create indexes on table %s : %s", pStms->pTblName,sqlite3_errmsg(m_pDB)); gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - statement: %s",pStms->pszOpenCreateIndexes); sqlite3_close_v2(m_pDB); sqlite3_shutdown(); m_pDB = NULL; return(NULL); } } } // prepare all insert statements pStms = m_StmSQL; for(TblIdx = 0; TblIdx < 7; TblIdx++,pStms++) { if(pStms->pszInsert == NULL) { pStms->pPrepInsert = NULL; continue; } if((sqlite_error = sqlite3_prepare_v2(m_pDB,pStms->pszInsert,-1,&pStms->pPrepInsert,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't prepare insert statement on table %s: %s", pStms->pTblName, sqlite3_errmsg(m_pDB)); while(TblIdx > 0) { pStms -= 1; if(pStms->pPrepInsert != NULL) { sqlite3_finalize(pStms->pPrepInsert); pStms->pPrepInsert = NULL; } } sqlite3_close_v2(m_pDB); sqlite3_shutdown(); m_pDB = NULL; return(NULL); } } return(m_pDB); } int CSQLiteMarkers::CloseDatabase(bool bNoIndexes) { int TblIdx; int Rslt = 0; tsStmSQL *pStms; pStms = m_StmSQL; if(m_pDB != NULL) { if(!bNoIndexes) { for(TblIdx = 0; TblIdx < 7; TblIdx++,pStms++) { if(pStms->pPrepInsert == NULL) continue; sqlite3_finalize(pStms->pPrepInsert); pStms->pPrepInsert = NULL; } } Rslt = sqlite3_close_v2(m_pDB); sqlite3_shutdown(); m_pDB = NULL; } return(Rslt); } // callbacks from sqlite3_exec int CSQLiteMarkers::ExecCallbackCultID(void *pCallP1, // callback function processing identifier (4th arg to sqlite3_exec()) int NumCols, // number of result columns char **ppColValues, // array of ptrs to column values char **ppColName) // array of ptrs to column names { int ValChars; tsCultivar *pCult; char *ppEnd; // some basic validation of call back parameter values if(pCallP1 == NULL || NumCols != 1 || ppColValues == NULL || ppColValues[0] == NULL || *ppColValues[0] == '\0') return(1); pCult = (tsCultivar *)pCallP1; ValChars = (int)strlen(ppColValues[0]); pCult->CultID = strtol(ppColValues[0],&ppEnd,10); return(0); } // callbacks from sqlite3_exec returning an identifier int CSQLiteMarkers::ExecCallbackID(void *pCallP1, // callback function processing identifier (4th arg to sqlite3_exec()) int NumCols, // number of result columns char **ppColValues, // array of ptrs to column values char **ppColName) // array of ptrs to column names { int ValChars; int *pID; char *ppEnd; // some basic validation of call back parameter values if(pCallP1 == NULL || NumCols != 1 || ppColValues == NULL || ppColValues[0] == NULL || *ppColValues[0] == '\0') return(1); pID = (int *)pCallP1; ValChars = (int)strlen(ppColValues[0]); *pID = strtol(ppColValues[0],&ppEnd,10); return(0); } int // errors if < eBSFSuccess, if positive then the ExprID CSQLiteMarkers::CreateExperiment(int CSVtype, // 0 if markers, 1 if SNPs char *pszInFile, // file containing SNPs or markers char *pszExprName, // experiment identifier char *pszExprDescr, // describes experiment char *pszAssembName) // targeted assembly { int sqlite_error; int ExprID; char szExprName[128]; tsStmSQL *pStm; if(m_pDB == NULL) return(eBSFerrInternal); m_NumSeqs = 0; m_NumSNPLoci = 0; m_NumSNPs = 0; m_NumMarkers = 0; pStm = &m_StmSQL[0]; if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 1, CSVtype))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 2, pszInFile,(int)strlen(pszInFile)+1,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 3, pszExprName,(int)strlen(pszExprName)+1,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 4, pszExprDescr,(int)strlen(pszExprDescr)+1,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 5, pszAssembName,(int)strlen(pszAssembName)+1,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); // find out the ExprID assigned to this experiment if(m_bSafe) { ExprID = -1; sprintf(szExprName,"select ExprID from TblExprs where ExprName LIKE '%s'",pszExprName); if((sqlite_error = sqlite3_exec(m_pDB,szExprName,ExecCallbackID,&ExprID,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite3_exec - getting ExprID: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } } else ExprID = (int)sqlite3_last_insert_rowid(m_pDB); return(ExprID); } int // errors if < eBSFSuccess CSQLiteMarkers::CreateCultivars(int NumCultivars, // number of cultivars to add tsCultivar *pCultivars) // pts to array of cultivars { int sqlite_error; int CultIdx; tsStmSQL *pStm; tsCultivar *pCult; char szCultTarg[200]; if(m_pDB == NULL) return(eBSFerrInternal); pCult = pCultivars; pStm = &m_StmSQL[1]; // access cultivar statements for(CultIdx = 0; CultIdx < NumCultivars; CultIdx++,pCult++) { if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 1, pCult->szCultivarName,(int)strlen(pCult->szCultivarName)+1,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); // find out the CultID assigned to this cultivar if(m_bSafe) { sprintf(szCultTarg,"select CultID from TblCults where CultName LIKE '%s'",pCult->szCultivarName); sqlite3_exec(m_pDB,szCultTarg,ExecCallbackCultID,pCult,NULL); } else pCult->CultID = (int)sqlite3_last_insert_rowid(m_pDB); } return(NumCultivars); } int // returned sequence identifier for sequence CSQLiteMarkers::AddSeq( int ExprID, // experiment identifier char *pszSeqName) // target assembly sequence name { int sqlite_error; tsStmSQL *pStm; int Idx; int SeqID; tsMRASeq *pMRASeq; tsMRASeq *pLRASeq; char szSeqTarg[200]; if(m_pDB == NULL) return(eBSFerrInternal); if(!m_NumSeqMRA) memset(m_MRASeqs,0,sizeof(m_MRASeqs)); // quickly check if sequence is a recently accessed sequence and if so then return the identifier pMRASeq = m_MRASeqs; for(Idx = 0; Idx < m_NumSeqMRA; Idx++, pMRASeq++) { if(!stricmp(pszSeqName,pMRASeq->szSeqName)) { if(pMRASeq->AccessCnt < 0x7ffffff) pMRASeq->AccessCnt += 10; return(pMRASeq->SeqID); } if(pMRASeq->AccessCnt > 0) pMRASeq->AccessCnt -= 1; } SeqID = -1; if(m_bSafe) { // not a recently accessed sequence so need to check if already known to SQLite sprintf(szSeqTarg,"select SeqID from TblSeqs where ExprID = %d AND SeqName LIKE '%s'",ExprID,pszSeqName); sqlite3_exec(m_pDB,szSeqTarg,ExecCallbackID,&SeqID,NULL); } if(SeqID == -1) // will be -1 if not already in database so need to add { pStm = &m_StmSQL[2]; // access sequence statements if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 1, ExprID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 2, pszSeqName,(int)strlen(pszSeqName)+1,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); if(m_bSafe) { sprintf(szSeqTarg,"select SeqID from TblSeqs where ExprID = %d AND SeqName LIKE '%s'",ExprID,pszSeqName); sqlite3_exec(m_pDB,szSeqTarg,ExecCallbackID,&SeqID,NULL); } else SeqID = (int)sqlite3_last_insert_rowid(m_pDB); m_NumSeqs += 1; // number of seqs added to TblSeqs } // replace lRA sequence if(m_NumSeqMRA < cMaxMRASeqs) pMRASeq = &m_MRASeqs[m_NumSeqMRA++]; else { pMRASeq = m_MRASeqs; pLRASeq = pMRASeq++; for(Idx = 1; Idx < m_NumSeqMRA; Idx++, pMRASeq++) { if(pMRASeq->AccessCnt < pLRASeq->AccessCnt) pLRASeq = pMRASeq; } pMRASeq = pLRASeq; } pMRASeq->AccessCnt = 1000; pMRASeq->SeqID = SeqID; strcpy(pMRASeq->szSeqName,pszSeqName); return(SeqID); } int // returned loci identifier CSQLiteMarkers::AddLoci(int ExprID, // experiment identifier int SeqID, // target assembly sequence identifier int Offset, // offset on sequence char Base) // cannonical base at loci { int sqlite_error; tsStmSQL *pStm; int LociID; char szLoci[200]; char szBase[2]; szBase[0] = Base; // SQLite seems to treat chars as 1byte integers and the command line SQLite shell displays as a numeric szBase[1] = '\0'; // so easiest to use a VARCHAR(1) text string if(m_pDB == NULL) return(eBSFerrInternal); pStm = &m_StmSQL[3]; // access sequence statements if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 1, ExprID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 2, SeqID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 3, Offset))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 4, szBase,2,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); if(m_bSafe) { sprintf(szLoci,"select LociID from TblLoci where ExprID = %d AND SeqID = %d AND Offset = %d and Base = %d",ExprID,SeqID,Offset,(int)Base); sqlite3_exec(m_pDB,szLoci,ExecCallbackID,&LociID,NULL); } else LociID = (int)sqlite3_last_insert_rowid(m_pDB); m_NumSNPLoci += 1; // number of SNP loci added to TblLoci return(LociID); } int // returned SNP indentifier CSQLiteMarkers::AddSNP(int ExprID, // experiment identifier int CultID, // SNP in this cultivar relative to target sequence int LociID, // identifies target sequence loci at which SNP has been identified char SrcCnts, // from where the counts were derived - 'S' SNP call, 'I' imputed from coverage or from SAM/BAM alignment sequences int Acnt, // count of A's in reads covering loci int Ccnt, // count of C's in reads covering loci int Gcnt, // count of G's in reads covering loci int Tcnt, // count of T's in reads covering loci int Ncnt, // count of N's in reads covering loci int TotCovCnt, // total count of reads covering loci int TotMMCnt) // of which there were this many reads with mismatches { int sqlite_error; tsStmSQL *pStm; int SnpID; char szSNP[200]; char szSrcCnts[2]; pStm = &m_StmSQL[4]; // access sequence statements if(m_pDB == NULL) return(eBSFerrInternal); szSrcCnts[0] = SrcCnts; szSrcCnts[1] = '\0'; if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 1, ExprID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 2, CultID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 3, LociID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 4, szSrcCnts,2,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 5, Acnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 6, Ccnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 7, Gcnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 8, Tcnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 9, Ncnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 10, TotCovCnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 11, TotMMCnt))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); if(m_bSafe) { SnpID = -1; sprintf(szSNP,"select SnpID from TblSnps where ExprID = %d AND LociID = %d AND CultID = %d",ExprID,LociID,CultID); sqlite3_exec(m_pDB,szSNP,ExecCallbackID,&SnpID,NULL); } else SnpID = (int)sqlite3_last_insert_rowid(m_pDB); m_NumSNPs += 1; // number of SNPs added to TblSnps return(SnpID); } int // returned marker identifier CSQLiteMarkers::AddMarker(int ExprID, // experiment identifier int CultID, // marker in this cultivar relative to other cultivars int LociID, // identifies target sequence loci at which marker has been identified char MarkerBase, // marker base int MarkerScore) // score { int sqlite_error; tsStmSQL *pStm; int MarkerID; char szMarker[200]; char szBase[2]; szBase[0] = MarkerBase; // SQLite seems to treat chars as 1byte integers and the command line SQLite shell displays as a numeric szBase[1] = '\0'; // so easiest to use a VARCHAR(1) text string pStm = &m_StmSQL[5]; // access sequence statements if(m_pDB == NULL) return(eBSFerrInternal); if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 1, ExprID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 2, CultID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 3, LociID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_text(pStm->pPrepInsert, 4, szBase,2,SQLITE_STATIC))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 5, MarkerScore))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); if(m_bSafe) { MarkerID = -1; sprintf(szMarker,"select MarkerID from TblMarkers where ExprID = %d AND LociID = %d AND CultID = %d AND Base = %d",ExprID,LociID,CultID,MarkerBase); sqlite3_exec(m_pDB,szMarker,ExecCallbackID,&MarkerID,NULL); } else MarkerID = (int)sqlite3_last_insert_rowid(m_pDB); m_NumMarkers += 1; // number of markers add to TblMarkers return(MarkerID); } int CSQLiteMarkers::AddMarkerSnp(int ExprID, int MarkerID, int SnpID) { int sqlite_error; tsStmSQL *pStm; int MarkerSnpID; char szMarkerSnp[200]; pStm = &m_StmSQL[6]; // access sequence statements if(m_pDB == NULL) return(eBSFerrInternal); if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 1, ExprID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 2, MarkerID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_bind_int(pStm->pPrepInsert, 3, SnpID))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - bind prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } if((sqlite_error = sqlite3_step(pStm->pPrepInsert))!=SQLITE_DONE) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - step prepared statement: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } sqlite3_reset(pStm->pPrepInsert); if(m_bSafe) { MarkerSnpID = -1; sprintf(szMarkerSnp,"select MarkerSnpsID from TblMarkerSnps where ExprID = %d AND MarkerID = %d AND SnpID = %d",ExprID,MarkerID,SnpID); sqlite3_exec(m_pDB,szMarkerSnp,ExecCallbackID,&MarkerSnpID,NULL); } else MarkerSnpID = (int)sqlite3_last_insert_rowid(m_pDB); return(MarkerSnpID); } int CSQLiteMarkers::ProcessCSV2SQLite(int PMode, // currently just the one mode...default is to parse from CSV and create/populate SQLite database bool bSafe, // if true then use indexing on all tables whilst inserting... much slower but perhaps safer if multiple threads ... int CSVtype, // input CSV file has this format (0: markers, 1: SNPs) char *pszExprName, // name by which this experiment is identified char *pszExprDescr, // describes experiment char *pTargAssemb, // assembly against which aligments for SNP discovery int NumSpecies, // number of species used in alignments char *pszSpeciesNames[], // names of species char *pszInFile, // parse from this input CSV file char *pszDatabase) // SQLite database file { int Rslt; int ExprID; int sqlite_error; sqlite3_stmt *prepstatement = NULL; tsCultivar *pCultivar; int CultIdx; sqlite3_initialize(); if((CreateDatabase(bSafe,pszDatabase))==NULL) { sqlite3_shutdown(); return(eBSFerrInternal); } if((Rslt = CreateExperiment(CSVtype,pszInFile,pszExprName,pszExprDescr,pTargAssemb)) < 1) { CloseDatabase(true); return(Rslt); } ExprID = Rslt; pCultivar = Cultivars; for(CultIdx = 0; CultIdx < NumSpecies; CultIdx++, pCultivar++) { pCultivar->CultID = 0; if(CSVtype == 0) pCultivar->CultIdx = (4 + CultIdx * 9); else pCultivar->CultIdx = 0; strcpy(pCultivar->szCultivarName,pszSpeciesNames[CultIdx]); } if((Rslt = CreateCultivars(NumSpecies,Cultivars)) < 1) { CloseDatabase(true); return(Rslt); } char *pszBeginTransaction = (char *)"BEGIN TRANSACTION"; char *pszEndTransaction = (char *)"END TRANSACTION"; char *pszDropIndexes = (char *)"DROP INDEX IF EXISTS 'Markers_LociID'"; char *pszPragmaSyncOff = (char *)"PRAGMA synchronous = OFF"; char *pszPragmaSyncOn = (char *)"PRAGMA synchronous = ON"; char *pszPragmaJournMem = (char *)"PRAGMA journal_mode = MEMORY"; gDiagnostics.DiagOut(eDLInfo,gszProcName,"sqlite - populating tables"); // synchronous writes off if((sqlite_error = sqlite3_exec(m_pDB,pszPragmaSyncOff,NULL,NULL,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't turn synchronous writes off: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } // bracket inserts as a single transaction if((sqlite_error = sqlite3_exec(m_pDB,pszBeginTransaction,NULL,NULL,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't begin transactions: %s",sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } // load CSV file and start populating the SQLite database CCSVFile *pCSV = new CCSVFile; if(pCSV == NULL) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"Unable to instantiate CCSVfile"); CloseDatabase(true); return(eBSFerrObj); } if(CSVtype == 0) // marker CSV files can contain a large number of fields { if(((NumSpecies * 9) + 4) > cCSVDfltFields) pCSV->SetMaxFields((NumSpecies * 9) + 4); } if((Rslt=pCSV->Open(pszInFile))!=eBSFSuccess) { while(pCSV->NumErrMsgs()) gDiagnostics.DiagOut(eDLFatal,gszProcName,pCSV->GetErrMsg()); gDiagnostics.DiagOut(eDLFatal,gszProcName,"Unable to open file: %s",pszInFile); delete pCSV; CloseDatabase(true); return(Rslt); } int NumFields; int NumElsRead; int NumCultivars; char *pszSeqName; int SeqID; NumElsRead = 0; while((Rslt=pCSV->NextLine()) > 0) // onto next line containing fields { if(!(NumElsRead % (bSafe ? 5000 : 100000)) && NumElsRead > 0) gDiagnostics.DiagOut(eDLInfo,gszProcName,"Parsed %d lines, Unique target sequences: %d, SNP Loci: %d, SNPs: %d, Markers: %d",NumElsRead, m_NumSeqs,m_NumSNPLoci, m_NumSNPs, m_NumMarkers); NumFields = pCSV->GetCurFields(); // SNP files have 23, Marker CSV have 4 + (9 * NumCultivars) fields switch(CSVtype) { case 0: // markers NumCultivars = (NumFields - 4)/9; if(NumFields < 13 || NumFields != (NumCultivars * 9) + 4) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"Expected Marker CSV file number of fields to be ((NumCultivars * 9) + 4) in '%s', GetCurFields() returned '%d'",pszInFile,NumFields); delete pCSV; CloseDatabase(true); return(eBSFerrFieldCnt); } if(NumSpecies != NumCultivars) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"Expected Marker CSV file to contain %d SNP species, NumCultivars in '%s' is %d",NumSpecies,pszInFile,NumCultivars); delete pCSV; CloseDatabase(true); return(eBSFerrFieldCnt); } if(!NumElsRead && pCSV->IsLikelyHeaderLine()) continue; break; case 1: // SNPs if(NumFields != 23) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"Expected 'biokanga align' generated SNP CSV file number of fields to be 23 at line %d in '%s', GetCurFields() returned '%d'",NumElsRead,pszInFile,NumFields); delete pCSV; CloseDatabase(true); return(eBSFerrFieldCnt); } NumCultivars = 1; break; } if(!NumElsRead && pCSV->IsLikelyHeaderLine()) continue; NumElsRead += 1; int Loci; int LociID; int SnpID; int CultID; char *pszLociBase; char *pszMarkerBase; char *pszCntSrc; char CntSrc; int MarkerScore; int MarkerID; char LociBase; char MarkerBase; int TotCovCnt; int TotMMCnt; int SpeciesIdx; int Acnt; int Ccnt; int Gcnt; int Tcnt; int Ncnt; int FieldIdx; int NumCovd; switch(CSVtype) { case 0: // markers pCSV->GetText(1,&pszSeqName); // sequence containing marker RemoveQuotes(pszSeqName); SeqID = AddSeq(ExprID,pszSeqName); pCSV->GetInt(2,&Loci); // loci on sequence at which marker has been determined pCSV->GetText(3,&pszLociBase); // canonical target sequence base at the marker loci switch(*pszLociBase) { case 'a': case 'A': LociBase = 'A'; break; case 'c': case 'C': LociBase = 'C'; break; case 'g': case 'G': LociBase = 'G'; break; case 't': case 'T': LociBase = 'T'; break; default: LociBase = 'N'; break; } LociID = AddLoci(ExprID,SeqID,Loci,LociBase); FieldIdx = 5; NumCovd = 0; for(SpeciesIdx = 0; SpeciesIdx < NumSpecies; SpeciesIdx++,FieldIdx += 9) { pCSV->GetText(FieldIdx,&pszCntSrc); // "I" if base counts were imputed, "S" if from SNP call CntSrc = *pszCntSrc; pCSV->GetText(FieldIdx+1,&pszMarkerBase); // marker base switch(*pszMarkerBase) { case 'a': case 'A': MarkerBase = 'A'; break; case 'c': case 'C': MarkerBase = 'C'; break; case 'g': case 'G': MarkerBase = 'G'; break; case 't': case 'T': MarkerBase = 'T'; break; default: MarkerBase = 'N'; break; } pCSV->GetInt(FieldIdx+2,&MarkerScore); // canonical target sequence base at the marker loci pCSV->GetInt(FieldIdx+3,&TotCovCnt); pCSV->GetInt(FieldIdx+4,&Acnt); pCSV->GetInt(FieldIdx+5,&Ccnt); pCSV->GetInt(FieldIdx+6,&Gcnt); pCSV->GetInt(FieldIdx+7,&Tcnt); pCSV->GetInt(FieldIdx+8,&Ncnt); switch(LociBase) { case 'A': TotMMCnt = TotCovCnt - Acnt; break; case 'C': TotMMCnt = TotCovCnt - Ccnt; break; case 'G': TotMMCnt = TotCovCnt - Gcnt; break; case 'T': TotMMCnt = TotCovCnt - Tcnt; break; default: TotMMCnt = TotCovCnt - Ncnt; break; } CultID = (int)Cultivars[SpeciesIdx].CultID; if(TotCovCnt > 0 || MarkerScore > 0) { SnpID = AddSNP(ExprID,CultID,LociID,CntSrc,Acnt,Ccnt,Gcnt,Tcnt,Ncnt,TotCovCnt,TotMMCnt); Cultivars[SpeciesIdx].SnpID = SnpID; } else Cultivars[SpeciesIdx].SnpID = 0; if(MarkerScore > 0) { MarkerID = AddMarker(ExprID,CultID,LociID,MarkerBase,MarkerScore); Cultivars[SpeciesIdx].MarkerID = MarkerID; } else Cultivars[SpeciesIdx].MarkerID = 0; } for(SpeciesIdx = 0; SpeciesIdx < NumSpecies; SpeciesIdx++) { if(Cultivars[SpeciesIdx].MarkerID != 0) { MarkerID = Cultivars[SpeciesIdx].MarkerID; for(int Idx = 0; Idx < NumSpecies; Idx++) if(Cultivars[Idx].SnpID > 0) AddMarkerSnp(ExprID,MarkerID,Cultivars[Idx].SnpID); } } break; case 1: // SNPs pCSV->GetText(4,&pszSeqName); // sequence containing SNP RemoveQuotes(pszSeqName); SeqID = AddSeq(ExprID,pszSeqName); pCSV->GetInt(5,&Loci); // loci on sequence at which marker has been determined pCSV->GetInt(11,&TotCovCnt); pCSV->GetInt(12,&TotMMCnt); pCSV->GetText(13,&pszLociBase); // cannonical target sequence base at the marker loci switch(*pszLociBase) { case 'a': case 'A': LociBase = 'A'; break; case 'c': case 'C': LociBase = 'C'; break; case 'g': case 'G': LociBase = 'G'; break; case 't': case 'T': LociBase = 'T'; break; default: LociBase = 'N'; break; } LociID = AddLoci(ExprID,SeqID,Loci,LociBase); pCSV->GetInt(14,&Acnt); pCSV->GetInt(15,&Ccnt); pCSV->GetInt(16,&Gcnt); pCSV->GetInt(17,&Tcnt); pCSV->GetInt(18,&Ncnt); if(TotCovCnt) { switch(LociBase) { case 'A': if(Acnt == 0) Acnt = TotCovCnt - TotMMCnt; break; case 'C': if(Ccnt == 0) Ccnt = TotCovCnt - TotMMCnt; break; case 'G': if(Gcnt == 0) Gcnt = TotCovCnt - TotMMCnt; break; case 'T': if(Tcnt == 0) Tcnt = TotCovCnt - TotMMCnt; break; case 'N': if(Ncnt == 0) Ncnt = TotCovCnt - TotMMCnt; break; } CultID = (int)Cultivars[0].CultID; SnpID = AddSNP(ExprID,CultID,LociID,'S',Acnt,Ccnt,Gcnt,Tcnt,Ncnt,TotCovCnt,TotMMCnt); } break; } } gDiagnostics.DiagOut(eDLInfo,gszProcName,"Parsed %d lines, Unique target sequences: %d, SNP Loci: %d, SNPs: %d, Markers: %d",NumElsRead, m_NumSeqs,m_NumSNPLoci, m_NumSNPs, m_NumMarkers); // end transaction if((sqlite_error = sqlite3_exec(m_pDB,pszEndTransaction,NULL,NULL,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't end transactions on '%s': %s", "Markers",sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } gDiagnostics.DiagOut(eDLInfo,gszProcName,"Completed populating the sqlite database"); gDiagnostics.DiagOut(eDLInfo,gszProcName,"Generating indexes ..."); tsStmSQL *pStms; pStms = m_StmSQL; int TblIdx; for(TblIdx = 0; TblIdx < 7; TblIdx++,pStms++) { if(pStms->pszCreateIndexes == NULL) continue; gDiagnostics.DiagOut(eDLInfo,gszProcName,"Creating indexes on table %s ...", pStms->pTblName); if((sqlite_error = sqlite3_exec(m_pDB,pStms->pszCreateIndexes,0,0,0))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't create indexes on table %s : %s", pStms->pTblName,sqlite3_errmsg(m_pDB)); gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - statement: %s",pStms->pszCreateIndexes); CloseDatabase(true); return(eBSFerrInternal); } } gDiagnostics.DiagOut(eDLInfo,gszProcName,"Indexes generated"); // synchronous writes off if((sqlite_error = sqlite3_exec(m_pDB,pszPragmaSyncOn,NULL,NULL,NULL))!=SQLITE_OK) { gDiagnostics.DiagOut(eDLFatal,gszProcName,"sqlite - can't turn synchronous writes on: %s", sqlite3_errmsg(m_pDB)); CloseDatabase(true); return(eBSFerrInternal); } CloseDatabase(); sqlite3_shutdown(); gDiagnostics.DiagOut(eDLInfo,gszProcName,"SQLite database ready for use"); return(eBSFSuccess); }
apache-2.0
consulo/consulo-java
java-execution-impl/src/main/java/com/intellij/execution/JavaRunConfigurationExtensionManager.java
1948
/* * Copyright 2000-2011 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution; import consulo.logging.Logger; import jakarta.inject.Singleton; import com.intellij.execution.configuration.RunConfigurationExtensionsManager; import com.intellij.execution.configurations.RunConfigurationBase; import com.intellij.execution.configurations.RuntimeConfigurationException; import com.intellij.openapi.components.ServiceManager; /** * User: anna * Date: 10/4/11 */ @Singleton public class JavaRunConfigurationExtensionManager extends RunConfigurationExtensionsManager<RunConfigurationBase, RunConfigurationExtension> { private static final Logger LOG = Logger.getInstance(RunConfigurationExtension.class.getName()); public JavaRunConfigurationExtensionManager() { super(RunConfigurationExtension.EP_NAME); } public static JavaRunConfigurationExtensionManager getInstance() { return ServiceManager.getService(JavaRunConfigurationExtensionManager.class); } public static void checkConfigurationIsValid(RunConfigurationBase configuration) throws RuntimeConfigurationException { try { getInstance().validateConfiguration(configuration, false); } catch (Exception e) { LOG.error(e); } } @Override protected String getIdAttrName() { return "name"; } @Override protected String getExtensionRootAttr() { return "extension"; } }
apache-2.0
aesean/AndroidInject
injectlib/src/main/java/com/aesean/injectlib/ioc/annotation/EventUtils.java
1450
/* * Copyright (C) 2015. Aesean * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.aesean.injectlib.ioc.annotation; /** * 用于提供事件常量 */ public class EventUtils { /** * 全部都是静态常量禁止实例化 */ private EventUtils() { } public static final String SET_ON_CLICK_LISTENER = "setOnClickListener"; public static final String SET_ON_LONG_CLICK_LISTENER = "setOnLongClickListener"; public static final String SET_ON_CLICK_LISTENER_CALLBACK = "onClick"; public static final String SET_ON_LONG_CLICK_LISTENER_CALLBACK = "onLongClick"; public static final String SET_ON_CHECKED_CHANGE_LISTENER = "setOnCheckedChangeListener"; public static final String SET_ON_CHECKED_CHANGE_LISTENER_CALLBACK = "onCheckedChanged"; public static final String FIND_VIEW_BY_ID = "findViewById"; public static final String SET_CONTENT_VIEW = "setContentView"; }
apache-2.0
WindowsInsiders/Website
app/person/index.js
270
"use strict"; function __export(m) { for (var p in m) if (!exports.hasOwnProperty(p)) exports[p] = m[p]; } __export(require('./person-model.ts')); __export(require('./person.service.ts')); __export(require('./person.component.ts')); //# sourceMappingURL=index.js.map
apache-2.0
thedanfernandez/DockerCortanaClient
DockerCortanaClient/Docker.Universal/Models/CreateImageParameters.cs
515
namespace Docker.DotNet.Models { public class CreateImageParameters { [QueryStringParameter("fromImage", false)] public string FromImage { get; set; } [QueryStringParameter("repo", false)] public string Repo { get; set; } [QueryStringParameter("tag", false)] public string Tag { get; set; } [QueryStringParameter("registry", false)] public string Registry { get; set; } public CreateImageParameters() { } } }
apache-2.0
FederatedAI/FATE
python/federatedml/framework/homo/blocks/aggregator.py
2559
# # Copyright 2019 The FATE Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from federatedml.framework.homo.blocks import model_broadcaster, model_scatter from federatedml.framework.homo.blocks.base import HomoTransferBase from federatedml.framework.homo.blocks.model_broadcaster import ModelBroadcasterTransVar from federatedml.framework.homo.blocks.model_scatter import ModelScatterTransVar from federatedml.util import consts class AggregatorTransVar(HomoTransferBase): def __init__(self, server=(consts.ARBITER,), clients=(consts.GUEST, consts.HOST), prefix=None): super().__init__(server=server, clients=clients, prefix=prefix) self.model_scatter = ModelScatterTransVar(server=server, clients=clients, prefix=self.prefix) self.model_broadcaster = ModelBroadcasterTransVar(server=server, clients=clients, prefix=self.prefix) class Server(object): def __init__(self, trans_var: AggregatorTransVar = None): if trans_var is None: trans_var = AggregatorTransVar() self._model_broadcaster = model_broadcaster.Server(trans_var=trans_var.model_broadcaster) self._model_scatter = model_scatter.Server(trans_var=trans_var.model_scatter) def get_models(self, suffix=tuple()): return self._model_scatter.get_models(suffix=suffix) def send_aggregated_model(self, model, suffix=tuple()): self._model_broadcaster.send_model(model=model, suffix=suffix) class Client(object): def __init__(self, trans_var: AggregatorTransVar = None): if trans_var is None: trans_var = AggregatorTransVar() self._model_broadcaster = model_broadcaster.Client(trans_var=trans_var.model_broadcaster) self._model_scatter = model_scatter.Client(trans_var=trans_var.model_scatter) def send_model(self, model, suffix=tuple()): self._model_scatter.send_model(model=model, suffix=suffix) def get_aggregated_model(self, suffix=tuple()): return self._model_broadcaster.get_model(suffix=suffix)
apache-2.0
pravega/pravega
bindings/src/main/java/io/pravega/storage/extendeds3/ExtendedS3StorageConfig.java
4435
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.storage.extendeds3; import com.emc.object.s3.S3Config; import com.emc.object.util.ConfigUri; import com.google.common.base.Preconditions; import io.pravega.common.util.ConfigBuilder; import io.pravega.common.util.ConfigurationException; import io.pravega.common.util.Property; import io.pravega.common.util.TypedProperties; import lombok.Getter; import lombok.extern.slf4j.Slf4j; /** * Configuration for the ExtendedS3 Storage component. */ @Slf4j public class ExtendedS3StorageConfig { //region Config Names public static final Property<String> CONFIGURI = Property.named("connect.config.uri", "", "configUri"); public static final Property<String> BUCKET = Property.named("bucket", ""); public static final Property<String> PREFIX = Property.named("prefix", "/"); public static final Property<Boolean> USENONEMATCH = Property.named("noneMatch.enable", false, "useNoneMatch"); public static final Property<Integer> SMALL_OBJECT_THRESHOLD = Property.named("concat.smallObject.threshold.size", 1024 * 1024, "smallObjectSizeLimitForConcat"); private static final String COMPONENT_CODE = "extendeds3"; private static final String PATH_SEPARATOR = "/"; //endregion //region Members /** * The S3 complete client config of the EXTENDEDS3 REST interface */ @Getter private final S3Config s3Config; /** * The EXTENDEDS3 access key id - this is equivalent to the user */ @Getter private final String accessKey; /** * The EXTENDEDS3 secret key associated with the accessKey */ @Getter private final String secretKey; /** * A unique bucket name to store objects */ @Getter private final String bucket; /** * Prefix of the Pravega owned EXTENDEDS3 path under the assigned buckets. All the objects under this path will be * exclusively owned by Pravega. */ @Getter private final String prefix; /** * */ @Getter private final boolean useNoneMatch; /** * Size of ECS objects in bytes above which it is no longer considered a small object. * For small source objects, to implement concat ExtendedS3Storage reads complete objects and appends it to target * instead of using multi part upload. */ @Getter private final int smallObjectSizeLimitForConcat; //endregion //region Constructor /** * Creates a new instance of the ExtendedS3StorageConfig class. * * @param properties The TypedProperties object to read Properties from. */ private ExtendedS3StorageConfig(TypedProperties properties) throws ConfigurationException { ConfigUri<S3Config> s3ConfigUri = new ConfigUri<S3Config>(S3Config.class); this.s3Config = Preconditions.checkNotNull(s3ConfigUri.parseUri(properties.get(CONFIGURI)), "configUri"); this.accessKey = Preconditions.checkNotNull(s3Config.getIdentity(), "identity"); this.secretKey = Preconditions.checkNotNull(s3Config.getSecretKey(), "secretKey"); this.bucket = Preconditions.checkNotNull(properties.get(BUCKET), "bucket"); String givenPrefix = Preconditions.checkNotNull(properties.get(PREFIX), "prefix"); this.prefix = givenPrefix.endsWith(PATH_SEPARATOR) ? givenPrefix : givenPrefix + PATH_SEPARATOR; this.useNoneMatch = properties.getBoolean(USENONEMATCH); this.smallObjectSizeLimitForConcat = properties.getInt(SMALL_OBJECT_THRESHOLD); } /** * Creates a new ConfigBuilder that can be used to create instances of this class. * * @return A new Builder for this class. */ public static ConfigBuilder<ExtendedS3StorageConfig> builder() { return new ConfigBuilder<>(COMPONENT_CODE, ExtendedS3StorageConfig::new); } //endregion }
apache-2.0
bendavidson/graphhopper
core/src/test/java/com/graphhopper/reader/osm/conditional/ConditionalParserTest.java
2653
/* * Licensed to GraphHopper and Peter Karich under one or more contributor * license agreements. See the NOTICE file distributed with this work for * additional information regarding copyright ownership. * * GraphHopper licenses this file to you under the Apache License, * Version 2.0 (the "License"); you may not use this file except in * compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.graphhopper.reader.osm.conditional; import com.graphhopper.reader.OSMWay; import org.junit.Before; import org.junit.Test; import java.text.ParseException; import java.util.Calendar; import java.util.HashSet; import static org.junit.Assert.*; /** * @author Robin Boldt */ public class ConditionalParserTest extends CalendarBasedTest { ConditionalParser parser; @Before public void setup() { HashSet<String> restrictedValues = new HashSet<String>(); restrictedValues.add("private"); restrictedValues.add("agricultural"); restrictedValues.add("forestry"); restrictedValues.add("no"); restrictedValues.add("restricted"); restrictedValues.add("delivery"); restrictedValues.add("military"); restrictedValues.add("emergency"); parser = new ConditionalParser(restrictedValues); } @Test public void testParseConditional() throws ParseException { DateRange dateRange = parser.getDateRange("no @ (2015 Sep 1-2015 Sep 30)"); assertFalse(dateRange.isInRange(getCalendar(2015, Calendar.AUGUST, 31))); assertTrue(dateRange.isInRange(getCalendar(2015, Calendar.SEPTEMBER, 30))); } @Test public void testParseAllowingCondition() throws ParseException { DateRange dateRange = parser.getDateRange("yes @ (2015 Sep 1-2015 Sep 30)"); assertNull(dateRange); } @Test public void testParsingOfLeading0() throws ParseException { DateRange dateRange = parser.getDateRange("no @ (01.11. - 31.03.)"); assertTrue(dateRange.isInRange(getCalendar(2015, Calendar.DECEMBER, 2))); dateRange = parser.getDateRange("no @ (01.11 - 31.03)"); assertTrue(dateRange.isInRange(getCalendar(2015, Calendar.DECEMBER, 2))); } }
apache-2.0
shagraths/LinaExpress
application/controllers/controllerAdmin.php
5338
<?php if (!defined('BASEPATH')) exit('No direct script access allowed'); class controllerAdmin extends CI_Controller { function __construct() { parent::__construct(); $this->load->model('modelo'); } //TODO SECTOR function cargar_ciudades() { $datos['ciudades'] = $this->modelo->consultar_ciudad_activos()->result(); $this->load->view('ADMINISTRADOR/CB/combo_ciudad', $datos); } function guardar_sector() { $s_c = $this->input->post("s_c"); $nombre = $this->input->post("nombre"); $obs = $this->input->post("obs"); $estado = $this->input->post("estado"); $valor = 1; if ($this->modelo->guardar_sector(strtoupper($s_c), strtoupper($nombre), strtoupper($obs), strtoupper($estado)) == 0) { $valor = 0; } echo json_encode(array("valor" => $valor)); } function reporte_sector() { $data = $this->modelo->consultar_sector(); $datos['cantidad'] = $data->num_rows(); $datos['sectores'] = $data->result(); $this->load->view('ADMINISTRADOR/GRILLAS/sector', $datos); } function actualizar_sector() { $id = $this->input->post("id"); $s_c = $this->input->post("s_c"); $s_c = strtoupper($s_c); $nombre = $this->input->post("nombre"); $nombre = strtoupper($nombre); $obs = $this->input->post("obs"); $obs = strtoupper($obs); $estado = $this->input->post("estado"); $estado = strtoupper($estado); $valor = 1; if ($this->modelo->actualizar_sector($id, $s_c, $nombre, $obs, $estado) == 0) { $valor = 0; } echo json_encode(array("valor" => $valor)); } function eliminar_sector() { $id = $this->input->post("id"); $valor = 0; if ($this->modelo->eliminar_sector($id)==1) { $valor = 1; } echo json_encode(array("valor" => $valor)); } //TODO CIUDAD function guardar_ciudad() { $tipo = $this->input->post("tipo"); $tipo = strtoupper($tipo); $nombre = $this->input->post("nombre"); $nombre = strtoupper($nombre); $obs = $this->input->post("obs"); $obs = strtoupper($obs); $estado = $this->input->post("estado"); $estado = strtoupper($estado); $valor = 1; if ($this->modelo->guardar_ciudad($tipo, $nombre, $obs, $estado) == 0) { $valor = 0; } echo json_encode(array("valor" => $valor)); } function actualizar_ciudad() { $id = $this->input->post("id"); $tipo = $this->input->post("tipo"); $tipo = strtoupper($tipo); $nombre = $this->input->post("nombre"); $nombre = strtoupper($nombre); $obs = $this->input->post("obs"); $obs = strtoupper($obs); $estado = $this->input->post("estado"); $estado = strtoupper($estado); $valor = 1; if ($this->modelo->actualizar_ciudad($id, $tipo, $nombre, $obs, $estado) == 0) { $valor = 0; } echo json_encode(array("valor" => $valor)); } function reporte_ciudad() { $data = $this->modelo->consultar_ciudad(); $datos['cantidad'] = $data->num_rows(); $datos['ciudades'] = $data->result(); $this->load->view('ADMINISTRADOR/GRILLAS/ciudad', $datos); } function eliminar_ciudad() { $valor = 0; $id_ciudad = $this->input->post("id_ciudad"); if($this->modelo->eliminar_ciudad($id_ciudad)==1) { $valor = 1; } echo json_encode(array("valor" => $valor)); } function guardar_tipo_carta() { $tipo = $this->input->post("tipo"); $tipo = strtoupper($tipo); $obs = $this->input->post("obs"); $obs = strtoupper($obs); $estado = $this->input->post("estado"); $estado = strtoupper($estado); $valor = 1; if ($this->modelo->guardar_tipo_carta($tipo, $obs, $estado) == 0) { $valor = 0; } echo json_encode(array("valor" => $valor)); } function actualizar_tipo_carta() { $id = $this->input->post("id"); $tipo = $this->input->post("tipo"); $tipo = strtoupper($tipo); $obs = $this->input->post("obs"); $obs = strtoupper($obs); $estado = $this->input->post("estado"); $estado = strtoupper($estado); $valor = 1; if ($this->modelo->actualizar_tipo_carta($id, $tipo, $obs, $estado) == 0) { $valor = 0; } echo json_encode(array("valor" => $valor)); } function eliminar_tipo_carta(){ $id = $this->input->post("id"); $valor = 0; if ($this->modelo->eliminar_tipo_carta()==1) { $valor = 1; } echo json_decode(array("valor"=>$valor)); } function reporte_carta() { $data = $this->modelo->consultar_carta(); $datos['cantidad'] = $data->num_rows(); $datos['cartas'] = $data->result(); $this->load->view('ADMINISTRADOR/GRILLAS/carta', $datos); } function eliminar_carta() { $id_carta = $this->input->post("id_carta"); $this->modelo->eliminar_ciudad($id_carta); } }
apache-2.0
garygunarman/firanew
admin/account/signin.php
4023
<?php function password($post_username, $post_password){ $conn = connDB(); $sql = "SELECT * FROM tbl_admin WHERE `username` = '$post_username' AND `password` = md5($post_password)"; $query = mysql_query($sql, $conn); $result = mysql_fetch_array($query); return $result; } function get_username($post_username){ $conn = connDB(); $sql = "SELECT COUNT(*) AS rows FROM tbl_admin WHERE `username` = '$post_username'"; $query = mysql_query($sql, $conn); $result = mysql_fetch_array($query); return $result; } $username = clean_alphanum($_POST['username']); $password = clean_alphanum($_POST['password']); if($_POST['btn-admin-login'] == "Sign In"){ $get_admin = admin_login($username, $password); if($get_admin['rows'] != 1){ $_SESSION['alert'] = "error"; $_SESSION['msg'] = "<strong>Login invalid.</strong> Please check your username and password."; $forgot = get_username($username); if($forgot['rows'] > 0){ $_SESSION['username'] = $username; }else{ $_SESSION['username'] = "error"; } }else{ $_SESSION['admin'] = $get_admin['id']; ini_set('session.gc_probability', '1'); if(isset($_SESSION['alert'])){ unset($_SESSION['alert']); unset($_SESSION['msg']); } } } ?> <form method="post" enctype="multipart/form-data"> <?php if(!empty($_SESSION['alert'])){ echo '<div class="alert '.$_SESSION['alert'].'" id="alert-msg-login">'; echo '<div class="container text-center">'; echo $_SESSION['msg']; echo '</div>'; echo '</div>'; } if($_POST['btn-admin-login'] == ""){ unset($_SESSION['alert']); unset($_SESSION['msg']); } ?> <div class="container main"> <div class="box row login"> <div class="navbar-login clearfix"> <div class="navbar-brand"> <img src="<?php echo $prefix_url;?>files/common/logo.png" alt="logo" class="hidden"> <img src="<?php echo $prefix_url.'static/thimthumb.php?src='.$general['logo'].'&w=40&h=40&q=80';?>" class=""> </div> <h1><?php echo $general['website_title'];?> Admin</h1> </div> <div class="content"> <ul class="form-set clearfix"> <li class="form-group row"> <label class="col-xs-3 control-label">Username</label> <div class="col-xs-9"> <input type="text" class="form-control" autocomplete="off" name="username" id="id_username"> </div> </li> <li class="form-group row"> <label class="col-xs-3 control-label">Password</label> <div class="col-xs-9"> <input type="password" class="form-control" autocomplete="off" name="password" id="id_password"> </div> </li> <li class="btn-placeholder m_b_15"> <?php //if(isset($_SESSION['alert']) and $_SESSION['username'] != "error"){ echo "<a class=\"m_r_15\" href=\"".$prefix_url."forgot-password\" id=\"ahref-forgot\">Forgotten your password?</a>"; //} ?> <input type="button" class="btn btn-success btn-sm" value="Sign In" onclick="validateLogin()" id="btn_login"> <input type="submit" class="btn btn-success btn-sm hidden" value="Sign In" name="btn-admin-login" id="btn-login"> </li> </ul> </div><!--.content--> </div><!--.box.row--> </div><!--.container.main--> </form> <script type="text/javascript" src="<?php echo $prefix_url?>/script/login.js"></script>
apache-2.0
deephacks/confit
tck/src/main/java/org/deephacks/confit/test/ConfigTestData.java
26146
/** * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.deephacks.confit.test; import org.deephacks.confit.Config; import org.deephacks.confit.Id; import org.deephacks.confit.Index; import org.deephacks.confit.model.Bean; import org.deephacks.confit.model.BeanId; import org.deephacks.confit.test.validation.FirstUpper; import javax.validation.constraints.Max; import javax.validation.constraints.Min; import javax.validation.constraints.NotNull; import javax.validation.constraints.Size; import java.io.File; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; public class ConfigTestData { public static Grandfather getGrandfather(String id) { try { Grandfather gf = new Grandfather(id); gf.id = id; gf.prop1 = "value"; gf.prop2 = new HashSet<String>(Arrays.asList("c", "b", "a")); gf.prop4 = new DateTime("2002-09-24-06:00"); gf.prop5 = new DurationTime("PT15H"); gf.prop8 = new Byte((byte) 1); gf.prop9 = new Long(1000000000000L); gf.prop10 = new Short((short) 123); gf.prop11 = new Float(12313.13); gf.prop12 = new Double(238.476238746834796); gf.prop13 = new Boolean(true); gf.prop14 = TimeUnit.NANOSECONDS; gf.prop15 = new URL("http://www.deephacks.org"); gf.prop16 = new File(".").getAbsoluteFile(); gf.prop17 = Arrays.asList(new File(".").getAbsoluteFile(), new File(".").getAbsoluteFile()); gf.prop18 = Arrays.asList(new URL("http://www.deephacks.org"), new URL( "http://www.google.se")); gf.prop19 = Arrays.asList(TimeUnit.DAYS, TimeUnit.HOURS); gf.prop21 = 1; return gf; } catch (Exception e) { throw new RuntimeException(e); } } public static Parent getParent(String parentId) { try { Parent gf = new Parent(parentId); gf.id = parentId; gf.prop1 = "value"; gf.prop2 = new HashSet<String>(Arrays.asList("c", "b", "a")); gf.prop4 = new DateTime("2002-09-24-06:00"); gf.prop5 = new DurationTime("PT15H"); gf.prop8 = new Byte((byte) 1); gf.prop9 = new Long(1000000000000L); gf.prop10 = new Short((short) 123); gf.prop11 = new Float(12313.13); gf.prop12 = new Double(238.476238746834796); gf.prop13 = new Boolean(true); gf.prop15 = new URL("http://www.deephacks.org"); gf.prop16 = new File(".").getAbsoluteFile(); gf.prop17 = Arrays.asList(new File(".").getAbsoluteFile(), new File(".").getAbsoluteFile()); gf.prop18 = Arrays.asList(new URL("http://www.deephacks.org"), new URL( "http://www.google.se")); gf.prop19 = Arrays.asList(TimeUnit.DAYS, TimeUnit.HOURS); return gf; } catch (Exception e) { throw new RuntimeException(e); } } public static Child getChild(String childId) { try { Child gf = new Child(childId); gf.id = childId; gf.prop2 = new HashSet<String>(Arrays.asList("c", "b", "a")); gf.prop4 = new DateTime("2002-09-24-06:00"); gf.prop5 = new DurationTime("PT15H"); gf.prop8 = new Byte((byte) 1); gf.prop9 = new Long(1000000000000L); gf.prop10 = new Short((short) 123); gf.prop11 = new Float(12313.13); gf.prop12 = new Double(238.476238746834796); gf.prop13 = new Boolean(true); gf.prop15 = new URL("http://www.deephacks.org"); gf.prop16 = new File(".").getAbsoluteFile(); gf.prop17 = Arrays.asList(new File(".").getAbsoluteFile(), new File(".").getAbsoluteFile()); gf.prop18 = Arrays.asList(new URL("http://www.deephacks.org"), new URL( "http://www.google.se")); gf.prop19 = Arrays.asList(TimeUnit.DAYS, TimeUnit.HOURS); return gf; } catch (Exception e) { throw new RuntimeException(e); } } public static JSR303Validation getJSR303Validation(String id) { return new JSR303Validation(id); } public static Person getPerson(String id) { return new Person(id); } public static final String GRANDFATHER_SCHEMA_NAME = "GrandfatherSchemaName"; @Config(name = GRANDFATHER_SCHEMA_NAME, desc = "a test class") public static class Grandfather { @Id(name = "id", desc = "desc") private String id; @Config(desc = "prop1Desc") @Index private String prop1 = "defaultValue"; @Config(desc = "prop2Desc") private Set<String> prop2; @Config(desc = "prop3Desc") @Index private List<Integer> prop3 = Arrays.asList(1, 2, 3); @Config(desc = "prop4Desc") private DateTime prop4; @Config(desc = "prop5Desc") private DurationTime prop5; @Config(desc = "prop7Desc") @Index private List<Parent> prop7; @Config(desc = "prop8Desc") private byte prop8; @Config(desc = "prop9Desc") private long prop9; @Config(desc = "prop10Desc") private short prop10; @Config(desc = "prop11Desc") private float prop11; @Config(desc = "prop12Desc") @Index private double prop12; @Config(desc = "prop13Desc") private boolean prop13; @Config(desc = "prop14Desc") private TimeUnit prop14 = TimeUnit.MICROSECONDS; @Config(desc = "prop15Desc") private URL prop15; @Config(desc = "prop16Desc") private File prop16; @Config(desc = "prop17Desc") private List<File> prop17; @Config(desc = "prop18Desc") private List<URL> prop18; @Config(desc = "prop19Desc") private List<TimeUnit> prop19 = Arrays.asList(TimeUnit.HOURS, TimeUnit.SECONDS); @Config(desc = "prop20Desc") private Map<String, Parent> prop20; @Config(desc = "prop21Desc") private int prop21; public Grandfather() { } public Grandfather(String id) { this.id = id; } public BeanId getBeanId() { return BeanId.create(id, GRANDFATHER_SCHEMA_NAME); } public String getId() { return id; } public String getProp1() { return prop1; } public Set<String> getProp2() { return prop2; } public List<Integer> getProp3() { return prop3; } public DateTime getProp4() { return prop4; } public DurationTime getProp5() { return prop5; } public List<Parent> getProp7() { return prop7; } public byte getProp8() { return prop8; } public long getProp9() { return prop9; } public short getProp10() { return prop10; } public float getProp11() { return prop11; } public double getProp12() { return prop12; } public boolean getProp13() { return prop13; } public TimeUnit getProp14() { return prop14; } public URL getProp15() { return prop15; } public File getProp16() { return prop16; } public List<File> getProp17() { return prop17; } public List<URL> getProp18() { return prop18; } public List<TimeUnit> getProp19() { return prop19; } public Map<String, Parent> getProp20() { return prop20; } public int getProp21() { return prop21; } public void setId(String id) { this.id = id; } public void setProp1(String prop1) { this.prop1 = prop1; } public void setProp2(Set<String> prop2) { this.prop2 = prop2; } public void setProp3(List<Integer> prop3) { this.prop3 = prop3; } public void setProp4(DateTime prop4) { this.prop4 = prop4; } public void setProp5(DurationTime prop5) { this.prop5 = prop5; } public void setProp7(List<Parent> prop7) { this.prop7 = prop7; } public void setProp8(byte prop8) { this.prop8 = prop8; } public void setProp9(long prop9) { this.prop9 = prop9; } public void setProp10(short prop10) { this.prop10 = prop10; } public void setProp11(float prop11) { this.prop11 = prop11; } public void setProp12(double prop12) { this.prop12 = prop12; } public void setProp13(boolean prop13) { this.prop13 = prop13; } public void setProp14(TimeUnit prop14) { this.prop14 = prop14; } public void setProp15(URL prop15) { this.prop15 = prop15; } public void setProp16(File prop16) { this.prop16 = prop16; } public void setProp17(List<File> prop17) { this.prop17 = prop17; } public void setProp18(List<URL> prop18) { this.prop18 = prop18; } public void setProp19(List<TimeUnit> prop19) { this.prop19 = prop19; } public void setProp20(Map<String, Parent> prop20) { this.prop20 = prop20; } public void setProp21(int prop21) { this.prop21 = prop21; } public void add(Parent... p) { if (prop7 == null) { prop7 = new ArrayList<>(); } prop7.addAll(Arrays.asList(p)); } public List<Parent> getParents() { return prop7; } public void resetParents() { prop7.clear(); } public void put(Parent p) { if (prop20 == null) { prop20 = new HashMap<>(); } prop20.put(p.id, p); } public Bean toBean() { return ConversionUtils.toBean(this); } } public static final String PARENT_SCHEMA_NAME = "ParentSchemaName"; @Config(name = PARENT_SCHEMA_NAME, desc = "a test class") public static class Parent { @Id(desc = "desc") private String id; private String prop1 = "defaultValue"; private Set<String> prop2; private List<Integer> prop3 = Arrays.asList(1, 2, 3); private DateTime prop4; private DurationTime prop5; private Child prop6; private List<Child> prop7; private Byte prop8; private Long prop9; private Short prop10; private float prop11; private Double prop12; private Boolean prop13; private TimeUnit prop14; private URL prop15; private File prop16; private List<File> prop17; private List<URL> prop18; private List<TimeUnit> prop19; private Map<String, Child> prop20; public String getProp1() { return prop1; } public Set<String> getProp2() { return prop2; } public List<Integer> getProp3() { return prop3; } public DateTime getProp4() { return prop4; } public DurationTime getProp5() { return prop5; } public Child getProp6() { return prop6; } public List<Child> getProp7() { return prop7; } public Byte getProp8() { return prop8; } public Long getProp9() { return prop9; } public Short getProp10() { return prop10; } public float getProp11() { return prop11; } public Double getProp12() { return prop12; } public Boolean getProp13() { return prop13; } public TimeUnit getProp14() { return prop14; } public URL getProp15() { return prop15; } public File getProp16() { return prop16; } public List<File> getProp17() { return prop17; } public List<URL> getProp18() { return prop18; } public List<TimeUnit> getProp19() { return prop19; } public Map<String, Child> getProp20() { return prop20; } public void setId(String id) { this.id = id; } public void setProp1(String prop1) { this.prop1 = prop1; } public void setProp2(Set<String> prop2) { this.prop2 = prop2; } public void setProp3(List<Integer> prop3) { this.prop3 = prop3; } public void setProp4(DateTime prop4) { this.prop4 = prop4; } public void setProp5(DurationTime prop5) { this.prop5 = prop5; } public void setProp6(Child prop6) { this.prop6 = prop6; } public void setProp7(List<Child> prop7) { this.prop7 = prop7; } public void setProp8(Byte prop8) { this.prop8 = prop8; } public void setProp9(Long prop9) { this.prop9 = prop9; } public void setProp10(Short prop10) { this.prop10 = prop10; } public void setProp11(float prop11) { this.prop11 = prop11; } public void setProp12(Double prop12) { this.prop12 = prop12; } public void setProp13(Boolean prop13) { this.prop13 = prop13; } public void setProp14(TimeUnit prop14) { this.prop14 = prop14; } public void setProp15(URL prop15) { this.prop15 = prop15; } public void setProp16(File prop16) { this.prop16 = prop16; } public void setProp17(List<File> prop17) { this.prop17 = prop17; } public void setProp18(List<URL> prop18) { this.prop18 = prop18; } public void setProp19(List<TimeUnit> prop19) { this.prop19 = prop19; } public void setProp20(Map<String, Child> prop20) { this.prop20 = prop20; } public void add(Child... c) { if (prop7 == null) { prop7 = new ArrayList<Child>(); } prop7.addAll(Arrays.asList(c)); } public List<Child> getChilds() { return prop7; } public void resetChilds() { prop7.clear(); } public BeanId getBeanId() { return BeanId.create(id, PARENT_SCHEMA_NAME); } public String getId() { return id; } public void set(Child c) { prop6 = c; } public void put(Child c) { if (prop20 == null) { prop20 = new HashMap<String, Child>(); } prop20.put(c.id, c); } public Parent() { } public Parent(String id) { this.id = id; } public Bean toBean() { return ConversionUtils.toBean(this); } } public static final String CHILD_SCHEMA_NAME = "ChildSchemaName"; @Config(name = CHILD_SCHEMA_NAME, desc = "a test class") public static class Child { @Id(desc = "desc") private String id; @Config(desc = "prop1Desc") private String prop1; @Config(desc = "prop2Desc") private Set<String> prop2; @Config(desc = "prop3Desc") private List<Integer> prop3 = Arrays.asList(1, 2, 3); @Config(desc = "prop4Desc") private DateTime prop4; @Config(desc = "prop5Desc") private DurationTime prop5; @Config(desc = "prop8Desc") private Byte prop8; @Min(1) @Config(desc = "prop9Desc") private long prop9 = 100000000; @Config(desc = "prop10Desc") private Short prop10; @Config(desc = "prop11Desc") private Float prop11; @Config(desc = "prop12Desc") private Double prop12; @Config(desc = "prop13Desc") private Boolean prop13; @Config(desc = "prop15Desc") private URL prop15; @Config(desc = "prop16Desc") private File prop16; @Config(desc = "prop17Desc") private List<File> prop17; @Config(desc = "prop18Desc") private List<URL> prop18; @Config(desc = "prop19Desc") private List<TimeUnit> prop19; public Child(String id) { this.id = id; } public BeanId getBeanId() { return BeanId.create(id, CHILD_SCHEMA_NAME); } public String getId() { return id; } public Child() { } public Set<String> getProp2() { return prop2; } public List<Integer> getProp3() { return prop3; } public DateTime getProp4() { return prop4; } public DurationTime getProp5() { return prop5; } public Byte getProp8() { return prop8; } public long getProp9() { return prop9; } public Short getProp10() { return prop10; } public Float getProp11() { return prop11; } public Double getProp12() { return prop12; } public Boolean getProp13() { return prop13; } public URL getProp15() { return prop15; } public File getProp16() { return prop16; } public List<File> getProp17() { return prop17; } public List<URL> getProp18() { return prop18; } public List<TimeUnit> getProp19() { return prop19; } public void setId(String id) { this.id = id; } public void setProp1(String prop1) { this.prop1 = prop1; } public void setProp2(Set<String> prop2) { this.prop2 = prop2; } public void setProp3(List<Integer> prop3) { this.prop3 = prop3; } public void setProp4(DateTime prop4) { this.prop4 = prop4; } public void setProp5(DurationTime prop5) { this.prop5 = prop5; } public void setProp8(Byte prop8) { this.prop8 = prop8; } public void setProp9(long prop9) { this.prop9 = prop9; } public void setProp10(Short prop10) { this.prop10 = prop10; } public void setProp11(Float prop11) { this.prop11 = prop11; } public void setProp12(Double prop12) { this.prop12 = prop12; } public void setProp13(Boolean prop13) { this.prop13 = prop13; } public void setProp15(URL prop15) { this.prop15 = prop15; } public void setProp16(File prop16) { this.prop16 = prop16; } public void setProp17(List<File> prop17) { this.prop17 = prop17; } public void setProp18(List<URL> prop18) { this.prop18 = prop18; } public void setProp19(List<TimeUnit> prop19) { this.prop19 = prop19; } public Bean toBean() { return ConversionUtils.toBean(this); } } public static final String SINGLETON_SCHEMA_NAME = "SingletonSchemaName"; @Config(name = SINGLETON_SCHEMA_NAME, desc = "") public static class Singleton { @Config(desc = "") private String property; @Config(desc="") private Parent parent; public BeanId getBeanId() { return BeanId.createSingleton(SINGLETON_SCHEMA_NAME); } public String getProperty() { return property; } public Parent getParent() { return parent; } public void setProperty(String property) { this.property = property; } } public static final String SINGLETON_PARENT_SCHEMA_NAME = "SingletonParentSchemaName"; @Config(name = SINGLETON_PARENT_SCHEMA_NAME, desc = "") public static class SingletonParent { @Config(desc = "") private Singleton singleton; @Config(desc = "") private String property; public SingletonParent() { } public String getProperty() { return property; } public Singleton getSingleton() { return singleton; } public void setSingleton(Singleton singleton) { this.singleton = singleton; } } public static final String VALIDATION_SCHEMA_NAME = "ValidationSchemaName"; @Config(name = VALIDATION_SCHEMA_NAME, desc = "JSR303 validation assertion") @SuppressWarnings("unused") public static class JSR303Validation { @Id(desc = "validationCheckId") private String id; @Config(desc = "Assert that JSR303 works as expected.") @FirstUpper @Size(min = 2, max = 50) private String prop; @Config(desc = "Assert that JSR303 works as expected.") @NotNull private Integer height; @Config(desc = "Assert that JSR303 works as expected.") @NotNull private Integer width; @Max(20) private int getArea() { // check for null, height and weight may not have been set. if (height != null && width != null) { return height * width; } return 0; } private JSR303Validation(String id) { this.id = id; } public JSR303Validation() { } public BeanId getBeanId() { return BeanId.create(id, VALIDATION_SCHEMA_NAME); } public void setId(String id) { this.id = id; } public void setProp(String prop) { this.prop = prop; } public void setHeight(Integer height) { this.height = height; } public void setWidth(Integer width) { this.width = width; } public String getId() { return id; } public String getProp() { return prop; } public Integer getHeight() { return height; } public Integer getWidth() { return width; } } @Config(name = "person", desc = "desc") public static class Person { @Id(desc = "") private String id; @Config(desc = "") private Person bestFriend; @Config(desc = "") private List<Person> closeFriends = new ArrayList<>(); @Config(desc = "") private Map<String, Person> colleauges = new HashMap<>(); public Person(String id) { this.id = id; } public Person() { } } @Config(name="A") public static class A { private String name; public A () { } public A (String name) { this.name = name; } public String getName() { return name; } } @Config(name="B") public static class B { private String name; public B () { } public B (String name) { this.name = name; } public String getName() { return name; } } }
apache-2.0
nashvail/zulip-mobile
src/account-info/AccountDetails.js
3151
/* @flow */ import React, { PureComponent } from 'react'; import { StyleSheet, Text, View, Dimensions } from 'react-native'; import { Avatar, ZulipButton } from '../common'; import { BRAND_COLOR } from '../styles'; import { privateNarrow } from '../utils/narrow'; import UserStatusIndicator from '../common/UserStatusIndicator'; import mediumAvatarUrl from '../utils/mediumAvatar'; import LandscapeContent from './AccountDetailsContent.landscape'; import PortraitContent from './AccountDetailsContent.portrait'; import type { UserStatus, Auth, Actions } from '../types'; const ORIENTATION_PORTRAIT = 'PORTRAIT'; const ORIENTATION_LANDSCAPE = 'LANDSCAPE'; const styles = StyleSheet.create({ info: { textAlign: 'center', fontSize: 18, color: BRAND_COLOR, }, details: { flexDirection: 'row', justifyContent: 'center', alignItems: 'center', padding: 8, }, statusIndicator: { width: 20, height: 20, marginRight: 5, }, sendButton: { marginRight: 8, marginLeft: 8, }, }); export default class AccountDetails extends PureComponent { state = { layoutStyle: ORIENTATION_PORTRAIT, }; props: { email: string, status: UserStatus, avatarUrl: string, fullName: string, auth: Auth, actions: Actions, orientation: string, }; handleChatPress = () => { const { email, actions } = this.props; actions.doNarrow(privateNarrow(email)); actions.navigateBack(); }; handleOrientationChange = (event: Object) => { this.setState({ layoutStyle: this.props.orientation, }); }; renderAvatar = (width: number) => ( <Avatar avatarUrl={mediumAvatarUrl(this.props.avatarUrl)} name={this.props.fullName} size={width} status={this.props.status} realm={this.props.auth.realm} shape="square" /> ); renderSendPMButton = () => ( <ZulipButton style={styles.sendButton} text="Send private message" onPress={this.handleChatPress} icon="md-mail" /> ); renderUserDetails = () => ( <View style={styles.details}> {this.props.status && ( <UserStatusIndicator status={this.props.status} style={styles.statusIndicator} /> )} <Text style={styles.info}>{this.props.email}</Text> </View> ); renderContent = (orientation: string, landscapeContent, portraitContent) => orientation === ORIENTATION_LANDSCAPE ? landscapeContent : portraitContent; render() { const { layoutStyle } = this.state; const screenWidth = Dimensions.get('window').width; return this.renderContent( layoutStyle, <LandscapeContent screenWidth={screenWidth} handleOrientationChange={this.handleOrientationChange} avatar={this.renderAvatar} userDetails={this.renderUserDetails} sendButton={this.renderSendPMButton} />, <PortraitContent screenWidth={screenWidth} handleOrientationChange={this.handleOrientationChange} avatar={this.renderAvatar} userDetails={this.renderUserDetails} sendButton={this.renderSendPMButton} />, ); } }
apache-2.0
Metaswitch/fuel-library
deployment/puppet/vmware/spec/defines/vmware_compute_ha_spec.rb
885
require 'spec_helper' describe 'vmware::compute::ha' do let(:title) { 'cluster1' } let(:params) { { :index => '0' } } it 'must create /etc/nova/nova-compute.d directory' do should contain_file('/etc/nova/nova-compute.d').with({ 'ensure' => 'directory', 'owner' => 'nova', 'group' => 'nova', 'mode' => '0750' }) end it 'should create service p_nova_compute_vmware_0' do should contain_cs_resource('p_nova_compute_vmware_0').with({ 'primitive_class' => 'ocf', 'provided_by' => 'mirantis', }) end it 'should create service p_nova_compute_vmware_0' do should contain_service('p_nova_compute_vmware_0') end it 'should apply configuration file before corosync resource' do should contain_file('/etc/nova/nova-compute.d/vmware-0.conf').that_comes_before('Cs_resource[p_nova_compute_vmware_0]') end end
apache-2.0
MaferYangPointJun/Spring.net
test/Spring/Spring.Aop.Tests/Aop/Framework/CountingThrowsAdvice.cs
1301
#region License /* * Copyright © 2002-2005 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ #endregion #region Imports using System; using System.Reflection; using Spring.Aop.Framework; #endregion namespace Spring.Aop.Framework { /// <summary> /// Simple throw advice example that we can use for counting checks. /// </summary> /// <author>Rod Johnson</author> /// <author>Bruno Baia (.NET)</author> [Serializable] public class CountingThrowsAdvice : MethodCounter, IThrowsAdvice { public void AfterThrowing(Exception ex) { Count(ex.GetType().Name); } public void AfterThrowing(ApplicationException aex) { Count(aex.GetType().Name); } } }
apache-2.0
ampproject/amppackager
vendor/github.com/sacloud/libsacloud/sacloud/gslb.go
6603
// Copyright 2016-2020 The Libsacloud Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package sacloud import "fmt" // GSLB GSLB(CommonServiceItem) type GSLB struct { *Resource // ID propName // 名称 propDescription // 説明 propServiceClass // サービスクラス propIcon // アイコン propTags // タグ propCreatedAt // 作成日時 propModifiedAt // 変更日時 Status GSLBStatus `json:",omitempty"` // ステータス Provider GSLBProvider `json:",omitempty"` // プロバイダ Settings GSLBSettings `json:",omitempty"` // GSLB設定 } // GSLBSettings GSLB設定 type GSLBSettings struct { GSLB GSLBRecordSets `json:",omitempty"` // GSLB GSLBエントリー } // GSLBStatus GSLBステータス type GSLBStatus struct { FQDN string `json:",omitempty"` // GSLBのFQDN } // GSLBProvider プロバイダ type GSLBProvider struct { Class string `json:",omitempty"` // クラス } // CreateNewGSLB GSLB作成 func CreateNewGSLB(gslbName string) *GSLB { return &GSLB{ Resource: &Resource{}, propName: propName{Name: gslbName}, Provider: GSLBProvider{ Class: "gslb", }, Settings: GSLBSettings{ GSLB: GSLBRecordSets{ DelayLoop: 10, HealthCheck: defaultGSLBHealthCheck, Weighted: "True", Servers: []GSLBServer{}, }, }, } } // AllowGSLBHealthCheckProtocol GSLB監視プロトコルリスト func AllowGSLBHealthCheckProtocol() []string { return []string{"http", "https", "ping", "tcp"} } // HasGSLBServer GSLB配下にサーバーを保持しているか判定 func (g *GSLB) HasGSLBServer() bool { return len(g.Settings.GSLB.Servers) > 0 } // CreateGSLBServer GSLB配下のサーバーを作成 func (g *GSLB) CreateGSLBServer(ip string) *GSLBServer { return &GSLBServer{ IPAddress: ip, Enabled: "True", Weight: "1", } } // AddGSLBServer GSLB配下にサーバーを追加 func (g *GSLB) AddGSLBServer(server *GSLBServer) { var isExist = false for i := range g.Settings.GSLB.Servers { if g.Settings.GSLB.Servers[i].IPAddress == server.IPAddress { g.Settings.GSLB.Servers[i].Enabled = server.Enabled g.Settings.GSLB.Servers[i].Weight = server.Weight isExist = true } } if !isExist { g.Settings.GSLB.Servers = append(g.Settings.GSLB.Servers, *server) } } // ClearGSLBServer GSLB配下のサーバーをクリア func (g *GSLB) ClearGSLBServer() { g.Settings.GSLB.Servers = []GSLBServer{} } // GSLBRecordSets GSLBエントリー type GSLBRecordSets struct { DelayLoop int `json:",omitempty"` // 監視間隔 HealthCheck GSLBHealthCheck `json:",omitempty"` // ヘルスチェック Weighted string `json:",omitempty"` // ウェイト SorryServer string `json:",omitempty"` // ソーリーサーバー Servers []GSLBServer // サーバー } // AddServer GSLB配下のサーバーを追加 func (g *GSLBRecordSets) AddServer(ip string) { var record GSLBServer var isExist = false for i := range g.Servers { if g.Servers[i].IPAddress == ip { isExist = true } } if !isExist { record = GSLBServer{ IPAddress: ip, Enabled: "True", Weight: "1", } g.Servers = append(g.Servers, record) } } // DeleteServer GSLB配下のサーバーを削除 func (g *GSLBRecordSets) DeleteServer(ip string) { res := []GSLBServer{} for i := range g.Servers { if g.Servers[i].IPAddress != ip { res = append(res, g.Servers[i]) } } g.Servers = res } // GSLBServer GSLB配下のサーバー type GSLBServer struct { IPAddress string `json:",omitempty"` // IPアドレス Enabled string `json:",omitempty"` // 有効/無効 Weight string `json:",omitempty"` // ウェイト } // GSLBHealthCheck ヘルスチェック type GSLBHealthCheck struct { Protocol string `json:",omitempty"` // プロトコル Host string `json:",omitempty"` // 対象ホスト Path string `json:",omitempty"` // HTTP/HTTPSの場合のリクエストパス Status string `json:",omitempty"` // 期待するステータスコード Port string `json:",omitempty"` // ポート番号 } var defaultGSLBHealthCheck = GSLBHealthCheck{ Protocol: "http", Host: "", Path: "/", Status: "200", } // SetHTTPHealthCheck HTTPヘルスチェック 設定 func (g *GSLB) SetHTTPHealthCheck(hostHeader string, path string, responseCode int) { g.Settings.GSLB.HealthCheck.Protocol = "http" g.Settings.GSLB.HealthCheck.Host = hostHeader g.Settings.GSLB.HealthCheck.Path = path g.Settings.GSLB.HealthCheck.Status = fmt.Sprintf("%d", responseCode) g.Settings.GSLB.HealthCheck.Port = "" } // SetHTTPSHealthCheck HTTPSヘルスチェック 設定 func (g *GSLB) SetHTTPSHealthCheck(hostHeader string, path string, responseCode int) { g.Settings.GSLB.HealthCheck.Protocol = "https" g.Settings.GSLB.HealthCheck.Host = hostHeader g.Settings.GSLB.HealthCheck.Path = path g.Settings.GSLB.HealthCheck.Status = fmt.Sprintf("%d", responseCode) g.Settings.GSLB.HealthCheck.Port = "" } // SetPingHealthCheck Pingヘルスチェック 設定 func (g *GSLB) SetPingHealthCheck() { g.Settings.GSLB.HealthCheck.Protocol = "ping" g.Settings.GSLB.HealthCheck.Host = "" g.Settings.GSLB.HealthCheck.Path = "" g.Settings.GSLB.HealthCheck.Status = "" g.Settings.GSLB.HealthCheck.Port = "" } // SetTCPHealthCheck TCPヘルスチェック 設定 func (g *GSLB) SetTCPHealthCheck(port int) { g.Settings.GSLB.HealthCheck.Protocol = "tcp" g.Settings.GSLB.HealthCheck.Host = "" g.Settings.GSLB.HealthCheck.Path = "" g.Settings.GSLB.HealthCheck.Status = "" g.Settings.GSLB.HealthCheck.Port = fmt.Sprintf("%d", port) } // SetDelayLoop 監視間隔秒数 設定 func (g *GSLB) SetDelayLoop(delayLoop int) { g.Settings.GSLB.DelayLoop = delayLoop } // SetWeightedEnable 重み付け応答 有効/無効 設定 func (g *GSLB) SetWeightedEnable(enable bool) { v := "True" if !enable { v = "False" } g.Settings.GSLB.Weighted = v } // SetSorryServer ソーリーサーバ 設定 func (g *GSLB) SetSorryServer(server string) { g.Settings.GSLB.SorryServer = server }
apache-2.0
dmitrymex/oslo.messaging-check-tool
oslo_msg_check/client_app.py
1980
import eventlet eventlet.monkey_patch() import sys import threading import flask from flask import request from oslo_log import log from oslo_config import cfg import oslo_messaging as messaging opts = [ cfg.StrOpt('listen_port', help='Client app will listen for HTTP requests on that port', default=5000), cfg.StrOpt('rpc_topic_name', help='RPC topic name.', default='test_rpc'), cfg.StrOpt('server_id', help='A string uniquely identifying target instance.', default='server123'), ] CONF = cfg.CONF CONF.register_cli_opts(opts) log.register_options(CONF) LOG = None req_counter = 0 req_counter_lock = threading.Lock() rpc_client = None app = flask.Flask(__name__) @app.route('/') def hello_world(): global req_counter with req_counter_lock: request_id = req_counter req_counter += 1 timeout = int(request.args.get('timeout', 60)) delay = int(request.args.get('delay', 0)) rpc_client.test_method(request_id, timeout, delay) return '' class RpcClient(object): def __init__(self, transport): target = messaging.Target(topic=CONF.rpc_topic_name, version='1.0', server=CONF.server_id) self._client = messaging.RPCClient(transport, target) def test_method(self, request_id, timeout, delay): LOG.info('[request id: %i] Calling test_method' % request_id) self._client.prepare(timeout=timeout).call( {}, 'test_method', delay=delay) LOG.info('[request id: %i] Server responded on our call' % request_id) def main(): global rpc_client global LOG CONF(sys.argv[1:], project='test_rpc_server') log.setup(CONF, 'test_rpc_server') LOG = log.getLogger(__name__) transport = messaging.get_transport(cfg.CONF) rpc_client = RpcClient(transport) app.run(host='0.0.0.0', port=CONF.listen_port, threaded=True)
apache-2.0
EvanMark/Spring-REST-API
src/main/java/mdx/gsd/dao/impl/SurveyBelbinDAOImpl.java
3107
package mdx.gsd.dao.impl; import mdx.gsd.dao.SurveyBelbinDAO; import mdx.gsd.data.model.SurveyBelbin; import org.apache.log4j.Logger; import org.hibernate.ObjectNotFoundException; import org.hibernate.Session; import org.hibernate.SessionFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Repository; import java.util.List; /** * Created by universe (E.) on 12/06/17. */ @SuppressWarnings("unchecked") @Repository public class SurveyBelbinDAOImpl implements SurveyBelbinDAO { @Autowired private Logger logger; private SessionFactory sessionFactory; @Autowired public void setSessionFactory(SessionFactory sessionFactory) { this.sessionFactory = sessionFactory; } @Override public void addSurveyBelbin(SurveyBelbin surveyBelbin) { Session session = this.sessionFactory.getCurrentSession(); session.persist(surveyBelbin); logger.info("SurveyBelbin has been successfully inserted in db. " + surveyBelbin.toString()); } @Override public void updateSurveyBelbin(SurveyBelbin surveyBelbin) { Session session = this.sessionFactory.getCurrentSession(); session.update(surveyBelbin); logger.info("SurveyBelbin has been successfully updated in db. " + surveyBelbin.toString()); } @Override public SurveyBelbin getSurveyBelbinById(Integer id) { Session session = this.sessionFactory.getCurrentSession(); SurveyBelbin surveyBelbin; try { surveyBelbin = session.load(SurveyBelbin.class, id); logger.info("Returned SurveyBelbin " + surveyBelbin.toString()); } catch (ObjectNotFoundException ex) { return null; } return surveyBelbin; } @Override public List<SurveyBelbin> getAllSurveyBelbin() { Session session = this.sessionFactory.getCurrentSession(); List<SurveyBelbin> surveyBelbinList = session.createQuery("from SurveyBelbin").getResultList(); logger.info("Returned surveyBelbinList with size " + surveyBelbinList.size()); return surveyBelbinList; } @Override public List<SurveyBelbin> getUserSurveyBelbin(String id) { Session session = this.sessionFactory.getCurrentSession(); String hql = "from SurveyBelbin bel where bel.surveyBelbinUser.userId = :userId"; List<SurveyBelbin> surveyBelbinList = session.createQuery(hql).setParameter("userId", id).getResultList(); logger.info("Returned list for user " + id + " with surveyBelbin: " + surveyBelbinList.size()); return surveyBelbinList; } @Override public void removeSurveyBelbin(Integer id) { Session session = this.sessionFactory.getCurrentSession(); SurveyBelbin surveyBelbin = session.load(SurveyBelbin.class, id); if (surveyBelbin != null) { session.delete(surveyBelbin); logger.info("SurveyBelbin " + surveyBelbin.getSurveyBelbinId() + " has been deleted."); } else logger.info("Something went wrong!!"); } }
apache-2.0
sabriarabacioglu/engerek
gui/admin-gui/src/main/java/com/evolveum/midpoint/web/page/admin/workflow/PageWorkItem.java
37163
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.web.page.admin.workflow; import com.evolveum.midpoint.prism.PrismObject; import com.evolveum.midpoint.prism.delta.ObjectDelta; import com.evolveum.midpoint.prism.polystring.PolyString; import com.evolveum.midpoint.prism.xml.XmlTypeConverter; import com.evolveum.midpoint.schema.result.OperationResult; import com.evolveum.midpoint.security.api.AuthorizationConstants; import com.evolveum.midpoint.security.api.MidPointPrincipal; import com.evolveum.midpoint.task.api.Task; import com.evolveum.midpoint.util.exception.ObjectNotFoundException; import com.evolveum.midpoint.util.exception.SchemaException; import com.evolveum.midpoint.util.exception.SystemException; import com.evolveum.midpoint.util.logging.LoggingUtils; import com.evolveum.midpoint.util.logging.Trace; import com.evolveum.midpoint.util.logging.TraceManager; import com.evolveum.midpoint.web.application.AuthorizationAction; import com.evolveum.midpoint.web.application.PageDescriptor; import com.evolveum.midpoint.web.component.AjaxButton; import com.evolveum.midpoint.web.component.AjaxSubmitButton; import com.evolveum.midpoint.web.component.accordion.Accordion; import com.evolveum.midpoint.web.component.accordion.AccordionItem; import com.evolveum.midpoint.web.component.model.delta.ContainerValuePanel; import com.evolveum.midpoint.web.component.model.delta.DeltaDto; import com.evolveum.midpoint.web.component.model.delta.DeltaPanel; import com.evolveum.midpoint.web.component.prism.ContainerStatus; import com.evolveum.midpoint.web.component.prism.ObjectWrapper; import com.evolveum.midpoint.web.component.prism.PrismObjectPanel; import com.evolveum.midpoint.web.component.util.LoadableModel; import com.evolveum.midpoint.web.component.util.ObjectWrapperUtil; import com.evolveum.midpoint.web.component.util.VisibleEnableBehaviour; import com.evolveum.midpoint.web.page.PageBase; import com.evolveum.midpoint.web.page.admin.workflow.dto.ProcessInstanceDto; import com.evolveum.midpoint.web.page.admin.workflow.dto.WorkItemDetailedDto; import com.evolveum.midpoint.web.page.admin.workflow.dto.WorkItemDto; import com.evolveum.midpoint.web.resource.img.ImgResources; import com.evolveum.midpoint.web.util.OnePageParameterEncoder; import com.evolveum.midpoint.web.util.WebMiscUtil; import com.evolveum.midpoint.wf.api.WorkflowManager; import com.evolveum.midpoint.xml.ns._public.common.common_3.ObjectType; import com.evolveum.midpoint.xml.ns._public.common.common_3.UserType; import com.evolveum.midpoint.xml.ns._public.common.common_3.WfProcessInstanceType; import com.evolveum.midpoint.xml.ns._public.common.common_3.WorkItemType; import com.evolveum.midpoint.xml.ns.model.workflow.common_forms_3.GeneralChangeApprovalWorkItemContents; import com.evolveum.midpoint.xml.ns.model.workflow.process_instance_state_3.ProcessInstanceState; import org.apache.wicket.Component; import org.apache.wicket.ajax.AjaxRequestTarget; import org.apache.wicket.ajax.form.AjaxFormComponentUpdatingBehavior; import org.apache.wicket.markup.html.basic.Label; import org.apache.wicket.markup.html.form.CheckBox; import org.apache.wicket.markup.html.form.Form; import org.apache.wicket.markup.html.panel.Panel; import org.apache.wicket.model.*; import org.apache.wicket.request.mapper.parameter.PageParameters; import org.apache.wicket.request.resource.PackageResourceReference; import org.springframework.security.core.context.SecurityContextHolder; /** * @author mederly */ @PageDescriptor(url = "/admin/workItem", encoder = OnePageParameterEncoder.class, action = { @AuthorizationAction(actionUri = PageAdminWorkItems.AUTH_WORK_ITEMS_ALL, label = PageAdminWorkItems.AUTH_WORK_ITEMS_ALL_LABEL, description = PageAdminWorkItems.AUTH_WORK_ITEMS_ALL_DESCRIPTION), @AuthorizationAction(actionUri = AuthorizationConstants.NS_AUTHORIZATION + "#workItem", label = "PageWorkItem.auth.workItem.label", description = "PageWorkItem.auth.workItem.description")}) public class PageWorkItem extends PageAdminWorkItems { private static final String DOT_CLASS = PageWorkItem.class.getName() + "."; private static final String OPERATION_LOAD_WORK_ITEM = DOT_CLASS + "loadWorkItem"; private static final String OPERATION_LOAD_PROCESS_INSTANCE = DOT_CLASS + "loadProcessInstance"; private static final String OPERATION_SAVE_WORK_ITEM = DOT_CLASS + "saveWorkItem"; private static final String OPERATION_CLAIM_WORK_ITEM = DOT_CLASS + "claimWorkItem"; private static final String OPERATION_RELEASE_WORK_ITEM = DOT_CLASS + "releaseWorkItem"; private static final String ID_ACCORDION = "accordion"; private static final String ID_DELTA_PANEL = "deltaPanel"; private static final Trace LOGGER = TraceManager.getTrace(PageWorkItem.class); private static final String ID_DELTA_INFO = "deltaInfo"; private static final String ID_REQUESTER_ACCORDION_INFO = "requesterAccordionInfo"; private static final String ID_REQUESTER_PANEL = "requesterPanel"; private static final String ID_OBJECT_OLD_ACCORDION_INFO = "objectOldAccordionInfo"; private static final String ID_OBJECT_OLD_PANEL = "objectOldPanel"; private static final String ID_OBJECT_NEW_ACCORDION_INFO = "objectNewAccordionInfo"; private static final String ID_OBJECT_NEW_PANEL = "objectNewPanel"; private static final String ID_ADDITIONAL_DATA_ACCORDION_INFO = "additionalDataAccordionInfo"; private static final String ID_ADDITIONAL_DATA_PANEL = "additionalDataPanel"; private static final String ID_PROCESS_INSTANCE_ACCORDION_INFO = "processInstanceAccordionInfo"; private static final String ID_PROCESS_INSTANCE_PANEL = "processInstancePanel"; private static final String ID_SHOW_TECHNICAL_INFORMATION = "showTechnicalInformation"; private PageParameters parameters; private IModel<WorkItemDetailedDto> workItemDtoModel; private IModel<ObjectWrapper> requesterModel; private IModel<ObjectWrapper> objectOldModel; private IModel<ObjectWrapper> objectNewModel; private IModel<ObjectWrapper> requestSpecificModel; private IModel<ObjectWrapper> additionalDataModel; private IModel<ObjectWrapper> trackingDataModel; private LoadableModel<ProcessInstanceDto> processInstanceDtoModel; private IModel<DeltaDto> deltaModel; private IModel<Boolean> showTechnicalInformationModel = new Model<Boolean>(); private Accordion additionalInfoAccordion; public PageWorkItem() { this(new PageParameters(), null); } public PageWorkItem(PageParameters parameters, PageBase previousPage) { this(parameters, previousPage, false); } public PageWorkItem(PageParameters parameters, PageBase previousPage, boolean reinitializePreviousPage) { this.parameters = parameters; setPreviousPage(previousPage); setReinitializePreviousPages(reinitializePreviousPage); requesterModel = new LoadableModel<ObjectWrapper>(false) { @Override protected ObjectWrapper load() { loadWorkItemDetailedDtoIfNecessary(); return getRequesterWrapper(); } }; objectOldModel = new LoadableModel<ObjectWrapper>(false) { @Override protected ObjectWrapper load() { loadWorkItemDetailedDtoIfNecessary(); return getObjectOldWrapper(); } }; objectNewModel = new LoadableModel<ObjectWrapper>(false) { @Override protected ObjectWrapper load() { loadWorkItemDetailedDtoIfNecessary(); return getObjectNewWrapper(); } }; requestSpecificModel = new LoadableModel<ObjectWrapper>(false) { @Override protected ObjectWrapper load() { loadWorkItemDetailedDtoIfNecessary(); return getRequestSpecificWrapper(); } }; additionalDataModel = new LoadableModel<ObjectWrapper>(false) { @Override protected ObjectWrapper load() { loadWorkItemDetailedDtoIfNecessary(); return getAdditionalDataWrapper(); } }; trackingDataModel = new LoadableModel<ObjectWrapper>(false) { @Override protected ObjectWrapper load() { loadWorkItemDetailedDtoIfNecessary(); return getTrackingDataWrapper(); } }; workItemDtoModel = new LoadableModel<WorkItemDetailedDto>(false) { @Override protected WorkItemDetailedDto load() { return loadWorkItemDetailedDtoIfNecessary(); } }; processInstanceDtoModel = new LoadableModel<ProcessInstanceDto>(false) { @Override protected ProcessInstanceDto load() { return loadProcessInstanceDto(); } }; deltaModel = new PropertyModel<DeltaDto>(workItemDtoModel, WorkItemDetailedDto.F_DELTA); initLayout(); } @Override protected IModel<String> createPageTitleModel() { return new LoadableModel<String>(false) { @Override protected String load() { return new PropertyModel<String>(workItemDtoModel, "name").getObject(); } }; } private ObjectWrapper getRequesterWrapper() { PrismObject<UserType> prism = workItemDtoModel.getObject().getWorkItem().getRequester().asPrismObject(); ContainerStatus status = ContainerStatus.MODIFYING; ObjectWrapper wrapper = ObjectWrapperUtil.createObjectWrapper( createStringResource("pageWorkItem.requester.description").getString(), // name (large font) PolyString.getOrig(prism.asObjectable().getName()), // description (smaller font) prism, status, this); if (wrapper.getResult() != null && !WebMiscUtil.isSuccessOrHandledError(wrapper.getResult())) { showResultInSession(wrapper.getResult()); } wrapper.setShowEmpty(false); wrapper.setMinimalized(true); wrapper.setShowAssignments(false); wrapper.setReadonly(true); return wrapper; } private ObjectWrapper getObjectOldWrapper() { GeneralChangeApprovalWorkItemContents wic = getGeneralChangeApprovalWorkItemContents(); ObjectType objectOld = wic.getObjectOld(); PrismObject<? extends ObjectType> prism; if (objectOld != null) { prism = objectOld.asPrismObject(); } else { prism = createEmptyUserObject(); } ContainerStatus status = ContainerStatus.MODIFYING; ObjectWrapper wrapper = ObjectWrapperUtil.createObjectWrapper( createStringResource("pageWorkItem.objectOld.description").getString(), // name (large font) PolyString.getOrig(prism.asObjectable().getName()), // description (smaller font) prism, status, this); if (wrapper.getResult() != null && !WebMiscUtil.isSuccessOrHandledError(wrapper.getResult())) { showResultInSession(wrapper.getResult()); } wrapper.setShowEmpty(false); wrapper.setMinimalized(true); wrapper.setShowAssignments(true); wrapper.setReadonly(true); return wrapper; } private GeneralChangeApprovalWorkItemContents getGeneralChangeApprovalWorkItemContents() { ObjectType contents = workItemDtoModel.getObject().getWorkItem().getContents(); if (contents instanceof GeneralChangeApprovalWorkItemContents) { return (GeneralChangeApprovalWorkItemContents) contents; } else { return null; } } private ObjectWrapper getObjectNewWrapper() { GeneralChangeApprovalWorkItemContents wic = getGeneralChangeApprovalWorkItemContents(); ObjectType objectNew = wic.getObjectNew(); PrismObject<? extends ObjectType> prism; if (objectNew != null) { prism = objectNew.asPrismObject(); } else { prism = createEmptyUserObject(); } ContainerStatus status = ContainerStatus.MODIFYING; ObjectWrapper wrapper = ObjectWrapperUtil.createObjectWrapper( createStringResource("pageWorkItem.objectNew.description").getString(), // name (large font) PolyString.getOrig(prism.asObjectable().getName()), // description (smaller font) prism, status, this); if (wrapper.getResult() != null && !WebMiscUtil.isSuccessOrHandledError(wrapper.getResult())) { showResultInSession(wrapper.getResult()); } wrapper.setShowEmpty(false); wrapper.setMinimalized(true); wrapper.setShowAssignments(true); wrapper.setReadonly(true); return wrapper; } private PrismObject<? extends ObjectType> createEmptyUserObject() { PrismObject<? extends ObjectType> p = new PrismObject<UserType>(UserType.COMPLEX_TYPE, UserType.class); try { getWorkflowManager().getPrismContext().adopt(p); } catch (SchemaException e) { // safe to convert; this should not occur throw new SystemException("Got schema exception when creating empty user object.", e); } return p; } private ObjectWrapper getRequestSpecificWrapper() { GeneralChangeApprovalWorkItemContents wic = getGeneralChangeApprovalWorkItemContents(); PrismObject prism = wic.getQuestionForm().asPrismObject(); ContainerStatus status = ContainerStatus.MODIFYING; try{ ObjectWrapper wrapper = ObjectWrapperUtil.createObjectWrapper("pageWorkItem.requestSpecifics", null, prism, status, this); // ObjectWrapper wrapper = new ObjectWrapper("pageWorkItem.requestSpecifics", null, prism, status); if (wrapper.getResult() != null && !WebMiscUtil.isSuccessOrHandledError(wrapper.getResult())) { showResultInSession(wrapper.getResult()); } wrapper.setShowEmpty(true); wrapper.setMinimalized(false); wrapper.setShowInheritedObjectAttributes(false); return wrapper; } catch (Exception ex){ throw new SystemException("Got schema exception when creating general change approval work item contents.", ex); } } private ObjectWrapper getAdditionalDataWrapper() { GeneralChangeApprovalWorkItemContents wic = getGeneralChangeApprovalWorkItemContents(); ObjectType relatedObject = wic.getRelatedObject(); PrismObject<? extends ObjectType> prism; if (relatedObject != null) { prism = relatedObject.asPrismObject(); } else { prism = createEmptyUserObject(); // not quite correct, but ... ok } ContainerStatus status = ContainerStatus.MODIFYING; try{ ObjectWrapper wrapper = ObjectWrapperUtil.createObjectWrapper( createStringResource("pageWorkItem.additionalData.description").getString(), // name (large font) PolyString.getOrig(prism.asObjectable().getName()), // description (smaller font) prism, status, this); // ObjectWrapper wrapper = new ObjectWrapper( // createStringResource("pageWorkItem.additionalData.description").getString(), // name (large font) // PolyString.getOrig(prism.asObjectable().getName()), // description (smaller font) // prism, status); if (wrapper.getResult() != null && !WebMiscUtil.isSuccessOrHandledError(wrapper.getResult())) { showResultInSession(wrapper.getResult()); } wrapper.setShowEmpty(false); wrapper.setMinimalized(true); wrapper.setReadonly(true); return wrapper; } catch (Exception ex){ LoggingUtils.logException(LOGGER, "Couldn't get work item.", ex); } return null; } private ObjectWrapper getTrackingDataWrapper() { PrismObject<? extends ObjectType> prism = workItemDtoModel.getObject().getWorkItem().getTrackingData().asPrismObject(); ContainerStatus status = ContainerStatus.MODIFYING; try { ObjectWrapper wrapper = ObjectWrapperUtil.createObjectWrapper("pageWorkItem.trackingData", null, prism, status, this); // ObjectWrapper wrapper = new // ObjectWrapper("pageWorkItem.trackingData", null, prism, status); if (wrapper.getResult() != null && !WebMiscUtil.isSuccessOrHandledError(wrapper.getResult())) { showResultInSession(wrapper.getResult()); } wrapper.setShowEmpty(false); wrapper.setMinimalized(true); wrapper.setReadonly(true); return wrapper; } catch (Exception ex) { LoggingUtils.logException(LOGGER, "Couldn't get work item.", ex); } return null; } private WorkItemDetailedDto loadWorkItemDetailedDtoIfNecessary() { if (((LoadableModel) workItemDtoModel).isLoaded()) { return workItemDtoModel.getObject(); } OperationResult result = new OperationResult(OPERATION_LOAD_WORK_ITEM); WorkItemDetailedDto workItemDetailedDto = null; WorkItemType workItem = null; try { WorkflowManager wfm = getWorkflowManager(); workItem = wfm.getWorkItemDetailsById(parameters.get(OnePageParameterEncoder.PARAMETER).toString(), result); workItemDetailedDto = new WorkItemDetailedDto(workItem, getPrismContext()); result.recordSuccessIfUnknown(); } catch (Exception ex) { result.recordFatalError("Couldn't get work item.", ex); LoggingUtils.logException(LOGGER, "Couldn't get work item.", ex); } if (!result.isSuccess()) { showResultInSession(result); throw getRestartResponseException(PageWorkItems.class); } return workItemDetailedDto; } private ProcessInstanceDto loadProcessInstanceDto() { OperationResult result = new OperationResult(OPERATION_LOAD_PROCESS_INSTANCE); WfProcessInstanceType processInstance; try { String taskId = parameters.get(OnePageParameterEncoder.PARAMETER).toString(); LOGGER.trace("Loading process instance for task {}", taskId); WorkflowManager wfm = getWorkflowManager(); processInstance = wfm.getProcessInstanceByWorkItemId(taskId, result); LOGGER.trace("Found process instance {}", processInstance); String shadowTaskOid = ((ProcessInstanceState) processInstance.getState()).getShadowTaskOid(); Task shadowTask = null; try { shadowTask = getTaskManager().getTask(shadowTaskOid, result); } catch (ObjectNotFoundException e) { // ok } result.recordSuccess(); return new ProcessInstanceDto(processInstance, shadowTask); } catch (ObjectNotFoundException ex) { result.recordWarning("Work item seems to be already closed."); LoggingUtils.logException(LOGGER, "Couldn't get process instance for work item; it might be already closed.", ex); showResultInSession(result); throw getRestartResponseException(PageWorkItems.class); } catch (Exception ex) { result.recordFatalError("Couldn't get process instance for work item.", ex); LoggingUtils.logException(LOGGER, "Couldn't get process instance for work item.", ex); showResultInSession(result); throw getRestartResponseException(PageWorkItems.class); } } private void initLayout() { Form mainForm = new Form("mainForm"); mainForm.setMultiPart(true); add(mainForm); Label requestedBy = new Label("requestedBy", new PropertyModel(requesterModel, "object.asObjectable.name")); mainForm.add(requestedBy); Label requestedByFullName = new Label("requestedByFullName", new PropertyModel(requesterModel, "object.asObjectable.fullName")); mainForm.add(requestedByFullName); Label requestedOn = new Label("requestedOn", new AbstractReadOnlyModel<String>() { @Override public String getObject() { ProcessInstanceDto dto = processInstanceDtoModel.getObject(); if (dto.getProcessInstance().getStartTimestamp() == null) { return ""; } return WebMiscUtil.formatDate(XmlTypeConverter.toDate(dto.getProcessInstance().getStartTimestamp())); } }); mainForm.add(requestedOn); Label workItemCreatedOn = new Label("workItemCreatedOn", new AbstractReadOnlyModel<String>() { @Override public String getObject() { WorkItemDetailedDto dto = workItemDtoModel.getObject(); if (dto.getWorkItem().getMetadata() == null || dto.getWorkItem().getMetadata().getCreateTimestamp() == null) { return ""; } return WebMiscUtil.formatDate(XmlTypeConverter.toDate(dto.getWorkItem().getMetadata().getCreateTimestamp())); } }); mainForm.add(workItemCreatedOn); Label assignee = new Label("assignee", new PropertyModel(workItemDtoModel, WorkItemDto.F_ASSIGNEE)); mainForm.add(assignee); Label candidates = new Label("candidates", new PropertyModel(workItemDtoModel, WorkItemDto.F_CANDIDATES)); mainForm.add(candidates); PrismObjectPanel requestSpecificForm = new PrismObjectPanel("requestSpecificForm", requestSpecificModel, new PackageResourceReference(ImgResources.class, ImgResources.DECISION_PRISM), mainForm) { @Override protected IModel<String> createDisplayName(IModel<ObjectWrapper> model) { return createStringResource("pageWorkItem.requestSpecific.description"); } @Override protected IModel<String> createDescription(IModel<ObjectWrapper> model) { return new Model<>(""); } }; mainForm.add(requestSpecificForm); additionalInfoAccordion = new Accordion(ID_ACCORDION); additionalInfoAccordion.setOutputMarkupId(true); additionalInfoAccordion.setMultipleSelect(true); additionalInfoAccordion.setExpanded(false); mainForm.add(additionalInfoAccordion); PrismObjectPanel requesterForm = new PrismObjectPanel("requesterForm", requesterModel, new PackageResourceReference(ImgResources.class, ImgResources.USER_PRISM), mainForm); requesterForm.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return requesterModel != null && !requesterModel.getObject().getObject().isEmpty(); } }); mainForm.add(requesterForm); PrismObjectPanel objectOldForm = new PrismObjectPanel("objectOldForm", objectOldModel, new PackageResourceReference(ImgResources.class, ImgResources.USER_PRISM), mainForm); objectOldForm.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return getGeneralChangeApprovalWorkItemContents() != null && getGeneralChangeApprovalWorkItemContents().getObjectOld() != null; } }); mainForm.add(objectOldForm); PrismObjectPanel objectNewForm = new PrismObjectPanel("objectNewForm", objectNewModel, new PackageResourceReference(ImgResources.class, ImgResources.USER_PRISM), mainForm); objectNewForm.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return getGeneralChangeApprovalWorkItemContents() != null && getGeneralChangeApprovalWorkItemContents().getObjectNew() != null; } }); mainForm.add(objectNewForm); PrismObjectPanel additionalDataForm = new PrismObjectPanel("additionalDataForm", additionalDataModel, new PackageResourceReference(ImgResources.class, ImgResources.ROLE_PRISM), mainForm); mainForm.add(additionalDataForm); PrismObjectPanel trackingDataForm = new PrismObjectPanel("trackingDataForm", trackingDataModel, new PackageResourceReference(ImgResources.class, ImgResources.TRACKING_PRISM), mainForm) { @Override protected IModel<String> createDisplayName(IModel<ObjectWrapper> model) { return createStringResource("pageWorkItem.trackingData.description"); } @Override protected IModel<String> createDescription(IModel<ObjectWrapper> model) { return new Model(""); } }; mainForm.add(trackingDataForm); AccordionItem deltaInfo = new AccordionItem(ID_DELTA_INFO, new ResourceModel("pageWorkItem.delta")); deltaInfo.setOutputMarkupId(true); additionalInfoAccordion.getBodyContainer().add(deltaInfo); DeltaPanel deltaPanel = new DeltaPanel(ID_DELTA_PANEL, deltaModel); deltaInfo.getBodyContainer().add(deltaPanel); additionalInfoAccordion.getBodyContainer().add(createObjectAccordionItem(ID_REQUESTER_ACCORDION_INFO, ID_REQUESTER_PANEL, "pageWorkItem.accordionLabel.requester", new PropertyModel(workItemDtoModel, WorkItemDetailedDto.F_REQUESTER), true)); additionalInfoAccordion.getBodyContainer().add(createObjectAccordionItem(ID_OBJECT_OLD_ACCORDION_INFO, ID_OBJECT_OLD_PANEL, "pageWorkItem.accordionLabel.objectOld", new PropertyModel(workItemDtoModel, WorkItemDetailedDto.F_OBJECT_OLD), true)); additionalInfoAccordion.getBodyContainer().add(createObjectAccordionItem(ID_OBJECT_NEW_ACCORDION_INFO, ID_OBJECT_NEW_PANEL, "pageWorkItem.accordionLabel.objectNew", new PropertyModel(workItemDtoModel, WorkItemDetailedDto.F_OBJECT_NEW), true)); additionalInfoAccordion.getBodyContainer().add(createObjectAccordionItem(ID_ADDITIONAL_DATA_ACCORDION_INFO, ID_ADDITIONAL_DATA_PANEL, "pageWorkItem.accordionLabel.additionalData", new PropertyModel(workItemDtoModel, WorkItemDetailedDto.F_RELATED_OBJECT), true)); LOGGER.trace("processInstanceDtoModel = {}, loaded = {}", processInstanceDtoModel, processInstanceDtoModel.isLoaded()); ProcessInstanceDto processInstanceDto = processInstanceDtoModel.getObject(); WfProcessInstanceType processInstance = processInstanceDto.getProcessInstance(); additionalInfoAccordion.getBodyContainer().add(createAccordionItem(ID_PROCESS_INSTANCE_ACCORDION_INFO, "pageWorkItem.accordionLabel.processInstance", new ProcessInstancePanel(ID_PROCESS_INSTANCE_PANEL, processInstanceDtoModel), true)); initButtons(mainForm); } private Component createAccordionItem(String idAccordionInfo, String key, Panel panel, boolean isTechnical) { AccordionItem info = new AccordionItem(idAccordionInfo, new ResourceModel(key)); info.setOutputMarkupId(true); info.getBodyContainer().add(panel); if (isTechnical) { info.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return Boolean.TRUE.equals(showTechnicalInformationModel.getObject()); } }); } return info; } private Component createObjectAccordionItem(String idAccordionInfo, String idPanel, String key, IModel model, boolean isTechnical) { AccordionItem info = new AccordionItem(idAccordionInfo, new ResourceModel(key)); info.setOutputMarkupId(true); ContainerValuePanel panel = new ContainerValuePanel(idPanel, model); info.getBodyContainer().add(panel); if (isTechnical) { info.add(new VisibleEnableBehaviour() { @Override public boolean isVisible() { return Boolean.TRUE.equals(showTechnicalInformationModel.getObject()); } }); } return info; } private void initButtons(Form mainForm) { VisibleEnableBehaviour isAllowedToSubmit = new VisibleEnableBehaviour() { @Override public boolean isVisible() { return getWorkflowManager().isCurrentUserAuthorizedToSubmit(workItemDtoModel.getObject().getWorkItem()); } }; VisibleEnableBehaviour isAllowedToClaim = new VisibleEnableBehaviour() { @Override public boolean isVisible() { return workItemDtoModel.getObject().getWorkItem().getAssigneeRef() == null && getWorkflowManager().isCurrentUserAuthorizedToClaim(workItemDtoModel.getObject().getWorkItem()); } }; VisibleEnableBehaviour isAllowedToRelease = new VisibleEnableBehaviour() { @Override public boolean isVisible() { WorkItemType workItem = workItemDtoModel.getObject().getWorkItem(); MidPointPrincipal principal; try { principal = (MidPointPrincipal) SecurityContextHolder.getContext().getAuthentication().getPrincipal(); } catch (ClassCastException e) { return false; } String principalOid = principal.getOid(); if (workItem.getAssigneeRef() == null || !workItem.getAssigneeRef().getOid().equals(principalOid)) { return false; } return !workItem.getCandidateUsersRef().isEmpty() || !workItem.getCandidateRolesRef().isEmpty(); } }; AjaxSubmitButton claim = new AjaxSubmitButton("claim", createStringResource("pageWorkItem.button.claim")) { @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { claimPerformed(target); } @Override protected void onError(AjaxRequestTarget target, Form<?> form) { target.add(getFeedbackPanel()); } }; claim.add(isAllowedToClaim); mainForm.add(claim); AjaxSubmitButton release = new AjaxSubmitButton("release", createStringResource("pageWorkItem.button.release")) { @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { releasePerformed(target); } @Override protected void onError(AjaxRequestTarget target, Form<?> form) { target.add(getFeedbackPanel()); } }; release.add(isAllowedToRelease); mainForm.add(release); AjaxSubmitButton approve = new AjaxSubmitButton("approve", createStringResource("pageWorkItem.button.approve")) { @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { savePerformed(target, true); } @Override protected void onError(AjaxRequestTarget target, Form<?> form) { target.add(getFeedbackPanel()); } }; approve.add(isAllowedToSubmit); mainForm.add(approve); AjaxSubmitButton reject = new AjaxSubmitButton("reject", createStringResource("pageWorkItem.button.reject")) { @Override protected void onSubmit(AjaxRequestTarget target, Form<?> form) { savePerformed(target, false); } @Override protected void onError(AjaxRequestTarget target, Form<?> form) { target.add(getFeedbackPanel()); } }; reject.add(isAllowedToSubmit); mainForm.add(reject); // AjaxSubmitLinkButton done = new AjaxSubmitLinkButton("done", // createStringResource("pageWorkItem.button.done")) { // // @Override // protected void onSubmit(AjaxRequestTarget target, Form<?> form) { // try { // savePerformed(target); // } catch(RuntimeException e) { // LoggingUtils.logException(LOGGER, "Exception in savePerformed", e); // throw e; // } // } // // @Override // protected void onError(AjaxRequestTarget target, Form<?> form) { // target.add(getFeedbackPanel()); // } // }; // mainForm.add(done); AjaxButton cancel = new AjaxButton("cancel", createStringResource("pageWorkItem.button.cancel")) { @Override public void onClick(AjaxRequestTarget target) { cancelPerformed(target); } }; mainForm.add(cancel); CheckBox showTechnicalInformationBox = new CheckBox(ID_SHOW_TECHNICAL_INFORMATION, showTechnicalInformationModel); showTechnicalInformationBox.add(new AjaxFormComponentUpdatingBehavior("onchange") { @Override protected void onUpdate(AjaxRequestTarget target) { target.add(additionalInfoAccordion); } }); mainForm.add(showTechnicalInformationBox); } private void cancelPerformed(AjaxRequestTarget target) { goBack(PageWorkItems.class); } private void savePerformed(AjaxRequestTarget target, boolean decision) { LOGGER.debug("Saving work item changes."); OperationResult result = new OperationResult(OPERATION_SAVE_WORK_ITEM); ObjectWrapper rsWrapper = requestSpecificModel.getObject(); try { PrismObject object = rsWrapper.getObject(); ObjectDelta delta = rsWrapper.getObjectDelta(); delta.applyTo(object); getWorkflowManager().approveOrRejectWorkItemWithDetails(workItemDtoModel.getObject().getWorkItem().getWorkItemId(), object, decision, result); setReinitializePreviousPages(true); } catch (Exception ex) { result.recordFatalError("Couldn't save work item.", ex); LoggingUtils.logException(LOGGER, "Couldn't save work item", ex); } result.computeStatusIfUnknown(); if (!result.isSuccess()) { showResult(result); target.add(getFeedbackPanel()); } else { showResultInSession(result); goBack(PageWorkItems.class); } } private void claimPerformed(AjaxRequestTarget target) { OperationResult result = new OperationResult(OPERATION_CLAIM_WORK_ITEM); WorkflowManager workflowManagerImpl = getWorkflowManager(); try { workflowManagerImpl.claimWorkItem(workItemDtoModel.getObject().getWorkItem().getWorkItemId(), result); setReinitializePreviousPages(true); } catch (RuntimeException e) { result.recordFatalError("Couldn't claim work item due to an unexpected exception.", e); } result.computeStatusIfUnknown(); if (!result.isSuccess()) { showResult(result); target.add(getFeedbackPanel()); } else { showResultInSession(result); goBack(PageWorkItems.class); } } private void releasePerformed(AjaxRequestTarget target) { OperationResult result = new OperationResult(OPERATION_RELEASE_WORK_ITEM); WorkflowManager workflowManagerImpl = getWorkflowManager(); try { workflowManagerImpl.releaseWorkItem(workItemDtoModel.getObject().getWorkItem().getWorkItemId(), result); setReinitializePreviousPages(true); } catch (RuntimeException e) { result.recordFatalError("Couldn't release work item due to an unexpected exception.", e); } result.computeStatusIfUnknown(); if (!result.isSuccess()) { showResult(result); target.add(getFeedbackPanel()); } else { showResultInSession(result); goBack(PageWorkItems.class); } } @Override public PageBase reinitialize() { return new PageWorkItem(parameters, getPreviousPage(), true); } }
apache-2.0
EmaraLab/openvet
emareatech-lab/restful-services-layer/src/main/java/com/emaratech/services/models/package-info.java
566
@XmlJavaTypeAdapters({ @XmlJavaTypeAdapter(type=DateTime.class, value=JodaDateTimeAdapter.class), }) @XmlNameTransformer(DefaultNameGeneration.class) package com.emaratech.services.models; import com.emaratech.services.utils.DefaultNameGeneration; import com.emaratech.services.utils.JodaDateTimeAdapter; import org.eclipse.persistence.oxm.annotations.XmlNameTransformer; import org.joda.time.DateTime; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapter; import javax.xml.bind.annotation.adapters.XmlJavaTypeAdapters;
apache-2.0
sdinot/hipparchus
hipparchus-filtering/src/main/java/org/hipparchus/filtering/kalman/package-info.java
886
/* * Licensed to the Hipparchus project under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The Hipparchus project licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /** * Kalman filter. * @since 1.3 */ package org.hipparchus.filtering.kalman;
apache-2.0
S3S3L/jdbc
src/main/java/com/s3s3l/jdbc/exec/enumerations/SqlExecuteType.java
477
/** * Project Name:jdbc * File Name:SqlExecuteType.java * Package Name:com.s3s3l.jdbc.exec.enumerations * Date:2016年7月15日下午1:36:01 * Copyright (c) 2016, kehw.zwei@gmail.com All Rights Reserved. * */ package com.s3s3l.jdbc.exec.enumerations; /** * ClassName:SqlExecuteType <br> * Date: 2016年7月15日 下午1:36:01 <br> * * @author kehw_zwei * @version 1.0.0 * @since JDK 1.8 */ public enum SqlExecuteType { SELECT, UPDATE, INSERT, DELETE; }
apache-2.0
brendandouglas/intellij
aswb/src/com/google/idea/blaze/android/sync/sdk/AndroidSdkFromProjectView.java
5172
/* * Copyright 2016 The Bazel Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.idea.blaze.android.sync.sdk; import com.google.common.base.Joiner; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.idea.blaze.android.projectview.AndroidMinSdkSection; import com.google.idea.blaze.android.projectview.AndroidSdkPlatformSection; import com.google.idea.blaze.android.sdk.BlazeSdkProvider; import com.google.idea.blaze.android.sync.model.AndroidSdkPlatform; import com.google.idea.blaze.base.projectview.ProjectViewSet; import com.google.idea.blaze.base.projectview.ProjectViewSet.ProjectViewFile; import com.google.idea.blaze.base.scope.BlazeContext; import com.google.idea.blaze.base.scope.output.IssueOutput; import com.intellij.openapi.projectRoots.Sdk; import com.intellij.pom.Navigatable; import java.util.Collection; import java.util.List; import java.util.Set; import javax.annotation.Nullable; import org.jetbrains.android.sdk.AndroidPlatform; import org.jetbrains.android.sdk.AndroidSdkAdditionalData; /** Calculates AndroidSdkPlatform. */ public class AndroidSdkFromProjectView { @Nullable public static AndroidSdkPlatform getAndroidSdkPlatform( BlazeContext context, ProjectViewSet projectViewSet) { List<Sdk> sdks = BlazeSdkProvider.getInstance().getAllAndroidSdks(); if (sdks.isEmpty()) { IssueOutput.error("No Android SDK configured. Please use the SDK manager to configure.") .navigatable( new Navigatable() { @Override public void navigate(boolean b) { SdkUtil.openSdkManager(); } @Override public boolean canNavigate() { return true; } @Override public boolean canNavigateToSource() { return false; } }) .submit(context); return null; } if (projectViewSet == null) { return null; } String androidSdk = projectViewSet.getScalarValue(AndroidSdkPlatformSection.KEY).orElse(null); Integer androidMinSdk = projectViewSet.getScalarValue(AndroidMinSdkSection.KEY).orElse(null); if (androidSdk == null) { ProjectViewFile projectViewFile = projectViewSet.getTopLevelProjectViewFile(); IssueOutput.error( ("No android_sdk_platform set. Please set to an android platform. " + "Available android_sdk_platforms are: " + getAvailableTargetHashesAsList(sdks))) .inFile(projectViewFile != null ? projectViewFile.projectViewFile : null) .submit(context); return null; } Sdk sdk = BlazeSdkProvider.getInstance().findSdk(androidSdk); if (sdk == null) { ProjectViewFile projectViewFile = projectViewSet.getTopLevelProjectViewFile(); IssueOutput.error( ("No such android_sdk_platform: '" + androidSdk + "'. " + "Available android_sdk_platforms are: " + getAvailableTargetHashesAsList(sdks) + ". " + "Please change android_sdk_platform or run SDK manager " + "to download missing SDK platforms.")) .inFile(projectViewFile != null ? projectViewFile.projectViewFile : null) .submit(context); return null; } if (androidMinSdk == null) { androidMinSdk = getAndroidSdkApiLevel(sdk); } return new AndroidSdkPlatform(androidSdk, androidMinSdk); } public static List<String> getAvailableSdkTargetHashes(Collection<Sdk> sdks) { Set<String> names = Sets.newHashSet(); for (Sdk sdk : sdks) { String targetHash = BlazeSdkProvider.getInstance().getSdkTargetHash(sdk); if (targetHash != null) { names.add(targetHash); } } List<String> result = Lists.newArrayList(names); result.sort(String::compareTo); return result; } private static String getAvailableTargetHashesAsList(Collection<Sdk> sdks) { return Joiner.on(", ").join(getAvailableSdkTargetHashes(sdks)); } private static int getAndroidSdkApiLevel(Sdk sdk) { int androidSdkApiLevel = 1; AndroidSdkAdditionalData additionalData = (AndroidSdkAdditionalData) sdk.getSdkAdditionalData(); if (additionalData != null) { AndroidPlatform androidPlatform = additionalData.getAndroidPlatform(); if (androidPlatform != null) { androidSdkApiLevel = androidPlatform.getApiLevel(); } } return androidSdkApiLevel; } }
apache-2.0
alvaromarco/CleanArchitecturePlugin
resources/templates/ServiceCleanArch.java
592
#if (${PACKAGE_NAME} && ${PACKAGE_NAME} != "")package ${PACKAGE_NAME};#end import ${PACKAGE_BASE_SERVICE}.${BASE_SERVICE}; import javax.inject.Inject; import retrofit2.http.GET; import retrofit2.http.Query; import rx.Observable; #parse("File Header.java") public class ${NAME} extends ${BASE_SERVICE} { private Service mService; @Inject public ${NAME}() { this.mService = getAdapter().create(Service.class); } // TODO: Create methods to conect with API private interface Service { // TODO: Complete with calls } }
apache-2.0
LQJJ/demo
126-go-common-master/app/service/main/vip/model/oldvip.go
1949
package model import "go-common/library/time" //VipUserInfo vip_user_info table for vip java type VipUserInfo struct { ID int64 `json:"id"` Mid int64 `form:"mid" validate:"required" json:"mid"` VipType int32 `form:"vipType" json:"vipType"` VipStatus int32 `form:"vipStatus" json:"vipStatus"` VipStartTime time.Time `form:"vipStartTime" validate:"required" json:"vipStartTime"` VipRecentTime time.Time `form:"vipRecentTime" json:"vipRecentTime"` VipOverdueTime time.Time `form:"vipOverdueTime" validate:"required" json:"vipOverdueTime"` AnnualVipOverdueTime time.Time `form:"annualVipOverdueTime" json:"annualVipOverdueTime"` Wander int8 `json:"wander"` AccessStatus int8 `json:"accessStatus"` Ctime time.Time `form:"ctime" validate:"required" json:"ctime"` Mtime time.Time `form:"mtime" validate:"required" json:"mtime"` Ver int64 `form:"ver" json:"ver"` AutoRenewed int8 `form:"autoRenewed" json:"autoRenewed"` IsAutoRenew int32 `form:"isAutoRenew" json:"isAutoRenew"` PayChannelID int32 `form:"payChannelId" json:"payChannelId"` IosOverdueTime time.Time `form:"iosOverdueTime" json:"iosOverdueTime"` } // ToNew convert old model to new. func (v *VipUserInfo) ToNew() (res *VipInfoDB) { return &VipInfoDB{ Mid: v.Mid, VipType: v.VipType, VipPayType: v.IsAutoRenew, PayChannelID: v.PayChannelID, VipStatus: v.VipStatus, VipStartTime: v.VipStartTime, VipRecentTime: v.VipRecentTime, VipOverdueTime: v.VipOverdueTime, AnnualVipOverdueTime: v.AnnualVipOverdueTime, Ctime: v.Ctime, Mtime: v.Ctime, IosOverdueTime: v.IosOverdueTime, Ver: v.Ver, } }
apache-2.0
simark/lolc
ca/polymtl/lol/grammar/VisiteurDOTOutput.java
4278
package ca.polymtl.lol.grammar; import ca.polymtl.lol.LoopOperation; class VisiteurDOTOutput extends LOLVisitorBasic { public static void main(String[] args) throws Exception { LOL lol = new LOL(System.in); if (args.length > 0 && args[0].equals("debug")) { lol.setDebug(true); } SimpleNode n = lol.Start(); VisiteurDOTOutput vis = new VisiteurDOTOutput(); n.jjtAccept(vis, null); System.err.println("DOTOutput Done."); } private int ticketDistributor = 0; // Comme lorsqu'on attend 2 heures au // registrariat. private int takeTicket() { ticketDistributor++; return ticketDistributor - 1; } @Override public Object visit(SimpleNode node, Object data) throws LOLVisitException { int n = node.jjtGetNumChildren(); int id = takeTicket(); String nodeName = "node" + id; String label = node.toString(); if (data != null) { label += "<br /> [" + data + "]"; } System.out.println(nodeName + " [ label=< " + label + " > ]"); for (int i = 0; i < n; i++) { Node child = node.jjtGetChild(i); int childId = (Integer) child.jjtAccept(this, null); String childName = "node" + childId; System.out.println(nodeName + " -> " + childName); } return id; } @Override public Object visit(ASTliteralValue node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.type.toString() + ":" + node.value); } @Override public Object visit(ASTvariableOrFunctionExpression node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.name); } @Override public Object visit(ASTvariableDeclaration node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.name); } @Override public Object visit(ASTfunctionDefinition node, Object data) throws LOLVisitException { String label = node.name = "("; if (node.params.size() > 0) { label += node.params.get(0); for (int i = 1; i < node.params.size(); i++) { label += ", " + node.params.get(i); } } label += ")"; return visit((SimpleNode) node, label); } @Override public Object visit(ASTstart node, Object data) throws LOLVisitException { System.out.println("digraph lol {"); System.out .println("node [shape=box, fixedsize=false, fontsize=9, fontname=\"monospace\", fontcolor=\"blue\"," + "width=.25, height=.25, color=\"black\", fillcolor=\"white\", style=\"filled, solid\"];" + "edge [arrowsize=.5, color=\"black\", style=\"\"]"); System.out.println(" ranksep=.5;"); System.out.println(" ordering=out;"); int id = (Integer) visit((SimpleNode) node, null); System.out.println("}"); return id; } @Override public Object visit(ASTprimaryExpression node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.type.toString()); } @Override public Object visit(ASTbinaryOperator node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.type.toString()); } @Override public Object visit(ASTassignationExpression node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.name); } @Override public Object visit(ASTinfiniteArityOperator node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.type.toString()); } @Override public Object visit(ASTcastVariableExpression node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.name + " to " + node.type.toString()); } @Override public Object visit(ASTcastExpression node, Object data) throws LOLVisitException { return visit((SimpleNode) node, "to " + node.type.toString()); } @Override public Object visit(ASTconsoleInput node, Object data) throws LOLVisitException { return visit((SimpleNode) node, node.variable); } @Override public Object visit(ASTloopExpression node, Object data) throws LOLVisitException { String label; if (node.operation != LoopOperation.CUSTOM) { label = "op:" + node.operation.toString(); } else { label = "op:" + node.customOperation; } label += "(" + node.loopVariable + ")"; if (node.loopLimit != null) { label += "<br />limit:" + node.loopLimit.toString(); } return visit((SimpleNode) node, label); } }
apache-2.0
azkanurunala/bisabelajarNew
pages/call/layout/headerHalf.php
604
<section id="footer" class="header-call white-text"> <div class="dark-bg-darker"> <div class=" no-padding container"> <div class="text-left"> <div class="col-sm-12 col-md-1 text-left"> <img class="header-margin logo-small img-responsive" src="img/call_logo.png"/> </div> <div class="header-margin col-sm-12 col-md-11 text-left"> <h3 class="">Hubungi Kami </h3> </div> </div> </div> </div> </section>
apache-2.0
camilesing/zstack
plugin/applianceVm/src/main/java/org/zstack/appliancevm/ApplianceVmCascadeExtension.java
32388
package org.zstack.appliancevm; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.transaction.annotation.Transactional; import org.zstack.core.cascade.AbstractAsyncCascadeExtension; import org.zstack.core.cascade.CascadeAction; import org.zstack.core.cascade.CascadeConstant; import org.zstack.core.cloudbus.CloudBus; import org.zstack.core.cloudbus.CloudBusListCallBack; import org.zstack.core.componentloader.PluginRegistry; import org.zstack.core.db.DatabaseFacade; import org.zstack.core.db.SimpleQuery; import org.zstack.core.db.SimpleQuery.Op; import org.zstack.core.workflow.*; import org.zstack.header.cluster.ClusterInventory; import org.zstack.header.cluster.ClusterVO; import org.zstack.header.core.Completion; import org.zstack.header.core.workflow.*; import org.zstack.header.errorcode.ErrorCode; import org.zstack.header.host.HostInventory; import org.zstack.header.host.HostVO; import org.zstack.header.host.HostVO_; import org.zstack.header.message.MessageReply; import org.zstack.header.network.l2.L2NetworkConstant; import org.zstack.header.network.l2.L2NetworkDetachStruct; import org.zstack.header.network.l2.L2NetworkVO; import org.zstack.header.network.l3.IpRangeInventory; import org.zstack.header.network.l3.IpRangeVO; import org.zstack.header.network.l3.L3NetworkInventory; import org.zstack.header.network.l3.L3NetworkVO; import org.zstack.header.storage.primary.PrimaryStorageConstant; import org.zstack.header.storage.primary.PrimaryStorageDetachStruct; import org.zstack.header.storage.primary.PrimaryStorageInventory; import org.zstack.header.storage.primary.PrimaryStorageVO; import org.zstack.header.vm.*; import org.zstack.header.volume.VolumeType; import org.zstack.header.zone.ZoneInventory; import org.zstack.header.zone.ZoneVO; import org.zstack.utils.CollectionUtils; import org.zstack.utils.Utils; import org.zstack.utils.function.Function; import org.zstack.utils.logging.CLogger; import javax.persistence.TypedQuery; import java.util.*; import java.util.concurrent.Callable; /** */ public class ApplianceVmCascadeExtension extends AbstractAsyncCascadeExtension { private static final CLogger logger = Utils.getLogger(ApplianceVmCascadeExtension.class); @Autowired private DatabaseFacade dbf; @Autowired private CloudBus bus; @Autowired private PluginRegistry pluginRgty; private static String NAME = ApplianceVmVO.class.getSimpleName(); protected static final int OP_NOPE = 0; protected static final int OP_MIGRATE = 1; protected static final int OP_DELETION = 2; protected int toDeleteOpCode(CascadeAction action) { if (PrimaryStorageVO.class.getSimpleName().equals(action.getParentIssuer())) { return OP_DELETION; } if (HostVO.class.getSimpleName().equals(action.getParentIssuer())) { if (ZoneVO.class.getSimpleName().equals(action.getRootIssuer())) { return OP_DELETION; } else { return OP_MIGRATE; } } if (L3NetworkVO.class.getSimpleName().equals(action.getParentIssuer())) { return OP_DELETION; } if (IpRangeVO.class.getSimpleName().equals(action.getParentIssuer()) && IpRangeVO.class.getSimpleName().equals(action.getRootIssuer())) { return OP_DELETION; } if (ApplianceVmVO.class.getSimpleName().equals(action.getParentIssuer())) { return OP_DELETION; } return OP_NOPE; } @Override public void asyncCascade(CascadeAction action, Completion completion) { if (action.isActionCode(CascadeConstant.DELETION_CHECK_CODE)) { handleDeletionCheck(action, completion); } else if (action.isActionCode(CascadeConstant.DELETION_DELETE_CODE, CascadeConstant.DELETION_FORCE_DELETE_CODE)) { handleDeletion(action, completion); } else if (action.isActionCode(CascadeConstant.DELETION_CLEANUP_CODE)) { handleDeletionCleanup(action, completion); } else if (action.isActionCode(PrimaryStorageConstant.PRIMARY_STORAGE_DETACH_CODE)) { handlePrimaryStorageDetach(action, completion); } else if (action.isActionCode(L2NetworkConstant.DETACH_L2NETWORK_CODE)) { handleL2NetworkDetach(action, completion); } else { completion.success(); } } @Transactional(readOnly = true) private List<VmInstanceVO> getVmFromL2NetworkDetached(List<L2NetworkDetachStruct> structs) { Set<VmInstanceVO> apvms = new HashSet<>(); for (L2NetworkDetachStruct s : structs) { String sql = "select vm" + " from VmInstanceVO vm, L2NetworkVO l2, L3NetworkVO l3, VmNicVO nic" + " where vm.type = :vmType" + " and vm.clusterUuid = :clusterUuid" + " and vm.state in (:vmStates)" + " and vm.uuid = nic.vmInstanceUuid" + " and nic.l3NetworkUuid = l3.uuid" + " and l3.l2NetworkUuid = l2.uuid" + " and l2.uuid = :l2Uuid"; TypedQuery<VmInstanceVO> q = dbf.getEntityManager().createQuery(sql, VmInstanceVO.class); q.setParameter("vmType", ApplianceVmConstant.APPLIANCE_VM_TYPE); q.setParameter("vmStates", Arrays.asList( VmInstanceState.Running, VmInstanceState.Migrating, VmInstanceState.Starting, VmInstanceState.Rebooting)); q.setParameter("clusterUuid", s.getClusterUuid()); q.setParameter("l2Uuid", s.getL2NetworkUuid()); apvms.addAll(q.getResultList()); } List<VmInstanceVO> ret = new ArrayList<>(apvms.size()); ret.addAll(apvms); return ret; } private void migrateOrStopVmOnClusterDetach(final List<VmInstanceVO> toMigrate, List<String> clusterUuids, final Completion completion) { SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class); q.select(HostVO_.uuid); q.add(HostVO_.clusterUuid, Op.IN, clusterUuids); final List<String> avoidHostUuids = q.listValue(); final List<VmInstanceVO> toDelete = new ArrayList<>(); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("handle-appliance-vm-for-cluster-detach")); chain.then(new ShareFlow() { @Override public void setup() { final List<MigrateVmMsg> migrateVmMsgs = CollectionUtils.transformToList(toMigrate, new Function<MigrateVmMsg, VmInstanceVO>() { @Override public MigrateVmMsg call(VmInstanceVO arg) { MigrateVmMsg msg = new MigrateVmMsg(); msg.setVmInstanceUuid(arg.getUuid()); msg.setAvoidHostUuids(avoidHostUuids); bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, arg.getUuid()); return msg; } }); flow(new NoRollbackFlow() { String __name__ = "migrate-appliance-vm"; @Override public void run(final FlowTrigger trigger, Map data) { bus.send(migrateVmMsgs, 2, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { if (!r.isSuccess()) { VmInstanceVO apvm = toMigrate.get(replies.indexOf(r)); toDelete.add(apvm); logger.warn(String.format("failed to migrate appliance vm[uuid:%s, name:%s], %s. will try to delete it", apvm.getUuid(), r.getError(), apvm.getName())); } } trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "delete-appliance-vm"; @Override public void run(final FlowTrigger trigger, Map data) { if (toDelete.isEmpty()) { trigger.next(); return; } List<VmInstanceDeletionMsg> msgs = CollectionUtils.transformToList(toDelete, new Function<VmInstanceDeletionMsg, VmInstanceVO>() { @Override public VmInstanceDeletionMsg call(VmInstanceVO arg) { VmInstanceDeletionMsg msg = new VmInstanceDeletionMsg(); msg.setVmInstanceUuid(arg.getUuid()); bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, arg.getUuid()); return msg; } }); bus.send(msgs, 20, new CloudBusListCallBack(completion) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { if (!r.isSuccess()) { VmInstanceVO apvm = toDelete.get(replies.indexOf(r)); logger.warn(String.format("failed to delete vm[uuid:%s] for cluster detached, %s. However, detaching will go on", apvm.getUuid(), r.getError())); } } trigger.next(); } }); } }); done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(); } }); error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }); } }).start(); } private void handleL2NetworkDetach(CascadeAction action, final Completion completion) { List<L2NetworkDetachStruct> structs = action.getParentIssuerContext(); final List<VmInstanceVO> apvms = getVmFromL2NetworkDetached(structs); if (apvms.isEmpty()) { completion.success(); return; } List<String> clusterUuids = CollectionUtils.transformToList(structs, new Function<String, L2NetworkDetachStruct>() { @Override public String call(L2NetworkDetachStruct arg) { return arg.getClusterUuid(); } }); migrateOrStopVmOnClusterDetach(apvms, clusterUuids, completion); } @Transactional(readOnly = true) private List<VmInstanceVO> getVmForPrimaryStorageDetached(List<PrimaryStorageDetachStruct> structs) { Set<VmInstanceVO> vms = new HashSet<>(); for (PrimaryStorageDetachStruct s : structs) { String sql = "select vm" + " from VmInstanceVO vm, PrimaryStorageVO ps, VolumeVO vol" + " where vm.type = :vmType" + " and vm.state in (:vmStates)" + " and vm.clusterUuid = :clusterUuid" + " and vm.uuid = vol.vmInstanceUuid" + " and vol.primaryStorageUuid = :psUuid"; TypedQuery<VmInstanceVO> q = dbf.getEntityManager().createQuery(sql, VmInstanceVO.class); q.setParameter("vmType", ApplianceVmConstant.APPLIANCE_VM_TYPE); q.setParameter("vmStates", Arrays.asList( VmInstanceState.Running, VmInstanceState.Starting, VmInstanceState.Migrating, VmInstanceState.Rebooting)); q.setParameter("clusterUuid", s.getClusterUuid()); q.setParameter("psUuid", s.getPrimaryStorageUuid()); vms.addAll(q.getResultList()); } List<VmInstanceVO> ret = new ArrayList<>(vms.size()); ret.addAll(vms); return ret; } private void handlePrimaryStorageDetach(CascadeAction action, final Completion completion) { List<PrimaryStorageDetachStruct> structs = action.getParentIssuerContext(); final List<VmInstanceVO> vmInstanceVOs = getVmForPrimaryStorageDetached(structs); if (vmInstanceVOs.isEmpty()) { completion.success(); return; } List<String> clusterUuids = CollectionUtils.transformToList(structs, new Function<String, PrimaryStorageDetachStruct>() { @Override public String call(PrimaryStorageDetachStruct arg) { return arg.getClusterUuid(); } }); migrateOrStopVmOnClusterDetach(vmInstanceVOs, clusterUuids, completion); } private void handleDeletionCleanup(CascadeAction action, Completion completion) { dbf.eoCleanup(ApplianceVmVO.class); completion.success(); } protected void handleDeletion(final CascadeAction action, final Completion completion) { int op = toDeleteOpCode(action); if (op == OP_NOPE) { completion.success(); return; } final List<ApplianceVmInventory> apvms = apvmFromDeleteAction(action); if (apvms == null) { completion.success(); return; } final List<ApplianceVmInventory> apvmToMigrate = new ArrayList<ApplianceVmInventory>(); final List<ApplianceVmInventory> apvmToDelete = new ArrayList<ApplianceVmInventory>(); FlowChain chain = FlowChainBuilder.newShareFlowChain(); chain.setName(String.format("delete-cascade-for-appliance-vm")); if (op == OP_MIGRATE) { chain.then(new ShareFlow() { @Override public void setup() { for (ApplianceVmInventory apvm : apvms) { if (VmInstanceState.Running.toString().equals(apvm.getState())) { apvmToMigrate.add(apvm); } else { apvmToDelete.add(apvm); } } List<String> avoidHostUuids = null; if (action.getRootIssuer().equals(ClusterVO.class.getSimpleName())) { List<ClusterInventory> clusters = action.getRootIssuerContext(); List<String> clusterUuids = CollectionUtils.transformToList(clusters, new Function<String, ClusterInventory>() { @Override public String call(ClusterInventory arg) { return arg.getUuid(); } }); SimpleQuery<HostVO> q = dbf.createQuery(HostVO.class); q.select(HostVO_.uuid); q.add(HostVO_.clusterUuid, Op.IN, clusterUuids); avoidHostUuids = q.listValue(); } final List<String> finalAvoidHostUuids = avoidHostUuids; if (!apvmToMigrate.isEmpty()) { flow(new NoRollbackFlow() { String __name__ = "try-migrate-appliancevm"; @Override public void run(final FlowTrigger trigger, Map data) { final List<GetVmMigrationTargetHostMsg> gmsgs = CollectionUtils.transformToList(apvmToMigrate, new Function<GetVmMigrationTargetHostMsg, ApplianceVmInventory>() { @Override public GetVmMigrationTargetHostMsg call(ApplianceVmInventory arg) { GetVmMigrationTargetHostMsg gmsg = new GetVmMigrationTargetHostMsg(); gmsg.setVmInstanceUuid(arg.getUuid()); if (finalAvoidHostUuids != null) { gmsg.setAvoidHostUuids(finalAvoidHostUuids); } bus.makeTargetServiceIdByResourceUuid(gmsg, VmInstanceConstant.SERVICE_ID, arg.getUuid()); return gmsg; } }); bus.send(gmsgs, 1, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { List<ApplianceVmInventory> apvmCannotMigrate = new ArrayList<ApplianceVmInventory>(); for (MessageReply reply : replies) { if (!reply.isSuccess() || ((GetVmMigrationTargetHostReply) reply).getHosts().isEmpty()) { ApplianceVmInventory apvm = apvmToMigrate.get(replies.indexOf(reply)); apvmCannotMigrate.add(apvm); } } apvmToMigrate.removeAll(apvmCannotMigrate); apvmToDelete.addAll(apvmCannotMigrate); trigger.next(); } }); } }); flow(new NoRollbackFlow() { String __name__ = "migrate-appliancevm"; @Override public void run(final FlowTrigger trigger, Map data) { if (apvmToMigrate.isEmpty()) { trigger.next(); return; } List<MigrateVmMsg> mmsgs = CollectionUtils.transformToList(apvmToMigrate, new Function<MigrateVmMsg, ApplianceVmInventory>() { @Override public MigrateVmMsg call(ApplianceVmInventory arg) { MigrateVmMsg mmsg = new MigrateVmMsg(); mmsg.setVmInstanceUuid(arg.getUuid()); mmsg.setAvoidHostUuids(finalAvoidHostUuids); bus.makeTargetServiceIdByResourceUuid(mmsg, VmInstanceConstant.SERVICE_ID, arg.getUuid()); return mmsg; } }); bus.send(mmsgs, 2, new CloudBusListCallBack(trigger) { @Override public void run(List<MessageReply> replies) { for (MessageReply r : replies) { if (!r.isSuccess()) { ApplianceVmInventory apvm = apvmToMigrate.get(replies.indexOf(r)); apvmToDelete.add(apvm); } } trigger.next(); } }); } }); } } }); } else if (op == OP_DELETION) { apvmToDelete.addAll(apvms); } chain.then(new ShareFlow() { @Override public void setup() { flow(new NoRollbackFlow() { String __name__ = "delete-appliancevm"; @Override public void run(final FlowTrigger trigger, Map data) { if (apvmToDelete.isEmpty()) { trigger.next(); return; } List<VmInstanceDeletionMsg> msgs = CollectionUtils.transformToList(apvmToDelete, new Function<VmInstanceDeletionMsg, ApplianceVmInventory>() { @Override public VmInstanceDeletionMsg call(ApplianceVmInventory arg) { VmInstanceDeletionMsg msg = new VmInstanceDeletionMsg(); msg.setForceDelete(action.isActionCode(CascadeConstant.DELETION_FORCE_DELETE_CODE)); msg.setVmInstanceUuid(arg.getUuid()); bus.makeTargetServiceIdByResourceUuid(msg, VmInstanceConstant.SERVICE_ID, arg.getUuid()); return msg; } }); bus.send(msgs, 20, new CloudBusListCallBack(completion) { @Override public void run(List<MessageReply> replies) { if (!action.isActionCode(CascadeConstant.DELETION_FORCE_DELETE_CODE)) { for (MessageReply r : replies) { if (!r.isSuccess()) { trigger.fail(r.getError()); return; } } } trigger.next(); } }); } }); } }).done(new FlowDoneHandler(completion) { @Override public void handle(Map data) { completion.success(); } }).error(new FlowErrorHandler(completion) { @Override public void handle(ErrorCode errCode, Map data) { completion.fail(errCode); } }).start(); } private void handleDeletionCheck(CascadeAction action, Completion completion) { completion.success(); } @Override public List<String> getEdgeNames() { return Arrays.asList(HostVO.class.getSimpleName(), L3NetworkVO.class.getSimpleName(), IpRangeVO.class.getSimpleName(), PrimaryStorageVO.class.getSimpleName(), L2NetworkVO.class.getSimpleName()); } @Override public String getCascadeResourceName() { return NAME; } @Transactional protected List<ApplianceVmInventory> apvmFromDeleteAction(CascadeAction action) { List<ApplianceVmInventory> ret = null; if (HostVO.class.getSimpleName().equals(action.getParentIssuer())) { List<HostInventory> hosts = action.getParentIssuerContext(); List<String> huuids = CollectionUtils.transformToList(hosts, new Function<String, HostInventory>() { @Override public String call(HostInventory arg) { return arg.getUuid(); } }); Map<String, ApplianceVmVO> vmvos = new HashMap<String, ApplianceVmVO>(); SimpleQuery<ApplianceVmVO> q = dbf.createQuery(ApplianceVmVO.class); q.add(ApplianceVmVO_.hostUuid, Op.IN, huuids); List<ApplianceVmVO> lst = q.list(); for (ApplianceVmVO vo : lst) { vmvos.put(vo.getUuid(), vo); } if (ClusterVO.class.getSimpleName().equals(action.getRootIssuer())) { List<ClusterInventory> clusters = action.getRootIssuerContext(); List<String> clusterUuids = CollectionUtils.transformToList(clusters, new Function<String, ClusterInventory>() { @Override public String call(ClusterInventory arg) { return arg.getUuid(); } }); q = dbf.createQuery(ApplianceVmVO.class); q.add(ApplianceVmVO_.clusterUuid, Op.IN, clusterUuids); lst = q.list(); for (ApplianceVmVO vo : lst) { vmvos.put(vo.getUuid(), vo); } } else if (ZoneVO.class.getSimpleName().equals(action.getRootIssuer())) { List<ZoneInventory> zones = action.getRootIssuerContext(); List<String> zoneUuids = CollectionUtils.transformToList(zones, new Function<String, ZoneInventory>() { @Override public String call(ZoneInventory arg) { return arg.getUuid(); } }); q = dbf.createQuery(ApplianceVmVO.class); q.add(ApplianceVmVO_.zoneUuid, Op.IN, zoneUuids); lst = q.list(); for (ApplianceVmVO vo : lst) { vmvos.put(vo.getUuid(), vo); } } if (!vmvos.isEmpty()) { ret = ApplianceVmInventory.valueOf1(vmvos.values()); } } else if (NAME.equals(action.getParentIssuer())) { return action.getParentIssuerContext(); } else if (PrimaryStorageVO.class.getSimpleName().equals(action.getParentIssuer())) { final List<String> pruuids = CollectionUtils.transformToList((List<PrimaryStorageInventory>) action.getParentIssuerContext(), new Function<String, PrimaryStorageInventory>() { @Override public String call(PrimaryStorageInventory arg) { return arg.getUuid(); } }); List<ApplianceVmVO> vmvos = new Callable<List<ApplianceVmVO>>() { @Override @Transactional(readOnly = true) public List<ApplianceVmVO> call() { String sql = "select vm from ApplianceVmVO vm, VolumeVO vol, PrimaryStorageVO pr where vm.uuid = vol.vmInstanceUuid" + " and vol.primaryStorageUuid = pr.uuid and vol.type = :volType and pr.uuid in (:uuids)"; TypedQuery<ApplianceVmVO> q = dbf.getEntityManager().createQuery(sql, ApplianceVmVO.class); q.setParameter("uuids", pruuids); q.setParameter("volType", VolumeType.Root); return q.getResultList(); } }.call(); if (!vmvos.isEmpty()) { ret = ApplianceVmInventory.valueOf1(vmvos); } } else if (L3NetworkVO.class.getSimpleName().equals(action.getParentIssuer())) { List<L3NetworkInventory> l3s = action.getParentIssuerContext(); List<String> l3uuids = CollectionUtils.transformToList(l3s, new Function<String, L3NetworkInventory>() { @Override public String call(L3NetworkInventory arg) { return arg.getUuid(); } }); String sql = "select apvm from ApplianceVmVO apvm where apvm.uuid in (select nic.vmInstanceUuid from VmNicVO nic where nic.l3NetworkUuid in (:l3Uuids))"; TypedQuery<ApplianceVmVO> q = dbf.getEntityManager().createQuery(sql, ApplianceVmVO.class); q.setParameter("l3Uuids", l3uuids); List<ApplianceVmVO> apvms = q.getResultList(); if (!apvms.isEmpty()) { for (ApvmCascadeFilterExtensionPoint ext : pluginRgty.getExtensionList(ApvmCascadeFilterExtensionPoint.class)) { apvms = ext.filterApplianceVmCascade(apvms, action.getParentIssuer(), l3uuids); } ret = ApplianceVmInventory.valueOf1(apvms); } } else if (IpRangeVO.class.getSimpleName().equals(action.getParentIssuer())) { final List<String> ipruuids = CollectionUtils.transformToList((List<IpRangeInventory>) action.getParentIssuerContext(), new Function<String, IpRangeInventory>() { @Override public String call(IpRangeInventory arg) { return arg.getUuid(); } }); List<ApplianceVmVO> vmvos = new Callable<List<ApplianceVmVO>>() { @Override @Transactional(readOnly = true) public List<ApplianceVmVO> call() { String sql = "select vm from ApplianceVmVO vm, VmNicVO nic, UsedIpVO ip, IpRangeVO ipr where vm.uuid = nic.vmInstanceUuid" + " and nic.usedIpUuid = ip.uuid and ip.ipRangeUuid = ipr.uuid and ipr.uuid in (:uuids)"; TypedQuery<ApplianceVmVO> q = dbf.getEntityManager().createQuery(sql, ApplianceVmVO.class); q.setParameter("uuids", ipruuids); return q.getResultList(); } }.call(); // find out appliance vm whose ip is gateway of ip range final List<String> iprL3Uuids = CollectionUtils.transformToList((List<IpRangeInventory>) action.getParentIssuerContext(), new Function<String, IpRangeInventory>() { @Override public String call(IpRangeInventory arg) { return arg.getL3NetworkUuid(); } }); List<ApplianceVmVO> vmvos1 = new Callable<List<ApplianceVmVO>>() { @Override @Transactional(readOnly = true) public List<ApplianceVmVO> call() { String sql = "select vm from ApplianceVmVO vm, VmNicVO nic where vm.uuid = nic.vmInstanceUuid and nic.l3NetworkUuid in (:l3uuids)"; TypedQuery<ApplianceVmVO> q = dbf.getEntityManager().createQuery(sql, ApplianceVmVO.class); q.setParameter("l3uuids", iprL3Uuids); return q.getResultList(); } }.call(); if (!vmvos1.isEmpty()) { for (final IpRangeInventory ipr : (List<IpRangeInventory>) action.getParentIssuerContext()) { for (ApplianceVmVO vm : vmvos1) { for (VmNicVO nic : vm.getVmNics()) { if (ipr.getGateway().equals(nic.getIp())) { vmvos.add(vm); } } } } } for (ApvmCascadeFilterExtensionPoint ext : pluginRgty.getExtensionList(ApvmCascadeFilterExtensionPoint.class)) { vmvos = ext.filterApplianceVmCascade(vmvos, action.getParentIssuer(), ipruuids); } if (!vmvos.isEmpty()) { ret = ApplianceVmInventory.valueOf1(vmvos); } } return ret; } @Override public CascadeAction createActionForChildResource(CascadeAction action) { if (CascadeConstant.DELETION_CODES.contains(action.getActionCode())) { int op = toDeleteOpCode(action); if (op == OP_NOPE) { return null; } else { List<ApplianceVmInventory> apvms = apvmFromDeleteAction(action); return action.copy().setParentIssuer(NAME).setParentIssuerContext(apvms); } } else { return null; } } }
apache-2.0
BigFav/mathjs
lib/function/arithmetic/round.js
3751
'use strict'; module.exports = function (math) { var util = require('../../util/index'), BigNumber = math.type.BigNumber, Complex = require('../../type/Complex'), collection = require('../../type/collection'), isNumber = util.number.isNumber, isInteger = util.number.isInteger, isBoolean = util['boolean'].isBoolean, isComplex = Complex.isComplex, isCollection = collection.isCollection; /** * Round a value towards the nearest integer. * For matrices, the function is evaluated element wise. * * Syntax: * * math.round(x) * math.round(x, n) * * Examples: * * math.round(3.2); // returns Number 3 * math.round(3.8); // returns Number 4 * math.round(-4.2); // returns Number -4 * math.round(-4.7); // returns Number -5 * math.round(math.pi, 3); // returns Number 3.142 * math.round(123.45678, 2); // returns Number 123.46 * * var c = math.complex(3.2, -2.7); * math.round(c); // returns Complex 3 - 3i * * math.round([3.2, 3.8, -4.7]); // returns Array [3, 4, -5] * * See also: * * ceil, fix, floor * * @param {Number | BigNumber | Boolean | Complex | Array | Matrix | null} x Number to be rounded * @param {Number | BigNumber | Boolean | Array | null} [n=0] Number of decimals * @return {Number | BigNumber | Complex | Array | Matrix} Rounded value */ math.round = function round(x, n) { if (arguments.length != 1 && arguments.length != 2) { throw new math.error.ArgumentsError('round', arguments.length, 1, 2); } if (n == undefined) { // round (x) if (isNumber(x)) { return Math.round(x); } if (isComplex(x)) { return new Complex ( Math.round(x.re), Math.round(x.im) ); } if (x instanceof BigNumber) { return x.toDecimalPlaces(0); } if (isCollection(x)) { return collection.deepMap(x, round); } if (isBoolean(x) || x === null) { return Math.round(x); } throw new math.error.UnsupportedTypeError('round', math['typeof'](x)); } else { // round (x, n) if (!isNumber(n) || !isInteger(n)) { if (n instanceof BigNumber) { n = parseFloat(n.valueOf()); } else if (isBoolean(n) || x === null) { return round(x, +n); } else { throw new TypeError('Number of decimals in function round must be an integer'); } } if (n < 0 || n > 15) { throw new Error ('Number of decimals in function round must be in te range of 0-15'); } if (isNumber(x)) { return roundNumber(x, n); } if (isComplex(x)) { return new Complex ( roundNumber(x.re, n), roundNumber(x.im, n) ); } if (x instanceof BigNumber) { return x.toDecimalPlaces(n); } if (isCollection(x) || isCollection(n)) { return collection.deepMap2(x, n, round); } if (isBoolean(x) || x === null) { return round(+x, n); } throw new math.error.UnsupportedTypeError('round', math['typeof'](x), math['typeof'](n)); } }; /** * round a number to the given number of decimals, or to zero if decimals is * not provided * @param {Number} value * @param {Number} decimals number of decimals, between 0 and 15 (0 by default) * @return {Number} roundedValue */ function roundNumber (value, decimals) { var p = Math.pow(10, decimals); return Math.round(value * p) / p; } };
apache-2.0
blackcathacker/kc.preclean
coeus-code/src/main/java/org/kuali/coeus/common/framework/mail/KcEmailService.java
1216
/* * Copyright 2005-2014 The Kuali Foundation * * Licensed under the Educational Community License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.opensource.org/licenses/ecl1.php * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kuali.coeus.common.framework.mail; import java.util.List; import java.util.Set; public interface KcEmailService { public void sendEmail(String from, Set<String> toAddresses, String subject, Set<String> ccAddresses, Set<String> bccAddresses, String body, boolean htmlMessage); public void sendEmailWithAttachments(String from, Set<String> toAddresses, String subject, Set<String> ccAddresses, Set<String> bccAddresses, String body, boolean htmlMessage, List<EmailAttachment> attachments); public String getDefaultFromAddress(); }
apache-2.0
moumie/Collatexserver
server/models/user.js
431
var mongoose = require("mongoose"); //mongoose.connect('mongodb://localhost:27017/demoDb'); var dbUrl= 'mongodb://localhost:27017/test'; mongoose.createConnection(dbUrl); // create instance of Schema var mongoSchema = mongoose.Schema; // create schema var userSchema = { "userEmail" : String, "userPassword" : String }; // create model if not exists. module.exports = mongoose.model('users',userSchema);
apache-2.0
akjava/gwt-three.js-test
src/com/akjava/gwt/three/client/java/bone/CloseVertexAutoWeight.java
2050
package com.akjava.gwt.three.client.java.bone; import com.akjava.gwt.lib.client.JavaScriptUtils; import com.akjava.gwt.three.client.js.core.Geometry; import com.akjava.gwt.three.client.js.math.Vector3; import com.akjava.gwt.three.client.js.math.Vector4; import com.google.gwt.core.client.JsArray; public class CloseVertexAutoWeight { //TODO averaging public WeightResult autoWeight(Geometry geometry,Geometry targetGeometry){ return autoWeight(geometry, targetGeometry, 0); } public WeightResult autoWeight(Geometry geometry,Geometry targetGeometry,double maxDistance){ JsArray<Vector4> bodyIndices=JavaScriptUtils.createJSArray(); JsArray<Vector4> bodyWeight=JavaScriptUtils.createJSArray(); for(int i=0;i<geometry.vertices().length();i++){ JsArray<DistanceVertexIndex> array=JavaScriptUtils.createJSArray(); Vector3 origin=geometry.vertices().get(i); for(int j=0;j<targetGeometry.vertices().length();j++){ Vector3 target=targetGeometry.vertices().get(j); double distance=target.distanceTo(origin); if(maxDistance>0 && distance>maxDistance){ //skip }else{ array.push(DistanceVertexIndex.create(j,distance)); } /* DistanceVertexIndex dvi=array.get(j); if(dvi==null){ dvi=DistanceVertexIndex.create(j,distance); array.set(j, dvi); }else{ dvi.setDistance(distance); dvi.setVertexIndex(j); }*/ } sort(array); int index=array.get(0).getVertexIndex();//closed,TODO //LogUtils.log("closed:"+index); bodyIndices.push(targetGeometry.getSkinIndices().get(index).clone()); bodyWeight.push(targetGeometry.getSkinWeights().get(index).clone()); } return new WeightResult(bodyIndices, bodyWeight); } public final native void sort(JsArray<DistanceVertexIndex> distances)/*-{ distances.sort( function(a,b){ if( a.distance < b.distance ) return -1; if( a.distance > b.distance ) return 1; return 0; } ); }-*/; }
apache-2.0
rehacktive/waspdb
waspdb/src/main/java/net/rehacktive/waspdb/internals/utils/Salt.java
290
package net.rehacktive.waspdb.internals.utils; /** * Created by stefano on 20/07/2015. */ public class Salt { private byte[] salt; public Salt() { } public Salt(byte[] salt) { this.salt = salt; } public byte[] getSalt() { return salt; } }
apache-2.0
jentfoo/aws-sdk-java
aws-java-sdk-glacier/src/main/java/com/amazonaws/services/glacier/model/transform/VaultAccessPolicyMarshaller.java
1962
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.glacier.model.transform; import javax.annotation.Generated; import com.amazonaws.SdkClientException; import com.amazonaws.services.glacier.model.*; import com.amazonaws.protocol.*; import com.amazonaws.annotation.SdkInternalApi; /** * VaultAccessPolicyMarshaller */ @Generated("com.amazonaws:aws-java-sdk-code-generator") @SdkInternalApi public class VaultAccessPolicyMarshaller { private static final MarshallingInfo<String> POLICY_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD) .marshallLocationName("Policy").build(); private static final VaultAccessPolicyMarshaller instance = new VaultAccessPolicyMarshaller(); public static VaultAccessPolicyMarshaller getInstance() { return instance; } /** * Marshall the given parameter object. */ public void marshall(VaultAccessPolicy vaultAccessPolicy, ProtocolMarshaller protocolMarshaller) { if (vaultAccessPolicy == null) { throw new SdkClientException("Invalid argument passed to marshall(...)"); } try { protocolMarshaller.marshall(vaultAccessPolicy.getPolicy(), POLICY_BINDING); } catch (Exception e) { throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e); } } }
apache-2.0
BOXOUT-THINKERS/TiOpenChat
app/controllers/setting/banFriendManage.js
3930
var args = arguments[0] || {}; $.container.title = L('sb_banFriendTitle'); var Q = require('q'); var banContactsCol; ////////////// $.getView().addEventListener('open', function () { //fetch데이터가 불러온것만 들어가게됨. 그러니 새것으로 해야함 // Alloy.Globals.startWaiting('c_waitingMsgFriend'); // banContactsCol.defaultFetchData = { // order : "-User_objectId_To,fullName", // where : { "User_objectId" : Alloy.Globals.user.get("id"), "isBlock" : true }, // include : "User_object_To", // limit : 1000 // }; // banContactsCol.fetch({ // success: function(){ // drawBanFriend(); // Alloy.Globals.stopWaiting(); // }, // error: function(){ // Alloy.Globals.alert('verifyCodeFail'); // } // }); var allContactsCol = Alloy.Collections.instance('contacts'); var models = allContactsCol.filter(function(model){ return (model.get('User_objectId_To') && model.get('isBlock')); }); banContactsCol = Alloy.createCollection('contacts'); banContactsCol.reset(models); drawBanFriend(); }); //////////// // $.listView.addEventListener('itemclick', function(e){ var itemId = e.itemId; var contactM = banContactsCol.get(itemId); var opts = { cancleBan: 0, ban: 1 } if(OS_IOS){ opts.options = [ L('sb_cancleBanFriend'), L('sb_changeBlocktToHidden'), L('c_cancle'), ] }else{ opts.options = [ L('sb_cancleBanFriend'), L('sb_changeBlocktToHidden') ] opts.buttonNames = [L('c_cancle')]; } var dialog = Ti.UI.createOptionDialog(opts) //사진찍거나 가져오고, 로컬 변환후.. 서버에 저장한다. dialog.addEventListener('click', function(e){ //안드로이드에서 버튼이면 취소와 같음.아무동작안함. //아무동작하지않음. if(OS_IOS){ if(e.index > 1) return; }else{ if(e.button) return; } if(e.index == e.source.cancleBan){ cancleBan(contactM); } if(e.index == e.source.ban){ hideFriend(contactM); } }); dialog.show(); }); //업데이트와....컬렉션에서 제거.....리스트뷰는 걍 다시그리고. function cancleBan(contactM) { banContactsCol.remove(contactM, {remove:false}); drawBanFriend(); var tempContactM = Alloy.createModel('contacts'); tempContactM.save({'objectId': contactM.id, 'isBlock': false}, { success: function (result) { //전체 친구목록을 다시그림. // var currentContactM = currentContactsCol.get(contactM.id); contactM.set({'isBlock': false}, {change:'false'}); }, error : function (error) { banContactsCol.add(contactM, {add:false}); drawBanFriend(); Alloy.Globals.alert('c_alertMsgDefault'); } }); } //블락 취소 대신 하이드.. function hideFriend(contactM) { banContactsCol.remove(contactM, {remove:false}); drawBanFriend(); var tempContactM = Alloy.createModel('contacts'); tempContactM.save({'objectId': contactM.id, 'isHidden': true, 'isBlock': false}, { success: function (result) { //전체 친구목록에는 어차피 안보임 contactM.set({'isHidden': true, 'isBlock': false}, {change:'false'}); }, error : function (error) { banContactsCol.add(contactM, {add:false}); drawBanFriend(); Alloy.Globals.alert('c_alertMsgDefault'); } }); } function drawBanFriend() { var items = []; banContactsCol.each(function(contactM){ var friend = contactM.getUserInfo(); items.push({ template : "rowTemplate", profileImage : { image : friend.imageUrl || "/images/friendlist_profile_default_img.png" }, profileName: { text : friend.name }, rowRightBtnLabel : {text :L('c_manage')}, properties : { itemId : contactM.id } }); }); $.section.setItems(items); };
apache-2.0
qamate/iOS-selenium-server
javascript/safari-driver/inject/commands/commands.js
16906
// Copyright 2012 Selenium committers // Copyright 2012 Software Freedom Conservancy // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * @fileoverview Command handlers used by the SafariDriver's injected script. */ goog.provide('safaridriver.inject.commands'); goog.require('bot'); goog.require('bot.Error'); goog.require('bot.ErrorCode'); goog.require('bot.action'); goog.require('bot.dom'); goog.require('bot.frame'); goog.require('bot.inject'); goog.require('bot.inject.cache'); goog.require('bot.locators'); goog.require('bot.window'); goog.require('goog.array'); goog.require('goog.debug.Logger'); goog.require('goog.math.Coordinate'); goog.require('goog.math.Size'); goog.require('goog.net.cookies'); goog.require('goog.style'); goog.require('safaridriver.inject.CommandRegistry'); goog.require('safaridriver.inject.message.Activate'); goog.require('webdriver.atoms.element'); /** * @type {!goog.debug.Logger} * @const * @private */ safaridriver.inject.commands.LOG_ = goog.debug.Logger.getLogger( 'safaridriver.inject.commands'); /** @return {string} The name of the current window. */ safaridriver.inject.commands.getWindowName = function() { return window.name; }; /** @return {string} The current URL. */ safaridriver.inject.commands.getCurrentUrl = function() { return window.location.href; }; /** * Loads a new URL in the current page. * @param {!safaridriver.Command} command The command object. */ safaridriver.inject.commands.loadUrl = function(command) { window.location.href = /** @type {string} */ (command.getParameter('url')); // No need to send a response. The global page should be listening for the // navigate event. }; /** Reloads the current page. */ safaridriver.inject.commands.reloadPage = function() { window.location.reload(); // No need to send a response. The global page should be listening for the // navigate event. }; /** * Stub that reports an error that navigating through the browser history does * not work for the SafariDriver. */ safaridriver.inject.commands.unsupportedHistoryNavigation = function() { throw Error('Yikes! Safari history navigation does not work. We can ' + 'go forward or back, but once we do, we can no longer ' + 'communicate with the page...'); }; /** @return {string} The document title. */ safaridriver.inject.commands.getTitle = function() { return document.title; }; /** @return {string} A string representation of the current page source. */ safaridriver.inject.commands.getPageSource = function() { return new XMLSerializer().serializeToString(document); }; /** * Defines an element locating command. * @param {function(!Object, (Document|Element)=): * (Element|!goog.array.ArrayLike.<Element>)} locatorFn The locator function * that should be used. * @return {function(!safaridriver.Command): !bot.response.ResponseObject} The * locator command function. * @private */ safaridriver.inject.commands.findElementCommand_ = function(locatorFn) { return function(command) { var locator = {}; locator[command.getParameter('using')] = command.getParameter('value'); var args = [locator]; if (command.getParameter('id')) { args.push({'ELEMENT': command.getParameter('id')}); } return bot.inject.executeScript(locatorFn, args); }; }; /** * Locates an element on the page. * @param {!safaridriver.Command} command The command object. * @return {!bot.response.ResponseObject} The command response. */ safaridriver.inject.commands.findElement = safaridriver.inject.commands.findElementCommand_(bot.locators.findElement); /** * Locates multiple elements on the page. * @param {!safaridriver.Command} command The command object. * @return {bot.response.ResponseObject} The command response. */ safaridriver.inject.commands.findElements = safaridriver.inject.commands.findElementCommand_(bot.locators.findElements); /** * Retrieves the element that currently has focus. * @return {!bot.response.ResponseObject} The response object. */ safaridriver.inject.commands.getActiveElement = function() { var getActiveElement = goog.partial(bot.dom.getActiveElement, document); return /** @type {!bot.response.ResponseObject} */ (bot.inject.executeScript( getActiveElement, [])); }; /** * Adds a new cookie to the page. * @param {!safaridriver.Command} command The command object. */ safaridriver.inject.commands.addCookie = function(command) { var cookie = command.getParameter('cookie'); // The WebDriver wire protocol defines cookie expiration times in seconds // since midnight, January 1, 1970 UTC, but goog.net.Cookies expects them // to be in seconds since "right now". var maxAge = cookie['expiry']; if (goog.isNumber(maxAge)) { maxAge = new Date(maxAge - goog.now()); } // TODO: check whether cookie['domain'] is valid. goog.net.cookies.set(cookie['name'], cookie['value'], maxAge, cookie['path'], cookie['domain'], cookie['secure']); }; /** * @return {!Array.<{name:string, value:string}>} A list of the cookies visible * to the current page. */ safaridriver.inject.commands.getCookies = function() { var keys = goog.net.cookies.getKeys(); return goog.array.map(keys, function(key) { return { 'name': key, 'value': goog.net.cookies.get(key) }; }); }; /** Deletes all cookies visible to the current page. */ safaridriver.inject.commands.deleteCookies = function() { goog.net.cookies.clear(); }; /** * Deletes a specified cookie. * @param {!safaridriver.Command} command The command object. */ safaridriver.inject.commands.deleteCookie = function(command) { goog.net.cookies.remove(/** @type {string} */ (command.getParameter('name'))); }; /** * Creates a command that targets a specific DOM element. * @param {!Function} handlerFn The actual handler function. The first parameter * should be the Element to target. * @param {...string} var_args Any named parameters which should be extracted * and passed as arguments to {@code commandFn}. * @return {function(!safaridriver.Command)} The new element command function. * @private */ safaridriver.inject.commands.elementCommand_ = function(handlerFn, var_args) { var keys = goog.array.slice(arguments, 1); return function(command) { command = safaridriver.inject.commands.util.prepareElementCommand(command); var element = command.getParameter('id'); var args = goog.array.concat(element, goog.array.map(keys, function(key) { return command.getParameter(key); })); return bot.inject.executeScript(handlerFn, args); }; }; /** * @param {!safaridriver.Command} command The command to execute. * @see bot.action.clear */ safaridriver.inject.commands.clearElement = safaridriver.inject.commands.elementCommand_(bot.action.clear); /** * @param {!safaridriver.Command} command The command to execute. * @see bot.action.click */ safaridriver.inject.commands.clickElement = safaridriver.inject.commands.elementCommand_(bot.action.click); /** * @param {!safaridriver.Command} command The command to execute. * @see bot.action.submit */ safaridriver.inject.commands.submitElement = safaridriver.inject.commands.elementCommand_(bot.action.submit); /** * @param {!safaridriver.Command} command The command to execute. * @see webdriver.atoms.element.getAttribute */ safaridriver.inject.commands.getElementAttribute = safaridriver.inject.commands.elementCommand_( webdriver.atoms.element.getAttribute, 'name'); /** * @param {!safaridriver.Command} command The command to execute. * @see goog.style.getPageOffset */ safaridriver.inject.commands.getElementLocation = safaridriver.inject.commands.elementCommand_(goog.style.getPageOffset); /** * @param {!safaridriver.Command} command The command to execute. * @see bot.dom.getLocationInView */ safaridriver.inject.commands.getLocationInView = safaridriver.inject.commands.elementCommand_(bot.dom.getLocationInView); /** * @param {!safaridriver.Command} command The command to execute. * @see goog.style.getSize */ safaridriver.inject.commands.getElementSize = safaridriver.inject.commands.elementCommand_(goog.style.getSize); /** * @param {!safaridriver.Command} command The command to execute. * @see webdriver.atoms.element.getText */ safaridriver.inject.commands.getElementText = safaridriver.inject.commands.elementCommand_( webdriver.atoms.element.getText); /** * @param {!safaridriver.Command} command The command to execute. */ safaridriver.inject.commands.getElementTagName = safaridriver.inject.commands.elementCommand_(function(el) { return el.tagName; }); /** * @param {!safaridriver.Command} command The command to execute. * @see bot.dom.isShown */ safaridriver.inject.commands.isElementDisplayed = safaridriver.inject.commands.elementCommand_(bot.dom.isShown); /** * @param {!safaridriver.Command} command The command to execute. * @see bot.dom.isEnabled */ safaridriver.inject.commands.isElementEnabled = safaridriver.inject.commands.elementCommand_(bot.dom.isEnabled); /** * @param {!safaridriver.Command} command The command to execute. * @see webdriver.atoms.element.isSelected */ safaridriver.inject.commands.isElementSelected = safaridriver.inject.commands.elementCommand_( webdriver.atoms.element.isSelected); /** * @param {!safaridriver.Command} command The command to execute. */ safaridriver.inject.commands.elementEquals = safaridriver.inject.commands.elementCommand_(function(a, b) { return a === b; }, 'other'); /** * @param {!safaridriver.Command} command The command to execute. * @see bot.dom.getEffectiveStyle */ safaridriver.inject.commands.getCssValue = safaridriver.inject.commands.elementCommand_(bot.dom.getEffectiveStyle, 'propertyName'); /** * @return {!goog.math.Coordinate} The position of the window. * @see bot.window.getPosition */ safaridriver.inject.commands.getWindowPosition = function() { return bot.window.getPosition(); }; /** * @param {!safaridriver.Command} command The command to execute. * @see bot.window.setPosition */ safaridriver.inject.commands.setWindowPosition = function(command) { var position = new goog.math.Coordinate( /** @type {number} */ (command.getParameter('x')), /** @type {number} */ (command.getParameter('y'))); bot.window.setPosition(position); }; /** * @return {!goog.math.Size} The size of the window. * @see bot.window.getSize */ safaridriver.inject.commands.getWindowSize = function() { return bot.window.getSize(); }; /** * @param {!safaridriver.Command} command The command to execute. * @see bot.window.setSize */ safaridriver.inject.commands.setWindowSize = function(command) { var size = new goog.math.Size( /** @type {number} */ (command.getParameter('width')), /** @type {number} */ (command.getParameter('height'))); bot.window.setSize(size); }; /** Maximizes the window. */ safaridriver.inject.commands.maximizeWindow = function() { window.moveTo(0, 0); window.resizeTo(window.screen.width, window.screen.height); }; /** * Executes a command in the context of the current page. * @param {!safaridriver.Command} command The command to execute. * @param {!safaridriver.inject.Tab} tab A reference to the tab issuing this * command. * @return {!webdriver.promise.Promise} A promise that will be resolved with the * {@link bot.response.ResponseObject} from the page. * @throws {Error} If there is an error while sending the command to the page. */ safaridriver.inject.commands.executeInPage = function(command, tab) { command = safaridriver.inject.commands.util.prepareElementCommand(command); return tab.executeInPage(command); }; /** * Locates a frame and sends a message to it to activate itself with the * extension. The located frame will be * @param {!safaridriver.Command} command The command to execute. * the target of all subsequent commands. * @throws {Error} If there is an error whilst locating the frame. */ safaridriver.inject.commands.switchToFrame = function(command) { var id = command.getParameter('id'); var frameWindow; if (goog.isNull(id)) { safaridriver.inject.commands.LOG_.info('Resetting focus to window.top'); frameWindow = window.top; } else if (goog.isString(id)) { safaridriver.inject.commands.LOG_.info( 'Switching to frame by name or ID: ' + id); frameWindow = bot.frame.findFrameByNameOrId(/** @type {string} */ (id)); } else if (goog.isNumber(id)) { safaridriver.inject.commands.LOG_.info( 'Switching to frame by index: ' + id); frameWindow = bot.frame.findFrameByIndex(/** @type {number} */ (id)); } else { var elementKey = /** @type {string} */ (id[bot.inject.ELEMENT_KEY]); safaridriver.inject.commands.LOG_.info('Switching to frame by ' + 'WebElement: ' + elementKey); // ID must be a WebElement. Pull it from the cache. var frameElement = bot.inject.cache.getElement(elementKey); frameWindow = bot.frame.getFrameWindow( /** @type {!(HTMLIFrameElement|HTMLFrameElement)} */ (frameElement)); } if (!frameWindow) { throw new bot.Error(bot.ErrorCode.NO_SUCH_FRAME, 'Unable to locate frame with ' + id); } // De-activate ourselves. We should no longer respond to commands until // we are re-activated. safaridriver.inject.Tab.getInstance().setActive(false); var message = new safaridriver.inject.message.Activate(command); message.send(frameWindow); }; goog.scope(function() { var CommandName = webdriver.CommandName; var commands = safaridriver.inject.commands; // Commands that should be defined for every frame. safaridriver.inject.CommandRegistry.getInstance() .defineModule(safaridriver.inject.commands.module.ID, goog.object.create( CommandName.ADD_COOKIE, commands.addCookie, CommandName.CLEAR_ELEMENT, commands.clearElement, CommandName.CLICK_ELEMENT, commands.clickElement, CommandName.DELETE_ALL_COOKIES, commands.deleteCookies, CommandName.DELETE_COOKIE, commands.deleteCookie, CommandName.ELEMENT_EQUALS, commands.elementEquals, CommandName.FIND_CHILD_ELEMENT, commands.findElement, CommandName.FIND_CHILD_ELEMENTS, commands.findElements, CommandName.FIND_ELEMENT, commands.findElement, CommandName.FIND_ELEMENTS, commands.findElements, CommandName.GET, commands.loadUrl, CommandName.GET_ACTIVE_ELEMENT, commands.getActiveElement, CommandName.GET_ALL_COOKIES, commands.getCookies, CommandName.GET_CURRENT_URL, commands.getCurrentUrl, CommandName.GET_ELEMENT_ATTRIBUTE, commands.getElementAttribute, CommandName.GET_ELEMENT_LOCATION, commands.getElementLocation, CommandName.GET_ELEMENT_LOCATION_IN_VIEW, commands.getLocationInView, CommandName.GET_ELEMENT_SIZE, commands.getElementSize, CommandName.GET_ELEMENT_TAG_NAME, commands.getElementTagName, CommandName.GET_ELEMENT_TEXT, commands.getElementText, CommandName.GET_ELEMENT_VALUE_OF_CSS_PROPERTY, commands.getCssValue, CommandName.GET_PAGE_SOURCE, commands.getPageSource, CommandName.GET_TITLE, commands.getTitle, CommandName.GET_WINDOW_POSITION, commands.getWindowPosition, CommandName.GET_WINDOW_SIZE, commands.getWindowSize, CommandName.GO_BACK, commands.unsupportedHistoryNavigation, CommandName.GO_FORWARD, commands.unsupportedHistoryNavigation, CommandName.IS_ELEMENT_DISPLAYED, commands.isElementDisplayed, CommandName.IS_ELEMENT_ENABLED, commands.isElementEnabled, CommandName.IS_ELEMENT_SELECTED, commands.isElementSelected, CommandName.MAXIMIZE_WINDOW, commands.maximizeWindow, CommandName.REFRESH, commands.reloadPage, CommandName.SET_WINDOW_POSITION, commands.setWindowPosition, CommandName.SET_WINDOW_SIZE, commands.setWindowSize, CommandName.SUBMIT_ELEMENT, commands.submitElement, CommandName.SWITCH_TO_FRAME, commands.switchToFrame, // The extension handles window switches. It sends the command to this // injected script only as a means of retrieving the window name. CommandName.SWITCH_TO_WINDOW, commands.getWindowName)); }); // goog.scope
apache-2.0
HackerPack/thePlaneteers
social-auth-examples/index.js
505
var express = require('express'); var app = express(); app.set('port', (process.env.PORT || 5000)); app.use(express.static(__dirname + '/public')); app.use(express.static(__dirname + '/lib')); // views is directory for all template files app.set('views', __dirname + '/views'); app.set('view engine', 'ejs'); app.get('/', function(request, response) { response.render('pages/index'); }); app.listen(app.get('port'), function() { console.log('Node app is running on port', app.get('port')); });
apache-2.0
google/jwt_verify_lib
test/verify_jwk_rsa_test.cc
12751
// Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #include "gtest/gtest.h" #include "jwt_verify_lib/verify.h" #include "test/test_common.h" namespace google { namespace jwt_verify { namespace { // private key: // "-----BEGIN RSA PRIVATE KEY-----" // "MIIEowIBAAKCAQEAtw7MNxUTxmzWROCD5BqJxmzT7xqc9KsnAjbXCoqEEHDx4WBl" // "fcwkXHt9e/2+Uwi3Arz3FOMNKwGGlbr7clBY3utsjUs8BTF0kO/poAmSTdSuGeh2" // "mSbcVHvmQ7X/kichWwx5Qj0Xj4REU3Gixu1gQIr3GATPAIULo5lj/ebOGAa+l0wI" // "G80Nzz1pBtTIUx68xs5ZGe7cIJ7E8n4pMX10eeuh36h+aossePeuHulYmjr4N0/1" // "jG7a+hHYL6nqwOR3ej0VqCTLS0OloC0LuCpLV7CnSpwbp2Qg/c+MDzQ0TH8g8drI" // "zR5hFe9a3NlNRMXgUU5RqbLnR9zfXr7b9oEszQIDAQABAoIBAQCgQQ8cRZJrSkqG" // "P7qWzXjBwfIDR1wSgWcD9DhrXPniXs4RzM7swvMuF1myW1/r1xxIBF+V5HNZq9tD" // "Z07LM3WpqZX9V9iyfyoZ3D29QcPX6RGFUtHIn5GRUGoz6rdTHnh/+bqJ92uR02vx" // "VPD4j0SNHFrWpxcE0HRxA07bLtxLgNbzXRNmzAB1eKMcrTu/W9Q1zI1opbsQbHbA" // "CjbPEdt8INi9ij7d+XRO6xsnM20KgeuKx1lFebYN9TKGEEx8BCGINOEyWx1lLhsm" // "V6S0XGVwWYdo2ulMWO9M0lNYPzX3AnluDVb3e1Yq2aZ1r7t/GrnGDILA1N2KrAEb" // "AAKHmYNNAoGBAPAv9qJqf4CP3tVDdto9273DA4Mp4Kjd6lio5CaF8jd/4552T3UK" // "N0Q7N6xaWbRYi6xsCZymC4/6DhmLG/vzZOOhHkTsvLshP81IYpWwjm4rF6BfCSl7" // "ip+1z8qonrElxes68+vc1mNhor6GGsxyGe0C18+KzpQ0fEB5J4p0OHGnAoGBAMMb" // "/fpr6FxXcjUgZzRlxHx1HriN6r8Jkzc+wAcQXWyPUOD8OFLcRuvikQ16sa+SlN4E" // "HfhbFn17ABsikUAIVh0pPkHqMsrGFxDn9JrORXUpNhLdBHa6ZH+we8yUe4G0X4Mc" // "R7c8OT26p2zMg5uqz7bQ1nJ/YWlP4nLqIytehnRrAoGAT6Rn0JUlsBiEmAylxVoL" // "mhGnAYAKWZQ0F6/w7wEtPs/uRuYOFM4NY1eLb2AKLK3LqqGsUkAQx23v7PJelh2v" // "z3bmVY52SkqNIGGnJuGDaO5rCCdbH2EypyCfRSDCdhUDWquSpBv3Dr8aOri2/CG9" // "jQSLUOtC8ouww6Qow1UkPjMCgYB8kTicU5ysqCAAj0mVCIxkMZqFlgYUJhbZpLSR" // "Tf93uiCXJDEJph2ZqLOXeYhMYjetb896qx02y/sLWAyIZ0ojoBthlhcLo2FCp/Vh" // "iOSLot4lOPsKmoJji9fei8Y2z2RTnxCiik65fJw8OG6mSm4HeFoSDAWzaQ9Y8ue1" // "XspVNQKBgAiHh4QfiFbgyFOlKdfcq7Scq98MA3mlmFeTx4Epe0A9xxhjbLrn362+" // "ZSCUhkdYkVkly4QVYHJ6Idzk47uUfEC6WlLEAnjKf9LD8vMmZ14yWR2CingYTIY1" // "LL2jMkSYEJx102t2088meCuJzEsF3BzEWOP8RfbFlciT7FFVeiM4" // "-----END RSA PRIVATE KEY-----"; // The following public key jwk and token are taken from // https://github.com/cloudendpoints/esp/blob/master/src/api_manager/auth/lib/auth_jwt_validator_test.cc const std::string PublicKeyRSA = R"( { "keys": [ { "kty": "RSA", "alg": "RS256", "use": "sig", "kid": "62a93512c9ee4c7f8067b5a216dade2763d32a47", "n": "0YWnm_eplO9BFtXszMRQNL5UtZ8HJdTH2jK7vjs4XdLkPW7YBkkm_2xNgcaVpkW0VT2l4mU3KftR-6s3Oa5Rnz5BrWEUkCTVVolR7VYksfqIB2I_x5yZHdOiomMTcm3DheUUCgbJRv5OKRnNqszA4xHn3tA3Ry8VO3X7BgKZYAUh9fyZTFLlkeAh0-bLK5zvqCmKW5QgDIXSxUTJxPjZCgfx1vmAfGqaJb-nvmrORXQ6L284c73DUL7mnt6wj3H6tVqPKA27j56N0TB1Hfx4ja6Slr8S4EB3F1luYhATa1PKUSH8mYDW11HolzZmTQpRoLV8ZoHbHEaTfqX_aYahIw", "e": "AQAB" }, { "kty": "RSA", "alg": "RS256", "use": "sig", "kid": "b3319a147514df7ee5e4bcdee51350cc890cc89e", "n": "qDi7Tx4DhNvPQsl1ofxxc2ePQFcs-L0mXYo6TGS64CY_2WmOtvYlcLNZjhuddZVV2X88m0MfwaSA16wE-RiKM9hqo5EY8BPXj57CMiYAyiHuQPp1yayjMgoE1P2jvp4eqF-BTillGJt5W5RuXti9uqfMtCQdagB8EC3MNRuU_KdeLgBy3lS3oo4LOYd-74kRBVZbk2wnmmb7IhP9OoLc1-7-9qU1uhpDxmE6JwBau0mDSwMnYDS4G_ML17dC-ZDtLd1i24STUw39KH0pcSdfFbL2NtEZdNeam1DDdk0iUtJSPZliUHJBI_pj8M-2Mn_oA8jBuI8YKwBqYkZCN1I95Q", "e": "AQAB" } ] } )"; // private key: // "-----BEGIN PRIVATE KEY-----\n" // "MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQCoOLtPHgOE289C\n" // "yXWh/HFzZ49AVyz4vSZdijpMZLrgJj/ZaY629iVws1mOG511lVXZfzybQx/BpIDX\n" // "rAT5GIoz2GqjkRjwE9ePnsIyJgDKIe5A+nXJrKMyCgTU/aO+nh6oX4FOKWUYm3lb\n" // "lG5e2L26p8y0JB1qAHwQLcw1G5T8p14uAHLeVLeijgs5h37viREFVluTbCeaZvsi\n" // "E/06gtzX7v72pTW6GkPGYTonAFq7SYNLAydgNLgb8wvXt0L5kO0t3WLbhJNTDf0o\n" // "fSlxJ18VsvY20Rl015qbUMN2TSJS0lI9mWJQckEj+mPwz7Yyf+gDyMG4jxgrAGpi\n" // "RkI3Uj3lAgMBAAECggEAOuaaVyp4KvXYDVeC07QTeUgCdZHQkkuQemIi5YrDkCZ0\n" // "Zsi6CsAG/f4eVk6/BGPEioItk2OeY+wYnOuDVkDMazjUpe7xH2ajLIt3DZ4W2q+k\n" // "v6WyxmmnPqcZaAZjZiPxMh02pkqCNmqBxJolRxp23DtSxqR6lBoVVojinpnIwem6\n" // "xyUl65u0mvlluMLCbKeGW/K9bGxT+qd3qWtYFLo5C3qQscXH4L0m96AjGgHUYW6M\n" // "Ffs94ETNfHjqICbyvXOklabSVYenXVRL24TOKIHWkywhi1wW+Q6zHDADSdDVYw5l\n" // "DaXz7nMzJ2X7cuRP9zrPpxByCYUZeJDqej0Pi7h7ZQKBgQDdI7Yb3xFXpbuPd1VS\n" // "tNMltMKzEp5uQ7FXyDNI6C8+9TrjNMduTQ3REGqEcfdWA79FTJq95IM7RjXX9Aae\n" // "p6cLekyH8MDH/SI744vCedkD2bjpA6MNQrzNkaubzGJgzNiZhjIAqnDAD3ljHI61\n" // "NbADc32SQMejb6zlEh8hssSsXwKBgQDCvXhTIO/EuE/y5Kyb/4RGMtVaQ2cpPCoB\n" // "GPASbEAHcsRk+4E7RtaoDQC1cBRy+zmiHUA9iI9XZyqD2xwwM89fzqMj5Yhgukvo\n" // "XMxvMh8NrTneK9q3/M3mV1AVg71FJQ2oBr8KOXSEbnF25V6/ara2+EpH2C2GDMAo\n" // "pgEnZ0/8OwKBgFB58IoQEdWdwLYjLW/d0oGEWN6mRfXGuMFDYDaGGLuGrxmEWZdw\n" // "fzi4CquMdgBdeLwVdrLoeEGX+XxPmCEgzg/FQBiwqtec7VpyIqhxg2J9V2elJS9s\n" // "PB1rh9I4/QxRP/oO9h9753BdsUU6XUzg7t8ypl4VKRH3UCpFAANZdW1tAoGAK4ad\n" // "tjbOYHGxrOBflB5wOiByf1JBZH4GBWjFf9iiFwgXzVpJcC5NHBKL7gG3EFwGba2M\n" // "BjTXlPmCDyaSDlQGLavJ2uQar0P0Y2MabmANgMkO/hFfOXBPtQQe6jAfxayaeMvJ\n" // "N0fQOylUQvbRTodTf2HPeG9g/W0sJem0qFH3FrECgYEAnwixjpd1Zm/diJuP0+Lb\n" // "YUzDP+Afy78IP3mXlbaQ/RVd7fJzMx6HOc8s4rQo1m0Y84Ztot0vwm9+S54mxVSo\n" // "6tvh9q0D7VLDgf+2NpnrDW7eMB3n0SrLJ83Mjc5rZ+wv7m033EPaWSr/TFtc/MaF\n" // "aOI20MEe3be96HHuWD3lTK0=\n" // "-----END PRIVATE KEY-----"; // JWT without kid // Header: {"alg":"RS256","typ":"JWT"} // Payload: // {"iss":"https://example.com","sub":"test@example.com","exp":1501281058} const std::string JwtTextNoKid = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9." "eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwic3ViIjoidGVzdEBleGFtcGxlLmNvbSIs" "ImV4cCI6MTUwMTI4MTA1OH0.XYPg6VPrq-H1Kl-kgmAfGFomVpnmdZLIAo0g6dhJb2Be_" "koZ2T76xg5_Lr828hsLKxUfzwNxl5-k1cdz_kAst6vei0hdnOYqRQ8EhkZS_" "5Y2vWMrzGHw7AUPKCQvSnNqJG5HV8YdeOfpsLhQTd-" "tG61q39FWzJ5Ra5lkxWhcrVDQFtVy7KQrbm2dxhNEHAR2v6xXP21p1T5xFBdmGZbHFiH63N9" "dwdRgWjkvPVTUqxrZil7PSM2zg_GTBETp_" "qS7Wwf8C0V9o2KZu0KDV0j0c9nZPWTv3IMlaGZAtQgJUeyemzRDtf4g2yG3xBZrLm3AzDUj_" "EX_pmQAHA5ZjPVCAw"; // JWT without kid with long exp // Header: {"alg":"RS256","typ":"JWT"} // Payload: // {"iss":"https://example.com","sub":"test@example.com","aud":"example_service","exp":2001001001} const std::string JwtTextNoKidLongExp = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9." "eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwic3ViIjoidGVzdEBleGFtcGxlLmNvbSIs" "ImF1ZCI6ImV4YW1wbGVfc2VydmljZSIsImV4cCI6MjAwMTAwMTAwMX0." "n45uWZfIBZwCIPiL0K8Ca3tmm-ZlsDrC79_" "vXCspPwk5oxdSn983tuC9GfVWKXWUMHe11DsB02b19Ow-" "fmoEzooTFn65Ml7G34nW07amyM6lETiMhNzyiunctplOr6xKKJHmzTUhfTirvDeG-q9n24-" "8lH7GP8GgHvDlgSM9OY7TGp81bRcnZBmxim_UzHoYO3_" "c8OP4ZX3xG5PfihVk5G0g6wcHrO70w0_64JgkKRCrLHMJSrhIgp9NHel_" "CNOnL0AjQKe9IGblJrMuouqYYS0zEWwmOVUWUSxQkoLpldQUVefcfjQeGjz8IlvktRa77FYe" "xfP590ACPyXrivtsxg"; // JWT with correct kid // Header: // {"alg":"RS256","typ":"JWT","kid":"b3319a147514df7ee5e4bcdee51350cc890cc89e"} // Payload: // {"iss":"https://example.com","sub":"test@example.com","exp":1501281058} const std::string JwtTextWithCorrectKid = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6ImIzMzE5YTE0NzUxNGRmN2VlNWU0" "YmNkZWU1MTM1MGNjODkwY2M4OWUifQ." "eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwic3ViIjoidGVzdEBleGFtcGxlLmNvbSIs" "ImV4cCI6MTUwMTI4MTA1OH0.QYWtQR2JNhLBJXtpJfFisF0WSyzLbD-9dynqwZt_" "KlQZAIoZpr65BRNEyRzpt0jYrk7RA7hUR2cS9kB3AIKuWA8kVZubrVhSv_fiX6phjf_" "bZYj92kDtMiPJf7RCuGyMgKXwwf4b1Sr67zamcTmQXf26DT415rnrUHVqTlOIW50TjNa1bbO" "fNyKZC3LFnKGEzkfaIeXYdGiSERVOTtOFF5cUtZA2OVyeAT3mE1NuBWxz0v7xJ4zdIwHwxFU" "wd_5tB57j_" "zCEC9NwnwTiZ8wcaSyMWc4GJUn4bJs22BTNlRt5ElWl6RuBohxZA7nXwWig5CoLZmCpYpb8L" "fBxyCpqJQ"; // JWT with existing but incorrect kid // Header: // {"alg":"RS256","typ":"JWT","kid":"62a93512c9ee4c7f8067b5a216dade2763d32a47"} // Payload: // {"iss":"https://example.com","sub":"test@example.com","exp":1501281058} const std::string JwtTextWithIncorrectKid = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6IjYyYTkzNTEyYzllZTRjN2Y4MDY3" "YjVhMjE2ZGFkZTI3NjNkMzJhNDcifQ." "eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwic3ViIjoidGVzdEBleGFtcGxlLmNvbSIs" "ImV4cCI6MTUwMTI4MTA1OH0." "adrKqsjKh4zdOuw9rMZr0Kn2LLYG1OUfDuvnO6tk75NKCHpKX6oI8moNYhgcCQU4AoCKXZ_" "u-oMl54QTx9lX9xZ2VUWKTxcJEOnpoJb-DVv_FgIG9ETe5wcCS8Y9pQ2-hxtO1_LWYok1-" "A01Q4929u6WNw_Og4rFXR6VSpZxXHOQrEwW44D2-Lngu1PtPjWIz3rO6cOiYaTGCS6-" "TVeLFnB32KQg823WhFhWzzHjhYRO7NOrl-IjfGn3zYD_" "DfSoMY3A6LeOFCPp0JX1gcKcs2mxaF6e3LfVoBiOBZGvgG_" "jx3y85hF2BZiANbSf1nlLQFdjk_CWbLPhTWeSfLXMOg"; // JWT with nonexist kid // Header: {"alg":"RS256","typ":"JWT","kid":"blahblahblah"} // Payload: // {"iss":"https://example.com","sub":"test@example.com","exp":1501281058} const std::string JwtTextWithNonExistKid = "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCIsImtpZCI6ImJsYWhibGFoYmxhaCJ9." "eyJpc3MiOiJodHRwczovL2V4YW1wbGUuY29tIiwic3ViIjoidGVzdEBleGFtcGxlLmNvbSIs" "ImV4cCI6MTUwMTI4MTA1OH0.digk0Fr_IdcWgJNVyeVDw2dC1cQG6LsHwg5pIN93L4_" "xhEDI3ZFoZ8aE44kvQHWLicnHDlhELqtF-" "TqxrhfnitpLE7jiyknSu6NVXxtRBcZ3dOTKryVJDvDXcYXOaaP8infnh82loHfhikgg1xmk9" "rcH50jtc3BkxWNbpNgPyaAAE2tEisIInaxeX0gqkwiNVrLGe1hfwdtdlWFL1WENGlyniQBvB" "Mwi8DgG_F0eyFKTSRWoaNQQXQruEK0YIcwDj9tkYOXq8cLAnRK9zSYc5-" "15Hlzfb8eE77pID0HZN-Axeui4IY22I_kYftd0OEqlwXJv_v5p6kNaHsQ9QbtAkw"; class VerifyJwkRsaTest : public testing::Test { protected: void SetUp() { jwks_ = Jwks::createFrom(PublicKeyRSA, Jwks::Type::JWKS); EXPECT_EQ(jwks_->getStatus(), Status::Ok); } JwksPtr jwks_; }; TEST_F(VerifyJwkRsaTest, NoKidOK) { Jwt jwt; EXPECT_EQ(jwt.parseFromString(JwtTextNoKid), Status::Ok); EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::Ok); fuzzJwtSignature(jwt, [this](const Jwt& jwt) { EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::JwtVerificationFail); }); } TEST_F(VerifyJwkRsaTest, NoKidLongExpOK) { Jwt jwt; EXPECT_EQ(jwt.parseFromString(JwtTextNoKidLongExp), Status::Ok); EXPECT_EQ(verifyJwt(jwt, *jwks_), Status::Ok); fuzzJwtSignature(jwt, [this](const Jwt& jwt) { EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::JwtVerificationFail); }); } TEST_F(VerifyJwkRsaTest, CorrectKidOK) { Jwt jwt; EXPECT_EQ(jwt.parseFromString(JwtTextWithCorrectKid), Status::Ok); EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::Ok); fuzzJwtSignature(jwt, [this](const Jwt& jwt) { EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::JwtVerificationFail); }); } TEST_F(VerifyJwkRsaTest, NonExistKidFail) { Jwt jwt; EXPECT_EQ(jwt.parseFromString(JwtTextWithNonExistKid), Status::Ok); EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::JwksKidAlgMismatch); } TEST_F(VerifyJwkRsaTest, OkPublicKeyNotAlg) { // Remove "alg" claim from public key. std::string alg_claim = R"("alg": "RS256",)"; std::string pubkey_no_alg = PublicKeyRSA; std::size_t alg_pos = pubkey_no_alg.find(alg_claim); while (alg_pos != std::string::npos) { pubkey_no_alg.erase(alg_pos, alg_claim.length()); alg_pos = pubkey_no_alg.find(alg_claim); } jwks_ = Jwks::createFrom(pubkey_no_alg, Jwks::Type::JWKS); EXPECT_EQ(jwks_->getStatus(), Status::Ok); Jwt jwt; EXPECT_EQ(jwt.parseFromString(JwtTextNoKid), Status::Ok); EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::Ok); } TEST_F(VerifyJwkRsaTest, OkPublicKeyNotKid) { // Remove "kid" claim from public key. std::string kid_claim1 = R"("kid": "62a93512c9ee4c7f8067b5a216dade2763d32a47",)"; std::string kid_claim2 = R"("kid": "b3319a147514df7ee5e4bcdee51350cc890cc89e",)"; std::string pubkey_no_kid = PublicKeyRSA; std::size_t kid_pos = pubkey_no_kid.find(kid_claim1); pubkey_no_kid.erase(kid_pos, kid_claim1.length()); kid_pos = pubkey_no_kid.find(kid_claim2); pubkey_no_kid.erase(kid_pos, kid_claim2.length()); jwks_ = Jwks::createFrom(pubkey_no_kid, Jwks::Type::JWKS); EXPECT_EQ(jwks_->getStatus(), Status::Ok); Jwt jwt; EXPECT_EQ(jwt.parseFromString(JwtTextNoKid), Status::Ok); EXPECT_EQ(verifyJwt(jwt, *jwks_, 1), Status::Ok); } } // namespace } // namespace jwt_verify } // namespace google
apache-2.0
ShinyTechTastic/EsthersGame
src/com/foo/esthersgame/MyGLRenderer.java
10192
package com.foo.esthersgame; /* * Copyright (C) 2012 The Android Open Source Project * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ import javax.microedition.khronos.egl.EGLConfig; import javax.microedition.khronos.opengles.GL10; import com.foo.esthersgame.particle.IPartcleSystem; import com.foo.esthersgame.shapes.AbstractShape; import com.foo.esthersgame.shapes.Polygon; import com.foo.esthersgame.shapes.Star; import android.opengl.GLES20; import android.opengl.GLSurfaceView; import android.opengl.Matrix; import android.util.Log; public class MyGLRenderer implements GLSurfaceView.Renderer { private static final String TAG = "MyGLRenderer"; private final float[] mMVPMatrix = new float[16]; private final float[] mProjMatrix = new float[16]; private final float[] mVMatrix = new float[16]; private float screenW = 0.0f; private float screenH = 0.0f; // Declare as volatile because we are updating it from another thread public volatile float mAngle; private AbstractGameState gameState; private IRenderTarget mInnerRenderTarget; private InnerParticleSystem mInnerParticleSystem; private AbstractShape[] mShapes; public MyGLRenderer(AbstractGameState gameState) { this.gameState = gameState; } @Override public void onSurfaceCreated(GL10 unused, EGLConfig config) { // Set the background frame color GLES20.glClearColor(0.0f, 0.0f, 0.3f, 1.0f); GLES20.glEnable( GLES20.GL_BLEND ); //GLES20.glBlendFunc( GLES20.GL_SRC_ALPHA , GLES20.GL_ONE_MINUS_SRC_ALPHA ); GLES20.glBlendFunc( GLES20.GL_SRC_ALPHA , GLES20.GL_ONE ); mShapes = new AbstractShape[11]; mShapes[0] = new Star( 4 ); mShapes[1] = new Star( 5 ); mShapes[2] = new Star( 6 ); mShapes[3] = new Star( 7 ); mShapes[4] = new Star( 8 ); mShapes[5] = new Polygon( 3 ); mShapes[6] = new Polygon( 4 ); mShapes[7] = new Polygon( 5 ); mShapes[8] = new Polygon( 6 ); mShapes[9] = new Polygon( 7 ); mShapes[10] = new Polygon( 8 ); mInnerRenderTarget = new InnerRenderTarget(); mInnerParticleSystem = new InnerParticleSystem(); gameState.setParticleSystem(mInnerParticleSystem); } @Override public void onDrawFrame(GL10 unused) { // Draw background color GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // Set the camera position (View matrix) Matrix.setLookAtM(mVMatrix, 0, 0, 0, -3, 0f, 0f, 0f, 0f, 1.0f, 0.0f); // Calculate the projection and view transformation Matrix.multiplyMM(mMVPMatrix, 0, mProjMatrix, 0, mVMatrix, 0); gameState.render( mInnerRenderTarget ); double time = gameState.tick(); mInnerParticleSystem.tick( time ); mInnerParticleSystem.render(); } @Override public void onSurfaceChanged(GL10 unused, int width, int height) { // Adjust the viewport based on geometry changes, // such as screen rotation GLES20.glViewport(0, 0, width, height); screenW = width; screenH = height; float ratio = (float) width / height; // this projection matrix is applied to object coordinates // in the onDrawFrame() method Matrix.frustumM(mProjMatrix, 0, -ratio, ratio, -1, 1, 3, 7); gameState.resizeView(ratio * 2.0f, 2.0f); } public static int loadShader(int type, String shaderCode){ // create a vertex shader type (GLES20.GL_VERTEX_SHADER) // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER) int shader = GLES20.glCreateShader(type); // add the source code to the shader and compile it GLES20.glShaderSource(shader, shaderCode); GLES20.glCompileShader(shader); return shader; } /** * Utility method for debugging OpenGL calls. Provide the name of the call * just after making it: * * <pre> * mColorHandle = GLES20.glGetUniformLocation(mProgram, "vColor"); * MyGLRenderer.checkGlError("glGetUniformLocation");</pre> * * If the operation is not successful, the check throws an error. * * @param glOperation - Name of the OpenGL call to check. */ public static void checkGlError(String glOperation) { int error; while ((error = GLES20.glGetError()) != GLES20.GL_NO_ERROR) { Log.e(TAG, glOperation + ": glError " + error); throw new RuntimeException(glOperation + ": glError " + error); } } private float[] worldPos = new float[2]; public float[] getWorldPosition(float touch_x, float touch_y) { // Auxiliary matrix and vectors // to deal with ogl. float[] invertedMatrix, normalizedInPoint, outPoint; invertedMatrix = new float[16]; normalizedInPoint = new float[4]; outPoint = new float[4]; // Invert y coordinate, as android uses // top-left, and ogl bottom-left. int oglTouchY = (int) (screenH - touch_y); /* Transform the screen point to clip space in ogl (-1,1) */ normalizedInPoint[0] = (float) ((touch_x) * 2.0f / screenW - 1.0); normalizedInPoint[1] = (float) ((oglTouchY) * 2.0f / screenH - 1.0); normalizedInPoint[2] = - 1.0f; normalizedInPoint[3] = 1.0f; /* Obtain the transform matrix and then the inverse. */ Matrix.invertM(invertedMatrix, 0, mProjMatrix, 0); /* Apply the inverse to the point in clip space */ Matrix.multiplyMV( outPoint, 0, invertedMatrix, 0, normalizedInPoint, 0); if (outPoint[3] == 0.0) { // Avoid /0 error. Log.e("World coords", "ERROR!"); return worldPos; } // Divide by the 3rd component to find // out the real position. worldPos[0] = -(outPoint[0] / outPoint[3]); worldPos[1] = outPoint[1] / outPoint[3]; return worldPos; } public class InnerRenderTarget implements IRenderTarget { private float[] mModelMatrix = new float[16]; // 4x4 matrix private float[] mTempMatrix = new float[16]; // 4x4 matrix @Override public void drawShape(AbstractShape shape, float colour, float brightness, float x, float y, float rotation , float scale ) { //Matrix.setRotateM(mModelMatrix, 0, mAngle, 0, 0, -1.0f); Matrix.setIdentityM(mModelMatrix , 0); Matrix.translateM( mModelMatrix, 0, (float)x, (float)y, 0); Matrix.scaleM(mModelMatrix, 0 , scale, scale, 0.0f ); Matrix.rotateM(mModelMatrix, 0, rotation, 0.0f, 0.0f, -1.0f ); //float[] scratch = new float[16]; // Combine the rotation matrix with the projection and camera view Matrix.multiplyMM(mTempMatrix, 0, mMVPMatrix, 0, mModelMatrix, 0); shape.draw( mTempMatrix , colour , brightness ); } @Override public void drawShape(int shape, float colour, float x, float y, float rotation, float scale) { int shapeId = (shape % mShapes.length); this.drawShape( mShapes[shapeId] , colour , 1.0f , x , y , rotation , scale ); } @Override public void drawShape(int shape, float colour, float x, float y, float rotation) { this.drawShape(shape, colour, x, y , rotation , 1.0f ); } @Override public void drawShape(int shape, float colour, float x, float y) { this.drawShape(shape, colour, x, y , 0.0f , 1.0f ); } } public class InnerParticleSystem implements IPartcleSystem { private static final int MAX_PARTICLES = 200; // max onscreen at once private static final int PARTICLE_SIZE = 6; // floats in the data structure private static final float PARTICLE_LIFE = 5.0f; // seconds to live private static final float PARTICLE_RENDER_SIZE = 0.4f; // the size on screen. private float[] data = new float[ PARTICLE_SIZE * MAX_PARTICLES ]; private int dataHead = 0; private int dataTail = 0; private Polygon mSquare = new Polygon( 8 , 0.04f ); @Override public void create(float x, float y, float vx, float vy, float colour) { int offset = dataHead * PARTICLE_SIZE; data[ offset + 0 ] = x; data[ offset + 1 ] = y; data[ offset + 2 ] = vx; data[ offset + 3 ] = vy; data[ offset + 4 ] = colour; data[ offset + 5 ] = PARTICLE_LIFE; // TTL dataHead = (dataHead +1) % MAX_PARTICLES; if ( dataHead == dataTail ){ // no space... dataTail = (dataTail +1) % MAX_PARTICLES; } } public void tick( double t ){ int i = dataTail; while ( i != dataHead ){ int offset = i * PARTICLE_SIZE; data[ offset + 0 ] += data[ offset + 2 ] * t; data[ offset + 1 ] += data[ offset + 3 ] * t; if ( data[ offset + 1 ] > 1.0f ){ // bounce? (from the top as we're upside down) data[ offset + 1 ] = 2.0f - data[ offset + 1 ]; // bounce distance data[ offset + 3 ] = data[ offset + 3 ] * -0.8f; // invert the vertical velocity and reduce slightly } data[ offset + 3 ] += 0.5f * t; // g data[ offset + 5 ] -= t; if ( data[ offset + 5 ] < 0.0 ){ // this has expired dataTail = i; // catchup the tail. } i = (i+1) % MAX_PARTICLES; } } public void render(){ int i = dataTail; while ( i != dataHead ){ int offset = i * PARTICLE_SIZE; float x = data[ offset + 0 ]; float y = data[ offset + 1 ]; float c = data[ offset + 4 ]; float b = (data[ offset + 5 ] / PARTICLE_LIFE); if ( b > 0.0f ){ mInnerRenderTarget.drawShape( mSquare , c , b , x, y , b * 90.0f ,PARTICLE_RENDER_SIZE ); } i = (i+1) % MAX_PARTICLES; } } } }
apache-2.0
usdot-jpo-ode/jpo-ode
jpo-ode-core/src/main/java/us/dot/its/jpo/ode/wrapper/WebSocketClient.java
1190
/******************************************************************************* * Copyright 2018 572682 * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy * of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. ******************************************************************************/ package us.dot.its.jpo.ode.wrapper; import javax.websocket.CloseReason; import javax.websocket.Session; import us.dot.its.jpo.ode.model.OdeMessage; import us.dot.its.jpo.ode.model.OdeRequest; public interface WebSocketClient { OdeRequest getRequest(); void onMessage(OdeMessage message); void onOpen(Session session); void onClose(CloseReason reason); void onError(Throwable t); }
apache-2.0
lukin0110/poeditor-java
src/main/java/be/lukin/poeditor/exceptions/PermissionDeniedException.java
103
package be.lukin.poeditor.exceptions; public class PermissionDeniedException extends ApiException { }
apache-2.0
openstack/mistral
mistral/tests/unit/notifiers/test_notify.py
44129
# Copyright 2018 - Extreme Networks, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json from unittest import mock from oslo_config import cfg from mistral import context from mistral.db.v2 import api as db_api from mistral.notifiers import base as notif from mistral.notifiers import notification_events as events from mistral.services import workbooks as wb_svc from mistral.services import workflows as wf_svc from mistral.tests.unit.notifiers import base from mistral.workflow import states from mistral_lib import actions as ml_actions # Use the set_default method to set value otherwise in certain test cases # the change in value is not permanent. cfg.CONF.set_default('auth_enable', False, group='pecan') EVENT_LOGS = [] def log_event(ctx, ex_id, data, event, timestamp, **kwargs): if not isinstance(ctx, context.MistralContext): raise TypeError('ctx is not type of MistralContext.') EVENT_LOGS.append((ex_id, event)) class NotifyEventsTest(base.NotifierTestCase): def setUp(self): super(NotifyEventsTest, self).setUp() self.publishers = { 'wbhk': notif.get_notification_publisher('webhook'), 'noop': notif.get_notification_publisher('noop') } self.publishers['wbhk'].publish = mock.MagicMock(side_effect=log_event) self.publishers['wbhk'].publish.reset_mock() self.publishers['noop'].publish = mock.MagicMock(side_effect=log_event) self.publishers['noop'].publish.reset_mock() del EVENT_LOGS[:] cfg.CONF.set_default('type', 'local', group='notifier') def tearDown(self): super(NotifyEventsTest, self).tearDown() cfg.CONF.set_default('notify', None, group='notifier') def test_notify_all_explicit(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [ { 'type': 'webhook', 'event_types': events.EVENTS } ] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) self.assertTrue(self.publishers['wbhk'].publish.called) self.assertEqual(6, len(EVENT_LOGS)) self.assertIn((wf_ex.id, events.WORKFLOW_LAUNCHED), EVENT_LOGS) self.assertIn((t1_ex.id, events.TASK_LAUNCHED), EVENT_LOGS) self.assertIn((t1_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS) self.assertIn((t2_ex.id, events.TASK_LAUNCHED), EVENT_LOGS) self.assertIn((t2_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS) self.assertIn((wf_ex.id, events.WORKFLOW_SUCCEEDED), EVENT_LOGS) def test_notify_all_implicit(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) self.assertTrue(self.publishers['wbhk'].publish.called) self.assertEqual(6, len(EVENT_LOGS)) self.assertIn((wf_ex.id, events.WORKFLOW_LAUNCHED), EVENT_LOGS) self.assertIn((t1_ex.id, events.TASK_LAUNCHED), EVENT_LOGS) self.assertIn((t1_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS) self.assertIn((t2_ex.id, events.TASK_LAUNCHED), EVENT_LOGS) self.assertIn((t2_ex.id, events.TASK_SUCCEEDED), EVENT_LOGS) self.assertIn((wf_ex.id, events.WORKFLOW_SUCCEEDED), EVENT_LOGS) def test_notify_order(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_with_event_filter(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [ { 'type': 'webhook', 'event_types': [ events.WORKFLOW_LAUNCHED, events.WORKFLOW_SUCCEEDED ] } ] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) self.assertTrue(self.publishers['wbhk'].publish.called) self.assertEqual(2, len(EVENT_LOGS)) self.assertIn((wf_ex.id, events.WORKFLOW_LAUNCHED), EVENT_LOGS) self.assertIn((wf_ex.id, events.WORKFLOW_SUCCEEDED), EVENT_LOGS) def test_notify_multiple(self): self.assertFalse(self.publishers['wbhk'].publish.called) self.assertFalse(self.publishers['noop'].publish.called) wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [ {'type': 'webhook'}, {'type': 'noop'} ] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertTrue(self.publishers['noop'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_from_cfg(self): self.assertFalse(self.publishers['wbhk'].publish.called) self.assertFalse(self.publishers['noop'].publish.called) wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [ {'type': 'webhook'}, {'type': 'noop'} ] cfg.CONF.set_default( 'notify', json.dumps(notify_options), group='notifier' ) wf_ex = self.engine.start_workflow('wf', '') self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertTrue(self.publishers['noop'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_from_cfg_and_params(self): self.assertFalse(self.publishers['wbhk'].publish.called) self.assertFalse(self.publishers['noop'].publish.called) wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) cfg.CONF.set_default( 'notify', json.dumps([{'type': 'noop'}]), group='notifier' ) params = {'notify': [{'type': 'webhook'}]} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertTrue(self.publishers['noop'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_workbook_notify(self): wb_text = """ version: '2.0' name: wb workflows: wf1: tasks: t1: workflow: wf2 on-success: - t2 t2: action: std.noop wf2: tasks: t1: action: std.noop """ wb_svc.create_workbook_v2(wb_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf1_ex = self.engine.start_workflow('wb.wf1', '', **params) self.await_workflow_success(wf1_ex.id) with db_api.transaction(): wf1_ex = db_api.get_workflow_execution(wf1_ex.id) wf1_task_exs = wf1_ex.task_executions wf1_t1_ex = self._assert_single_item(wf1_task_exs, name='t1') wf1_t2_ex = self._assert_single_item(wf1_task_exs, name='t2') wf1_t1_act_exs = db_api.get_workflow_executions( task_execution_id=wf1_t1_ex.id ) wf2_ex = wf1_t1_act_exs[0] wf2_task_exs = wf2_ex.task_executions wf2_t1_ex = self._assert_single_item(wf2_task_exs, name='t1') self.assertEqual(states.SUCCESS, wf1_ex.state) self.assertIsNone(wf1_ex.state_info) self.assertEqual(2, len(wf1_task_exs)) self.assertEqual(states.SUCCESS, wf1_t1_ex.state) self.assertIsNone(wf1_t1_ex.state_info) self.assertEqual(states.SUCCESS, wf1_t2_ex.state) self.assertIsNone(wf1_t2_ex.state_info) self.assertEqual(1, len(wf1_t1_act_exs)) self.assertEqual(states.SUCCESS, wf2_ex.state) self.assertIsNone(wf2_ex.state_info) self.assertEqual(1, len(wf2_task_exs)) self.assertEqual(states.SUCCESS, wf2_t1_ex.state) self.assertIsNone(wf2_t1_ex.state_info) expected_order = [ (wf1_ex.id, events.WORKFLOW_LAUNCHED), (wf1_t1_ex.id, events.TASK_LAUNCHED), (wf2_ex.id, events.WORKFLOW_LAUNCHED), (wf2_t1_ex.id, events.TASK_LAUNCHED), (wf2_t1_ex.id, events.TASK_SUCCEEDED), (wf2_ex.id, events.WORKFLOW_SUCCEEDED), (wf1_t1_ex.id, events.TASK_SUCCEEDED), (wf1_t2_ex.id, events.TASK_LAUNCHED), (wf1_t2_ex.id, events.TASK_SUCCEEDED), (wf1_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_task_error(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.fail """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_error(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.ERROR, wf_ex.state) self.assertIsNotNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.ERROR, t2_ex.state) self.assertIsNotNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_FAILED), (wf_ex.id, events.WORKFLOW_FAILED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_task_transition_fail(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-complete: - fail """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_error(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.ERROR, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(1, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_FAILED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_with_items_task(self): wf_text = """ version: '2.0' wf: tasks: t1: with-items: i in <% list(range(0, 3)) %> action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_success(wf_ex.id) self._sleep(1) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_pause_resume(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.async_noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_running(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.RUNNING, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) # Pause the workflow. self.engine.pause_workflow(wf_ex.id) self.await_workflow_paused(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) # Workflow is paused but the task is still running as expected. self.assertEqual(states.PAUSED, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (wf_ex.id, events.WORKFLOW_PAUSED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) # Complete action execution of task 1. self.engine.on_action_complete( t1_act_exs[0].id, ml_actions.Result(data={'result': 'foobar'}) ) self.await_workflow_paused(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.PAUSED, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(1, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (wf_ex.id, events.WORKFLOW_PAUSED), (t1_ex.id, events.TASK_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) # Resume the workflow. self.engine.resume_workflow(wf_ex.id) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (wf_ex.id, events.WORKFLOW_PAUSED), (t1_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_RESUMED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_pause_resume_task(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.async_noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_running(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.RUNNING, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) # Pause the action execution of task 1. self.engine.on_action_update(t1_act_exs[0].id, states.PAUSED) self.await_workflow_paused(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.PAUSED, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.PAUSED, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.PAUSED, t1_act_exs[0].state) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_PAUSED), (wf_ex.id, events.WORKFLOW_PAUSED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) # Resume the action execution of task 1. self.engine.on_action_update(t1_act_exs[0].id, states.RUNNING) self.await_task_running(t1_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.RUNNING, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) # Complete action execution of task 1. self.engine.on_action_complete( t1_act_exs[0].id, ml_actions.Result(data={'result': 'foobar'}) ) # Wait for the workflow execution to complete. self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.SUCCESS, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(2, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertIsNone(t2_ex.state_info) # TASK_RESUMED comes before WORKFLOW_RESUMED because # this test resumed the workflow with on_action_update. expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_PAUSED), (wf_ex.id, events.WORKFLOW_PAUSED), (t1_ex.id, events.TASK_RESUMED), (wf_ex.id, events.WORKFLOW_RESUMED), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_cancel(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.async_noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_running(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.RUNNING, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) # Cancel the workflow. self.engine.stop_workflow(wf_ex.id, states.CANCELLED) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) # Workflow is cancelled but the task is still running as expected. self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (wf_ex.id, events.WORKFLOW_CANCELLED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) # Complete action execution of task 1. self.engine.on_action_complete( t1_act_exs[0].id, ml_actions.Result(data={'result': 'foobar'}) ) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(states.CANCELLED, wf_ex.state) self.assertIsNone(wf_ex.state_info) self.assertEqual(1, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='t1') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertIsNone(t1_ex.state_info) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (wf_ex.id, events.WORKFLOW_CANCELLED), (t1_ex.id, events.TASK_SUCCEEDED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_cancel_task(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.async_noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_running(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.RUNNING, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.RUNNING, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.RUNNING, t1_act_exs[0].state) # Cancel the action execution of task 1. self.engine.on_action_update(t1_act_exs[0].id, states.CANCELLED) self.await_workflow_cancelled(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t1_act_exs = db_api.get_action_executions(task_execution_id=t1_ex.id) self.assertEqual(states.CANCELLED, wf_ex.state) self.assertEqual(1, len(task_exs)) self.assertEqual(states.CANCELLED, t1_ex.state) self.assertEqual(1, len(t1_act_exs)) self.assertEqual(states.CANCELLED, t1_act_exs[0].state) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_CANCELLED), (wf_ex.id, events.WORKFLOW_CANCELLED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) def test_notify_task_input_error(self): wf_text = """--- version: '2.0' wf: tasks: task1: input: url: <% $.ItWillBeError %> action: std.http on-error: task2 task2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_error(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions self.assertEqual(1, len(task_exs)) t1_ex = self._assert_single_item(task_exs, name='task1') self.assertEqual(states.ERROR, t1_ex.state) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_FAILED), (wf_ex.id, events.WORKFLOW_FAILED) ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) @mock.patch('mistral.actions.std_actions.NoOpAction.run', mock.MagicMock( side_effect=[Exception(), None, None])) def test_notify_rerun_task(self): wf_text = """ version: '2.0' wf: tasks: t1: action: std.noop on-success: - t2 t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_ex = self.engine.start_workflow('wf', '', **params) self.await_workflow_error(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') self.assertEqual(states.ERROR, t1_ex.state) self.assertEqual(1, len(task_exs)) # Rerun the workflow. self.engine.rerun_workflow(t1_ex.id) self.await_workflow_success(wf_ex.id) with db_api.transaction(): wf_ex = db_api.get_workflow_execution(wf_ex.id) task_exs = wf_ex.task_executions t1_ex = self._assert_single_item(task_exs, name='t1') t2_ex = self._assert_single_item(task_exs, name='t2') self.assertEqual(states.SUCCESS, t1_ex.state) self.assertEqual(states.SUCCESS, t2_ex.state) self.assertEqual(2, len(task_exs)) expected_order = [ (wf_ex.id, events.WORKFLOW_LAUNCHED), (t1_ex.id, events.TASK_LAUNCHED), (t1_ex.id, events.TASK_FAILED), (wf_ex.id, events.WORKFLOW_FAILED), # rerun (wf_ex.id, events.WORKFLOW_RERUN), (t1_ex.id, events.TASK_RERUN), (t1_ex.id, events.TASK_SUCCEEDED), (t2_ex.id, events.TASK_LAUNCHED), (t2_ex.id, events.TASK_SUCCEEDED), (wf_ex.id, events.WORKFLOW_SUCCEEDED), ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS) @mock.patch('mistral.actions.std_actions.NoOpAction.run', mock.MagicMock( side_effect=[Exception(), None, None, None])) def test_notify_rerun_nested_workflow(self): wf_text = """ version: '2.0' wf_1: tasks: wf_1_t1: workflow: wf_2 on-success: - wf_1_t2 wf_1_t2: action: std.noop wf_2: tasks: wf_2_t1: action: std.noop on-success: - wf_2_t2 wf_2_t2: action: std.noop """ wf_svc.create_workflows(wf_text) notify_options = [{'type': 'webhook'}] params = {'notify': notify_options} wf_1_ex = self.engine.start_workflow('wf_1', '', **params) self.await_workflow_error(wf_1_ex.id) with db_api.transaction(): wf_exs = db_api.get_workflow_executions() self._assert_single_item( wf_exs, name='wf_1', state=states.ERROR ) self._assert_single_item( wf_exs, name='wf_2', state=states.ERROR ) task_exs = db_api.get_task_executions() self._assert_single_item( task_exs, name='wf_1_t1', state=states.ERROR ) wf_2_t1 = self._assert_single_item( task_exs, name='wf_2_t1', state=states.ERROR ) self.assertEqual(2, len(task_exs)) self.assertEqual(2, len(wf_exs)) # Rerun the nested workflow. self.engine.rerun_workflow(wf_2_t1.id) self.await_workflow_success(wf_1_ex.id) with db_api.transaction(): wf_exs = db_api.get_workflow_executions() wf_1_ex = self._assert_single_item( wf_exs, name='wf_1', state=states.SUCCESS ) wf_2_ex = self._assert_single_item( wf_exs, name='wf_2', state=states.SUCCESS ) task_wf_1_exs = wf_1_ex.task_executions wf_1_t1 = self._assert_single_item( task_wf_1_exs, name='wf_1_t1', state=states.SUCCESS ) wf_1_t2 = self._assert_single_item( task_wf_1_exs, name='wf_1_t2', state=states.SUCCESS ) task_wf_2_exs = wf_2_ex.task_executions wf_2_t1 = self._assert_single_item( task_wf_2_exs, name='wf_2_t1', state=states.SUCCESS ) wf_2_t2 = self._assert_single_item( task_wf_2_exs, name='wf_2_t2', state=states.SUCCESS ) self.assertEqual(2, len(task_wf_1_exs)) self.assertEqual(2, len(task_wf_2_exs)) self.assertEqual(2, len(wf_exs)) expected_order = [ (wf_1_ex.id, events.WORKFLOW_LAUNCHED), (wf_1_t1.id, events.TASK_LAUNCHED), (wf_2_ex.id, events.WORKFLOW_LAUNCHED), (wf_2_t1.id, events.TASK_LAUNCHED), (wf_2_t1.id, events.TASK_FAILED), (wf_2_ex.id, events.WORKFLOW_FAILED), (wf_1_t1.id, events.TASK_FAILED), (wf_1_ex.id, events.WORKFLOW_FAILED), # rerun (wf_2_ex.id, events.WORKFLOW_RERUN), (wf_1_ex.id, events.WORKFLOW_RERUN), (wf_1_t1.id, events.TASK_RERUN), (wf_2_t1.id, events.TASK_RERUN), (wf_2_t1.id, events.TASK_SUCCEEDED), (wf_2_t2.id, events.TASK_LAUNCHED), (wf_2_t2.id, events.TASK_SUCCEEDED), (wf_2_ex.id, events.WORKFLOW_SUCCEEDED), (wf_1_t1.id, events.TASK_SUCCEEDED), (wf_1_t2.id, events.TASK_LAUNCHED), (wf_1_t2.id, events.TASK_SUCCEEDED), (wf_1_ex.id, events.WORKFLOW_SUCCEEDED), ] self.assertTrue(self.publishers['wbhk'].publish.called) self.assertListEqual(expected_order, EVENT_LOGS)
apache-2.0
thymeleaf/thymeleaf-spring
thymeleaf-spring6/src/main/java/org/thymeleaf/spring6/processor/SpringValueTagProcessor.java
4978
/* * ============================================================================= * * Copyright (c) 2011-2018, The THYMELEAF team (http://www.thymeleaf.org) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * * ============================================================================= */ package org.thymeleaf.spring6.processor; import org.thymeleaf.context.ITemplateContext; import org.thymeleaf.engine.AttributeDefinition; import org.thymeleaf.engine.AttributeDefinitions; import org.thymeleaf.engine.AttributeName; import org.thymeleaf.engine.IAttributeDefinitionsAware; import org.thymeleaf.model.IProcessableElementTag; import org.thymeleaf.processor.element.IElementTagStructureHandler; import org.thymeleaf.spring6.requestdata.RequestDataValueProcessorUtils; import org.thymeleaf.standard.processor.AbstractStandardExpressionAttributeTagProcessor; import org.thymeleaf.standard.util.StandardProcessorUtils; import org.thymeleaf.templatemode.TemplateMode; import org.thymeleaf.util.Validate; import org.unbescape.html.HtmlEscape; /** * * @author Daniel Fern&aacute;ndez * * @since 3.0.3 * */ public final class SpringValueTagProcessor extends AbstractStandardExpressionAttributeTagProcessor implements IAttributeDefinitionsAware { // This is 1010 in order to make sure it is executed after "name" and "type" public static final int ATTR_PRECEDENCE = 1010; public static final String TARGET_ATTR_NAME = "value"; private static final TemplateMode TEMPLATE_MODE = TemplateMode.HTML; private static final String TYPE_ATTR_NAME = "type"; private static final String NAME_ATTR_NAME = "name"; private AttributeDefinition targetAttributeDefinition; private AttributeDefinition fieldAttributeDefinition; private AttributeDefinition typeAttributeDefinition; private AttributeDefinition nameAttributeDefinition; public SpringValueTagProcessor(final String dialectPrefix) { super(TEMPLATE_MODE, dialectPrefix, TARGET_ATTR_NAME, ATTR_PRECEDENCE, false, false); } public void setAttributeDefinitions(final AttributeDefinitions attributeDefinitions) { Validate.notNull(attributeDefinitions, "Attribute Definitions cannot be null"); // We precompute the AttributeDefinitions in order to being able to use much // faster methods for setting/replacing attributes on the ElementAttributes implementation final String dialectPrefix = getMatchingAttributeName().getMatchingAttributeName().getPrefix(); this.targetAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, TARGET_ATTR_NAME); this.fieldAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, dialectPrefix, AbstractSpringFieldTagProcessor.ATTR_NAME); this.typeAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, TYPE_ATTR_NAME); this.nameAttributeDefinition = attributeDefinitions.forName(TEMPLATE_MODE, NAME_ATTR_NAME); } @Override protected final void doProcess( final ITemplateContext context, final IProcessableElementTag tag, final AttributeName attributeName, final String attributeValue, final Object expressionResult, final IElementTagStructureHandler structureHandler) { String newAttributeValue = HtmlEscape.escapeHtml4Xml(expressionResult == null ? "" : expressionResult.toString()); // Let RequestDataValueProcessor modify the attribute value if needed, but only in the case we don't also have // a 'th:field' - in such case, we will let th:field do its job if (!tag.hasAttribute(this.fieldAttributeDefinition.getAttributeName())) { // We will need to know the 'name' and 'type' attribute values in order to (potentially) modify the 'value' final String nameValue = tag.getAttributeValue(this.nameAttributeDefinition.getAttributeName()); final String typeValue = tag.getAttributeValue(this.typeAttributeDefinition.getAttributeName()); newAttributeValue = RequestDataValueProcessorUtils.processFormFieldValue(context, nameValue, newAttributeValue, typeValue); } // Set the 'value' attribute StandardProcessorUtils.replaceAttribute(structureHandler, attributeName, this.targetAttributeDefinition, TARGET_ATTR_NAME, (newAttributeValue == null? "" : newAttributeValue)); } }
apache-2.0
thirdy/TomP2P
core/src/main/java/net/tomp2p/message/Message.java
36683
/* * Copyright 2009 Thomas Bocek * * Licensed under the Apache License, Version 2.0 (the "License"); you may not * use this file except in compliance with the License. You may obtain a copy of * the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations under * the License. */ package net.tomp2p.message; import java.net.InetSocketAddress; import java.security.KeyPair; import java.security.PrivateKey; import java.security.PublicKey; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.NavigableMap; import java.util.Queue; import java.util.Random; import java.util.TreeMap; import net.tomp2p.connection.PeerConnection; import net.tomp2p.peers.Number160; import net.tomp2p.peers.Number640; import net.tomp2p.peers.PeerAddress; import net.tomp2p.peers.PeerSocketAddress; import net.tomp2p.rpc.RPC; import net.tomp2p.rpc.RPC.Commands; import net.tomp2p.rpc.SimpleBloomFilter; import net.tomp2p.storage.Data; /** * The message is in binary format in TomP2P. It has several header and payload fields. Since * we do the serialization/encoding manually, we do not need a serialization field. * * @author Thomas Bocek */ public class Message { // used for creating random message id private static final transient Random RND = new Random(); public static final int CONTENT_TYPE_LENGTH = 8; /** * 8 x 4 bit. */ public enum Content { EMPTY, KEY, MAP_KEY640_DATA, MAP_KEY640_KEYS, SET_KEY640, SET_NEIGHBORS, BYTE_BUFFER, LONG, INTEGER, PUBLIC_KEY_SIGNATURE, SET_TRACKER_DATA, BLOOM_FILTER, MAP_KEY640_BYTE, PUBLIC_KEY, SET_PEER_SOCKET, USER1 }; /** * 1 x 4 bit. */ public enum Type { /** * <ul> * <li>the normal request</li> * <li>for {@link Commands#NEIGHBOR} means check for put (no digest) for tracker and storage</li> * <li>for TASK is submit new task</li> * <li>for {@link Commands#RELAY} is for setup</li> * <li>for {@link Commands#RCON} means forward reverse connection to unreachable peer</li> * </ul> */ REQUEST_1, /** * <ul> * <li>for {@link Commands#GET} returns the extended digest (hashes of all stored data)</li> * <li>for {@link Commands#PUT}/{@link Commands#ADD}/COMPARE_PUT means protect domain</li> * <li>for {@link Commands#REMOVE} means send back results</li> * <li>for RAW_DATA means serialize object</li> * <li>for {@link Commands#NEIGHBOR} means check for get (with digest) for storage</li> * <li>for TASK is status</li> * <li>for {@link Commands#RELAY} means send piggybacked message</li> * <li>for {@link Commands#RCON} open TCP channel and transmit {@link PeerConnection}</li> * </ul> */ REQUEST_2, /** * <ul> * <li>for {@link Commands#GET} returns a Bloom filter</li> * <li>for {@link Commands#PUT} means put if absent</li> * <li>for COMPARE_PUT means partial (partial means that put those data that match compare, ignore * others)</li> * <li>for TASK is send back result</li> * <li>for {@link Commands#RELAY} means update the routing table</li> * <li>for {@link Commands#RCON} use open {@link PeerConnection} to transmit original message</li> * </ul> */ REQUEST_3, /** * <ul> * <li>for {@link Commands#GET} returns a range (min/max)</li> * <li>for {@link Commands#PUT} for PUT means protect domain and put if absent</li> * <li>for COMPARE_PUT means partial and protect domain</li> * <li>for {@link Commands#NEIGHBOR} means check for put (with digest) for task</li> * <li>for {@link Commands#RELAY} fetch the buffer from the relay peer (Android only)</li> * </ul> */ REQUEST_4, /** * <ul> * <li>for {@link Commands#RELAY} means that a late response arrived at the relay peer (slow peers * only)</li> * </ul> */ REQUEST_5, /** * <ul> * <li>for {@link Commands#PEX} means fire and forget, coming from mesh</li> * </ul> */ REQUEST_FF_1, /** * <ul> * <li>for {@link Commands#PEX} means fire and forget, coming from primary</li> * </ul> */ REQUEST_FF_2, /** * The request was processed and everything is alright */ OK, /** * When the called node has {@link PeerAddress#isSlow()} activated, the relay peer returns a partial ok */ PARTIALLY_OK, NOT_FOUND, DENIED, UNKNOWN_ID, EXCEPTION, CANCEL, /** * Still unused */ RESERVED1 }; // Header: private int messageId; private int version; private Type type; private byte command; private PeerAddress sender; private PeerAddress recipient; private int options = 0; // Payload: // we can send 8 types private Content[] contentTypes = new Content[CONTENT_TYPE_LENGTH]; private final transient Queue<MessageContentIndex> contentReferences = new LinkedList<MessageContentIndex>(); // ********* Here comes the payload objects ************ // The content lists: private List<NeighborSet> neighborsList = null; private List<Number160> keyList = null; private List<SimpleBloomFilter<Number160>> bloomFilterList = null; private List<DataMap> dataMapList = null; // private PublicKey publicKey = null; // there can only be one private List<Integer> integerList = null; private List<Long> longList = null; private List<KeyCollection> keyCollectionList = null; private List<KeyMap640Keys> keyMap640KeysList = null; private List<KeyMapByte> keyMapByteList = null; private List<Buffer> bufferList = null; private List<TrackerData> trackerDataList = null; private List<PublicKey> publicKeyList = null; private List<PeerSocketAddress> peerSocketAddressList = null; private SignatureCodec signatureEncode = null; // this will not be transferred, status variables private transient boolean presetContentTypes = false; private transient PrivateKey privateKey; private transient InetSocketAddress senderSocket; private transient InetSocketAddress recipientSocket; private transient boolean udp = false; private transient boolean done = false; private transient boolean sign = false; private transient boolean content = false; private transient boolean verified = false; private transient boolean sendSelf = false; private transient PeerAddress recipientRelay; private transient PeerAddress recipientReflected; /** * Creates message with a random ID. */ public Message() { this.messageId = RND.nextInt(); } /** * Randomly generated message ID. * * @return message Id */ public int messageId() { return messageId; } /** * For deserialization, we need to set the id. * * @param messageId * The message Id * @return This class */ public Message messageId(final int messageId) { this.messageId = messageId; return this; } /** * Returns the version, which is 32bit. Each application can choose and version to not intefere with other * applications * * @return The application version that uses this P2P framework */ public int version() { return version; } /** * For deserialization. * * @param version * The 24bit version * @return This class */ public Message version(final int version) { this.version = version; return this; } /** * Determines if its a request or command reply, and what kind of reply (error, warning states). * * @return Type of the message */ public Type type() { return type; } /** * Set the message type. Either its a request or reply (with error and warning codes). * * @param type * Type of the message * @return This class */ public Message type(final Type type) { this.type = type; return this; } /** * Command of the message, such as GET, PING, etc. * * @return Command */ public byte command() { return command; } /** * Command of the message, such as GET, PING, etc. * * @param command * Command * @return This class */ public Message command(final byte command) { this.command = command; return this; } /** * The ID of the sender. Note that the IP is set via the socket. * * @return The ID of the sender. */ public PeerAddress sender() { return sender; } /** * The ID of the sender. The IP of the sender will *not* be transferred, as this information is in the IP packet. * * @param sender * The ID of the sender. * @return This class */ public Message sender(final PeerAddress sender) { this.sender = sender; return this; } /** * The ID of the recipient. Note that the IP is set via the socket. * * @return The ID of the recipient */ public PeerAddress recipient() { return recipient; } /** * Set the ID of the recipient. The IP is used to connect to the recipient, but the IP is *not* transferred. * * @param recipient * The ID of the recipient * @return This class */ public Message recipient(final PeerAddress recipient) { this.recipient = recipient; return this; } public PeerAddress recipientRelay() { return recipientRelay; } public Message recipientRelay(PeerAddress recipientRelay) { this.recipientRelay = recipientRelay; return this; } public PeerAddress recipientReflected() { return recipientReflected; } public Message recipientReflected(PeerAddress recipientReflected) { this.recipientReflected = recipientReflected; return this; } /** * Return content types. Content type can be empty if not set * * @return Content type 1 */ public Content[] contentTypes() { return contentTypes; } /** * Convenient method to set content type. Set first content type 1, if this is set (not empty), then set the second * one, etc. * * @param contentType * The content type to set * @return This class */ public Message contentType(final Content contentType) { for (int i = 0, reference = 0; i < CONTENT_TYPE_LENGTH; i++) { if (contentTypes[i] == null) { if (contentType == Content.PUBLIC_KEY_SIGNATURE && i != 0) { throw new IllegalStateException("The public key needs to be the first to be set."); } contentTypes[i] = contentType; contentReferences.add(new MessageContentIndex(reference, contentType)); return this; } else if (contentTypes[i] == contentType) { reference++; } else if (contentTypes[i] == Content.PUBLIC_KEY_SIGNATURE || contentTypes[i] == Content.PUBLIC_KEY) { //special handling for public key as we store both in the same list if (contentType == Content.PUBLIC_KEY_SIGNATURE || contentType == Content.PUBLIC_KEY) { reference++; } } } throw new IllegalStateException("Already set 8 content types."); } /** * Restore the content references if only the content types array is * present. The content references are removed when decoding a message. That * means if a message was received it cannot be used a second time as the * content references are not there anymore. This method restores the * content references based on the content types of the message. */ public void restoreContentReferences() { Map<Content, Integer> refs = new HashMap<Content, Integer>(contentTypes.length * 2); for (Content contentType : contentTypes) { if (contentType == Content.EMPTY || contentType == null) { return; } int index = 0; if (contentType == Content.PUBLIC_KEY_SIGNATURE || contentType == Content.PUBLIC_KEY) { Integer i1 = refs.get(Content.PUBLIC_KEY_SIGNATURE); if(i1 != null) { index = i1.intValue(); } else { i1 = refs.get(Content.PUBLIC_KEY); if(i1 != null) { index = i1.intValue(); } } } if (!refs.containsKey(contentType)) { refs.put(contentType, index); } else { index = refs.get(contentType); } contentReferences.add(new MessageContentIndex(index, contentType)); refs.put(contentType, index + 1); } } /** * Restores all buffers such that they can be re-read (e.g. used for encoding). If the message does not * have any buffer, this method does nothing. */ public void restoreBuffers() { for (Buffer buffer : bufferList()) { buffer.reset(); } } /** * Sets or replaces the content type at a specific index. * * @param index * The index * @param contentType * The content type * @return This class */ public Message contentType(final int index, final Content contentType) { contentTypes[index] = contentType; return this; } /** * Used for deserialization. * * @param contentTypes * The content types that were decoded. * @return This class */ public Message contentTypes(final Content[] contentTypes) { this.contentTypes = contentTypes; return this; } /** * @return The serialized content and references to the respective arrays */ public Queue<MessageContentIndex> contentReferences() { return contentReferences; } /** * @return True if we have content and not only the header */ public boolean hasContent() { return contentReferences.size() > 0 || content; } /** * @param content * We can set this already in the header to know if we have content or not * @return This class */ public Message hasContent(final boolean content) { this.content = content; return this; } // Types of requests /** * @return True if this is a request, a regural or a fire and forget */ public boolean isRequest() { return type == Type.REQUEST_1 || type == Type.REQUEST_2 || type == Type.REQUEST_3 || type == Type.REQUEST_4 || type == Type.REQUEST_5 || type == Type.REQUEST_FF_1 || type == Type.REQUEST_FF_2; } /** * @return True if its a fire and forget, that means we don't expect an answer */ public boolean isFireAndForget() { return type == Type.REQUEST_FF_1 || type == Type.REQUEST_FF_2; } /** * @return True if the message was ok, or at least send partial data */ public boolean isOk() { return type == Type.OK || type == Type.PARTIALLY_OK; } /** * @return True if the message arrived, but data was not found or access was denied */ public boolean isNotOk() { return type == Type.NOT_FOUND || type == Type.DENIED; } /** * @return True if the message contained an unexpected error or behavior */ public boolean isError() { return isError(type); } /** * @param type * The type to check * @return True if the message contained an unexpected error or behavior */ public static boolean isError(final Type type) { return type == Type.UNKNOWN_ID || type == Type.EXCEPTION || type == Type.CANCEL; } /** * @param options * The option from the last byte of the header * @return This class */ public Message options(final int options) { this.options = options; return this; } /** * @return The options from the last byte of the header */ public int options() { return options; } /** * * @param isKeepAlive * True if the connection should remain open. We need to announce this in the header, as otherwise the * other end has an idle handler that will close the connection. * @return This class */ public Message keepAlive(final boolean isKeepAlive) { if (isKeepAlive) { options |= 1; } else { options &= ~1; } return this; } /** * @return True if this message was sent on a connection that should be kept alive */ public boolean isKeepAlive() { return (options & 1) > 0; } public Message streaming() { return streaming(true); } public Message streaming(boolean streaming) { if (streaming) { options |= 2; } else { options &= ~2; } return this; } public boolean isStreaming() { return (options & 2) > 0; } public Message expectDuplicate(boolean expectDuplicate) { if (expectDuplicate) { options |= 4; } else { options &= ~4; } return this; } public boolean isExpectDuplicate() { return (options & 4) > 0; } // Header data ends here *********************************** static payload starts now public Message key(final Number160 key) { if (!presetContentTypes) { contentType(Content.KEY); } if (keyList == null) { keyList = new ArrayList<Number160>(1); } keyList.add(key); return this; } public List<Number160> keyList() { if (keyList == null) { return Collections.emptyList(); } return keyList; } public Number160 key(final int index) { if (keyList == null || index > keyList.size() - 1) { return null; } return keyList.get(index); } public Message bloomFilter(final SimpleBloomFilter<Number160> bloomFilter) { if (!presetContentTypes) { contentType(Content.BLOOM_FILTER); } if (bloomFilterList == null) { bloomFilterList = new ArrayList<SimpleBloomFilter<Number160>>(1); } bloomFilterList.add(bloomFilter); return this; } public List<SimpleBloomFilter<Number160>> bloomFilterList() { if (bloomFilterList == null) { return Collections.emptyList(); } return bloomFilterList; } public SimpleBloomFilter<Number160> bloomFilter(final int index) { if (bloomFilterList == null || index > bloomFilterList.size() - 1) { return null; } return bloomFilterList.get(index); } public Message publicKeyAndSign(KeyPair keyPair) { if (!presetContentTypes) { contentType(Content.PUBLIC_KEY_SIGNATURE); } publicKey0(keyPair.getPublic()); this.privateKey = keyPair.getPrivate(); return this; } public Message intValue(final int integer) { if (!presetContentTypes) { contentType(Content.INTEGER); } if (integerList == null) { integerList = new ArrayList<Integer>(1); } this.integerList.add(integer); return this; } public List<Integer> intList() { if (integerList == null) { return Collections.emptyList(); } return integerList; } public Integer intAt(final int index) { if (integerList == null || index > integerList.size() - 1) { return null; } return integerList.get(index); } public Message longValue(long long0) { if (!presetContentTypes) { contentType(Content.LONG); } if (longList == null) { longList = new ArrayList<Long>(1); } this.longList.add(long0); return this; } public List<Long> longList() { if (longList == null) { return Collections.emptyList(); } return longList; } public Long longAt(int index) { if (longList == null || index > longList.size() - 1) { return null; } return longList.get(index); } public Message neighborsSet(final NeighborSet neighborSet) { if (!presetContentTypes) { contentType(Content.SET_NEIGHBORS); } if (neighborsList == null) { neighborsList = new ArrayList<NeighborSet>(1); } this.neighborsList.add(neighborSet); return this; } public List<NeighborSet> neighborsSetList() { if (neighborsList == null) { return Collections.emptyList(); } return neighborsList; } public NeighborSet neighborsSet(final int index) { if (neighborsList == null || index > neighborsList.size() - 1) { return null; } return neighborsList.get(index); } public Message setDataMap(final DataMap dataMap) { if (!presetContentTypes) { contentType(Content.MAP_KEY640_DATA); } if (dataMapList == null) { dataMapList = new ArrayList<DataMap>(1); } this.dataMapList.add(dataMap); return this; } public List<DataMap> dataMapList() { if (dataMapList == null) { return Collections.emptyList(); } return dataMapList; } public DataMap dataMap(final int index) { if (dataMapList == null || index > dataMapList.size() - 1) { return null; } return dataMapList.get(index); } public Message keyCollection(final KeyCollection key) { if (!presetContentTypes) { contentType(Content.SET_KEY640); } if (keyCollectionList == null) { keyCollectionList = new ArrayList<KeyCollection>(1); } keyCollectionList.add(key); return this; } public List<KeyCollection> keyCollectionList() { if (keyCollectionList == null) { return Collections.emptyList(); } return keyCollectionList; } public KeyCollection keyCollection(final int index) { if (keyCollectionList == null || index > keyCollectionList.size() - 1) { return null; } return keyCollectionList.get(index); } public Message keyMap640Keys(final KeyMap640Keys keyMap) { if (!presetContentTypes) { contentType(Content.MAP_KEY640_KEYS); } if (keyMap640KeysList == null) { keyMap640KeysList = new ArrayList<KeyMap640Keys>(1); } keyMap640KeysList.add(keyMap); return this; } public List<KeyMap640Keys> keyMap640KeysList() { if (keyMap640KeysList == null) { return Collections.emptyList(); } return keyMap640KeysList; } public KeyMap640Keys keyMap640Keys(final int index) { if (keyMap640KeysList == null || index > keyMap640KeysList.size() - 1) { return null; } return keyMap640KeysList.get(index); } public Message keyMapByte(final KeyMapByte keyMap) { if (!presetContentTypes) { contentType(Content.MAP_KEY640_BYTE); } if (keyMapByteList == null) { keyMapByteList = new ArrayList<KeyMapByte>(1); } keyMapByteList.add(keyMap); return this; } public List<KeyMapByte> keyMapByteList() { if (keyMapByteList == null) { return Collections.emptyList(); } return keyMapByteList; } public KeyMapByte keyMapByte(final int index) { if (keyMapByteList == null || index > keyMapByteList.size() - 1) { return null; } return keyMapByteList.get(index); } public Message publicKey(final PublicKey publicKey) { if (!presetContentTypes) { contentType(Content.PUBLIC_KEY); } if(publicKeyList == null) { publicKeyList = new ArrayList<PublicKey>(1); } publicKeyList.add(publicKey); return this; } private Message publicKey0(final PublicKey publicKey) { if(publicKeyList == null) { publicKeyList = new ArrayList<PublicKey>(1); } publicKeyList.add(publicKey); return this; } public List<PublicKey> publicKeyList() { if (publicKeyList == null) { return Collections.emptyList(); } return publicKeyList; } public PublicKey publicKey(final int index) { if (publicKeyList == null || index > publicKeyList.size() - 1) { return null; } return publicKeyList.get(index); } public Message peerSocketAddresses(Collection<PeerSocketAddress> peerSocketAddresses) { if (!presetContentTypes) { contentType(Content.SET_PEER_SOCKET); } if(this.peerSocketAddressList == null) { this.peerSocketAddressList = new ArrayList<PeerSocketAddress>(peerSocketAddresses.size()); } this.peerSocketAddressList.addAll(peerSocketAddresses); return this; } public List<PeerSocketAddress> peerSocketAddresses() { if (peerSocketAddressList == null) { return Collections.emptyList(); } return peerSocketAddressList; } /*public PublicKey getPublicKey() { return publicKey; }*/ public PrivateKey privateKey() { return privateKey; } public Message buffer(final Buffer byteBuf) { if (!presetContentTypes) { contentType(Content.BYTE_BUFFER); } if (bufferList == null) { bufferList = new ArrayList<Buffer>(1); } bufferList.add(byteBuf); return this; } public List<Buffer> bufferList() { if (bufferList == null) { return Collections.emptyList(); } return bufferList; } public Buffer buffer(final int index) { if (bufferList == null || index > bufferList.size() - 1) { return null; } return bufferList.get(index); } public Message trackerData(final TrackerData trackerData) { if (!presetContentTypes) { contentType(Content.SET_TRACKER_DATA); } if (trackerDataList == null) { trackerDataList = new ArrayList<TrackerData>(1); } this.trackerDataList.add(trackerData); return this; } public List<TrackerData> trackerDataList() { if (trackerDataList == null) { return Collections.emptyList(); } return trackerDataList; } public TrackerData trackerData(final int index) { if (trackerDataList == null || index > trackerDataList.size() - 1) { return null; } return trackerDataList.get(index); } public Message receivedSignature(SignatureCodec signatureEncode) { this.signatureEncode = signatureEncode; return this; } public SignatureCodec receivedSignature() { return signatureEncode; } //*************************************** End of content payload ******************** @Override public String toString() { final StringBuilder sb = new StringBuilder("msgid="); return sb.append(messageId()).append(",t=").append(type.toString()). append(",c=").append(RPC.Commands.find(command).toString()).append(",").append(isUdp()?"udp":"tcp"). append(",s=").append(sender).append(",r=").append(recipient).toString(); } // *************************** No transferable objects here ********************************* /** * If we are setting values from the decoder, then the content type is already set. * * @param presetContentTypes * True if the content type is already set. * @return This class */ public Message presetContentTypes(final boolean presetContentTypes) { this.presetContentTypes = presetContentTypes; return this; } /** * Store the sender of the packet. This is needed for UDP traffic. * * @param senderSocket * The sender as we saw it on the interface * @return This class */ public Message senderSocket(final InetSocketAddress senderSocket) { this.senderSocket = senderSocket; return this; } /** * @return The sender of the packet. This is needed for UDP traffic. */ public InetSocketAddress senderSocket() { return senderSocket; } /** * Store the recipient of the packet. This is needed for UDP (especially broadcast) packets * @param recipientSocket The recipient as we saw it on the interface * @return This class */ public Message recipientSocket(InetSocketAddress recipientSocket) { this.recipientSocket = recipientSocket; return this; } /** * @return The recipient as we saw it on the interface. This is needed for UDP (especially broadcast) packets */ public InetSocketAddress recipientSocket() { return recipientSocket; } /** * Set if we have a signed message. * * @return This class */ public Message setHintSign() { sign = true; return this; } /** * @return True if message is or should be signed */ public boolean isSign() { /* * boolean hasType = false; for (Content type : contentTypes) { if (type == Content.PUBLIC_KEY_SIGNATURE) { * hasType = true; } } */ return sign || privateKey != null; // || hasType; } /** * @param udp * True if connection is UDP * @return This class */ public Message udp(final boolean udp) { this.udp = udp; return this; } /** * @return True if connection is UDP */ public boolean isUdp() { return udp; } public Message verified(boolean verified) { this.verified = verified; return this; } public boolean verified() { return verified; } public Message setVerified() { this.verified = true; return this; } /** * @param done * True if message decoding or encoding is done * @return This class */ public Message setDone(final boolean done) { this.done = done; return this; } /** * Set done to true if message decoding or encoding is done. * @return This class */ public Message setDone() { return setDone(true); } /** * @return True if message decoding or encoding is done */ public boolean isDone() { return done; } public Message sendSelf(final boolean sendSelf) { this.sendSelf = sendSelf; return this; } public Message sendSelf() { return sendSelf(true); } public boolean isSendSelf() { return sendSelf; } public Message duplicate() { return duplicate(null); } public Message duplicate(DataFilter dataFilter) { Message message = new Message(); // Header message.messageId = this.messageId; message.version = this.version; message.type = this.type; message.command = this.command; message.sender = this.sender; message.recipient = this.recipient; // recipientRelay is not transferred message.options = this.options; // Payload message.contentTypes = this.contentTypes; // contentReferences is transient // ********* Here comes the payload objects ************ // The content lists: message.neighborsList = this.neighborsList; message.keyList = this.keyList; message.bloomFilterList = this.bloomFilterList; if(dataFilter == null) { message.dataMapList = this.dataMapList; } else { message.dataMapList = filter(dataFilter); } message.integerList = this.integerList; message.longList = this.longList; message.keyCollectionList = this.keyCollectionList; message.keyMap640KeysList = this.keyMap640KeysList; message.keyMapByteList = this.keyMapByteList; message.bufferList = this.bufferList; message.trackerDataList = this.trackerDataList; message.publicKeyList = this.publicKeyList; message.peerSocketAddressList = this.peerSocketAddressList; message.signatureEncode = this.signatureEncode; // these are transient //presetContentTypes //privateKey; //senderSocket; //recipientSocket; //udp; //done; //sign; //content; //verified; //sendSelf; return message; } /** * Change the data and make a shallow copy of the data. This means fields * such as TTL or other are copied, the underlying buffer remains the same * * @param dataFilter * The filter that will be applied on each data item * @return The filtered data */ private List<DataMap> filter(DataFilter dataFilter) { final List<DataMap> dataMapListCopy = new ArrayList<DataMap>(this.dataMapList().size()); for (DataMap dataMap : this.dataMapList()) { final NavigableMap<Number640, Data> dataMapCopy = new TreeMap<Number640, Data>(); for (Map.Entry<Number640, Data> entry : dataMap.dataMap() .entrySet()) { Data filteredData = dataFilter.filter(entry.getValue(), dataMap.isConvertMeta(), !isRequest()); dataMapCopy.put(entry.getKey(), filteredData); } dataMapListCopy.add(new DataMap(dataMapCopy, dataMap.isConvertMeta())); } return dataMapListCopy; } /** * Returns the estimated message size. If the message contains data, a constant value of 1000bytes is added. */ public int estimateSize() { int current = MessageHeaderCodec.HEADER_SIZE; if(neighborsList != null) { for (NeighborSet neighbors : neighborsList) { for (PeerAddress address : neighbors.neighbors()) { current += address.size() + 1; } } } if(keyList != null) { current += keyList.size() * Number160.BYTE_ARRAY_SIZE; } if(bloomFilterList != null) { for (SimpleBloomFilter<Number160> filter : bloomFilterList) { current += filter.size(); } } if(integerList != null) { current += integerList.size() * 4; } if(longList != null) { current += longList.size() * 8; } if(keyCollectionList != null) { for (KeyCollection coll : keyCollectionList) { current += 4 + coll.size() * Number640.BYTE_ARRAY_SIZE; } } if(keyMap640KeysList != null) { for (KeyMap640Keys keys : keyMap640KeysList) { current += 4 + keys.size() * Number640.BYTE_ARRAY_SIZE; } } if(keyMapByteList != null) { for (KeyMapByte keys : keyMapByteList) { current += 4 + keys.size(); } } if(bufferList != null) { for (Buffer buffer : bufferList) { current += 4 + buffer.length(); } } if(publicKeyList != null) { for (PublicKey key : publicKeyList) { current += key.getEncoded().length; } } if(peerSocketAddressList != null) { for (PeerSocketAddress address : peerSocketAddressList) { current += address.size() + 1; } } if(signatureEncode != null) { current += signatureEncode.signatureSize(); } /** * Here are the estimations to skip CPU intensive calculations */ if(dataMapList != null) { current += 1000; } if(trackerDataList != null) { current += 1000; // estimated size } return current; } public void release() { for(DataMap dataMap: dataMapList()) { for(Data data: dataMap.dataMap().values()) { data.release(); } } for(Buffer buffer: bufferList()) { buffer.buffer().release(); } for(TrackerData trackerData:trackerDataList()) { for(Data data:trackerData.peerAddresses().values()) { data.release(); } } } }
apache-2.0
rocketjob/iostreams
lib/io_streams/line/writer.rb
1744
module IOStreams module Line class Writer < IOStreams::Writer attr_reader :delimiter # Write a line at a time to a stream. def self.stream(output_stream, **args) # Pass-through if already a line writer return yield(output_stream) if output_stream.is_a?(self.class) yield new(output_stream, **args) end # A delimited stream writer that will write to the supplied output stream. # # The output stream will have the encoding of data written to it. # To change the output encoding, use IOStreams::Encode::Writer. # # Parameters # output_stream # The output stream that implements #write # # delimiter: [String] # Add the specified delimiter after every record when writing it # to the output stream # Default: OS Specific. Linux: "\n" def initialize(output_stream, delimiter: $/, original_file_name: nil) super(output_stream) @delimiter = delimiter end # Write a line to the output stream # # Example: # IOStreams.path('a.txt').writer(:line) do |stream| # stream << 'first line' << 'second line' # end def <<(data) write(data) self end # Write a line to the output stream followed by the delimiter. # Returns [Integer] the number of bytes written. # # Example: # IOStreams.path('a.txt').writer(:line) do |stream| # count = stream.write('first line') # puts "Wrote #{count} bytes to the output file, including the delimiter" # end def write(data) output_stream.write(data.to_s + delimiter) end end end end
apache-2.0
qingyuancloud/qingyuan
pkg/api/resource_helpers.go
1960
/* Copyright 2014 The QingYuan Authors All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package api import ( "github.com/qingyuancloud/QingYuan/pkg/api/resource" ) // Returns string version of ResourceName. func (self ResourceName) String() string { return string(self) } // Returns the CPU limit if specified. func (self *ResourceList) Cpu() *resource.Quantity { if val, ok := (*self)[ResourceCPU]; ok { return &val } return &resource.Quantity{} } // Returns the Memory limit if specified. func (self *ResourceList) Memory() *resource.Quantity { if val, ok := (*self)[ResourceMemory]; ok { return &val } return &resource.Quantity{} } func (self *ResourceList) Pods() *resource.Quantity { if val, ok := (*self)[ResourcePods]; ok { return &val } return &resource.Quantity{} } func GetContainerStatus(statuses []ContainerStatus, name string) (ContainerStatus, bool) { for i := range statuses { if statuses[i].Name == name { return statuses[i], true } } return ContainerStatus{}, false } func GetExistingContainerStatus(statuses []ContainerStatus, name string) ContainerStatus { for i := range statuses { if statuses[i].Name == name { return statuses[i] } } return ContainerStatus{} } // IsPodReady retruns true if a pod is ready; false otherwise. func IsPodReady(pod *Pod) bool { for _, c := range pod.Status.Conditions { if c.Type == PodReady && c.Status == ConditionTrue { return true } } return false }
apache-2.0
toger5/godotdevs-Forum
Themes/aldo/languages/ThemeStrings.english.php
308
<?php // Version: 2.0; Themes global $scripturl; $txt['facebook_url'] = 'Facebook URL'; $txt['twitter_url'] = 'Twitter URL'; $txt['google+_url'] = 'google+ URL'; $txt['rss_url'] = 'RSS URL'; $txt['maintenance'] = '(Maintenance)'; $txt['approval_member'] = 'Approval'; $txt['open_reports'] = 'Reports'; ?>
apache-2.0
niwasmala/alt-angularjs
dist/alt/store.js
456
alt.loader.store=function(){return"undefined"!=typeof alt.modules.store?alt.modules.store:(alt.modules.store=angular.module("alt-store",["angular-storage","ngCookies"]).factory("$store",["$log","store",function(e,o){return o.getNamespacedStore(alt.application)}]).run(["$rootScope","$store",function(e,o){e.$store=o}]),void alt.module("alt-store",alt.modules.store))},"undefined"!=typeof define?define([],function(){alt.loader.store()}):alt.loader.store();
apache-2.0
hortonworks/cloudbreak
custom-configurations-api/src/test/java/com/sequenceiq/cloudbreak/api/endpoint/requests/CustomConfigurationsV4RequestTest.java
2072
package com.sequenceiq.cloudbreak.api.endpoint.requests; import static org.junit.jupiter.api.Assertions.assertEquals; import java.util.Set; import javax.validation.ConstraintViolation; import com.sequenceiq.cloudbreak.api.endpoint.v4.requests.CustomConfigurationsV4Request; import org.hibernate.validator.HibernateValidator; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.extension.ExtendWith; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.CsvSource; import org.mockito.junit.jupiter.MockitoExtension; import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean; @ExtendWith(MockitoExtension.class) class CustomConfigurationsV4RequestTest { private CustomConfigurationsV4Request underTest; private LocalValidatorFactoryBean localValidatorFactory; private long expectedNameViolations; private long resultedNameViolations; @BeforeEach public void setUp() { underTest = new CustomConfigurationsV4Request(); localValidatorFactory = new LocalValidatorFactoryBean(); localValidatorFactory.setProviderClass(HibernateValidator.class); localValidatorFactory.afterPropertiesSet(); } @ParameterizedTest @CsvSource({ "this is a valid name for custom configs,0", "132-3 cUsToM cOnFiGs,0", "!@#$^&*CC,0", "another'' valid name,0", "this an %invalid% name for custom configs.,1", "custom; configs 2,1", "custom/configs,1", "/,1" }) void testCustomConfigsName(String name, String violation) { underTest.setName(name); Set<ConstraintViolation<CustomConfigurationsV4Request>> violationSet = localValidatorFactory.validate(underTest); resultedNameViolations += violationSet.stream().filter(vio -> vio.getPropertyPath().toString().equals("name")).count(); expectedNameViolations += Integer.parseInt(violation); assertEquals(expectedNameViolations, resultedNameViolations); } }
apache-2.0
steny138/TwssApi
quote/twse.py
371
#-*- coding: utf-8 -*- import json import flask from flask.ext import restful from twss import fetch_from_twse as twse from datetime import datetime , timedelta class TWSE_Quote(restful.Resource): def get(self, no, date): data = twse.QuoteStock(no, datetime.strptime(date, '%Y-%M-%d')).data result = [] for x in data: result.append(x.__dict__) return result
apache-2.0
tumblr/jetpants
plugins/simple_tracker/lib/db.rb
1630
module Jetpants class DB def is_spare? Jetpants.topology.spares.include? self end def claim! spares = Jetpants.topology.spares.reject do |sp| self == (sp.is_a?(Hash) && sp['node'] ? sp['node'].to_db : sp.to_db) end Jetpants.topology.tracker.spares = spares Jetpants.topology.update_tracker_data end def cleanup_spare! # If the node is already a valid spare, do not do anything return true if probe! && usable_spare? if running? datadir = mysql_root_cmd('select @@datadir;').chomp("\n/") mysql_root_cmd("PURGE BINARY LOGS BEFORE NOW();") rescue nil else datadir = Jetpants.mysql_datadir end stop_mysql output "Initializing the MySQL data directory" ssh_cmd [ "rm -rf #{datadir}/*", '/usr/bin/mysql_install_db' ], 1 service_start('mysql') confirm_listening @running = true usable_spare? end ##### CALLBACKS ############################################################ # Determine master from asset tracker if machine is unreachable or MySQL isn't running. def after_probe_master unless @running my_pool, my_role = Jetpants.topology.class.tracker.determine_pool_and_role(@ip, @port) @master = (my_role == 'MASTER' ? false : my_pool.master) end end # Determine slaves from asset tracker if machine is unreachable or MySQL isn't running def after_probe_slaves unless @running @slaves = Jetpants.topology.class.tracker.determine_slaves(@ip, @port) end end end end
apache-2.0
act262/CustomView
app/src/main/java/com/github/act262/customview/view/GridsView.java
2043
package com.github.act262.customview.view; import android.annotation.TargetApi; import android.content.Context; import android.graphics.Canvas; import android.graphics.Color; import android.graphics.Paint; import android.os.Build; import android.util.AttributeSet; import android.view.View; /** * @author act262@gmail.com * @version 1.0 * @time 2015/10/24 */ public class GridsView extends View { public GridsView(Context context) { super(context); init(); } public GridsView(Context context, AttributeSet attrs) { super(context, attrs); init(); } public GridsView(Context context, AttributeSet attrs, int defStyleAttr) { super(context, attrs, defStyleAttr); init(); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) public GridsView(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) { super(context, attrs, defStyleAttr, defStyleRes); init(); } private Paint mPaint = null; private int distance = 10; // 每个格子之间的间隔 10px private void init() { mPaint = new Paint(); mPaint.setColor(Color.DKGRAY); mPaint.setAntiAlias(true); mPaint.setStyle(Paint.Style.STROKE); mPaint.setStrokeWidth(5); } @Override protected void onDraw(Canvas canvas) { super.onDraw(canvas); int width = getWidth(); int height = getHeight(); mPaint.setColor(Color.RED); mPaint.setStrokeWidth(5); canvas.drawRect(0, 0, width, height, mPaint);//画整个View的外边框 int row = height / distance; int columns = width / distance; mPaint.setColor(Color.BLACK); mPaint.setStrokeWidth(2); for (int i = 0; i < row; i++) { canvas.drawLine(1, i * distance, width - 1, i * distance, mPaint); // 画横线 } for (int i = 0; i < columns; i++) { canvas.drawLine(i * distance, 1, i * distance, height - 1, mPaint); // 画竖线 } } }
apache-2.0
raulnq/Jal.Aop
Jal.Aop.Aspects.Logger/Impl/DataContractSerializer.cs
677
using System.IO; using System.Text; namespace Jal.Aop.Aspects.Logger { public class DataContractSerializer : ISerializer { public string Serialize(object value) { using (var ms = new MemoryStream()) { var typeToSerialize = value.GetType(); var ser = new System.Runtime.Serialization.DataContractSerializer(typeToSerialize); ser.WriteObject(ms, value); var array = ms.ToArray(); ms.Close(); var serializedXml = Encoding.UTF8.GetString(array, 0, array.Length); return serializedXml; } } } }
apache-2.0
ifzing/ceilometer-extended-monitor
neutron_client.py
803
from neutronclient.v2_0 import client as neutron_client class Client(object): def __init__(self): self.nets = neutron_client.Client( username='ceilometer',password='password', tenant_name='service',region_name='region_name', auth_url="http://mykeystone:5000/v2.0",) def _list_pools(self): return self.nets.list_pools().get('pools') def _show_pool_stats(self, pool_id): return self.nets.retrieve_pool_stats(pool_id) def _get_lb_in_bytes(self, pool_id): _in_bytes = self._show_pool_stats(pool_id)['stats']['bytes_in'] return _in_bytes def _get_lb_out_bytes(self, pool_id): _out_bytes = self._show_pool_stats(pool_id)['stats']['bytes_out'] return _out_bytes
apache-2.0
longXboy/lunnel
server/serControl.go
20007
// Copyright 2017 longXboy, longxboyhi@gmail.com // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package server import ( "container/list" "fmt" "io" "net" "sync" "sync/atomic" "time" "github.com/longXboy/lunnel/contrib" "github.com/longXboy/lunnel/crypto" "github.com/longXboy/lunnel/log" "github.com/longXboy/lunnel/msg" "github.com/longXboy/lunnel/transport" "github.com/longXboy/lunnel/util" "github.com/longXboy/smux" "github.com/pkg/errors" "github.com/satori/go.uuid" "golang.org/x/net/context" ) var maxIdlePipes uint32 var maxStreams uint64 var cleanInterval time.Duration = time.Second * 60 var ControlMapLock sync.RWMutex var ControlMap = make(map[uuid.UUID]*Control) var OldTunnelLock sync.Mutex var OldTunnelMap = make(map[uuid.UUID]map[string]msg.Tunnel) var subDomainIdx uint64 var TunnelMapLock sync.RWMutex var TunnelMap = make(map[string]*Tunnel) func NewControl(conn net.Conn, encryptMode string, enableCompress bool, version string) *Control { ctx, cancel := context.WithCancel(context.Background()) ctl := &Control{ ctlConn: conn, pipeGet: make(chan *smux.Session), pipeAdd: make(chan *smux.Session), writeChan: make(chan writeReq, 64), encryptMode: encryptMode, tunnels: make(map[string]*Tunnel, 0), tunnelLock: new(sync.Mutex), enableCompress: enableCompress, ctx: ctx, cancel: cancel, version: version, busyPipes: list.New(), idlePipes: list.New(), } return ctl } type writeReq struct { mType msg.MsgType body interface{} } type Tunnel struct { tunnelConfig msg.Tunnel listener net.Listener name string ctl *Control isClosed bool } func (t *Tunnel) Close() { if t.isClosed { return } TunnelMapLock.Lock() delete(TunnelMap, t.tunnelConfig.PublicAddr()) TunnelMapLock.Unlock() if t.listener != nil { t.listener.Close() } if serverConf.NotifyEnable { err := contrib.RemoveTunnel(serverConf.ServerDomain, t.tunnelConfig, t.ctl.ClientID.String()) if err != nil { log.WithFields(log.Fields{"err": err}).Errorln("notify remove member failed!") } } t.isClosed = true t.listener = nil } type Control struct { // To work on both ARM and x86-32, // these two fields must be the first elements to keep 64-bit // alignment for atomic access to the fields. lastRead uint64 totalPipes uint32 idlePipeCount uint32 busyPipeCount uint32 ClientID uuid.UUID ctlConn net.Conn preMasterSecret []byte encryptMode string enableCompress bool writeChan chan writeReq version string tunnels map[string]*Tunnel tunnelLock *sync.Mutex busyPipes *list.List idlePipes *list.List pipeAdd chan *smux.Session pipeGet chan *smux.Session cancel context.CancelFunc ctx context.Context } func (c *Control) putPipe(p *smux.Session) { select { case c.pipeAdd <- p: case <-c.ctx.Done(): p.Close() atomic.AddUint32(&c.totalPipes, ^uint32(0)) return } return } func (c *Control) getPipe() *smux.Session { select { case p := <-c.pipeGet: return p case <-c.ctx.Done(): return nil } } func (c *Control) clean() *smux.Session { if serverConf.Debug { if atomic.LoadUint32(&c.totalPipes) > maxIdlePipes { log.WithFields(log.Fields{"total_pipe_count": atomic.LoadUint32(&c.totalPipes), "client_id": c.ClientID.String()}).Debugln("total pipe count") } } var deleted int64 = 0 front := c.busyPipes.Front() next := front for { if front == nil { break } next = front.Next() sess := front.Value.(*smux.Session) if sess.IsClosed() { deleted++ c.busyPipes.Remove(front) } else if num := uint64(sess.NumStreams()); num < maxStreams { if num <= maxStreams/2 { c.idlePipes.PushFront(c.busyPipes.Remove(front)) } else { c.idlePipes.PushBack(c.busyPipes.Remove(front)) } } front = next } front = c.idlePipes.Front() next = front var idle *smux.Session for { if front == nil { break } next = front.Next() sess := front.Value.(*smux.Session) if sess.IsClosed() { c.idlePipes.Remove(front) deleted++ } else if sess.NumStreams() == 0 && uint32(c.idlePipes.Len()) > maxIdlePipes { c.idlePipes.Remove(front) deleted++ sess.Close() log.WithFields(log.Fields{"idle_count": c.idlePipes.Len(), "pipe": fmt.Sprintf("%p", sess), "client_id": c.ClientID.String()}).Debugln("remove and close idle") } else if idle == nil { idle = sess } front = next } if deleted > 0 { atomic.AddUint32(&c.totalPipes, ^uint32(deleted-1)) } atomic.StoreUint32(&c.busyPipeCount, uint32(c.busyPipes.Len())) atomic.StoreUint32(&c.idlePipeCount, uint32(c.idlePipes.Len())) return idle } func (c *Control) getIdleFast() *smux.Session { idle := c.idlePipes.Front() for { if idle == nil { atomic.StoreUint32(&c.idlePipeCount, uint32(c.idlePipes.Len())) return nil } next := idle.Next() if idle.Value.(*smux.Session).IsClosed() { atomic.AddUint32(&c.totalPipes, ^uint32(0)) c.idlePipes.Remove(idle) } else { atomic.StoreUint32(&c.idlePipeCount, uint32(c.idlePipes.Len())) return c.idlePipes.Remove(idle).(*smux.Session) } idle = next } } func (c *Control) pipeManage() { defer log.CapturePanic() defer c.closePipes() var available *smux.Session ticker := time.NewTicker(cleanInterval) defer ticker.Stop() for { Prepare: if available == nil || available.IsClosed() { if available != nil { atomic.AddUint32(&c.totalPipes, ^uint32(0)) } available = c.getIdleFast() if available == nil { available = c.clean() select { case c.writeChan <- writeReq{msg.TypePipeReq, nil}: default: c.Close() return } if available == nil { pipeGetTimeout := time.After(time.Second * 12) for { select { case <-ticker.C: available = c.clean() if available != nil { goto Available } case p := <-c.pipeAdd: if !p.IsClosed() { if uint64(p.NumStreams()) < maxStreams { available = p goto Available } else { c.busyPipes.PushBack(p) atomic.StoreUint32(&c.busyPipeCount, uint32(c.busyPipes.Len())) } } else { atomic.AddUint32(&c.totalPipes, ^uint32(0)) } case <-c.ctx.Done(): return case <-pipeGetTimeout: goto Prepare } } } } } Available: select { case <-ticker.C: c.clean() case c.pipeGet <- available: log.WithFields(log.Fields{"pipe": fmt.Sprintf("%p", available), "client_id": c.ClientID.String()}).Debugln("dispatch pipe to consumer") available = nil case p := <-c.pipeAdd: if !p.IsClosed() { if num := uint64(p.NumStreams()); num < maxStreams { if num <= maxStreams/2 { c.idlePipes.PushFront(p) } else { c.idlePipes.PushBack(p) } atomic.StoreUint32(&c.idlePipeCount, uint32(c.idlePipes.Len())) } else { c.busyPipes.PushBack(p) atomic.StoreUint32(&c.busyPipeCount, uint32(c.busyPipes.Len())) } } else { atomic.AddUint32(&c.totalPipes, ^uint32(0)) } case <-c.ctx.Done(): return } } } func (c *Control) Close() { log.WithField("clientId", c.ClientID).Debugln("ready to close control") c.cancel() } func (c *Control) closeTunnels() map[string]msg.Tunnel { log.WithField("clientId", c.ClientID).Debugln("ready to close tunnels") tunnelConfigMap := make(map[string]msg.Tunnel) c.tunnelLock.Lock() for _, t := range c.tunnels { t.Close() tunnelConfigMap[t.name] = t.tunnelConfig } c.tunnelLock.Unlock() return tunnelConfigMap } func (c *Control) closePipes() { idle := c.idlePipes.Front() for { if idle == nil { break } sess := idle.Value.(*smux.Session) if !sess.IsClosed() { sess.Close() } atomic.AddUint32(&c.totalPipes, ^uint32(0)) idle = idle.Next() } c.idlePipes = nil busy := c.busyPipes.Front() for { if busy == nil { break } sess := busy.Value.(*smux.Session) if !sess.IsClosed() { sess.Close() } atomic.AddUint32(&c.totalPipes, ^uint32(0)) busy = busy.Next() } c.busyPipes = nil log.WithField("clientId", c.ClientID).Debugln("close pipes") } func (c *Control) recvLoop() { defer log.CapturePanic() defer log.WithField("clientId", c.ClientID).Debugln("close recvLoop") atomic.StoreUint64(&c.lastRead, uint64(time.Now().UnixNano())) for { mType, body, err := msg.ReadMsgWithoutDeadline(c.ctlConn) if err != nil { log.WithFields(log.Fields{"err": err, "client_Id": c.ClientID.String()}).Warningln("ReadMsgWithoutTimeout in recvLoop failed") c.Close() return } if mType != msg.TypePing && mType != msg.TypePong { log.WithFields(log.Fields{"type": mType, "body": body, "client_id": c.ClientID}).Debugln("recv msg") } atomic.StoreUint64(&c.lastRead, uint64(time.Now().UnixNano())) switch mType { case msg.TypeAddTunnels: go c.ServerAddTunnels(body.(*msg.AddTunnels)) case msg.TypePong: case msg.TypePing: select { case c.writeChan <- writeReq{msg.TypePong, nil}: default: c.Close() return } case msg.TypeExit: c.Close() return default: } } } func (c *Control) writeLoop() { defer log.CapturePanic() defer log.WithField("clientId", c.ClientID).Debugln("close writeLoop") lastWrite := time.Now() idx := 0 for { select { case msgBody := <-c.writeChan: if msgBody.mType == msg.TypePing { if time.Now().Before(lastWrite.Add(time.Duration(serverConf.Health.Interval * int64(time.Second) / 2))) { continue } } if msgBody.mType == msg.TypePipeReq { idx++ } lastWrite = time.Now() if msgBody.mType != msg.TypePing && msgBody.mType != msg.TypePong { log.WithFields(log.Fields{"type": msgBody.mType, "body": msgBody.body, "client_id": c.ClientID}).Debugln("ready to send msg") } err := msg.WriteMsg(c.ctlConn, msgBody.mType, msgBody.body) if err != nil { log.WithFields(log.Fields{"mType": msgBody.mType, "body": fmt.Sprintf("%v", msgBody.body), "client_id": c.ClientID.String(), "err": err}).Warningln("send msg to client failed!") c.Close() return } case <-c.ctx.Done(): return } } } func (c *Control) Serve() { defer func() { ControlMapLock.Lock() ctl, isok := ControlMap[c.ClientID] if isok && ctl == c { delete(ControlMap, c.ClientID) } ControlMapLock.Unlock() tunnelsMap := c.closeTunnels() OldTunnelLock.Lock() OldTunnelMap[c.ClientID] = tunnelsMap OldTunnelLock.Unlock() defer log.WithField("clientId", c.ClientID).Debugln("close mainLoop") }() defer c.ctlConn.Close() go c.recvLoop() go c.writeLoop() go c.pipeManage() ticker := time.NewTicker(time.Duration(serverConf.Health.Interval * int64(time.Second))) defer ticker.Stop() for { select { case <-ticker.C: if (uint64(time.Now().UnixNano()) - atomic.LoadUint64(&c.lastRead)) > uint64(serverConf.Health.TimeOut*int64(time.Second)) { log.WithFields(log.Fields{"client_id": c.ClientID.String()}).Warningln("recv client ping time out!") c.Close() return } select { case c.writeChan <- writeReq{msg.TypePing, nil}: default: c.Close() return } case <-c.ctx.Done(): return } } } func proxyConn(userConn net.Conn, c *Control, tunnelName string) { defer userConn.Close() p := c.getPipe() if p == nil { return } //todo:close stream friendly stream, err := p.OpenStream(tunnelName) if err != nil { c.putPipe(p) return } defer stream.Close() c.putPipe(p) p1die := make(chan struct{}) p2die := make(chan struct{}) go func() { io.Copy(stream, userConn) close(p1die) }() go func() { io.Copy(userConn, stream) close(p2die) }() select { case <-p1die: case <-p2die: } return } //add or update tunnel stat func (c *Control) ServerAddTunnels(sstm *msg.AddTunnels) { defer log.CapturePanic() c.tunnelLock.Lock() defer c.tunnelLock.Unlock() for name, tunnel := range sstm.Tunnels { var lis net.Listener = nil var err error oldTunnel, isok := c.tunnels[name] if isok { oldTunnel.Close() delete(c.tunnels, name) } if tunnel.Public.Schema == "tcp" || tunnel.Public.Schema == "udp" { if tunnel.Public.Port == 0 && oldTunnel != nil && tunnel.Public.Schema == oldTunnel.tunnelConfig.Public.Schema && tunnel.LocalAddr() == oldTunnel.tunnelConfig.LocalAddr() { tunnel.Public.AllowReallocate = true tunnel.Public.Port = oldTunnel.tunnelConfig.Public.Port } if tunnel.Public.Schema == "udp" { addr := net.UDPAddr{ Port: int(tunnel.Public.Port), IP: net.ParseIP(serverConf.ListenIP), } udpConn, err := net.ListenUDP("udp", &addr) if err != nil { if tunnel.Public.AllowReallocate { addr.Port = 0 udpConn, err = net.ListenUDP("udp", &addr) } if err != nil { log.WithFields(log.Fields{"remote_addr": tunnel.PublicAddr(), "client_id": c.ClientID.String(), "err": err.Error()}).Warningln("listen tunnel failed!") select { case c.writeChan <- writeReq{msg.TypeError, msg.Error{fmt.Sprintf("add tunnels(remote_addr:%s) failed!err:=%s", tunnel.PublicAddr(), err.Error())}}: default: c.Close() return } continue } } go proxyConn(udpConn, c, name) tunnel.Public.Port = uint16(udpConn.LocalAddr().(*net.UDPAddr).Port) tunnel.Public.Host = serverConf.ServerDomain } else { lis, err = net.Listen(tunnel.Public.Schema, fmt.Sprintf("%s:%d", serverConf.ListenIP, tunnel.Public.Port)) if err != nil { if tunnel.Public.AllowReallocate { lis, err = net.Listen(tunnel.Public.Schema, fmt.Sprintf("%s:%d", serverConf.ListenIP, 0)) } if err != nil { log.WithFields(log.Fields{"remote_addr": tunnel.PublicAddr(), "client_id": c.ClientID.String(), "err": err.Error()}).Warningln("listen tunnel failed!") select { case c.writeChan <- writeReq{msg.TypeError, msg.Error{fmt.Sprintf("add tunnels(remote_addr:%s) failed!err:=%s", tunnel.PublicAddr(), err.Error())}}: default: c.Close() return } continue } } go func(tunnelName string) { for { conn, err := lis.Accept() if err != nil { return } go proxyConn(conn, c, tunnelName) } }(name) //todo: port should allocated and managed by server not by OS addr := lis.Addr().(*net.TCPAddr) tunnel.Public.Port = uint16(addr.Port) tunnel.Public.Host = serverConf.ServerDomain } } else if tunnel.Public.Schema == "http" || tunnel.Public.Schema == "https" { if tunnel.Public.Host == "" { if oldTunnel != nil && tunnel.Public.Schema == oldTunnel.tunnelConfig.Public.Schema && tunnel.LocalAddr() == oldTunnel.tunnelConfig.LocalAddr() { tunnel.Public.AllowReallocate = true tunnel.Public.Host = oldTunnel.tunnelConfig.Public.Host } else { subDomain := util.Int2Short(atomic.AddUint64(&subDomainIdx, 1)) tunnel.Public.Host = fmt.Sprintf("%s.%s", string(subDomain), serverConf.ServerDomain) } } if tunnel.Public.Schema == "http" { tunnel.Public.Port = serverConf.HttpPort } else { tunnel.Public.Port = serverConf.HttpsPort } } tunnelControl := Tunnel{tunnelConfig: tunnel, listener: lis, ctl: c, name: name} TunnelMapLock.Lock() _, isok = TunnelMap[tunnel.PublicAddr()] if isok { TunnelMapLock.Unlock() if lis != nil { lis.Close() } log.WithFields(log.Fields{"remote_addr": tunnel.PublicAddr(), "client_id": c.ClientID.String()}).Warningln("forbidden,remote addrs already in use") select { case c.writeChan <- writeReq{msg.TypeError, msg.Error{fmt.Sprintf("add tunnels failed!forbidden,remote addrs(%s) already in use", tunnel.PublicAddr())}}: default: c.Close() return } continue } TunnelMap[tunnel.PublicAddr()] = &tunnelControl TunnelMapLock.Unlock() c.tunnels[name] = &tunnelControl sstm.Tunnels[name] = tunnel if serverConf.NotifyEnable { err = contrib.AddTunnel(serverConf.ServerDomain, tunnel, c.ClientID.String()) if err != nil { log.WithFields(log.Fields{"err": err}).Errorln("notify add member failed!") } } } select { case c.writeChan <- writeReq{msg.TypeAddTunnels, *sstm}: default: c.Close() return } return } func (c *Control) GenerateClientId() uuid.UUID { c.ClientID = uuid.NewV4() return c.ClientID } func (c *Control) ServerHandShake() error { var shello msg.ControlServerHello var chello *msg.ControlClientHello mType, body, err := msg.ReadMsg(c.ctlConn) if err != nil { return errors.Wrap(err, "msg.ReadMsg") } if mType != msg.TypeControlClientHello { return errors.Errorf("invalid msg type(%d),expect(%d)", mType, msg.TypeControlClientHello) } chello = body.(*msg.ControlClientHello) if serverConf.AuthEnable { isok, err := contrib.Auth(chello) if err != nil { return errors.Wrap(err, "contrib.Auth") } if !isok { return errors.Errorf("auth failed!token:%s", chello.AuthToken) } } if c.encryptMode != "none" { priv, keyMsg := crypto.GenerateKeyExChange() if keyMsg == nil || priv == nil { return errors.Errorf("crypto.GenerateKeyExChange error ,exchange key is nil") } preMasterSecret, err := crypto.ProcessKeyExchange(priv, chello.CipherKey) if err != nil { return errors.Wrap(err, "crypto.ProcessKeyExchange") } c.preMasterSecret = preMasterSecret shello.CipherKey = keyMsg } if chello.ClientID != nil { shello.ClientID = *chello.ClientID } else { shello.ClientID = c.GenerateClientId() } c.ClientID = shello.ClientID err = msg.WriteMsg(c.ctlConn, msg.TypeControlServerHello, shello) if err != nil { return errors.Wrap(err, "Write ClientId") } if chello.ClientID != nil { ControlMapLock.RLock() old, isok := ControlMap[c.ClientID] ControlMapLock.RUnlock() var oldTunnelsMap map[string]msg.Tunnel if isok { oldTunnelsMap = old.closeTunnels() for name, oldTunnel := range oldTunnelsMap { c.tunnels[name] = &Tunnel{isClosed: true, name: name, tunnelConfig: oldTunnel} } } else { OldTunnelLock.Lock() oldTunnelsMap, isok = OldTunnelMap[c.ClientID] if isok { delete(OldTunnelMap, c.ClientID) } OldTunnelLock.Unlock() if isok { for name, oldTunnel := range oldTunnelsMap { c.tunnels[name] = &Tunnel{isClosed: true, name: name, tunnelConfig: oldTunnel} } } } } ControlMapLock.Lock() ControlMap[c.ClientID] = c ControlMapLock.Unlock() return nil } func PipeHandShake(conn net.Conn, phs *msg.PipeClientHello) error { ControlMapLock.RLock() ctl, isok := ControlMap[phs.ClientID] ControlMapLock.RUnlock() if !isok { return errors.Errorf("invalid phs.client_id %s", phs.ClientID.String()) } smuxConfig := smux.DefaultConfig() smuxConfig.MaxReceiveBuffer = 1194304 smuxConfig.IdleStreamTimeout = time.Minute * 30 var err error var sess *smux.Session var underlyingConn io.ReadWriteCloser if ctl.encryptMode != "none" { prf := crypto.NewPrf12() var masterKey []byte = make([]byte, 16) prf(masterKey, ctl.preMasterSecret, phs.ClientID[:], phs.Once[:]) underlyingConn, err = crypto.NewCryptoStream(conn, masterKey) if err != nil { return errors.Wrap(err, "crypto.NewCryptoConn") } } else { underlyingConn = conn } if ctl.enableCompress { underlyingConn = transport.NewCompStream(underlyingConn) } sess, err = smux.Client(underlyingConn, smuxConfig) if err != nil { return errors.Wrap(err, "smux.Client") } atomic.AddUint32(&ctl.totalPipes, 1) ctl.putPipe(sess) return nil }
apache-2.0
naterivah/dynaorm
src/main/java/be/bittich/dynaorm/maping/ReflectionMapper.java
2083
/* * Copyright 2014 Nordine. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package be.bittich.dynaorm.maping; import be.bittich.dynaorm.exception.ColumnNotFoundException; import be.bittich.dynaorm.exception.RequestInvalidException; import be.bittich.dynaorm.repository.DynaRepository; import java.io.Serializable; import java.util.List; import java.util.logging.Level; import java.util.logging.Logger; /** * * @author Nordine */ public class ReflectionMapper implements Serializable { private static final long serialVersionUID = 3836286981898895355L; /** * Load lazily a relation between two entities. First step before concrete * mapping * * @param <E> * @param e * @param lazyRepo * @return */ public static <E> E lazyLoadRelation(E e, DynaRepository<E> lazyRepo) { E v = lazyRepo.findById(e); return lazyRepo.findById(v); } /** * That method does a basic findBy to get the list of entities related to * the field * * @param <E> * @param mapping * @param lazyRepo * @return */ public static <E> List<E> lazyLoadRelationList(final ForeignKeyMapping<E> mapping, DynaRepository<E> lazyRepo) { try { List<E> list = lazyRepo.findBy(mapping.getIdMappedBy(), mapping.getId()); return list; } catch (ColumnNotFoundException | RequestInvalidException ex) { Logger.getLogger(ReflectionMapper.class.getName()).log(Level.SEVERE, ex.getMessage(), ex); } return null; } }
apache-2.0
openstack/castellan
castellan/common/objects/private_key.py
1996
# Copyright (c) 2015 The Johns Hopkins University/Applied Physics Laboratory # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Base PrivateKey Class This module defines the PrivateKey class. """ from castellan.common.objects import key class PrivateKey(key.Key): """This class represents private keys.""" def __init__(self, algorithm, bit_length, key, name=None, created=None, id=None): """Create a new PrivateKey object. The arguments specify the algorithm and bit length for the asymmetric encryption and the bytes for the key in a bytestring. """ self._alg = algorithm self._bit_length = bit_length self._key = key super().__init__(name=name, created=created, id=id) @classmethod def managed_type(cls): return "private" @property def algorithm(self): return self._alg @property def format(self): return "PKCS8" @property def bit_length(self): return self._bit_length def get_encoded(self): return self._key def __eq__(self, other): if isinstance(other, PrivateKey): return (self._alg == other._alg and self._bit_length == other._bit_length and self._key == other._key) else: return False def __ne__(self, other): result = self.__eq__(other) return not result
apache-2.0
tensorflow/datasets
tensorflow_datasets/image_classification/dtd_test.py
951
# coding=utf-8 # Copyright 2022 The TensorFlow Datasets Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for DTD image data loading.""" from tensorflow_datasets import testing from tensorflow_datasets.image_classification import dtd class DtdTest(testing.DatasetBuilderTestCase): DATASET_CLASS = dtd.Dtd SPLITS = { 'test': 3, 'train': 2, 'validation': 1, } if __name__ == '__main__': testing.test_main()
apache-2.0
ringmaster217/galactic-media-server
src/main/java/gms/model/movies/Movie.java
8728
package gms.model.movies; import com.fasterxml.jackson.annotation.JsonIgnore; import java.util.Comparator; import java.util.LinkedList; import java.util.List; import gms.model.MediaItem; import gms.model.moviedb.movie.MovieDbMovie; import gms.model.moviedb.movie.MovieDbMovieGenre; import gms.model.moviedb.movie.MovieDbMovieProductionCompany; import gms.model.moviedb.movie.MovieDbMovieProductionCountry; /** * Created by ringm on 1/5/2017. */ public class Movie extends MediaItem { private int _id; private MovieImage backdropImage; private boolean partOfCollection; private int collectionId; private int budget; private List<String> genres; private int movieDbId; private String imdbId; private String originalLanguage; private String originalTitle; private String overview; private double popularity; private MovieImage posterImage; private List<String> productionCompanies; private List<String> productionCountries; private String releaseDate; private long revenue; private int runtime; private String status; private String tagline; private double voteAverage; private int voteCount; private List<MovieImage> backdrops; private List<MovieImage> posters; private String title; private String fileName; private long fileSize; public static Comparator<Movie> yearComparator = new Comparator<Movie>(){ @Override public int compare(Movie m1, Movie m2) { if(m1.releaseDate != null){ return m1.releaseDate.compareToIgnoreCase(m2.releaseDate); }else if(m2.releaseDate != null){ return -m2.releaseDate.compareToIgnoreCase(m1.releaseDate); } return 0; } }; public Movie(){ this.mediaType = "Movie"; } @JsonIgnore public void populateFromMovieDb(MovieDbMovie movie){ //setBackdropImage(movie.getBackdrop_path()); setPartOfCollection(movie.getBelongs_to_collection() != null); if(movie.getBelongs_to_collection() != null){ setCollectionId(movie.getBelongs_to_collection().getId()); } setBudget(movie.getBudget()); if(movie.getGenres() != null && !movie.getGenres().isEmpty()){ genres = new LinkedList<>(); movie.getGenres().forEach((MovieDbMovieGenre mg) -> genres.add(mg.getName())); } setMovieDbId(movie.getId()); setImdbId(movie.getImdb_id()); setOriginalLanguage(movie.getOriginal_language()); setOriginalTitle(movie.getOriginal_title()); setOverview(movie.getOverview()); setPopularity(movie.getPopularity()); //setPosterImage(movie.getPoster_path()); if(movie.getProduction_companies() != null && !movie.getProduction_companies().isEmpty()){ productionCompanies = new LinkedList<>(); movie.getProduction_companies().forEach((MovieDbMovieProductionCompany prod) -> productionCompanies.add(prod.getName())); } if(movie.getProduction_countries() != null && !movie.getProduction_countries().isEmpty()){ productionCountries = new LinkedList<>(); movie.getProduction_countries().forEach((MovieDbMovieProductionCountry prod) -> productionCountries.add(prod.getName())); } setReleaseDate(movie.getRelease_date()); setRevenue(movie.getRevenue()); setRuntime(movie.getRuntime()); setStatus(movie.getStatus()); setTagline(movie.getTagline()); setVoteAverage(movie.getVote_average()); setVoteCount(movie.getVote_count()); setTitle(movie.getTitle()); } @JsonIgnore @Override public String getSortString() { return title.replaceAll("the", "").replaceAll("The", "").trim(); } /****************** Getters and Setters ********************/ public int get_id() { return _id; } public void set_id(int _id) { this._id = _id; } public MovieImage getBackdropImage() { return backdropImage; } public void setBackdropImage(MovieImage backdropImage) { this.backdropImage = backdropImage; } public boolean isPartOfCollection() { return partOfCollection; } public void setPartOfCollection(boolean partOfCollection) { this.partOfCollection = partOfCollection; } public int getCollectionId() { return collectionId; } public void setCollectionId(int collectionId) { this.collectionId = collectionId; } public int getBudget() { return budget; } public void setBudget(int budget) { this.budget = budget; } public List<String> getGenres() { return genres; } public void setGenres(List<String> genres) { this.genres = genres; } public int getMovieDbId() { return movieDbId; } public void setMovieDbId(int movieDbId) { this.movieDbId = movieDbId; } public String getImdbId() { return imdbId; } public void setImdbId(String imdbId) { this.imdbId = imdbId; } public String getOriginalLanguage() { return originalLanguage; } public void setOriginalLanguage(String originalLanguage) { this.originalLanguage = originalLanguage; } public String getOriginalTitle() { return originalTitle; } public void setOriginalTitle(String originalTitle) { this.originalTitle = originalTitle; } public String getOverview() { return overview; } public void setOverview(String overview) { this.overview = overview; } public double getPopularity() { return popularity; } public void setPopularity(double popularity) { this.popularity = popularity; } public MovieImage getPosterImage() { return posterImage; } public void setPosterImage(MovieImage posterImage) { this.posterImage = posterImage; } public List<String> getProductionCompanies() { return productionCompanies; } public void setProductionCompanies(List<String> productionCompanies) { this.productionCompanies = productionCompanies; } public List<String> getProductionCountries() { return productionCountries; } public void setProductionCountries(List<String> productionCountries) { this.productionCountries = productionCountries; } public String getReleaseDate() { return releaseDate; } public void setReleaseDate(String releaseDate) { this.releaseDate = releaseDate; } public long getRevenue() { return revenue; } public void setRevenue(long revenue) { this.revenue = revenue; } public int getRuntime() { return runtime; } public void setRuntime(int runtime) { this.runtime = runtime; } public String getStatus() { return status; } public void setStatus(String status) { this.status = status; } public String getTagline() { return tagline; } public void setTagline(String tagline) { this.tagline = tagline; } public double getVoteAverage() { return voteAverage; } public void setVoteAverage(double voteAverage) { this.voteAverage = voteAverage; } public int getVoteCount() { return voteCount; } public void setVoteCount(int voteCount) { this.voteCount = voteCount; } public String getTitle() { return title; } public void setTitle(String title) { this.title = title; } public String getFileName() { return fileName; } public void setFileName(String fileName) { this.fileName = fileName; } public List<MovieImage> getBackdrops() { return backdrops; } public void setBackdrops(List<MovieImage> backdrops) { this.backdrops = backdrops; } public List<MovieImage> getPosters() { return posters; } public void setPosters(List<MovieImage> posters) { this.posters = posters; } public long getFileSize() { return fileSize; } public void setFileSize(long fileSize) { this.fileSize = fileSize; } }
apache-2.0
volodymyrpavlenko/muon-java
muon-transport-amqp/src/main/java/io/muoncore/extension/amqp/QueueListener.java
3934
package io.muoncore.extension.amqp; import com.rabbitmq.client.Channel; import com.rabbitmq.client.ConsumerCancelledException; import com.rabbitmq.client.QueueingConsumer; import com.rabbitmq.client.ShutdownSignalException; import io.muoncore.Muon; import io.muoncore.transport.MuonMessageEvent; import io.muoncore.transport.MuonMessageEventBuilder; import java.io.IOException; import java.util.HashMap; import java.util.Map; import java.util.logging.Level; import java.util.logging.Logger; public class QueueListener implements Runnable { private boolean running; private Channel channel; private Logger log = Logger.getLogger(QueueListener.class.getName()); private String queueName; private Muon.EventMessageTransportListener listener; private QueueingConsumer consumer; public QueueListener(Channel channel, String queueName, Muon.EventMessageTransportListener listener) { this.channel = channel; this.queueName = queueName; this.listener = listener; } public void blockUntilReady() { synchronized (this) { try { wait(); } catch (InterruptedException e) {} } } @Override public void run() { try { log.info("Opening Queue: " + queueName); channel.queueDeclare(queueName, false, false, true, null); synchronized (this) { notify(); } consumer = new QueueingConsumer(channel); channel.basicConsume(queueName, false, consumer); log.info("Queue ready: " + queueName); running = true; while (running) { try { QueueingConsumer.Delivery delivery = consumer.nextDelivery(); byte[] content = delivery.getBody(); MuonMessageEventBuilder builder = MuonMessageEventBuilder.named(queueName); Map<String, Object> headers = delivery.getProperties().getHeaders(); if (headers == null) { headers = new HashMap<String, Object>(); } String contentType = ""; if (headers.get("Content-Type") != null) { contentType = headers.get("Content-Type").toString(); } for (Map.Entry<String, Object> entry : headers.entrySet()) { if (entry.getValue() != null) { builder.withHeader(entry.getKey(), entry.getValue().toString()); } } MuonMessageEvent ev = builder.build(); ev.setContentType(contentType); ev.setEncodedBinaryContent(content); listener.onEvent(queueName, ev); channel.basicAck(delivery.getEnvelope().getDeliveryTag(), false); } catch (ShutdownSignalException ex) { log.log(Level.FINER, ex.getMessage(), ex); } catch (ConsumerCancelledException ex) { log.log(Level.FINER, ex.getMessage(), ex); } catch (Exception e) { log.log(Level.WARNING, e.getMessage(), e); } } } catch (Exception e) { log.log(Level.WARNING, e.getMessage(), e); } log.warning("Queue Listener exits: " + queueName); } public void cancel() { log.info("Queue listener is cancelled:" + queueName); running = false; try { consumer.handleCancel("Muon-Cancel"); } catch (IOException e) { e.printStackTrace(); } finally { try { channel.queueDelete(queueName, false, false); } catch (IOException e) { e.printStackTrace(); } } } }
apache-2.0
gawkermedia/googleads-java-lib
modules/dfp_appengine/src/main/java/com/google/api/ads/dfp/jaxws/v201602/DaypartPremiumFeature.java
904
package com.google.api.ads.dfp.jaxws.v201602; import javax.xml.bind.annotation.XmlAccessType; import javax.xml.bind.annotation.XmlAccessorType; import javax.xml.bind.annotation.XmlType; /** * * A premium feature applied to daypart targeting. * * * <p>Java class for DaypartPremiumFeature complex type. * * <p>The following schema fragment specifies the expected content contained within this class. * * <pre> * &lt;complexType name="DaypartPremiumFeature"> * &lt;complexContent> * &lt;extension base="{https://www.google.com/apis/ads/publisher/v201602}PremiumFeature"> * &lt;sequence> * &lt;/sequence> * &lt;/extension> * &lt;/complexContent> * &lt;/complexType> * </pre> * * */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name = "DaypartPremiumFeature") public class DaypartPremiumFeature extends PremiumFeature { }
apache-2.0
BerlinUnited/webspark
webspark/lib/three/js/loaders/OBJMTLLoader.js
8701
/** * Loads a Wavefront .obj file with materials * * @author mrdoob / http://mrdoob.com/ * @author angelxuanchang */ THREE.OBJMTLLoader = function (materialBaseUrl, textureBaseUrl) { this.materialBaseUrl = materialBaseUrl; this.textureBaseUrl = textureBaseUrl; }; THREE.OBJMTLLoader.prototype = { constructor: THREE.OBJMTLLoader, load: function ( url, mtlurl, onLoad, onProgress, onError ) { var scope = this; var loader = new THREE.XHRLoader( scope.manager ); loader.setCrossOrigin( this.crossOrigin ); loader.load( url, function ( text ) { var baseUrl = url.substr( 0, url.lastIndexOf( "/" ) + 1 ); // load the object and collect the url for the material file if avaliable var object = scope.parse( text, function( mtlfile ) { mtlurl = scope.materialBaseUrl + mtlfile; } ); // load the material file and apply it to the object var mtlLoader = new THREE.MTLLoader( scope.textureBaseUrl ); mtlLoader.load( mtlurl, function ( materials ) { var materialsCreator = materials; materialsCreator.preload(); object.traverse( function ( object ) { if ( object instanceof THREE.Mesh ) { if ( object.material.name ) { var material = materialsCreator.create( object.material.name ); if ( material ) { object.material = material; } } } } ); onLoad( object ); } ); } ); }, /** * Parses loaded .obj file * @param data - content of .obj file * @param mtllibCallback - callback to handle mtllib declaration (optional) * @return {THREE.Object3D} - Object3D (with default material) */ parse: function ( data, mtllibCallback ) { function vector( x, y, z ) { return new THREE.Vector3( x, y, z ); } function uv( u, v ) { return new THREE.Vector2( u, v ); } function face3( a, b, c, normals ) { return new THREE.Face3( a, b, c, normals ); } var face_offset = 0; function meshN( meshName, materialName ) { if ( vertices.length > 0 ) { geometry.vertices = vertices; geometry.mergeVertices(); geometry.computeCentroids(); geometry.computeFaceNormals(); geometry.computeBoundingSphere(); object.add( mesh ); geometry = new THREE.Geometry(); mesh = new THREE.Mesh( geometry, material ); verticesCount = 0; } if ( meshName !== undefined ) mesh.name = meshName; if ( materialName !== undefined ) { material = new THREE.MeshLambertMaterial(); material.name = materialName; mesh.material = material; } } var group = new THREE.Object3D(); var object = group; var geometry = new THREE.Geometry(); var material = new THREE.MeshLambertMaterial(); var mesh = new THREE.Mesh( geometry, material ); var vertices = []; var verticesCount = 0; var normals = []; var uvs = []; function add_face( a, b, c, normals_inds ) { if ( normals_inds === undefined ) { geometry.faces.push( face3( parseInt( a ) - (face_offset + 1), parseInt( b ) - (face_offset + 1), parseInt( c ) - (face_offset + 1) ) ); } else { geometry.faces.push( face3( parseInt( a ) - (face_offset + 1), parseInt( b ) - (face_offset + 1), parseInt( c ) - (face_offset + 1), [ normals[ parseInt( normals_inds[ 0 ] ) - 1 ].clone(), normals[ parseInt( normals_inds[ 1 ] ) - 1 ].clone(), normals[ parseInt( normals_inds[ 2 ] ) - 1 ].clone() ] ) ); } } function add_uvs( a, b, c ) { geometry.faceVertexUvs[ 0 ].push( [ uvs[ parseInt( a ) - 1 ].clone(), uvs[ parseInt( b ) - 1 ].clone(), uvs[ parseInt( c ) - 1 ].clone() ] ); } function handle_face_line(faces, uvs, normals_inds) { if ( faces[ 3 ] === undefined ) { add_face( faces[ 0 ], faces[ 1 ], faces[ 2 ], normals_inds ); if (!(uvs === undefined) && uvs.length > 0) { add_uvs( uvs[ 0 ], uvs[ 1 ], uvs[ 2 ] ); } } else { if (!(normals_inds === undefined) && normals_inds.length > 0) { add_face( faces[ 0 ], faces[ 1 ], faces[ 3 ], [ normals_inds[ 0 ], normals_inds[ 1 ], normals_inds[ 3 ] ]); add_face( faces[ 1 ], faces[ 2 ], faces[ 3 ], [ normals_inds[ 1 ], normals_inds[ 2 ], normals_inds[ 3 ] ]); } else { add_face( faces[ 0 ], faces[ 1 ], faces[ 3 ]); add_face( faces[ 1 ], faces[ 2 ], faces[ 3 ]); } if (!(uvs === undefined) && uvs.length > 0) { add_uvs( uvs[ 0 ], uvs[ 1 ], uvs[ 3 ] ); add_uvs( uvs[ 1 ], uvs[ 2 ], uvs[ 3 ] ); } } } // v float float float var vertex_pattern = /v( +[\d|\.|\+|\-|e]+)( +[\d|\.|\+|\-|e]+)( +[\d|\.|\+|\-|e]+)/; // vn float float float var normal_pattern = /vn( +[\d|\.|\+|\-|e]+)( +[\d|\.|\+|\-|e]+)( +[\d|\.|\+|\-|e]+)/; // vt float float var uv_pattern = /vt( +[\d|\.|\+|\-|e]+)( +[\d|\.|\+|\-|e]+)/; // f vertex vertex vertex ... var face_pattern1 = /f( +\d+)( +\d+)( +\d+)( +\d+)?/; // f vertex/uv vertex/uv vertex/uv ... var face_pattern2 = /f( +(\d+)\/(\d+))( +(\d+)\/(\d+))( +(\d+)\/(\d+))( +(\d+)\/(\d+))?/; // f vertex/uv/normal vertex/uv/normal vertex/uv/normal ... var face_pattern3 = /f( +(\d+)\/(\d+)\/(\d+))( +(\d+)\/(\d+)\/(\d+))( +(\d+)\/(\d+)\/(\d+))( +(\d+)\/(\d+)\/(\d+))?/; // f vertex//normal vertex//normal vertex//normal ... var face_pattern4 = /f( +(\d+)\/\/(\d+))( +(\d+)\/\/(\d+))( +(\d+)\/\/(\d+))( +(\d+)\/\/(\d+))?/ // var lines = data.split( "\n" ); for ( var i = 0; i < lines.length; i ++ ) { var line = lines[ i ]; line = line.trim(); var result; if ( line.length === 0 || line.charAt( 0 ) === '#' ) { continue; } else if ( ( result = vertex_pattern.exec( line ) ) !== null ) { // ["v 1.0 2.0 3.0", "1.0", "2.0", "3.0"] vertices.push( vector( parseFloat( result[ 1 ] ), parseFloat( result[ 2 ] ), parseFloat( result[ 3 ] ) ) ); } else if ( ( result = normal_pattern.exec( line ) ) !== null ) { // ["vn 1.0 2.0 3.0", "1.0", "2.0", "3.0"] normals.push( vector( parseFloat( result[ 1 ] ), parseFloat( result[ 2 ] ), parseFloat( result[ 3 ] ) ) ); } else if ( ( result = uv_pattern.exec( line ) ) !== null ) { // ["vt 0.1 0.2", "0.1", "0.2"] uvs.push( uv( parseFloat( result[ 1 ] ), parseFloat( result[ 2 ] ) ) ); } else if ( ( result = face_pattern1.exec( line ) ) !== null ) { // ["f 1 2 3", "1", "2", "3", undefined] handle_face_line([ result[ 1 ], result[ 2 ], result[ 3 ], result[ 4 ] ]); } else if ( ( result = face_pattern2.exec( line ) ) !== null ) { // ["f 1/1 2/2 3/3", " 1/1", "1", "1", " 2/2", "2", "2", " 3/3", "3", "3", undefined, undefined, undefined] handle_face_line( [ result[ 2 ], result[ 5 ], result[ 8 ], result[ 11 ] ], //faces [ result[ 3 ], result[ 6 ], result[ 9 ], result[ 12 ] ] //uv ); } else if ( ( result = face_pattern3.exec( line ) ) !== null ) { // ["f 1/1/1 2/2/2 3/3/3", " 1/1/1", "1", "1", "1", " 2/2/2", "2", "2", "2", " 3/3/3", "3", "3", "3", undefined, undefined, undefined, undefined] handle_face_line( [ result[ 2 ], result[ 6 ], result[ 10 ], result[ 14 ] ], //faces [ result[ 3 ], result[ 7 ], result[ 11 ], result[ 15 ] ], //uv [ result[ 4 ], result[ 8 ], result[ 12 ], result[ 16 ] ] //normal ); } else if ( ( result = face_pattern4.exec( line ) ) !== null ) { // ["f 1//1 2//2 3//3", " 1//1", "1", "1", " 2//2", "2", "2", " 3//3", "3", "3", undefined, undefined, undefined] handle_face_line( [ result[ 2 ], result[ 5 ], result[ 8 ], result[ 11 ] ], //faces [ ], //uv [ result[ 3 ], result[ 6 ], result[ 9 ], result[ 12 ] ] //normal ); } else if ( /^o /.test( line ) ) { // object meshN(); face_offset = face_offset + vertices.length; vertices = []; object = new THREE.Object3D(); object.name = line.substring( 2 ).trim(); group.add( object ); } else if ( /^g /.test( line ) ) { // group meshN( line.substring( 2 ).trim(), undefined ); } else if ( /^usemtl /.test( line ) ) { // material meshN( undefined, line.substring( 7 ).trim() ); } else if ( /^mtllib /.test( line ) ) { // mtl file if ( mtllibCallback ) { var mtlfile = line.substring( 7 ); mtlfile = mtlfile.trim(); mtllibCallback( mtlfile ); } } else if ( /^s /.test( line ) ) { // Smooth shading } else { console.log( "THREE.OBJMTLLoader: Unhandled line " + line ); } } //Add last object meshN(undefined, undefined); return group; } }; THREE.EventDispatcher.prototype.apply( THREE.OBJMTLLoader.prototype );
apache-2.0
a156845044/BrilliantSDK
trunk/Brilliant.Web.UI/Properties/AssemblyInfo.cs
1338
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // 有关程序集的常规信息通过以下 // 特性集控制。更改这些特性值可修改 // 与程序集关联的信息。 [assembly: AssemblyTitle("Brilliant.Web.UI")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("Microsoft")] [assembly: AssemblyProduct("Brilliant.Web.UI")] [assembly: AssemblyCopyright("Copyright © Microsoft 2015")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // 将 ComVisible 设置为 false 使此程序集中的类型 // 对 COM 组件不可见。 如果需要从 COM 访问此程序集中的类型, // 则将该类型上的 ComVisible 特性设置为 true。 [assembly: ComVisible(false)] // 如果此项目向 COM 公开,则下列 GUID 用于类型库的 ID [assembly: Guid("0dcb6434-8270-45a9-8f20-7319412f6cf0")] // 程序集的版本信息由下面四个值组成: // // 主版本 // 次版本 // 生成号 // 修订号 // // 可以指定所有这些值,也可以使用“生成号”和“修订号”的默认值, // 方法是按如下所示使用“*”: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
apache-2.0
SudhersonV/DotNetRoot
SampleCode/SignalRChat/Properties/AssemblyInfo.cs
1358
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // General Information about an assembly is controlled through the following // set of attributes. Change these attribute values to modify the information // associated with an assembly. [assembly: AssemblyTitle("SignalRChat")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("SignalRChat")] [assembly: AssemblyCopyright("Copyright © 2016")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Setting ComVisible to false makes the types in this assembly not visible // to COM components. If you need to access a type in this assembly from // COM, set the ComVisible attribute to true on that type. [assembly: ComVisible(false)] // The following GUID is for the ID of the typelib if this project is exposed to COM [assembly: Guid("be58cbaf-c6cd-47be-9c22-32b52072a0ea")] // Version information for an assembly consists of the following four values: // // Major Version // Minor Version // Build Number // Revision // // You can specify all the values or you can default the Revision and Build Numbers // by using the '*' as shown below: [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
apache-2.0
umuzungu/zipline
tests/pipeline/test_buyback_auth.py
5592
""" Tests for the reference loader for Buyback Authorizations. """ import blaze as bz from blaze.compute.core import swap_resources_into_scope import pandas as pd from six import iteritems from zipline.pipeline.common import( BUYBACK_AMOUNT_FIELD_NAME, BUYBACK_ANNOUNCEMENT_FIELD_NAME, BUYBACK_TYPE_FIELD_NAME, BUYBACK_UNIT_FIELD_NAME, DAYS_SINCE_PREV, PREVIOUS_BUYBACK_AMOUNT, PREVIOUS_BUYBACK_ANNOUNCEMENT, PREVIOUS_BUYBACK_TYPE, PREVIOUS_BUYBACK_UNIT, SID_FIELD_NAME, TS_FIELD_NAME, ) from zipline.pipeline.data import BuybackAuthorizations from zipline.pipeline.factors.events import BusinessDaysSinceBuybackAuth from zipline.pipeline.loaders.buyback_auth import BuybackAuthorizationsLoader from zipline.pipeline.loaders.blaze import BlazeBuybackAuthorizationsLoader from zipline.pipeline.loaders.utils import ( zip_with_dates, zip_with_floats, zip_with_strs ) from zipline.testing.fixtures import ( WithPipelineEventDataLoader, ZiplineTestCase ) date_intervals = [ [['2014-01-01', '2014-01-04'], ['2014-01-05', '2014-01-09'], ['2014-01-10', '2014-01-31']] ] buyback_authorizations_cases = [ pd.DataFrame({ BUYBACK_AMOUNT_FIELD_NAME: [1, 15], BUYBACK_UNIT_FIELD_NAME: ["$M", "Mshares"], BUYBACK_TYPE_FIELD_NAME: ["New", "Additional"], TS_FIELD_NAME: pd.to_datetime(['2014-01-05', '2014-01-10']), BUYBACK_ANNOUNCEMENT_FIELD_NAME: pd.to_datetime(['2014-01-04', '2014-01-09']) }), pd.DataFrame( columns=[BUYBACK_AMOUNT_FIELD_NAME, BUYBACK_UNIT_FIELD_NAME, BUYBACK_TYPE_FIELD_NAME, BUYBACK_ANNOUNCEMENT_FIELD_NAME, TS_FIELD_NAME], dtype='datetime64[ns]' ), ] class BuybackAuthLoaderTestCase(WithPipelineEventDataLoader, ZiplineTestCase): """ Test for cash buyback authorizations dataset. """ pipeline_columns = { PREVIOUS_BUYBACK_AMOUNT: BuybackAuthorizations.previous_amount.latest, PREVIOUS_BUYBACK_ANNOUNCEMENT: BuybackAuthorizations.previous_date.latest, PREVIOUS_BUYBACK_UNIT: BuybackAuthorizations.previous_unit.latest, PREVIOUS_BUYBACK_TYPE: BuybackAuthorizations.previous_type.latest, DAYS_SINCE_PREV: BusinessDaysSinceBuybackAuth(), } @classmethod def get_sids(cls): return range(2) @classmethod def get_dataset(cls): return {sid: frame for sid, frame in enumerate(buyback_authorizations_cases)} loader_type = BuybackAuthorizationsLoader def setup(self, dates): cols = { PREVIOUS_BUYBACK_AMOUNT: self.get_sids_to_frames(zip_with_floats, [['NaN', 1, 15]], date_intervals, dates, 'float', 'NaN'), PREVIOUS_BUYBACK_ANNOUNCEMENT: self.get_sids_to_frames( zip_with_dates, [['NaT', '2014-01-04', '2014-01-09']], date_intervals, dates, 'datetime64[ns]', 'NaN' ), PREVIOUS_BUYBACK_UNIT: self.get_sids_to_frames( zip_with_strs, [[None, "$M", "Mshares"]], date_intervals, dates, 'category', None ), PREVIOUS_BUYBACK_TYPE: self.get_sids_to_frames( zip_with_strs, [[None, "New", "Additional"]], date_intervals, dates, 'category', None ) } cols[DAYS_SINCE_PREV] = self._compute_busday_offsets( cols[PREVIOUS_BUYBACK_ANNOUNCEMENT] ) return cols class BlazeBuybackAuthLoaderTestCase(BuybackAuthLoaderTestCase): """ Test case for loading via blaze. """ loader_type = BlazeBuybackAuthorizationsLoader def pipeline_event_loader_args(self, dates): _, mapping = super( BlazeBuybackAuthLoaderTestCase, self, ).pipeline_event_loader_args(dates) return (bz.data(pd.concat( pd.DataFrame({ BUYBACK_ANNOUNCEMENT_FIELD_NAME: frame[BUYBACK_ANNOUNCEMENT_FIELD_NAME], BUYBACK_AMOUNT_FIELD_NAME: frame[BUYBACK_AMOUNT_FIELD_NAME], BUYBACK_UNIT_FIELD_NAME: frame[BUYBACK_UNIT_FIELD_NAME], BUYBACK_TYPE_FIELD_NAME: frame[BUYBACK_TYPE_FIELD_NAME], TS_FIELD_NAME: frame[TS_FIELD_NAME], SID_FIELD_NAME: sid, }) for sid, frame in iteritems(mapping) ).reset_index(drop=True)),) class BlazeBuybackAuthLoaderNotInteractiveTestCase( BlazeBuybackAuthLoaderTestCase ): """Test case for passing a non-interactive symbol and a dict of resources. """ def pipeline_event_loader_args(self, dates): (bound_expr,) = super( BlazeBuybackAuthLoaderNotInteractiveTestCase, self, ).pipeline_event_loader_args(dates) return swap_resources_into_scope(bound_expr, {})
apache-2.0
IHTSDO/OTF-User-Module
security/src/main/java/org/ihtsdo/otf/security/UserSecurityModel.java
2612
package org.ihtsdo.otf.security; import java.util.Collection; import java.util.List; import java.util.Map; import org.ihtsdo.otf.security.dto.OtfAccount; import org.ihtsdo.otf.security.dto.OtfAccountMin; import org.ihtsdo.otf.security.dto.OtfApplication; import org.ihtsdo.otf.security.dto.OtfDirectory; import org.ihtsdo.otf.security.dto.OtfGroup; import org.ihtsdo.otf.security.dto.OtfSettings; import org.ihtsdo.otf.security.dto.UserSecurity; public interface UserSecurityModel { void init(); void reset(); UserSecurity getModel(); void setModel(UserSecurity userSecurityIn); UserSecurity getFullModel(); void buildFullModel(); void buildModel(); OtfAccount getUserAccountByName(final String accnameIn); OtfAccount getUserAccountById(final String idIn); Collection<OtfAccount> getUsers(); Collection<OtfAccountMin> getUsersMin(); boolean accountExists(final String accNameIn); // String getDirNameForUser(final String accNameIn); OtfApplication getAppbyName(final String appNameIn); String getUsersDirName(); OtfDirectory getDirByName(final String dirName); OtfDirectory getMembersDir(); OtfDirectory getUsersDir(); OtfSettings getSettings(); void resetSettings(); // Should be just the member name or object.... OtfGroup getMemberByName(final String accNameIn); OtfGroup getGroupById(final String idIn); OtfGroup getMemberById(final String idIn); List<OtfGroup> getGroupsByAppName(final String appnameIn); List<OtfGroup> getGroupsByDirName(final String dirnameIn); List<String> getDirsByAppName(String appname); List<String> getUserNames(); List<String> getAdminUsers(); List<String> getMembers(); List<String> getApps(); List<String> getAppsNotAdmin(); // Map<String, List<String>> getAppsMap(); // Map<String, OtfAccount> getAllAccounts(); void setHandlerAdmin(HandlerAdmin handlerAdmin); HandlerAdmin getHandlerAdmin(); String getAdminApp(); void setAdminApp(String adminAppIn); String getMembersApp(); void setMembersApp(String membersAppIn); String getUsersApp(); void setUsersApp(String usersAppIn); void setUsersToken(String userNameIn, String tokenIn); // From model boolean appExists(String appname); boolean dirExists(String appname); Collection<OtfApplication> getOtfApps(); Collection<OtfDirectory> getOtfDirs(); OtfApplication getAppById(String idIn); OtfDirectory getDirById(String idIn); Map<String, List<String>> getAppsMap(); Map<String, List<String>> getDirsMap(); Map<String, OtfAccount> getAllAccounts(); void resetAllAccounts(); void resetAppsMap(); void resetMembers(); }
apache-2.0
ntt-sic/glance
glance/common/property_utils.py
6833
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2013 Rackspace # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import ConfigParser import re from oslo.config import cfg import webob.exc import glance.api.policy from glance.common import exception from glance.common.ordereddict import OrderedDict from glance.openstack.common import log as logging from glance.openstack.common import policy # NOTE(bourke): The default dict_type is collections.OrderedDict in py27, but # we must set manually for compatibility with py26 CONFIG = ConfigParser.SafeConfigParser(dict_type=OrderedDict) LOG = logging.getLogger(__name__) property_opts = [ cfg.StrOpt('property_protection_file', default=None, help=_('The location of the property protection file.')), cfg.StrOpt('property_protection_rule_format', default='roles', help=_('This config value indicates whether "roles" or ' '"policies" are used in the property protection file.')), ] CONF = cfg.CONF CONF.register_opts(property_opts) def is_property_protection_enabled(): if CONF.property_protection_file: return True return False class PropertyRules(object): def __init__(self, policy_enforcer=None): self.rules = [] self.prop_exp_mapping = {} self.policies = [] self.policy_enforcer = policy_enforcer or glance.api.policy.Enforcer() self.prop_prot_rule_format = CONF.property_protection_rule_format self.prop_prot_rule_format = self.prop_prot_rule_format.lower() self._load_rules() def _load_rules(self): try: conf_file = CONF.find_file(CONF.property_protection_file) CONFIG.read(conf_file) except Exception as e: msg = (_("Couldn't find property protection file %s:%s.") % (CONF.property_protection_file, e)) LOG.error(msg) raise exception.InvalidPropertyProtectionConfiguration() if self.prop_prot_rule_format not in ['policies', 'roles']: msg = _("Invalid value '%s' for 'property_protection_rule_format'" ". The permitted values are 'roles' and 'policies'" % self.prop_prot_rule_format) LOG.error(msg) raise exception.InvalidPropertyProtectionConfiguration() operations = ['create', 'read', 'update', 'delete'] properties = CONFIG.sections() for property_exp in properties: property_dict = {} compiled_rule = self._compile_rule(property_exp) for operation in operations: permissions = CONFIG.get(property_exp, operation) if permissions: if self.prop_prot_rule_format == 'policies': if ',' in permissions: msg = _("Multiple policies '%s' not allowed for a" " given operation. Policies can be " "combined in the policy file" % permissions) LOG.error(msg) raise exception.\ InvalidPropertyProtectionConfiguration() self.prop_exp_mapping[compiled_rule] = property_exp self._add_policy_rules(property_exp, operation, permissions) permissions = [permissions] else: permissions = [permission.strip() for permission in permissions.split(',')] property_dict[operation] = permissions else: property_dict[operation] = [] msg = _(('Property protection on operation %s for rule ' '%s is not found. No role will be allowed to ' 'perform this operation.' % (operation, property_exp))) LOG.warn(msg) self.rules.append((compiled_rule, property_dict)) def _compile_rule(self, rule): try: return re.compile(rule) except Exception as e: msg = (_("Encountered a malformed property protection rule %s:%s.") % (rule, e)) LOG.error(msg) raise exception.InvalidPropertyProtectionConfiguration() def _add_policy_rules(self, property_exp, action, rule): """ Add policy rules to the policy enforcer. For example, if the file listed as property_protection_file has: [prop_a] create = glance_creator then the corresponding policy rule would be: "prop_a:create": "rule:glance_creator" where glance_creator is defined in policy.json. For example: "glance:creator": "role:admin or role:glance_create_user" """ rule = "rule:%s" % rule rule_name = "%s:%s" % (property_exp, action) rule_dict = {} rule_dict[rule_name] = policy.parse_rule(rule) self.policy_enforcer.add_rules(rule_dict) def _check_policy(self, property_exp, action, context): try: target = ":".join([property_exp, action]) self.policy_enforcer.enforce(context, target, {}) except exception.Forbidden: return False return True def check_property_rules(self, property_name, action, context): roles = context.roles if not self.rules: return True if action not in ['create', 'read', 'update', 'delete']: return False for rule_exp, rule in self.rules: if rule_exp.search(str(property_name)): rule_roles = rule.get(action) if rule_roles: if self.prop_prot_rule_format == 'policies': prop_exp_key = self.prop_exp_mapping[rule_exp] return self._check_policy(prop_exp_key, action, context) if set(roles).intersection(set(rule_roles)): return True return False
apache-2.0
trycoon/JEL
jel-server/src/main/java/se/liquidbytes/jel/system/package-info.java
208
@ModuleGen(name = "jelservice", groupPackage = "se.liquidbytes.jel") package se.liquidbytes.jel.system; // This file is needed for Vert.x Service-Proxy to work. import io.vertx.codegen.annotations.ModuleGen;
apache-2.0
EnMasseProject/enmasse
vendor/github.com/99designs/gqlgen/graphql/executable_schema.go
4016
//go:generate go run github.com/matryer/moq -out executable_schema_mock.go . ExecutableSchema package graphql import ( "context" "fmt" "github.com/vektah/gqlparser/v2/ast" ) type ExecutableSchema interface { Schema() *ast.Schema Complexity(typeName, fieldName string, childComplexity int, args map[string]interface{}) (int, bool) Exec(ctx context.Context) ResponseHandler } // CollectFields returns the set of fields from an ast.SelectionSet where all collected fields satisfy at least one of the GraphQL types // passed through satisfies. Providing an empty or nil slice for satisfies will return collect all fields regardless of fragment // type conditions. func CollectFields(reqCtx *OperationContext, selSet ast.SelectionSet, satisfies []string) []CollectedField { return collectFields(reqCtx, selSet, satisfies, map[string]bool{}) } func collectFields(reqCtx *OperationContext, selSet ast.SelectionSet, satisfies []string, visited map[string]bool) []CollectedField { groupedFields := make([]CollectedField, 0, len(selSet)) for _, sel := range selSet { switch sel := sel.(type) { case *ast.Field: if !shouldIncludeNode(sel.Directives, reqCtx.Variables) { continue } f := getOrCreateAndAppendField(&groupedFields, sel.Alias, func() CollectedField { return CollectedField{Field: sel} }) f.Selections = append(f.Selections, sel.SelectionSet...) case *ast.InlineFragment: if !shouldIncludeNode(sel.Directives, reqCtx.Variables) { continue } if len(satisfies) > 0 && !instanceOf(sel.TypeCondition, satisfies) { continue } for _, childField := range collectFields(reqCtx, sel.SelectionSet, satisfies, visited) { f := getOrCreateAndAppendField(&groupedFields, childField.Name, func() CollectedField { return childField }) f.Selections = append(f.Selections, childField.Selections...) } case *ast.FragmentSpread: if !shouldIncludeNode(sel.Directives, reqCtx.Variables) { continue } fragmentName := sel.Name if _, seen := visited[fragmentName]; seen { continue } visited[fragmentName] = true fragment := reqCtx.Doc.Fragments.ForName(fragmentName) if fragment == nil { // should never happen, validator has already run panic(fmt.Errorf("missing fragment %s", fragmentName)) } if len(satisfies) > 0 && !instanceOf(fragment.TypeCondition, satisfies) { continue } for _, childField := range collectFields(reqCtx, fragment.SelectionSet, satisfies, visited) { f := getOrCreateAndAppendField(&groupedFields, childField.Name, func() CollectedField { return childField }) f.Selections = append(f.Selections, childField.Selections...) } default: panic(fmt.Errorf("unsupported %T", sel)) } } return groupedFields } type CollectedField struct { *ast.Field Selections ast.SelectionSet } func instanceOf(val string, satisfies []string) bool { for _, s := range satisfies { if val == s { return true } } return false } func getOrCreateAndAppendField(c *[]CollectedField, name string, creator func() CollectedField) *CollectedField { for i, cf := range *c { if cf.Alias == name { return &(*c)[i] } } f := creator() *c = append(*c, f) return &(*c)[len(*c)-1] } func shouldIncludeNode(directives ast.DirectiveList, variables map[string]interface{}) bool { if len(directives) == 0 { return true } skip, include := false, true if d := directives.ForName("skip"); d != nil { skip = resolveIfArgument(d, variables) } if d := directives.ForName("include"); d != nil { include = resolveIfArgument(d, variables) } return !skip && include } func resolveIfArgument(d *ast.Directive, variables map[string]interface{}) bool { arg := d.Arguments.ForName("if") if arg == nil { panic(fmt.Sprintf("%s: argument 'if' not defined", d.Name)) } value, err := arg.Value.Value(variables) if err != nil { panic(err) } ret, ok := value.(bool) if !ok { panic(fmt.Sprintf("%s: argument 'if' is not a boolean", d.Name)) } return ret }
apache-2.0
CMTelecom/kafka-net
src/Kafka/Kafka.Tests/Integration/AutoOffsetResetTest.cs
4230
using System.Collections.Generic; using System.Linq; using System.Reflection; using System.Text; using Kafka.Client.Consumers; using Kafka.Client.Producers; using Kafka.Client.Serializers; using Kafka.Client.Utils; using Kafka.Tests.Custom.Server; using Kafka.Tests.Utils; using log4net; using Xunit; namespace Kafka.Tests.Integration { public class AutoOffsetResetTest : KafkaServerTestHarness { protected static readonly ILog Logger = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType); public const string Topic = "test_topic"; public const string Group = "default_group"; public const string TestConsumer = "consumer"; public const int NumMessages = 10; public const int LargeOffset = 10000; public const int SmallOffset = -1; protected override List<TempKafkaConfig> CreateConfigs() { return TestUtils.CreateBrokerConfigs(1); } [Fact] public void TestResetToEarliestWhenOffsetTooHigh() { Assert.Equal(NumMessages, this.ResetAndConsume(NumMessages, "smallest", LargeOffset)); } [Fact] public void TestResetToEarliestWhenOffsetTooLow() { Assert.Equal(NumMessages, this.ResetAndConsume(NumMessages, "smallest", SmallOffset)); } [Fact] public void TestResetToLatestWhenOffsetTooHigh() { Assert.Equal(0, this.ResetAndConsume(NumMessages, "largest", LargeOffset)); } [Fact] public void TestResetToLatestWhenOffsetTooLow() { Assert.Equal(0, this.ResetAndConsume(NumMessages, "largest", SmallOffset)); } /// <summary> /// Produce the given number of messages, create a consumer with the given offset policy, /// then reset the offset to the given value and consume until we get no new messages. /// </summary> /// <param name="numMessages"></param> /// <param name="resetTo"></param> /// <param name="offset"></param> /// <returns>The count of messages received.</returns> public int ResetAndConsume(int numMessages, string resetTo, long offset) { TestUtils.WaitUntilLeaderIsElectedOrChanged(this.ZkClient, Topic, 0, 1000); var producer = TestUtils.CreateProducer( TestUtils.GetBrokerListFromConfigs(Configs), new DefaultEncoder(), new StringEncoder()); for (var i = 0; i < numMessages; i++) { producer.Send(new KeyedMessage<string, byte[]>(Topic, Topic, Encoding.UTF8.GetBytes("test"))); } TestUtils.WaitUntilMetadataIsPropagated(this.Servers, Topic, 0, 1000); // update offset in zookeeper for consumer to jump "forward" in time var dirs = new ZKGroupTopicDirs(Group, Topic); var consumerConfig = TestUtils.CreateConsumerProperties(ZkConnect, Group, TestConsumer); consumerConfig.AutoOffsetReset = resetTo; consumerConfig.ConsumerTimeoutMs = 2000; consumerConfig.FetchWaitMaxMs = 0; TestUtils.UpdateConsumerOffset(consumerConfig, dirs.ConsumerOffsetDir + "/" + "0", offset); Logger.InfoFormat("Update consumer offset to {0}", offset); var consumerConnector = Consumer.Create(consumerConfig); var messagesStream = consumerConnector.CreateMessageStreams(new Dictionary<string, int> { { Topic, 1 } })[Topic].First(); var received = 0; var iter = messagesStream.GetEnumerator(); try { for (var i = 0; i < numMessages; i++) { iter.MoveNext(); // will throw a timeout exception if the message isn't there received++; } } catch (ConsumerTimeoutException) { Logger.InfoFormat("consumer timeout out after receiving {0} messages", received); } finally { producer.Dispose(); consumerConnector.Shutdown(); } return received; } } }
apache-2.0
vladisav/ignite
modules/core/src/main/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridDhtTxLocalAdapter.java
28018
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.dht; import java.io.Externalizable; import java.util.Collection; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.UUID; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cluster.ClusterNode; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.processors.affinity.AffinityTopologyVersion; import org.apache.ignite.internal.processors.cache.GridCacheContext; import org.apache.ignite.internal.processors.cache.GridCacheEntryEx; import org.apache.ignite.internal.processors.cache.GridCacheEntryRemovedException; import org.apache.ignite.internal.processors.cache.GridCacheReturn; import org.apache.ignite.internal.processors.cache.GridCacheSharedContext; import org.apache.ignite.internal.processors.cache.KeyCacheObject; import org.apache.ignite.internal.processors.cache.distributed.GridDistributedTxMapping; import org.apache.ignite.internal.processors.cache.distributed.near.GridNearTxPrepareResponse; import org.apache.ignite.internal.processors.cache.transactions.IgniteInternalTx; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxEntry; import org.apache.ignite.internal.processors.cache.transactions.IgniteTxLocalAdapter; import org.apache.ignite.internal.processors.cache.version.GridCacheVersion; import org.apache.ignite.internal.util.F0; import org.apache.ignite.internal.util.GridLeanMap; import org.apache.ignite.internal.util.GridLeanSet; import org.apache.ignite.internal.util.future.GridEmbeddedFuture; import org.apache.ignite.internal.util.future.GridFinishedFuture; import org.apache.ignite.internal.util.tostring.GridToStringBuilder; import org.apache.ignite.internal.util.typedef.CX1; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.CU; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteUuid; import org.apache.ignite.transactions.TransactionConcurrency; import org.apache.ignite.transactions.TransactionIsolation; import org.apache.ignite.transactions.TransactionState; import org.jetbrains.annotations.Nullable; import java.util.concurrent.ConcurrentHashMap; import static org.apache.ignite.internal.processors.cache.GridCacheOperation.NOOP; import static org.apache.ignite.internal.processors.cache.GridCacheOperation.READ; import static org.apache.ignite.transactions.TransactionState.COMMITTED; import static org.apache.ignite.transactions.TransactionState.COMMITTING; import static org.apache.ignite.transactions.TransactionState.PREPARED; import static org.apache.ignite.transactions.TransactionState.PREPARING; import static org.apache.ignite.transactions.TransactionState.ROLLED_BACK; import static org.apache.ignite.transactions.TransactionState.ROLLING_BACK; import static org.apache.ignite.transactions.TransactionState.UNKNOWN; /** * Replicated user transaction. */ public abstract class GridDhtTxLocalAdapter extends IgniteTxLocalAdapter { /** */ private static final long serialVersionUID = 0L; /** Near mappings. */ protected Map<UUID, GridDistributedTxMapping> nearMap = new ConcurrentHashMap<>(); /** DHT mappings. */ protected Map<UUID, GridDistributedTxMapping> dhtMap = new ConcurrentHashMap<>(); /** Mapped flag. */ protected volatile boolean mapped; /** */ protected boolean explicitLock; /** Versions of pending locks for entries of this tx. */ private Collection<GridCacheVersion> pendingVers; /** Flag indicating that originating node has near cache. */ private boolean nearOnOriginatingNode; /** Nodes where transactions were started on lock step. */ private Set<ClusterNode> lockTxNodes; /** * Empty constructor required for {@link Externalizable}. */ protected GridDhtTxLocalAdapter() { // No-op. } /** * @param xidVer Transaction version. * @param implicit Implicit flag. * @param implicitSingle Implicit-with-single-key flag. * @param cctx Cache context. * @param sys System flag. * @param concurrency Concurrency. * @param isolation Isolation. * @param timeout Timeout. * @param txSize Expected transaction size. */ protected GridDhtTxLocalAdapter( GridCacheSharedContext cctx, GridCacheVersion xidVer, boolean implicit, boolean implicitSingle, boolean sys, boolean explicitLock, byte plc, TransactionConcurrency concurrency, TransactionIsolation isolation, long timeout, boolean invalidate, boolean storeEnabled, boolean onePhaseCommit, int txSize, @Nullable UUID subjId, int taskNameHash ) { super( cctx, xidVer, implicit, implicitSingle, sys, plc, concurrency, isolation, timeout, invalidate, storeEnabled, onePhaseCommit, txSize, subjId, taskNameHash ); assert cctx != null; this.explicitLock = explicitLock; threadId = Thread.currentThread().getId(); } /** * @param node Node. */ void addLockTransactionNode(ClusterNode node) { assert node != null; assert !node.isLocal(); if (lockTxNodes == null) lockTxNodes = new HashSet<>(); lockTxNodes.add(node); } /** * Sets flag that indicates that originating node has a near cache that participates in this transaction. * * @param hasNear Has near cache flag. */ public void nearOnOriginatingNode(boolean hasNear) { nearOnOriginatingNode = hasNear; } /** * Gets flag that indicates that originating node has a near cache that participates in this transaction. * * @return Has near cache flag. */ boolean nearOnOriginatingNode() { return nearOnOriginatingNode; } /** * @return {@code True} if explicit lock transaction. */ public boolean explicitLock() { return explicitLock; } /** * @param explicitLock Explicit lock flag. */ public void explicitLock(boolean explicitLock) { this.explicitLock = explicitLock; } /** * @return Nodes where transactions were started on lock step. */ @Nullable Set<ClusterNode> lockTransactionNodes() { return lockTxNodes; } /** * @return Near node id. */ protected abstract UUID nearNodeId(); /** * @return Near future ID. */ protected abstract IgniteUuid nearFutureId(); /** * Adds reader to cached entry. * * @param msgId Message ID. * @param cached Cached entry. * @param entry Transaction entry. * @param topVer Topology version. * @return {@code True} if reader was added as a result of this call. */ @Nullable protected abstract IgniteInternalFuture<Boolean> addReader(long msgId, GridDhtCacheEntry cached, IgniteTxEntry entry, AffinityTopologyVersion topVer); /** * @param err Error, if any. */ protected abstract void sendFinishReply(@Nullable Throwable err); /** {@inheritDoc} */ @Override public boolean needsCompletedVersions() { return nearOnOriginatingNode; } /** * @return Versions for all pending locks that were in queue before tx locks were released. */ Collection<GridCacheVersion> pendingVersions() { return pendingVers == null ? Collections.<GridCacheVersion>emptyList() : pendingVers; } /** * @param pendingVers Versions for all pending locks that were in queue before tx locsk were released. */ public void pendingVersions(Collection<GridCacheVersion> pendingVers) { this.pendingVers = pendingVers; } /** * Map explicit locks. */ protected void mapExplicitLocks() { if (!mapped) { // Explicit locks may participate in implicit transactions only. if (!implicit()) { mapped = true; return; } Map<ClusterNode, List<GridDhtCacheEntry>> dhtEntryMap = null; Map<ClusterNode, List<GridDhtCacheEntry>> nearEntryMap = null; for (IgniteTxEntry e : allEntries()) { assert e.cached() != null; GridCacheContext cacheCtx = e.cached().context(); if (cacheCtx.isNear()) continue; if (e.cached().obsolete()) { GridCacheEntryEx cached = cacheCtx.cache().entryEx(e.key(), topologyVersion()); e.cached(cached); } if (e.cached().detached() || e.cached().isLocal()) continue; while (true) { try { // Map explicit locks. if (e.explicitVersion() != null && !e.explicitVersion().equals(xidVer)) { if (dhtEntryMap == null) dhtEntryMap = new GridLeanMap<>(); if (nearEntryMap == null) nearEntryMap = new GridLeanMap<>(); cacheCtx.dhtMap( (GridDhtCacheEntry)e.cached(), e.explicitVersion(), log, dhtEntryMap, nearEntryMap); } break; } catch (GridCacheEntryRemovedException ignore) { GridCacheEntryEx cached = cacheCtx.cache().entryEx(e.key(), topologyVersion()); e.cached(cached); } } } if (!F.isEmpty(dhtEntryMap)) addDhtNodeEntryMapping(dhtEntryMap); if (!F.isEmpty(nearEntryMap)) addNearNodeEntryMapping(nearEntryMap); mapped = true; } } /** * @return DHT map. */ Map<UUID, GridDistributedTxMapping> dhtMap() { mapExplicitLocks(); return dhtMap; } /** * @return Near map. */ Map<UUID, GridDistributedTxMapping> nearMap() { mapExplicitLocks(); return nearMap; } /** * @param mappings Mappings to add. */ private void addDhtNodeEntryMapping(Map<ClusterNode, List<GridDhtCacheEntry>> mappings) { addMapping(mappings, dhtMap); } /** * @param mappings Mappings to add. */ private void addNearNodeEntryMapping(Map<ClusterNode, List<GridDhtCacheEntry>> mappings) { addMapping(mappings, nearMap); } /** * @param nodeId Node ID. * @return {@code True} if mapping was removed. */ public boolean removeMapping(UUID nodeId) { return removeMapping(nodeId, null, dhtMap) | removeMapping(nodeId, null, nearMap); } /** * @param nodeId Node ID. * @param entry Entry to remove. * @return {@code True} if was removed. */ boolean removeDhtMapping(UUID nodeId, GridCacheEntryEx entry) { return removeMapping(nodeId, entry, dhtMap); } /** * @param nodeId Node ID. * @param entry Entry to remove. * @return {@code True} if was removed. */ boolean removeNearMapping(UUID nodeId, GridCacheEntryEx entry) { return removeMapping(nodeId, entry, nearMap); } /** * @param nodeId Node ID. * @param entry Entry to remove. * @param map Map to remove from. * @return {@code True} if was removed. */ private boolean removeMapping(UUID nodeId, @Nullable GridCacheEntryEx entry, Map<UUID, GridDistributedTxMapping> map) { if (entry != null) { if (log.isDebugEnabled()) log.debug("Removing mapping for entry [nodeId=" + nodeId + ", entry=" + entry + ']'); IgniteTxEntry txEntry = entry(entry.txKey()); if (txEntry == null) return false; GridDistributedTxMapping m = map.get(nodeId); boolean ret = m != null && m.removeEntry(txEntry); if (m != null && m.empty()) map.remove(nodeId); return ret; } else return map.remove(nodeId) != null; } /** * @param mappings Entry mappings. * @param dst Transaction mappings. */ private void addMapping( Map<ClusterNode, List<GridDhtCacheEntry>> mappings, Map<UUID, GridDistributedTxMapping> dst ) { for (Map.Entry<ClusterNode, List<GridDhtCacheEntry>> mapping : mappings.entrySet()) { ClusterNode n = mapping.getKey(); GridDistributedTxMapping m = dst.get(n.id()); List<GridDhtCacheEntry> entries = mapping.getValue(); for (GridDhtCacheEntry entry : entries) { IgniteTxEntry txEntry = entry(entry.txKey()); if (txEntry != null) { if (m == null) dst.put(n.id(), m = new GridDistributedTxMapping(n)); m.add(txEntry); } } } } /** {@inheritDoc} */ @Override public void addInvalidPartition(GridCacheContext ctx, int part) { assert false : "DHT transaction encountered invalid partition [part=" + part + ", tx=" + this + ']'; } /** * @param msgId Message ID. * @param e Entry to add. * @return Future for active transactions for the time when reader was added. * @throws IgniteCheckedException If failed. */ @Nullable public IgniteInternalFuture<Boolean> addEntry(long msgId, IgniteTxEntry e) throws IgniteCheckedException { init(); TransactionState state = state(); assert state == PREPARING : "Invalid tx state for " + "adding entry [msgId=" + msgId + ", e=" + e + ", tx=" + this + ']'; e.unmarshal(cctx, false, cctx.deploy().globalLoader()); checkInternal(e.txKey()); GridCacheContext cacheCtx = e.context(); GridDhtCacheAdapter dhtCache = cacheCtx.isNear() ? cacheCtx.near().dht() : cacheCtx.dht(); try { IgniteTxEntry existing = entry(e.txKey()); if (existing != null) { existing.op(e.op()); // Absolutely must set operation, as default is DELETE. existing.value(e.value(), e.hasWriteValue(), e.hasReadValue()); existing.entryProcessors(e.entryProcessors()); existing.ttl(e.ttl()); existing.filters(e.filters()); existing.expiry(e.expiry()); existing.conflictExpireTime(e.conflictExpireTime()); existing.conflictVersion(e.conflictVersion()); } else { existing = e; addActiveCache(dhtCache.context(), false); GridDhtCacheEntry cached = dhtCache.entryExx(existing.key(), topologyVersion()); existing.cached(cached); GridCacheVersion explicit = existing.explicitVersion(); if (explicit != null) { GridCacheVersion dhtVer = cctx.mvcc().mappedVersion(explicit); if (dhtVer == null) throw new IgniteCheckedException("Failed to find dht mapping for explicit entry version: " + existing); existing.explicitVersion(dhtVer); } txState.addEntry(existing); if (log.isDebugEnabled()) log.debug("Added entry to transaction: " + existing); } return addReader(msgId, dhtCache.entryExx(existing.key()), existing, topologyVersion()); } catch (GridDhtInvalidPartitionException ex) { throw new IgniteCheckedException(ex); } } /** * @param cacheCtx Cache context. * @param entries Entries to lock. * @param msgId Message ID. * @param read Read flag. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param needRetVal Return value flag. * @param skipStore Skip store flag. * @param keepBinary Keep binary flag. * @param nearCache {@code True} if near cache enabled on originating node. * @return Lock future. */ @SuppressWarnings("ForLoopReplaceableByForEach") IgniteInternalFuture<GridCacheReturn> lockAllAsync( GridCacheContext cacheCtx, List<GridCacheEntryEx> entries, long msgId, final boolean read, final boolean needRetVal, long createTtl, long accessTtl, boolean skipStore, boolean keepBinary, boolean nearCache ) { try { checkValid(); } catch (IgniteCheckedException e) { return new GridFinishedFuture<>(e); } final GridCacheReturn ret = new GridCacheReturn(localResult(), false); if (F.isEmpty(entries)) return new GridFinishedFuture<>(ret); init(); onePhaseCommit(onePhaseCommit); try { Set<KeyCacheObject> skipped = null; AffinityTopologyVersion topVer = topologyVersion(); GridDhtCacheAdapter dhtCache = cacheCtx.isNear() ? cacheCtx.near().dht() : cacheCtx.dht(); // Enlist locks into transaction. for (int i = 0; i < entries.size(); i++) { GridCacheEntryEx entry = entries.get(i); KeyCacheObject key = entry.key(); IgniteTxEntry txEntry = entry(entry.txKey()); // First time access. if (txEntry == null) { GridDhtCacheEntry cached; while (true) { try { cached = dhtCache.entryExx(key, topVer); cached.unswap(read); break; } catch (GridCacheEntryRemovedException ignore) { if (log.isDebugEnabled()) log.debug("Get removed entry: " + key); } } addActiveCache(dhtCache.context(), false); txEntry = addEntry(NOOP, null, null, null, cached, null, CU.empty0(), false, -1L, -1L, null, skipStore, keepBinary, nearCache); if (read) txEntry.ttl(accessTtl); txEntry.cached(cached); addReader(msgId, cached, txEntry, topVer); } else { if (skipped == null) skipped = new GridLeanSet<>(); skipped.add(key); } } assert pessimistic(); Collection<KeyCacheObject> keys = F.viewReadOnly(entries, CU.entry2Key()); // Acquire locks only after having added operation to the write set. // Otherwise, during rollback we will not know whether locks need // to be rolled back. // Loose all skipped and previously locked (we cannot reenter locks here). final Collection<KeyCacheObject> passedKeys = skipped != null ? F.view(keys, F0.notIn(skipped)) : keys; if (log.isDebugEnabled()) log.debug("Lock keys: " + passedKeys); return obtainLockAsync(cacheCtx, ret, passedKeys, read, needRetVal, createTtl, accessTtl, skipStore, keepBinary); } catch (IgniteCheckedException e) { setRollbackOnly(); return new GridFinishedFuture<>(e); } } /** * @param cacheCtx Context. * @param ret Return value. * @param passedKeys Passed keys. * @param read {@code True} if read. * @param needRetVal Return value flag. * @param createTtl TTL for create operation. * @param accessTtl TTL for read operation. * @param skipStore Skip store flag. * @return Future for lock acquisition. */ private IgniteInternalFuture<GridCacheReturn> obtainLockAsync( final GridCacheContext cacheCtx, GridCacheReturn ret, final Collection<KeyCacheObject> passedKeys, final boolean read, final boolean needRetVal, final long createTtl, final long accessTtl, boolean skipStore, boolean keepBinary) { if (log.isDebugEnabled()) log.debug("Before acquiring transaction lock on keys [keys=" + passedKeys + ']'); if (passedKeys.isEmpty()) return new GridFinishedFuture<>(ret); GridDhtTransactionalCacheAdapter<?, ?> dhtCache = cacheCtx.isNear() ? cacheCtx.nearTx().dht() : cacheCtx.dhtTx(); long timeout = remainingTime(); if (timeout == -1) return new GridFinishedFuture<>(timeoutException()); IgniteInternalFuture<Boolean> fut = dhtCache.lockAllAsyncInternal(passedKeys, timeout, this, isInvalidate(), read, needRetVal, isolation, createTtl, accessTtl, CU.empty0(), skipStore, keepBinary); return new GridEmbeddedFuture<>( fut, new PLC1<GridCacheReturn>(ret) { @Override protected GridCacheReturn postLock(GridCacheReturn ret) throws IgniteCheckedException { if (log.isDebugEnabled()) log.debug("Acquired transaction lock on keys: " + passedKeys); postLockWrite(cacheCtx, passedKeys, ret, /*remove*/false, /*retval*/false, /*read*/read, accessTtl, CU.empty0(), /*computeInvoke*/false); return ret; } } ); } /** {@inheritDoc} */ @SuppressWarnings({"CatchGenericClass", "ThrowableInstanceNeverThrown"}) @Override public boolean localFinish(boolean commit, boolean clearThreadMap) throws IgniteCheckedException { if (log.isDebugEnabled()) log.debug("Finishing dht local tx [tx=" + this + ", commit=" + commit + "]"); if (optimistic()) state(PREPARED); if (commit) { if (!state(COMMITTING)) { TransactionState state = state(); if (state != COMMITTING && state != COMMITTED) throw new IgniteCheckedException("Invalid transaction state for commit [state=" + state() + ", tx=" + this + ']'); else { if (log.isDebugEnabled()) log.debug("Invalid transaction state for commit (another thread is committing): " + this); return false; } } } else { if (!state(ROLLING_BACK)) { if (log.isDebugEnabled()) log.debug("Invalid transaction state for rollback [state=" + state() + ", tx=" + this + ']'); return false; } } IgniteCheckedException err = null; // Commit to DB first. This way if there is a failure, transaction // won't be committed. try { if (commit && !isRollbackOnly()) userCommit(); else userRollback(clearThreadMap); } catch (IgniteCheckedException e) { err = e; commit = false; // If heuristic error. if (!isRollbackOnly()) { systemInvalidate(true); U.warn(log, "Set transaction invalidation flag to true due to error [tx=" + CU.txString(this) + ", err=" + err + ']'); } } if (err != null) { state(UNKNOWN); throw err; } else { // Committed state will be set in finish future onDone callback. if (commit) { if (!onePhaseCommit()) { if (!state(COMMITTED)) { state(UNKNOWN); throw new IgniteCheckedException("Invalid transaction state for commit: " + this); } } } else { if (!state(ROLLED_BACK)) { state(UNKNOWN); throw new IgniteCheckedException("Invalid transaction state for rollback: " + this); } } } return true; } /** * Removes previously created prepare future from atomic reference. * * @param fut Expected future. */ protected abstract void clearPrepareFuture(GridDhtTxPrepareFuture fut); /** * @return {@code True} if transaction is finished on prepare step. */ public final boolean commitOnPrepare() { return onePhaseCommit() && !near() && !nearOnOriginatingNode; } /** * @param prepFut Prepare future. * @return If transaction if finished on prepare step returns future which is completed after transaction finish. */ @SuppressWarnings("TypeMayBeWeakened") protected final IgniteInternalFuture<GridNearTxPrepareResponse> chainOnePhasePrepare( final GridDhtTxPrepareFuture prepFut) { if (commitOnPrepare()) { return finishFuture().chain(new CX1<IgniteInternalFuture<IgniteInternalTx>, GridNearTxPrepareResponse>() { @Override public GridNearTxPrepareResponse applyx(IgniteInternalFuture<IgniteInternalTx> finishFut) throws IgniteCheckedException { return prepFut.get(); } }); } return prepFut; } /** {@inheritDoc} */ @Override public String toString() { return GridToStringBuilder.toString(GridDhtTxLocalAdapter.class, this, "nearNodes", nearMap.keySet(), "dhtNodes", dhtMap.keySet(), "explicitLock", explicitLock, "super", super.toString()); } }
apache-2.0
btcontract/wallet
app/src/main/java/immortan/sqlite/SQLiteLNUrlPay.scala
2448
package immortan.sqlite import java.lang.{Long => JLong} import immortan.utils.ImplicitJsonFormats._ import immortan.{ChannelMaster, LNUrlDescription, LNUrlPayLink} import spray.json._ class SQLiteLNUrlPay(db: DBInterface) { def updDescription(description: LNUrlDescription, domain: String, pay: String): Unit = db txWrap { val updateDescriptionSqlPQ = db.makePreparedQuery(LNUrlPayTable.updateDescriptionSql) db.change(updateDescriptionSqlPQ, description.toJson.compactPrint, pay) for (label <- description.label) addSearchableLink(label, domain) ChannelMaster.next(ChannelMaster.payMarketDbStream) updateDescriptionSqlPQ.close } def remove(pay: String): Unit = { db.change(LNUrlPayTable.killSql, pay) ChannelMaster.next(ChannelMaster.payMarketDbStream) } def saveLink(info: LNUrlPayLink): Unit = db txWrap { val descriptionString = info.description.toJson.compactPrint val updInfoSqlPQ = db.makePreparedQuery(LNUrlPayTable.updInfoSql) val newSqlPQ = db.makePreparedQuery(LNUrlPayTable.newSql) db.change(newSqlPQ, info.domain, info.payString, info.payMetaString, info.updatedAt: JLong, descriptionString, info.lastNodeIdString, info.lastCommentString) db.change(updInfoSqlPQ, info.payMetaString, info.updatedAt: JLong, descriptionString, info.lastNodeIdString, info.lastCommentString, info.payString) addSearchableLink(info.payMetaData.get.queryText(info.domain), info.domain) ChannelMaster.next(ChannelMaster.payMarketDbStream) updInfoSqlPQ.close newSqlPQ.close } def addSearchableLink(search: String, domain: String): Unit = { val newVirtualSqlPQ = db.makePreparedQuery(LNUrlPayTable.newVirtualSql) db.change(newVirtualSqlPQ, search.toLowerCase, domain) newVirtualSqlPQ.close } def searchLinks(rawSearchQuery: String): RichCursor = db.search(LNUrlPayTable.searchSql, rawSearchQuery.toLowerCase) def listRecentLinks(limit: Int): RichCursor = db.select(LNUrlPayTable.selectRecentSql, limit.toString) def toLinkInfo(rc: RichCursor): LNUrlPayLink = LNUrlPayLink(domain = rc string LNUrlPayTable.domain, payString = rc string LNUrlPayTable.pay, payMetaString = rc string LNUrlPayTable.payMeta, updatedAt = rc long LNUrlPayTable.updatedAt, description = to[LNUrlDescription](rc string LNUrlPayTable.description), lastNodeIdString = rc string LNUrlPayTable.lastNodeId, lastCommentString = rc string LNUrlPayTable.lastComment) }
apache-2.0
deleet/couchbase-lite-java-core
src/main/java/com/couchbase/lite/support/RemoteRequestCompletionBlock.java
148
package com.couchbase.lite.support; public interface RemoteRequestCompletionBlock { public void onCompletion(Object result, Throwable e); }
apache-2.0
synyx/minos
archetypes/application/src/main/resources/archetype-resources/src/main/java/SampleLifecycle.java
1484
#set( $symbol_pound = '#' ) #set( $symbol_dollar = '$' ) #set( $symbol_escape = '\' ) package ${package}; import org.springframework.core.annotation.Order; import org.springframework.util.Assert; import org.synyx.minos.core.domain.Role; import org.synyx.minos.core.domain.User; import org.synyx.minos.core.module.ModuleLifecycleException; import org.synyx.minos.core.module.SimpleNoOpLifecycle; import org.synyx.minos.umt.service.UserManagement; import ${package}.dao.ItemDao; @Order(0) public class SampleLifecycle extends SimpleNoOpLifecycle { private ItemDao itemDao; private UserManagement userManagement; public SampleLifecycle(ItemDao itemDao, UserManagement userManagement) { this.itemDao = itemDao; this.userManagement = userManagement; } @Override public void install() throws ModuleLifecycleException { Assert.notNull(itemDao, "We need an item dao to work"); // We can ensure that admins always have the ITEMS_DELETE permission Role adminRole = userManagement.getRole("ADMIN"); adminRole.add(SamplePermissions.ITEMS_DELETE); userManagement.save(adminRole); // ... or create other non-admin users if (userManagement.getUser("user") == null) { User user = new User("user", "user@example.com", "user"); Role userRole = userManagement.getRole("USER"); user.addRole(userRole); userManagement.save(user); } } }
apache-2.0
ms123s/simpl4-src
bundles/exporting/src/main/java/org/ms123/common/exporting/BaseExportingServiceImpl.java
7582
/** * This file is part of SIMPL4(http://simpl4.org). * * Copyright [2017] [Manfred Sattler] <manfred@ms123.org> * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.ms123.common.exporting; import flexjson.JSONDeserializer; import flexjson.JSONSerializer; import java.io.*; import java.text.SimpleDateFormat; import java.util.*; import javax.jdo.Extent; import javax.jdo.JDOObjectNotFoundException; import javax.jdo.PersistenceManager; import javax.jdo.Query; import javax.transaction.UserTransaction; import javax.xml.transform.stream.StreamSource; import net.sf.sojo.common.*; import net.sf.sojo.core.*; import org.apache.commons.beanutils.PropertyUtils; import org.apache.tika.Tika; import org.milyn.container.*; import org.milyn.*; import org.milyn.payload.*; import org.milyn.Smooks; import org.milyn.SmooksFactory; import com.Ostermiller.util.*; import javax.servlet.http.*; import javax.servlet.ServletOutputStream; import javax.xml.transform.stream.*; import org.ms123.common.data.api.DataLayer; import org.ms123.common.data.api.SessionContext; import org.ms123.common.entity.api.EntityService; import org.ms123.common.store.StoreDesc; import org.ms123.common.permission.api.PermissionException; import org.ms123.common.libhelper.Inflector; import org.ms123.common.permission.api.PermissionService; import org.ms123.common.nucleus.api.NucleusService; import org.ms123.common.reporting.ReportingService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** BaseExportingService implementation */ @SuppressWarnings("unchecked") public class BaseExportingServiceImpl implements Constants { private static final Logger m_logger = LoggerFactory.getLogger(BaseExportingServiceImpl.class); protected Inflector m_inflector = Inflector.getInstance(); protected DataLayer m_dataLayer; protected PermissionService m_permissionService; protected EntityService m_entityService; protected SmooksFactory m_smooksFactory; protected NucleusService m_nucleusService; protected ReportingService m_reportingService; protected JSONDeserializer m_ds = new JSONDeserializer(); private ObjectUtil m_objUtils = new ObjectUtil(); protected JSONSerializer m_js = new JSONSerializer(); public BaseExportingServiceImpl() { } public Map smooksExport(String storeId, String entityName, Map filters, Map options, HttpServletResponse response) throws Exception { StoreDesc sdesc = StoreDesc.get(storeId); response.setContentType("application/xml;charset=UTF-8"); Map filtersMap = filters; response.addHeader("Content-Disposition", "attachment;filename=\"" + options.get("filename") + "\""); Smooks smooks = m_smooksFactory.createInstance(); ClassLoader previous = Thread.currentThread().getContextClassLoader(); Thread.currentThread().setContextClassLoader(m_nucleusService.getClassLoader(sdesc)); System.out.println("filtersMap:" + filtersMap); System.out.println("mainEntity:" + entityName); System.out.println("options:" + options); try { SessionContext sessionContext = m_dataLayer.getSessionContext(sdesc); List result = sessionContext.query(entityName, filtersMap); System.out.println("result:" + result); Class clazz = m_nucleusService.getClass(sdesc, m_inflector.getClassName(entityName)); List nList = new ArrayList(); nList.addAll(result); GenericReaderConfigurator grc = new GenericReaderConfigurator(BeanReader.class); smooks.setReaderConfig(grc); ExecutionContext executionContext = smooks.createExecutionContext(); executionContext.setAttribute("sessionContext", sessionContext); executionContext.setAttribute("moduleName", entityName); executionContext.setAttribute("withNullValues", options.get("withNullValues")); JavaSource source = new JavaSource("result", nList); ServletOutputStream outputStream = response.getOutputStream(); outputStream.println("<?xml version=\"1.0\" encoding=\"UTF-8\"?>"); smooks.filterSource(executionContext, source, new StreamResult(outputStream)); return new HashMap(); } finally { smooks.close(); Thread.currentThread().setContextClassLoader(previous); } } protected String createReport(List rows, StoreDesc sdesc, String entity, String format, Map soptions, List fields, List aliases, String user, HttpServletResponse response) throws Exception { Map options = null; if (soptions != null) { options = soptions; } else { options = new HashMap(); options.put("rowDelim", "UNIX"); options.put("columnDelim", ","); options.put("quote", "\""); options.put("alwaysQuote", false); } List aliasesArray = null; if (aliases != null && aliases.size() > 0) { aliasesArray = aliases; } else { aliasesArray = new ArrayList(); } List fieldsArray = null; if (fields != null && fields.size() > 0) { fieldsArray = fields; } else { throw new Exception("DataService.createDownloadFile:fieldsArray_is_empty"); } System.out.println("fields:" + fields); System.out.println("aliases:" + aliases); System.out.println("fieldsArray:" + fieldsArray); System.out.println("aliasesArray:" + aliasesArray); Map configForFieldsArray = getConfigForFieldsArray(sdesc, entity, fieldsArray); if (!format.equals("csv")) { String filename = (String) options.get("filename"); if (format.equals("pdf")) { response.setContentType("application/x-pdf"); response.addHeader("Content-Disposition", "inline;filename=" + filename); } else if (format.equals("xls")) { response.setContentType("application/msexcel"); response.addHeader("Content-Disposition", "inline;filename=" + filename); } else if (format.equals("html")) { response.setContentType("text/html"); response.addHeader("Content-Disposition", "inline;filename=" + filename); } String retx = m_reportingService.generateReport(rows, fieldsArray, aliasesArray, configForFieldsArray, format, options, response.getOutputStream()); response.flushBuffer(); return retx; } else { response.setContentType("text/csv;charset=UTF-8"); response.addHeader("Content-Disposition", "inline;filename=" + options.get("filename")); return m_reportingService.createCSV(rows, fieldsArray, aliasesArray, configForFieldsArray, options, response.getOutputStream()); } } protected Map getConfigForFieldsArray(StoreDesc sdesc, String entityName, List<String> fieldsArray) { Map retMap = new HashMap(); for (String field : fieldsArray) { int dot = field.indexOf("."); if (dot != -1) { String[] name = field.split("\\."); String mname = name[0]; String fname = name[1]; mname = org.ms123.common.utils.TypeUtils.getEntityForPath(m_nucleusService, sdesc, mname); Map _configMap = m_entityService.getPermittedFields(sdesc, mname); retMap.put(field, _configMap.get(fname)); } else { Map _configMap = m_entityService.getPermittedFields(sdesc, entityName); retMap.put(field, _configMap.get(field)); } } return retMap; } protected String getUserName() { return org.ms123.common.system.thread.ThreadContext.getThreadContext().getUserName(); } }
apache-2.0
fjoglar/ETSIT-News
app/src/main/java/com/fjoglar/etsitnoticias/presenter/NewsListPresenter.java
8865
/* * Copyright (C) 2016 Felipe Joglar Santos * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.fjoglar.etsitnoticias.presenter; import android.support.annotation.NonNull; import com.fjoglar.etsitnoticias.R; import com.fjoglar.etsitnoticias.domain.UseCase; import com.fjoglar.etsitnoticias.domain.UseCaseHandler; import com.fjoglar.etsitnoticias.domain.usecase.DeleteBookmark; import com.fjoglar.etsitnoticias.domain.usecase.GetAllNews; import com.fjoglar.etsitnoticias.domain.usecase.GetFilteredNews; import com.fjoglar.etsitnoticias.domain.usecase.SaveBookmark; import com.fjoglar.etsitnoticias.domain.usecase.UpdateNews; import com.fjoglar.etsitnoticias.data.entities.Category; import com.fjoglar.etsitnoticias.data.entities.NewsItem; import com.fjoglar.etsitnoticias.data.repository.NewsRepository; import com.fjoglar.etsitnoticias.data.repository.datasource.NewsSharedPreferences; import com.fjoglar.etsitnoticias.presenter.contracts.NewsListContract; import com.fjoglar.etsitnoticias.utils.CategoryUtils; import com.fjoglar.etsitnoticias.utils.DateUtils; import java.util.List; public class NewsListPresenter implements NewsListContract.Presenter { private NewsListContract.View mNewsListView; private final UseCaseHandler mUseCaseHandler; private boolean mUpdateStatus; public NewsListPresenter(@NonNull NewsListContract.View newsListView) { mNewsListView = newsListView; mUseCaseHandler = UseCaseHandler.getInstance(); mNewsListView.setPresenter(this); } @Override public void getNews() { mNewsListView.showProgress(); if (CategoryUtils.areAllCategoriesActive()) { GetAllNews getAllNews = new GetAllNews(NewsRepository.getInstance()); mUseCaseHandler.execute(getAllNews, new GetAllNews.RequestValues(), new UseCase.UseCaseCallback<GetAllNews.ResponseValue>() { @Override public void onSuccess(GetAllNews.ResponseValue response) { mNewsListView.showNews(response.getNewsItemList()); mNewsListView.hideProgress(); checkForErrors(response.getNewsItemList()); updateIfNeeded(); } @Override public void onError() { mNewsListView.hideProgress(); mNewsListView.showError(); updateIfNeeded(); } }); } else { GetFilteredNews getFilteredNews = new GetFilteredNews(NewsRepository.getInstance()); mUseCaseHandler.execute(getFilteredNews, new GetFilteredNews.RequestValues(CategoryUtils.getActiveCategories()), new UseCase.UseCaseCallback<GetFilteredNews.ResponseValue>() { @Override public void onSuccess(GetFilteredNews.ResponseValue response) { mNewsListView.showNews(response.getNewsItemFilteredList()); mNewsListView.hideProgress(); checkForErrors(response.getNewsItemFilteredList()); } @Override public void onError() { mNewsListView.hideProgress(); mNewsListView.showError(); } }); } } @Override public void updateNews() { mNewsListView.showUpdating(); mUpdateStatus = true; UpdateNews updateNews = new UpdateNews(NewsRepository.getInstance()); mUseCaseHandler.execute(updateNews, new UpdateNews.RequestValues(), new UseCase.UseCaseCallback<UpdateNews.ResponseValue>() { @Override public void onSuccess(UpdateNews.ResponseValue response) { getNews(); mNewsListView.hideUpdating(); mUpdateStatus = false; // Update last updated time in SharedPreferences. NewsSharedPreferences newsSharedPreferences = NewsSharedPreferences.getInstance(); newsSharedPreferences.putLong( newsSharedPreferences.getStringFromResId(R.string.pref_last_updated_key), System.currentTimeMillis()); showLastUpdateTime(); } @Override public void onError() { mNewsListView.hideUpdating(); mUpdateStatus = false; mNewsListView.showNoInternetMsg(); } }); } @Override public void filterItemClicked(List<Category> categoryList, int position) { CategoryUtils.updateCategoryFilterStatus(categoryList.get(position)); getNews(); mNewsListView.updateFilterList(); } @Override public void manageBookmark(NewsItem newsItem) { mNewsListView.showProgress(); if (newsItem.getBookmarked() == 0) { SaveBookmark saveBookmark = new SaveBookmark(NewsRepository.getInstance()); mUseCaseHandler.execute(saveBookmark, new SaveBookmark.RequestValues(newsItem), new UseCase.UseCaseCallback<SaveBookmark.ResponseValue>() { @Override public void onSuccess(SaveBookmark.ResponseValue response) { mNewsListView.hideProgress(); mNewsListView.showMessage("Favorito guardado"); } @Override public void onError() { mNewsListView.hideProgress(); } }); } else { DeleteBookmark deleteBookmark = new DeleteBookmark(NewsRepository.getInstance()); mUseCaseHandler.execute(deleteBookmark, new DeleteBookmark.RequestValues(newsItem.getFormattedPubDate()), new UseCase.UseCaseCallback<DeleteBookmark.ResponseValue>() { @Override public void onSuccess(DeleteBookmark.ResponseValue response) { mNewsListView.hideProgress(); mNewsListView.showMessage("Favorito borrado"); } @Override public void onError() { mNewsListView.hideProgress(); } }); } } @Override public void setView(NewsListContract.View newsListView) { mNewsListView = newsListView; } @Override public void start() { checkUpdatingStatus(); showLastUpdateTime(); getNews(); } private void updateIfNeeded() { NewsSharedPreferences newsSharedPreferences = NewsSharedPreferences.getInstance(); boolean firstStart = newsSharedPreferences.getBoolean( newsSharedPreferences.getStringFromResId(R.string.pref_first_start_key), true); if (firstStart) { updateNews(); newsSharedPreferences.putBoolean( newsSharedPreferences.getStringFromResId(R.string.pref_first_start_key), false); } } private void checkForErrors(List<NewsItem> newsItemList) { if (newsItemList == null || newsItemList.size() == 0) { mNewsListView.showError(); } } private void checkUpdatingStatus() { if (mUpdateStatus) { mNewsListView.showUpdating(); } } private void showLastUpdateTime() { NewsSharedPreferences newsSharedPreferences = NewsSharedPreferences.getInstance(); long lastUpdateTimeinMillis = newsSharedPreferences.getLong( newsSharedPreferences.getStringFromResId(R.string.pref_last_updated_key), 0L); mNewsListView.showLastUpdateTime(DateUtils.formatLastUpdateTime(lastUpdateTimeinMillis)); } }
apache-2.0
alien11689/sputnik
src/test/java/pl/touk/sputnik/connector/gerrit/GerritFacadeExceptionTest.java
1166
package pl.touk.sputnik.connector.gerrit; import com.google.gerrit.extensions.api.GerritApi; import com.google.gerrit.extensions.api.changes.Changes; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.ExtendWith; import org.mockito.Mock; import org.mockito.junit.jupiter.MockitoExtension; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.catchThrowable; import static org.mockito.Mockito.when; @ExtendWith(MockitoExtension.class) class GerritFacadeExceptionTest { @Mock private GerritApi gerritApi; @Mock private Changes changes; @Test void shouldWrapConnectorException() throws Exception { when(gerritApi.changes()).thenReturn(changes); when(changes.id("foo")).thenThrow(new RuntimeException("Connection refused")); GerritFacade gerritFacade = new GerritFacade(gerritApi, new GerritPatchset("foo", "bar")); Throwable thrown = catchThrowable(gerritFacade::listFiles); assertThat(thrown) .isInstanceOf(GerritException.class) .hasMessageContaining("Error when listing files"); } }
apache-2.0
vespa-engine/vespa
document/src/main/java/com/yahoo/vespaxmlparser/DocumentUpdateFeedOperation.java
739
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root. package com.yahoo.vespaxmlparser; import com.yahoo.document.DocumentUpdate; import com.yahoo.document.TestAndSetCondition; public class DocumentUpdateFeedOperation extends ConditionalFeedOperation { private final DocumentUpdate update; public DocumentUpdateFeedOperation(DocumentUpdate update) { super(Type.UPDATE); this.update = update; } public DocumentUpdateFeedOperation(DocumentUpdate update, TestAndSetCondition condition) { super(Type.UPDATE, condition); this.update = update; } @Override public DocumentUpdate getDocumentUpdate() { return update; } }
apache-2.0
Contezero74/cloudapp-mp2
PopularityLeague.java
8808
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.ArrayWritable; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStreamReader; import java.util.*; public class PopularityLeague extends Configured implements Tool { public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new PopularityLeague(), args); System.exit(res); } public static String readHDFSFile(String path, Configuration conf) throws IOException{ Path pt=new Path(path); FileSystem fs = FileSystem.get(pt.toUri(), conf); FSDataInputStream file = fs.open(pt); BufferedReader buffIn=new BufferedReader(new InputStreamReader(file)); StringBuilder everything = new StringBuilder(); String line; while( (line = buffIn.readLine()) != null) { everything.append(line); everything.append("\n"); } return everything.toString(); } public static class IntArrayWritable extends ArrayWritable { public IntArrayWritable() { super(IntWritable.class); } public IntArrayWritable(Integer[] numbers) { super(IntWritable.class); IntWritable[] ints = new IntWritable[numbers.length]; for (int i = 0; i < numbers.length; i++) { ints[i] = new IntWritable(numbers[i]); } set(ints); } } @Override public int run(String[] args) throws Exception { Configuration conf = this.getConf(); FileSystem fs = FileSystem.get(conf); Path tmpPath = new Path("/mp2/tmp"); fs.delete(tmpPath, true); Job jobA = Job.getInstance(conf, "Popularity League Count"); jobA.setOutputKeyClass(IntWritable.class); jobA.setOutputValueClass(IntWritable.class); jobA.setMapperClass(LinkCountMap.class); jobA.setReducerClass(LinkCountReduce.class); FileInputFormat.setInputPaths(jobA, new Path(args[0])); FileOutputFormat.setOutputPath(jobA, tmpPath); jobA.setJarByClass(PopularityLeague.class); jobA.waitForCompletion(true); Job jobB = Job.getInstance(conf, "Popularity League Rank"); jobB.setOutputKeyClass(IntWritable.class); jobB.setOutputValueClass(IntWritable.class); jobB.setMapOutputKeyClass(NullWritable.class); jobB.setMapOutputValueClass(IntArrayWritable.class); jobB.setMapperClass(LinkRankMap.class); jobB.setReducerClass(LinkRankReduce.class); jobB.setNumReduceTasks(1); FileInputFormat.setInputPaths(jobB, tmpPath); FileOutputFormat.setOutputPath(jobB, new Path(args[1])); jobB.setInputFormatClass(KeyValueTextInputFormat.class); jobB.setOutputFormatClass(TextOutputFormat.class); jobB.setJarByClass(PopularityLeague.class); return jobB.waitForCompletion(true) ? 0 : 1; } public static class LinkCountMap extends Mapper<Object, Text, IntWritable, IntWritable> { List<String> pageIds; @Override protected void setup(Context context) throws IOException,InterruptedException { Configuration conf = context.getConfiguration(); String pageIdsPath = conf.get("league"); pageIds = Arrays.asList(readHDFSFile(pageIdsPath, conf).split("\n")); } @Override public void map(Object key, Text value, Context context) throws IOException, InterruptedException { final String line = value.toString(); final String pages[] = line.split("[:]"); final String links[] = pages[1].split("[ ]"); for(String l : links) { if (!l.trim().isEmpty() && pageIds.contains(l)) { Integer linkId = Integer.parseInt(l.trim()); context.write(new IntWritable(linkId), new IntWritable(1)); } } } } public static class LinkCountReduce extends Reducer<IntWritable, IntWritable, IntWritable, IntWritable> { @Override public void reduce(IntWritable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException { int numLink = 0; for (IntWritable val : values) { numLink += val.get(); } context.write(key, new IntWritable(numLink)); } } public static class LinkRankMap extends Mapper<Text, Text, NullWritable, IntArrayWritable> { TreeSet<Pair<Integer, Integer>> countToRankMap = new TreeSet<Pair<Integer, Integer>>(); @Override public void map(Text key, Text value, Context context) throws IOException, InterruptedException { Integer count = Integer.parseInt(value.toString()); Integer id = Integer.parseInt(key.toString()); countToRankMap.add(new Pair<Integer, Integer>(count, id)); } @Override protected void cleanup(Context context) throws IOException, InterruptedException { int rank = -1; Integer lastScore = null; Integer lastRank = null; for (Pair<Integer, Integer> item : countToRankMap) { ++rank; LOG.info("map: <" + item.second + " : " + item.first + ">; rank: <" + rank + " : " + lastRank + ">; lastScore: " + lastScore + "; item.first ! =lastScore: " + (!item.first.equals(lastScore))); if (null == lastScore || !item.first.equals(lastScore)) { lastScore = item.first; lastRank = rank; } Integer[] integers = { item.second, lastRank, item.first }; IntArrayWritable val = new IntArrayWritable(integers); context.write(NullWritable.get(), val); } } } public static class LinkRankReduce extends Reducer<NullWritable, IntArrayWritable, IntWritable, IntWritable> { @Override public void reduce(NullWritable key, Iterable<IntArrayWritable> values, Context context) throws IOException, InterruptedException { for (IntArrayWritable val: values) { IntWritable[] pair = (IntWritable[]) val.toArray(); IntWritable id = pair[0]; IntWritable rank = pair[1]; IntWritable score = pair[2]; LOG.info("reduce: id: " + id.get() + "; score: " + score.get() + "; rank: " + rank.get()); context.write(id, rank); } } } private static final Log LOG = LogFactory.getLog(PopularityLeague.class); } class Pair<A extends Comparable<? super A>, B extends Comparable<? super B>> implements Comparable<Pair<A, B>> { public final A first; public final B second; public Pair(A first, B second) { this.first = first; this.second = second; } public static <A extends Comparable<? super A>, B extends Comparable<? super B>> Pair<A, B> of(A first, B second) { return new Pair<A, B>(first, second); } @Override public int compareTo(Pair<A, B> o) { int cmp = o == null ? 1 : (this.first).compareTo(o.first); return cmp == 0 ? (this.second).compareTo(o.second) : cmp; } @Override public int hashCode() { return 31 * hashcode(first) + hashcode(second); } private static int hashcode(Object o) { return o == null ? 0 : o.hashCode(); } @Override public boolean equals(Object obj) { if (!(obj instanceof Pair)) return false; if (this == obj) return true; return equal(first, ((Pair<?, ?>) obj).first) && equal(second, ((Pair<?, ?>) obj).second); } private boolean equal(Object o1, Object o2) { return o1 == o2 || (o1 != null && o1.equals(o2)); } @Override public String toString() { return "(" + first + ", " + second + ')'; } }
apache-2.0
jorik041/plaso
plaso/parsers/mac_securityd.py
8868
# -*- coding: utf-8 -*- """This file contains the ASL securityd log plaintext parser.""" import datetime import logging import pyparsing from plaso.events import time_events from plaso.lib import eventdata from plaso.lib import timelib from plaso.parsers import manager from plaso.parsers import text_parser __author__ = 'Joaquin Moreno Garijo (Joaquin.MorenoGarijo.2013@live.rhul.ac.uk)' # INFO: # http://opensource.apple.com/source/Security/Security-55471/sec/securityd/ class MacSecuritydLogEvent(time_events.TimestampEvent): """Convenience class for a ASL securityd line event.""" DATA_TYPE = u'mac:asl:securityd:line' def __init__( self, timestamp, structure, sender, sender_pid, security_api, caller, message): """Initializes the event object. Args: timestamp: The timestamp time value, epoch. structure: Structure with the parse fields. level: String with the text representation of the priority level. facility: String with the ASL facility. sender: String with the name of the sender. sender_pid: Process id of the sender. security_api: Securityd function name. caller: The caller field, a string containing two hex numbers. message: String with the ASL message. """ super(MacSecuritydLogEvent, self).__init__( timestamp, eventdata.EventTimestamp.ADDED_TIME) self.timestamp = timestamp self.level = structure.level self.sender_pid = sender_pid self.facility = structure.facility self.sender = sender self.security_api = security_api self.caller = caller self.message = message class MacSecuritydLogParser(text_parser.PyparsingSingleLineTextParser): """Parses the securityd file that contains logs from the security daemon.""" NAME = u'mac_securityd' DESCRIPTION = u'Parser for Mac OS X securityd log files.' ENCODING = u'utf-8' # Default ASL Securityd log. SECURITYD_LINE = ( text_parser.PyparsingConstants.MONTH.setResultsName(u'month') + text_parser.PyparsingConstants.ONE_OR_TWO_DIGITS.setResultsName(u'day') + text_parser.PyparsingConstants.TIME.setResultsName(u'time') + pyparsing.CharsNotIn(u'[').setResultsName(u'sender') + pyparsing.Literal(u'[').suppress() + text_parser.PyparsingConstants.PID.setResultsName(u'sender_pid') + pyparsing.Literal(u']').suppress() + pyparsing.Literal(u'<').suppress() + pyparsing.CharsNotIn(u'>').setResultsName(u'level') + pyparsing.Literal(u'>').suppress() + pyparsing.Literal(u'[').suppress() + pyparsing.CharsNotIn(u'{').setResultsName(u'facility') + pyparsing.Literal(u'{').suppress() + pyparsing.Optional(pyparsing.CharsNotIn( u'}').setResultsName(u'security_api')) + pyparsing.Literal(u'}').suppress() + pyparsing.Optional(pyparsing.CharsNotIn(u']:').setResultsName( u'caller')) + pyparsing.Literal(u']:').suppress() + pyparsing.SkipTo(pyparsing.lineEnd).setResultsName(u'message')) # Repeated line. REPEATED_LINE = ( text_parser.PyparsingConstants.MONTH.setResultsName(u'month') + text_parser.PyparsingConstants.ONE_OR_TWO_DIGITS.setResultsName(u'day') + text_parser.PyparsingConstants.TIME.setResultsName(u'time') + pyparsing.Literal(u'--- last message repeated').suppress() + text_parser.PyparsingConstants.INTEGER.setResultsName(u'times') + pyparsing.Literal(u'time ---').suppress()) # Define the available log line structures. LINE_STRUCTURES = [ (u'logline', SECURITYD_LINE), (u'repeated', REPEATED_LINE)] def __init__(self): """Initializes a parser object.""" super(MacSecuritydLogParser, self).__init__() self._year_use = 0 self._last_month = None self.previous_structure = None def VerifyStructure(self, parser_mediator, line): """Verify that this file is a ASL securityd log file. Args: parser_mediator: A parser mediator object (instance of ParserMediator). line: A single line from the text file. Returns: True if this is the correct parser, False otherwise. """ try: line = self.SECURITYD_LINE.parseString(line) except pyparsing.ParseException: logging.debug(u'Not a ASL securityd log file') return False # Check if the day, month and time is valid taking a random year. month = timelib.MONTH_DICT.get(line.month.lower()) if not month: return False if self._GetTimestamp(line.day, month, 2012, line.time) == 0: return False return True def ParseRecord(self, parser_mediator, key, structure): """Parse each record structure and return an EventObject if applicable. Args: parser_mediator: A parser mediator object (instance of ParserMediator). key: An identification string indicating the name of the parsed structure. structure: A pyparsing.ParseResults object from a line in the log file. Returns: An event object (instance of EventObject) or None. """ if key in [u'logline', u'repeated']: return self._ParseLogLine(parser_mediator, structure, key) else: logging.warning( u'Unable to parse record, unknown structure: {0:s}'.format(key)) def _ParseLogLine(self, parser_mediator, structure, key): """Parse a logline and store appropriate attributes. Args: parser_mediator: A parser mediator object (instance of ParserMediator). key: An identification string indicating the name of the parsed structure. structure: A pyparsing.ParseResults object from a line in the log file. Returns: An event object (instance of EventObject) or None. """ # TODO: improving this to get a valid year. if not self._year_use: self._year_use = parser_mediator.year if not self._year_use: # Get from the creation time of the file. self._year_use = self._GetYear( self.file_entry.GetStat(), parser_mediator.timezone) # If fail, get from the current time. if not self._year_use: self._year_use = timelib.GetCurrentYear() # Gap detected between years. month = timelib.MONTH_DICT.get(structure.month.lower()) if not self._last_month: self._last_month = month if month < self._last_month: self._year_use += 1 timestamp = self._GetTimestamp( structure.day, month, self._year_use, structure.time) if not timestamp: logging.debug(u'Invalid timestamp {0:s}'.format(structure.timestamp)) return self._last_month = month if key == u'logline': self.previous_structure = structure message = structure.message else: times = structure.times structure = self.previous_structure message = u'Repeated {0:d} times: {1:s}'.format( times, structure.message) # It uses CarsNotIn structure which leaves whitespaces # at the beginning of the sender and the caller. sender = structure.sender.strip() caller = structure.caller.strip() if not caller: caller = u'unknown' if not structure.security_api: security_api = u'unknown' else: security_api = structure.security_api return MacSecuritydLogEvent( timestamp, structure, sender, structure.sender_pid, security_api, caller, message) def _GetTimestamp(self, day, month, year, time): """Gets a timestamp from a pyparsing ParseResults timestamp. This is a timestamp_string as returned by using text_parser.PyparsingConstants structures: 08, Nov, [20, 36, 37] Args: day: An integer representing the day. month: An integer representing the month. year: An integer representing the year. time: A list containing the hours, minutes, seconds. Returns: timestamp: A plaso timestamp. """ hours, minutes, seconds = time return timelib.Timestamp.FromTimeParts( year, month, day, hours, minutes, seconds) def _GetYear(self, stat, zone): """Retrieves the year either from the input file or from the settings.""" time = getattr(stat, u'crtime', 0) if not time: time = getattr(stat, u'ctime', 0) if not time: current_year = timelib.GetCurrentYear() logging.error(( u'Unable to determine year of log file.\nDefaulting to: ' u'{0:d}').format(current_year)) return current_year try: timestamp = datetime.datetime.fromtimestamp(time, zone) except ValueError: current_year = timelib.GetCurrentYear() logging.error(( u'Unable to determine year of log file.\nDefaulting to: ' u'{0:d}').format(current_year)) return current_year return timestamp.year manager.ParsersManager.RegisterParser(MacSecuritydLogParser)
apache-2.0
unibas-gravis/scalismo-faces
src/main/scala/scalismo/faces/image/filter/SeparableMorphologicalFilter.scala
2520
/* * Copyright University of Basel, Graphics and Vision Research Group * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package scalismo.faces.image.filter import scalismo.faces.image.AccessMode.Repeat import scalismo.faces.image.{ColumnMajorImageDomain, PixelImage, RowMajorImageDomain} import scala.reflect.ClassTag /** * Separable morphological filter (separable structuring element) * @param rowElement 1d structuring element, must be 1 row or column * @param windowFilter Filtering function */ case class SeparableMorphologicalFilter[A: ClassTag](rowElement: PixelImage[Boolean], colElement: PixelImage[Boolean], windowFilter: (Seq[A]) => A) extends ImageFilter[A, A] { require(rowElement.height == 1 && colElement.width == 1, "Structuring elements must be 1D") private val width = rowElement.width private val height = rowElement.height private val columnFilter = MorphologicalFilter[A](rowElement, windowFilter) private val rowFilter = MorphologicalFilter[A](colElement, windowFilter) override def filter(image: PixelImage[A]): PixelImage[A] = image.domain match { case d: ColumnMajorImageDomain => image.filter(columnFilter).withAccessMode(Repeat()).filter(rowFilter) case d: RowMajorImageDomain => image.filter(rowFilter).withAccessMode(Repeat()).filter(columnFilter) } } object SeparableMorphologicalFilter { /** create a filter with a structuring element el1d * el1d.t */ def apply[A: ClassTag](structElement1D: PixelImage[Boolean], windowFilter: (Seq[A]) => A): SeparableMorphologicalFilter[A] = { require(structElement1D.width == 1 || structElement1D.height == 1, "structuring element needs to be 1D") val rowElement = if (structElement1D.height == 1) structElement1D else structElement1D.transposed SeparableMorphologicalFilter(rowElement, rowElement.transposed, windowFilter) } /** 1d line structuring element, row */ def lineElement(size: Int): PixelImage[Boolean] = PixelImage.view(size, 1, (x, y) => x >= 0 && x < size) }
apache-2.0
FMplus/secminer
src/sellergoodsinfo.php
3203
<?php session_start(); include_once("goods_info_class.php"); $PAGE_MAX_NUM = 10; $PAGE_ID = 0; $MAX_PAGE_ID = 7; /*Default Page is zero!*/ if(isset($_GET['page'])){ $PAGE_ID = (integer)($_GET['page']); } if(isset($_SESSION['id'])){ $sid = $_SESSION['id']; $goods_db = new goods_info_db; $goods_db -> open(); $count = $goods_db -> seller_goods_count($sid); $begin = $PAGE_ID*$PAGE_MAX_NUM; $PAGE_NUM = (integer)($count/$PAGE_MAX_NUM); if($count%$PAGE_MAX_NUM > 0){ $PAGE_NUM++; } $goods_id_list = $goods_db -> fetch_sellergoods($sid,$begin,$PAGE_MAX_NUM); $constant = array("零","一","二","三","四","五","六","七","八","九","十","错误"); $goods_info_list = array(); foreach($goods_id_list as $goods_id){ array_push($goods_info_list,$goods_db -> fetch_goods_info_asid($goods_id)); } $goods_db -> close(); }else{ die("请登录!"); } ?> <html> <head> <meta charset="UTF-8"/> <link rel="stylesheet" type="text/css" href="css/sellerinfocenter.css" > </head> <body> <div id="goodsinfo"> <table width="700px" frame="box" align="center"> <tr> <th>商品名</th> <th>展示</th> <th>数量</th> <th>新旧程度</th> <th>原价</th> <th>现价</th> <th>状态</th> <th>操作</th> </tr> <?php foreach ($goods_info_list as $value){?> <tr> <td><a href = "goodsinfo.php?id=<?=$value->id?>" target="_top"><?= $value -> name ?></a></td> <td><a href = "goodsinfo.php?id=<?=$value->id?>" target="_top"><img src="<?= $value -> photo[0] ?>" height="100px" width="100px"></img></a></td> <td><?= $value -> quantity?></td> <td><?= $constant[(integer)($value -> usingdgr)]?>成新</td> <td><?= $value -> originalprice ?></td> <td><?= $value -> currentprice ?></td> <td><?php if( $value -> state == "onsell" ){?>在售<?php null;}else{?>下架<?php null;}?></td> <td><a href = "updategoodsinfo.php?id=<?=$value->id?>">编辑信息</a></td> </tr> <?php }?> </table> < <?php if($PAGE_NUM <= $MAX_PAGE_ID){ for($i = 1;$i <= $PAGE_NUM;$i++){ if($PAGE_ID == $i - 1){?> <?=$i?>&nbsp <?php }else{?> <a href = "sellergoodsinfo.php?page=<?= $i-1?>"><?=$i?></a>&nbsp <?php } }?> > <? }else{ for($i = 1;$i < $MAX_PAGE_ID;$i++){ if($PAGE_ID == $i - 1){?> <?=$i?>&nbsp <?php }else{?> <a href = "sellergoodsinfo.php?page=<?= $i-1?>"><?=$i?></a>&nbsp <?php } }?> ... <a href = "sellergoodsinfo.php?page=<?= $PAGE_NUM-1?>"><?=$PAGE_NUM?></a>&nbsp > <?php }?> &nbsp <input type = "text" name = "pageid" id = "pageid" value = '<?=$PAGE_ID+1?>' align = 'right' size = '3'/>/<?=$PAGE_NUM?>&nbsp <input type = "button" onclick = "jump('sellergoodsinfo.php?page=',pageid.value - 1)" value = "Go"/> <script language = "javascript"> function jump(page,pageid){ max_page = <?=$PAGE_NUM?>; if(pageid < max_page && pageid >= 0){ window.location.href=page+pageid; } } </script> </div> </body> </html>
apache-2.0
StratusLab/use-cases
src/main/python/stratuslab_usecases/api/testBasicVmLifecycleCernVM.py
265
import unittest import BasicVmLifecycleTestBase class testBasicVmLifecycle(BasicVmLifecycleTestBase.BasicVmLifecycleTestBase): vmName = 'cernvm' timeout = 20*60 def suite(): return unittest.TestLoader().loadTestsFromTestCase(testBasicVmLifecycle)
apache-2.0
Mogztter/kafka-manager
app/kafka/manager/KafkaManagerActor.scala
25487
/** * Copyright 2015 Yahoo Inc. Licensed under the Apache License, Version 2.0 * See accompanying LICENSE file. */ package kafka.manager import java.nio.charset.StandardCharsets import java.util.concurrent.{LinkedBlockingQueue, TimeUnit, ThreadPoolExecutor} import akka.pattern._ import akka.actor.{Props, ActorPath} import ActorModel.CMShutdown import kafka.manager.features.ClusterFeatures import org.apache.curator.framework.CuratorFramework import org.apache.curator.framework.recipes.cache.{PathChildrenCacheEvent, PathChildrenCacheListener, PathChildrenCache} import org.apache.curator.framework.recipes.cache.PathChildrenCache.StartMode import org.apache.curator.framework.recipes.locks.InterProcessSemaphoreMutex import org.apache.zookeeper.CreateMode import scala.concurrent.duration.FiniteDuration import scala.concurrent.{Future, ExecutionContext} import scala.util.{Success, Failure, Try} import scala.util.matching.Regex /** * @author hiral */ sealed trait KafkaVersion case object Kafka_0_8_1_1 extends KafkaVersion { override def toString = "0.8.1.1" } case object Kafka_0_8_2_0 extends KafkaVersion { override def toString = "0.8.2.0" } case object Kafka_0_8_2_1 extends KafkaVersion { override def toString = "0.8.2.1" } object KafkaVersion { val supportedVersions: Map[String,KafkaVersion] = Map( "0.8.1.1" -> Kafka_0_8_1_1, "0.8.2-beta" -> Kafka_0_8_2_0, "0.8.2.0" -> Kafka_0_8_2_0, "0.8.2.1" -> Kafka_0_8_2_1 ) val formSelectList : IndexedSeq[(String,String)] = supportedVersions.toIndexedSeq.filterNot(_._1.contains("beta")).map(t => (t._1,t._2.toString)) def apply(s: String) : KafkaVersion = { supportedVersions.get(s) match { case Some(v) => v case None => throw new IllegalArgumentException(s"Unsupported kafka version : $s") } } def unapply(v: KafkaVersion) : Option[String] = { Some(v.toString) } } object ClusterConfig { val legalChars = "[a-zA-Z0-9\\._\\-]" private val maxNameLength = 255 val regex = new Regex(legalChars + "+") def validateName(clusterName: String) { require(clusterName.length > 0, "cluster name is illegal, can't be empty") require(!(clusterName.equals(".") || clusterName.equals("..")), "cluster name cannot be \".\" or \"..\"") require(clusterName.length <= maxNameLength,"cluster name is illegal, can't be longer than " + maxNameLength + " characters") regex.findFirstIn(clusterName) match { case Some(t) => require(t.equals(clusterName), ("cluster name " + clusterName + " is illegal, contains a character other than ASCII alphanumerics, '.', '_' and '-'")) case None => require(false,"cluster name " + clusterName + " is illegal, contains a character other than ASCII alphanumerics, '.', '_' and '-'") } } def validateZkHosts(zkHosts: String): Unit = { require(zkHosts.length > 0, "cluster zk hosts is illegal, can't be empty!") } def apply(name: String, version : String, zkHosts: String, zkMaxRetry: Int = 100, jmxEnabled: Boolean, filterConsumers: Boolean, logkafkaEnabled: Boolean = false, activeOffsetCacheEnabled: Boolean = false) : ClusterConfig = { val kafkaVersion = KafkaVersion(version) //validate cluster name validateName(name) //validate zk hosts validateZkHosts(zkHosts) val cleanZkHosts = zkHosts.replaceAll(" ","") new ClusterConfig( name, CuratorConfig(cleanZkHosts, zkMaxRetry), true, kafkaVersion, jmxEnabled, filterConsumers, logkafkaEnabled, activeOffsetCacheEnabled) } def customUnapply(cc: ClusterConfig) : Option[(String, String, String, Int, Boolean, Boolean, Boolean, Boolean)] = { Some((cc.name, cc.version.toString, cc.curatorConfig.zkConnect, cc.curatorConfig.zkMaxRetry, cc.jmxEnabled, cc.filterConsumers, cc.logkafkaEnabled, cc.activeOffsetCacheEnabled)) } import scalaz.{Failure,Success} import scalaz.syntax.applicative._ import org.json4s._ import org.json4s.jackson.JsonMethods._ import org.json4s.jackson.Serialization import org.json4s.scalaz.JsonScalaz._ import scala.language.reflectiveCalls implicit val formats = Serialization.formats(FullTypeHints(List(classOf[ClusterConfig]))) implicit def curatorConfigJSONW: JSONW[CuratorConfig] = new JSONW[CuratorConfig] { def write(a: CuratorConfig) = makeObj(("zkConnect" -> toJSON(a.zkConnect)) :: ("zkMaxRetry" -> toJSON(a.zkMaxRetry)) :: ("baseSleepTimeMs" -> toJSON(a.baseSleepTimeMs)) :: ("maxSleepTimeMs" -> toJSON(a.maxSleepTimeMs)) :: Nil) } implicit def curatorConfigJSONR: JSONR[CuratorConfig] = CuratorConfig.applyJSON( field[String]("zkConnect"), field[Int]("zkMaxRetry"), field[Int]("baseSleepTimeMs"), field[Int]("maxSleepTimeMs")) def serialize(config: ClusterConfig) : Array[Byte] = { val json = makeObj(("name" -> toJSON(config.name)) :: ("curatorConfig" -> toJSON(config.curatorConfig)) :: ("enabled" -> toJSON(config.enabled)) :: ("kafkaVersion" -> toJSON(config.version.toString)) :: ("jmxEnabled" -> toJSON(config.jmxEnabled)) :: ("filterConsumers" -> toJSON(config.filterConsumers)) :: ("logkafkaEnabled" -> toJSON(config.logkafkaEnabled)) :: ("activeOffsetCacheEnabled" -> toJSON(config.activeOffsetCacheEnabled)) :: Nil) compact(render(json)).getBytes(StandardCharsets.UTF_8) } def deserialize(ba: Array[Byte]) : Try[ClusterConfig] = { Try { val json = parse(kafka.manager.utils.deserializeString(ba)) val result = (field[String]("name")(json) |@| field[CuratorConfig]("curatorConfig")(json) |@| field[Boolean]("enabled")(json)) { (name:String,curatorConfig:CuratorConfig,enabled:Boolean) => val versionString = field[String]("kafkaVersion")(json) val version = versionString.map(KafkaVersion.apply).getOrElse(Kafka_0_8_1_1) val jmxEnabled = field[Boolean]("jmxEnabled")(json) val filterConsumers = field[Boolean]("filterConsumers")(json) val logkafkaEnabled = field[Boolean]("logkafkaEnabled")(json) val activeOffsetCacheEnabled = field[Boolean]("activeOffsetCacheEnabled")(json) ClusterConfig.apply( name, curatorConfig, enabled,version, jmxEnabled.getOrElse(false), filterConsumers.getOrElse(true), logkafkaEnabled.getOrElse(false), activeOffsetCacheEnabled.getOrElse(false)) } result match { case Failure(nel) => throw new IllegalArgumentException(nel.toString()) case Success(clusterConfig) => clusterConfig } } } } case class ClusterContext(clusterFeatures: ClusterFeatures, config: ClusterConfig) case class ClusterConfig (name: String, curatorConfig : CuratorConfig, enabled: Boolean, version: KafkaVersion, jmxEnabled: Boolean, filterConsumers: Boolean, logkafkaEnabled: Boolean, activeOffsetCacheEnabled: Boolean) object KafkaManagerActor { val ZkRoot : String = "/kafka-manager" def getClusterPath(config: ClusterConfig) : String = s"$ZkRoot/${config.name}" } import scala.collection.JavaConverters._ import scala.concurrent.duration._ import ActorModel._ case class KafkaManagerActorConfig(curatorConfig: CuratorConfig, baseZkPath : String = KafkaManagerActor.ZkRoot, pinnedDispatcherName : String = "pinned-dispatcher", brokerViewUpdatePeriod: FiniteDuration = 10 seconds, startDelayMillis: Long = 1000, threadPoolSize: Int = 2, mutexTimeoutMillis: Int = 4000, maxQueueSize: Int = 100, kafkaManagerUpdatePeriod: FiniteDuration = 10 seconds, deleteClusterUpdatePeriod: FiniteDuration = 10 seconds, deletionBatchSize : Int = 2) class KafkaManagerActor(kafkaManagerConfig: KafkaManagerActorConfig) extends BaseQueryCommandActor with CuratorAwareActor with BaseZkPath { //this is for baze zk path trait override def baseZkPath : String = kafkaManagerConfig.baseZkPath //this is for curator aware actor override def curatorConfig: CuratorConfig = kafkaManagerConfig.curatorConfig private[this] val baseClusterZkPath = zkPath("clusters") private[this] val configsZkPath = zkPath("configs") private[this] val deleteClustersZkPath = zkPath("deleteClusters") log.info(s"zk=${kafkaManagerConfig.curatorConfig.zkConnect}") log.info(s"baseZkPath=$baseZkPath") //create kafka manager base path Try(curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(baseZkPath)) require(curator.checkExists().forPath(baseZkPath) != null,s"Kafka manager base path not found : $baseZkPath") //create kafka manager base clusters path Try(curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(baseClusterZkPath)) require(curator.checkExists().forPath(baseClusterZkPath) != null,s"Kafka manager base clusters path not found : $baseClusterZkPath") //create kafka manager delete clusters path Try(curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(deleteClustersZkPath)) require(curator.checkExists().forPath(deleteClustersZkPath) != null,s"Kafka manager delete clusters path not found : $deleteClustersZkPath") //create kafka manager configs path Try(curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(configsZkPath)) require(curator.checkExists().forPath(configsZkPath) != null,s"Kafka manager configs path not found : $configsZkPath") private[this] val longRunningExecutor = new ThreadPoolExecutor( kafkaManagerConfig.threadPoolSize, kafkaManagerConfig.threadPoolSize, 0L, TimeUnit.MILLISECONDS, new LinkedBlockingQueue[Runnable](kafkaManagerConfig.maxQueueSize)) private[this] val longRunningExecutionContext = ExecutionContext.fromExecutor(longRunningExecutor) private[this] val kafkaManagerPathCache = new PathChildrenCache(curator,configsZkPath,true) private[this] val mutex = new InterProcessSemaphoreMutex(curator, zkPath("mutex")) private[this] val dcProps = { val dcConfig = DeleteClusterActorConfig( curator, deleteClustersZkPath, baseClusterZkPath, configsZkPath, kafkaManagerConfig.deleteClusterUpdatePeriod, kafkaManagerConfig.deletionBatchSize) Props(classOf[DeleteClusterActor],dcConfig) } private[this] val deleteClustersActor: ActorPath = context.actorOf(dcProps.withDispatcher(kafkaManagerConfig.pinnedDispatcherName),"delete-cluster").path private[this] val deleteClustersPathCache = new PathChildrenCache(curator,deleteClustersZkPath,true) private[this] val pathCacheListener = new PathChildrenCacheListener { override def childEvent(client: CuratorFramework, event: PathChildrenCacheEvent): Unit = { log.debug(s"Got event : ${event.getType} path=${Option(event.getData).map(_.getPath)}") event.getType match { case PathChildrenCacheEvent.Type.CONNECTION_RECONNECTED => self ! KMUpdateState self ! KMPruneClusters case PathChildrenCacheEvent.Type.CHILD_ADDED | PathChildrenCacheEvent.Type.CHILD_UPDATED => self ! KMUpdateState case PathChildrenCacheEvent.Type.CHILD_REMOVED => self ! KMPruneClusters case _ => //don't care } } } private[this] var lastUpdateMillis: Long = 0L private[this] var clusterManagerMap : Map[String,ActorPath] = Map.empty private[this] var clusterConfigMap : Map[String,ClusterConfig] = Map.empty private[this] var pendingClusterConfigMap : Map[String,ClusterConfig] = Map.empty private[this] def modify(fn: => Any) : Unit = { if(longRunningExecutor.getQueue.remainingCapacity() == 0) { Future.successful(KMCommandResult(Try(throw new UnsupportedOperationException("Long running executor blocking queue is full!")))) } else { implicit val ec = longRunningExecutionContext Future { try { log.debug(s"Acquiring kafka manager mutex...") mutex.acquire(kafkaManagerConfig.mutexTimeoutMillis,TimeUnit.MILLISECONDS) KMCommandResult(Try { fn }) } finally { if(mutex.isAcquiredInThisProcess) { log.debug(s"Releasing kafka manger mutex...") mutex.release() } } } pipeTo sender() } } @scala.throws[Exception](classOf[Exception]) override def preStart() = { super.preStart() import scala.concurrent.duration._ log.info("Started actor %s".format(self.path)) log.info("Starting delete clusters path cache...") deleteClustersPathCache.start(StartMode.BUILD_INITIAL_CACHE) log.info("Starting kafka manager path cache...") kafkaManagerPathCache.start(StartMode.BUILD_INITIAL_CACHE) log.info("Adding kafka manager path cache listener...") kafkaManagerPathCache.getListenable.addListener(pathCacheListener) implicit val ec = longRunningExecutionContext //schedule periodic forced update context.system.scheduler.schedule( Duration(kafkaManagerConfig.startDelayMillis,TimeUnit.MILLISECONDS),kafkaManagerConfig.kafkaManagerUpdatePeriod) { self ! KMUpdateState } } @scala.throws[Exception](classOf[Exception]) override def preRestart(reason: Throwable, message: Option[Any]) { log.error(reason, "Restarting due to [{}] when processing [{}]", reason.getMessage, message.getOrElse("")) super.preRestart(reason, message) } @scala.throws[Exception](classOf[Exception]) override def postStop(): Unit = { log.info("Stopped actor %s".format(self.path)) log.info("Removing kafka manager path cache listener...") Try(kafkaManagerPathCache.getListenable.removeListener(pathCacheListener)) log.info("Shutting down long running executor...") Try(longRunningExecutor.shutdown()) log.info("Shutting down kafka manager path cache...") Try(kafkaManagerPathCache.close()) log.info("Shutting down delete clusters path cache...") Try(deleteClustersPathCache.close()) super.postStop() } override def processActorResponse(response: ActorResponse): Unit = { response match { case any: Any => log.warning("kma : processActorResponse : Received unknown message: {}", any) } } override def processQueryRequest(request: QueryRequest): Unit = { request match { case KMGetActiveClusters => sender ! KMQueryResult(clusterConfigMap.values.filter(_.enabled).toIndexedSeq) case KMGetAllClusters => sender ! KMClusterList(clusterConfigMap.values.toIndexedSeq, pendingClusterConfigMap.values.toIndexedSeq) case KMGetClusterConfig(name) => sender ! KMClusterConfigResult(Try { val cc = clusterConfigMap.get(name) require(cc.isDefined, s"Unknown cluster : $name") cc.get }) case KMClusterQueryRequest(clusterName, request) => clusterManagerMap.get(clusterName).fold[Unit] { sender ! ActorErrorResponse(s"Unknown cluster : $clusterName") } { clusterManagerPath:ActorPath => context.actorSelection(clusterManagerPath).forward(request) } case any: Any => log.warning("kma : processQueryRequest : Received unknown message: {}", any) } } override def processCommandRequest(request: CommandRequest): Unit = { request match { case KMAddCluster(clusterConfig) => modify { val data: Array[Byte] = ClusterConfig.serialize(clusterConfig) val zkpath: String = getConfigsZkPath(clusterConfig) require(kafkaManagerPathCache.getCurrentData(zkpath) == null, s"Cluster already exists : ${clusterConfig.name}") require(deleteClustersPathCache.getCurrentData(getDeleteClusterZkPath(clusterConfig.name)) == null, s"Cluster is marked for deletion : ${clusterConfig.name}") log.debug(s"Creating new config node $zkpath") curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(zkpath, data) } case KMUpdateCluster(clusterConfig) => modify { val data: Array[Byte] = ClusterConfig.serialize(clusterConfig) val zkpath: String = getConfigsZkPath(clusterConfig) require(deleteClustersPathCache.getCurrentData(getDeleteClusterZkPath(clusterConfig.name)) == null, s"Cluster is marked for deletion : ${clusterConfig.name}") require(kafkaManagerPathCache.getCurrentData(zkpath) != null, s"Cannot update non-existing cluster : ${clusterConfig.name}") curator.setData().forPath(zkpath, data) } case KMDisableCluster(clusterName) => modify { val existingConfigOption = clusterConfigMap.get(clusterName) require(existingConfigOption.isDefined, s"Cannot disable non-existing cluster : $clusterName") require(deleteClustersPathCache.getCurrentData(getDeleteClusterZkPath(clusterName)) == null, s"Cluster is marked for deletion : $clusterName") for { existingConfig <- existingConfigOption } yield { val disabledConfig = existingConfig.copy(enabled = false) val data: Array[Byte] = ClusterConfig.serialize(disabledConfig) val zkpath = getConfigsZkPath(existingConfig) require(kafkaManagerPathCache.getCurrentData(zkpath) != null, s"Cannot disable non-existing cluster : $clusterName") curator.setData().forPath(zkpath, data) } } case KMEnableCluster(clusterName) => modify { val existingManagerOption = clusterManagerMap.get(clusterName) require(existingManagerOption.isEmpty, s"Cannot enable already enabled cluster : $clusterName") val existingConfigOption = clusterConfigMap.get(clusterName) require(existingConfigOption.isDefined, s"Cannot enable non-existing cluster : $clusterName") require(deleteClustersPathCache.getCurrentData(getDeleteClusterZkPath(clusterName)) == null, s"Cluster is marked for deletion : $clusterName") for { existingConfig <- existingConfigOption } yield { val enabledConfig = existingConfig.copy(enabled = true) val data: Array[Byte] = ClusterConfig.serialize(enabledConfig) val zkpath = getConfigsZkPath(existingConfig) require(kafkaManagerPathCache.getCurrentData(zkpath) != null, s"Cannot enable non-existing cluster : $clusterName") curator.setData().forPath(zkpath, data) } } case KMDeleteCluster(clusterName) => modify { val existingManagerOption = clusterManagerMap.get(clusterName) require(existingManagerOption.isEmpty, s"Cannot delete enabled cluster : $clusterName") val existingConfigOption = clusterConfigMap.get(clusterName) require(existingConfigOption.isDefined, s"Cannot delete non-existing cluster : $clusterName") require(existingConfigOption.exists(!_.enabled), s"Cannot delete enabled cluster : $clusterName") for { existingConfig <- existingConfigOption } yield { val zkpath = getConfigsZkPath(existingConfig) require(kafkaManagerPathCache.getCurrentData(zkpath) != null, s"Cannot delete non-existing cluster : $clusterName") //mark for deletion val deleteZkPath = getDeleteClusterZkPath(existingConfig.name) curator.create().creatingParentsIfNeeded().withMode(CreateMode.PERSISTENT).forPath(deleteZkPath) } } case KMClusterCommandRequest(clusterName, request) => clusterManagerMap.get(clusterName).fold[Unit] { sender ! ActorErrorResponse(s"Unknown cluster : $clusterName") } { clusterManagerPath:ActorPath => context.actorSelection(clusterManagerPath).forward(request) } case KMUpdateState => updateState() case KMPruneClusters => pruneClusters() case KMShutdown => log.info(s"Shutting down kafka manager") context.children.foreach(context.stop) shutdown = true case any: Any => log.warning("kma : processCommandRequest : Received unknown message: {}", any) } } private[this] def getDeleteClusterZkPath(clusterName: String) : String = { zkPathFrom(deleteClustersZkPath,clusterName) } private[this] def getConfigsZkPath(clusterConfig: ClusterConfig) : String = { zkPathFrom(configsZkPath,clusterConfig.name) } private[this] def getClusterZkPath(clusterConfig: ClusterConfig) : String = { zkPathFrom(baseClusterZkPath,clusterConfig.name) } private[this] def markPendingClusterManager(clusterConfig: ClusterConfig) : Unit = { implicit val ec = context.system.dispatcher log.info(s"Mark pending cluster manager $clusterConfig") pendingClusterConfigMap += (clusterConfig.name -> clusterConfig) } private[this] def removeClusterManager(clusterConfig: ClusterConfig) : Unit = { implicit val ec = context.system.dispatcher clusterManagerMap.get(clusterConfig.name).foreach { actorPath => log.info(s"Removing cluster manager $clusterConfig") val selection = context.actorSelection(actorPath) selection.tell(CMShutdown,self) //this is non-blocking selection.resolveOne(1 seconds).foreach( ref => context.stop(ref) ) } clusterManagerMap -= clusterConfig.name clusterConfigMap -= clusterConfig.name } private[this] def addCluster(config: ClusterConfig): Try[Boolean] = { Try { if(!config.enabled) { log.info("Not adding cluster manager for disabled cluster : {}", config.name) clusterConfigMap += (config.name -> config) pendingClusterConfigMap -= config.name false } else { log.info("Adding new cluster manager for cluster : {}", config.name) val clusterManagerConfig = ClusterManagerActorConfig( kafkaManagerConfig.pinnedDispatcherName, getClusterZkPath(config), kafkaManagerConfig.curatorConfig, config, kafkaManagerConfig.brokerViewUpdatePeriod) val props = Props(classOf[ClusterManagerActor], clusterManagerConfig) val newClusterManager = context.actorOf(props, config.name).path clusterConfigMap += (config.name -> config) clusterManagerMap += (config.name -> newClusterManager) pendingClusterConfigMap -= config.name true } } } private[this] def updateCluster(currentConfig: ClusterConfig, newConfig: ClusterConfig): Try[Boolean] = { Try { if(newConfig.curatorConfig.zkConnect == currentConfig.curatorConfig.zkConnect && newConfig.enabled == currentConfig.enabled && newConfig.version == currentConfig.version && newConfig.jmxEnabled == currentConfig.jmxEnabled && newConfig.logkafkaEnabled == currentConfig.logkafkaEnabled && newConfig.filterConsumers == currentConfig.filterConsumers && newConfig.activeOffsetCacheEnabled == currentConfig.activeOffsetCacheEnabled) { //nothing changed false } else { //only need to shutdown enabled cluster log.info("Updating cluster manager for cluster={} , old={}, new={}", currentConfig.name,currentConfig.curatorConfig,newConfig.curatorConfig) markPendingClusterManager(newConfig) removeClusterManager(currentConfig) true } } } private[this] def updateState(): Unit = { log.info("Updating internal state...") val result = Try { kafkaManagerPathCache.getCurrentData.asScala.foreach { data => ClusterConfig.deserialize(data.getData) match { case Failure(t) => log.error("Failed to deserialize cluster config",t) case Success(newConfig) => clusterConfigMap.get(newConfig.name).fold(addCluster(newConfig))(updateCluster(_,newConfig)) } } } result match { case Failure(t) => log.error("Failed to update internal state ... ",t) case _ => } lastUpdateMillis = System.currentTimeMillis() } private[this] def pruneClusters(): Unit = { log.info("Pruning clusters...") Try { val localClusterConfigMap = clusterConfigMap localClusterConfigMap.foreach { case (name, clusterConfig) => val zkpath : String = getConfigsZkPath(clusterConfig) if(kafkaManagerPathCache.getCurrentData(zkpath) == null) { pendingClusterConfigMap -= clusterConfig.name removeClusterManager(clusterConfig) clusterConfigMap -= name } } } lastUpdateMillis = System.currentTimeMillis() } }
apache-2.0
joewalnes/idea-community
plugins/git4idea/src/git4idea/changes/ChangeCollector.java
12393
/* * Copyright 2000-2009 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package git4idea.changes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.vcs.FilePath; import com.intellij.openapi.vcs.FileStatus; import com.intellij.openapi.vcs.VcsException; import com.intellij.openapi.vcs.changes.Change; import com.intellij.openapi.vcs.changes.ChangeListManager; import com.intellij.openapi.vcs.changes.ContentRevision; import com.intellij.openapi.vcs.changes.VcsDirtyScope; import com.intellij.openapi.vfs.VirtualFile; import com.intellij.vcsUtil.VcsUtil; import git4idea.GitContentRevision; import git4idea.GitRevisionNumber; import git4idea.GitUtil; import git4idea.commands.GitCommand; import git4idea.commands.GitSimpleHandler; import git4idea.commands.StringScanner; import java.io.IOException; import java.util.*; /** * A collector for changes in the Git. It is introduced because changes are not * cannot be got as a sum of stateless operations. */ class ChangeCollector { private final Project myProject; private final ChangeListManager myChangeListManager; private final VcsDirtyScope myDirtyScope; private final VirtualFile myVcsRoot; private final List<VirtualFile> myUnversioned = new ArrayList<VirtualFile>(); // Unversioned files private final Set<String> myUnmergedNames = new HashSet<String>(); // Names of unmerged files private final List<Change> myChanges = new ArrayList<Change>(); // all changes private boolean myIsCollected = false; // indicates that collecting changes has been started private boolean myIsFailed = true; // indicates that collecting changes has been failed. public ChangeCollector(final Project project, ChangeListManager changeListManager, VcsDirtyScope dirtyScope, final VirtualFile vcsRoot) { myChangeListManager = changeListManager; myDirtyScope = dirtyScope; myVcsRoot = vcsRoot; myProject = project; } /** * Get unversioned files */ public Collection<VirtualFile> unversioned() throws VcsException { ensureCollected(); return myUnversioned; } /** * Get changes */ public Collection<Change> changes() throws VcsException { ensureCollected(); return myChanges; } /** * Ensure that changes has been collected. */ private void ensureCollected() throws VcsException { if (myIsCollected) { if (myIsFailed) { throw new IllegalStateException("The method should not be called after after exception has been thrown."); } else { return; } } myIsCollected = true; updateIndex(); collectUnmergedAndUnversioned(); collectDiffChanges(); myIsFailed = false; } private void updateIndex() throws VcsException { GitSimpleHandler handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.UPDATE_INDEX); handler.addParameters("--refresh", "--ignore-missing"); handler.setSilent(true); handler.setNoSSH(true); handler.setStdoutSuppressed(true); handler.ignoreErrorCode(1); handler.run(); } /** * Collect dirty file paths * * @param includeChanges if true, previous changes are included in collection * @return the set of dirty paths to check, the paths are automatically collapsed if the summary length more than limit */ private Collection<FilePath> dirtyPaths(boolean includeChanges) { // TODO collapse paths with common prefix ArrayList<FilePath> paths = new ArrayList<FilePath>(); FilePath rootPath = VcsUtil.getFilePath(myVcsRoot.getPath(), true); for (FilePath p : myDirtyScope.getRecursivelyDirtyDirectories()) { addToPaths(rootPath, paths, p); } ArrayList<FilePath> candidatePaths = new ArrayList<FilePath>(); candidatePaths.addAll(myDirtyScope.getDirtyFilesNoExpand()); if (includeChanges) { try { for (Change c : myChangeListManager.getChangesIn(myVcsRoot)) { switch (c.getType()) { case NEW: case DELETED: case MOVED: if (c.getAfterRevision() != null) { addToPaths(rootPath, paths, c.getAfterRevision().getFile()); } if (c.getBeforeRevision() != null) { addToPaths(rootPath, paths, c.getBeforeRevision().getFile()); } case MODIFICATION: default: // do nothing } } } catch (Exception t) { // ignore exceptions } } for (FilePath p : candidatePaths) { addToPaths(rootPath, paths, p); } return paths; } /** * Add path to the collection of the paths to check for this vcs root * * @param root the root path * @param paths the existing paths * @param toAdd the path to add */ void addToPaths(FilePath root, Collection<FilePath> paths, FilePath toAdd) { if (GitUtil.getGitRootOrNull(toAdd) != myVcsRoot) { return; } if (root.isUnder(toAdd, true)) { toAdd = root; } for (Iterator<FilePath> i = paths.iterator(); i.hasNext();) { FilePath p = i.next(); if (isAncestor(toAdd, p, true)) { // toAdd is an ancestor of p => adding toAdd instead of p. i.remove(); } if (isAncestor(p, toAdd, false)) { // p is an ancestor of toAdd => no need to add toAdd. return; } } paths.add(toAdd); } /** * Returns true if childCandidate file is located under parentCandidate. * This is an alternative to {@link com.intellij.openapi.vcs.FilePathImpl#isUnder(com.intellij.openapi.vcs.FilePath, boolean)}: * it doesn't check VirtualFile associated with this FilePath. * When we move a file we get a VcsDirtyScope with old and new FilePaths, but unfortunately the virtual file in the FilePath is * refreshed ({@link com.intellij.openapi.vcs.changes.VirtualFileHolder#cleanAndAdjustScope(com.intellij.openapi.vcs.changes.VcsModifiableDirtyScope)} * and thus points to the new position which makes FilePathImpl#isUnder useless. * * @param parentCandidate FilePath which we check to be the parent of childCandidate. * @param childCandidate FilePath which we check to be a child of parentCandidate. * @param strict if false, the method also returns true if files are equal * @return true if childCandidate is a child of parentCandidate. */ private static boolean isAncestor(FilePath parentCandidate, FilePath childCandidate, boolean strict) { try { return FileUtil.isAncestor(parentCandidate.getIOFile(), childCandidate.getIOFile(), strict); } catch (IOException e) { return false; } } /** * Collect diff with head * * @throws VcsException if there is a problem with running git */ private void collectDiffChanges() throws VcsException { Collection<FilePath> dirtyPaths = dirtyPaths(true); if (dirtyPaths.isEmpty()) { return; } GitSimpleHandler handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.DIFF); handler.addParameters("--name-status", "--diff-filter=ADCMRUX", "-M", "HEAD"); handler.setNoSSH(true); handler.setSilent(true); handler.setStdoutSuppressed(true); handler.endOptions(); handler.addRelativePaths(dirtyPaths); if (handler.isLargeCommandLine()) { // if there are too much files, just get all changes for the project handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.DIFF); handler.addParameters("--name-status", "--diff-filter=ADCMRUX", "-M", "HEAD"); handler.setNoSSH(true); handler.setSilent(true); handler.setStdoutSuppressed(true); handler.endOptions(); } try { String output = handler.run(); GitChangeUtils.parseChanges(myProject, myVcsRoot, null, GitChangeUtils.loadRevision(myProject, myVcsRoot, "HEAD"), output, myChanges, myUnmergedNames); } catch (VcsException ex) { if (!GitChangeUtils.isHeadMissing(ex)) { throw ex; } handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.LS_FILES); handler.addParameters("--cached"); handler.setNoSSH(true); handler.setSilent(true); handler.setStdoutSuppressed(true); // During init diff does not works because HEAD // will appear only after the first commit. // In that case added files are cached in index. String output = handler.run(); if (output.length() > 0) { StringTokenizer tokenizer = new StringTokenizer(output, "\n\r"); while (tokenizer.hasMoreTokens()) { final String s = tokenizer.nextToken(); Change ch = new Change(null, GitContentRevision.createRevision(myVcsRoot, s, null, myProject, false, false), FileStatus.ADDED); myChanges.add(ch); } } } } /** * Collect unversioned and unmerged files * * @throws VcsException if there is a problem with running git */ private void collectUnmergedAndUnversioned() throws VcsException { Collection<FilePath> dirtyPaths = dirtyPaths(false); if (dirtyPaths.isEmpty()) { return; } // prepare handler GitSimpleHandler handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.LS_FILES); handler.addParameters("-v", "--unmerged"); handler.setSilent(true); handler.setNoSSH(true); handler.setStdoutSuppressed(true); // run handler and collect changes parseFiles(handler.run()); // prepare handler handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.LS_FILES); handler.addParameters("-v", "--others", "--exclude-standard"); handler.setSilent(true); handler.setNoSSH(true); handler.setStdoutSuppressed(true); handler.endOptions(); handler.addRelativePaths(dirtyPaths); if(handler.isLargeCommandLine()) { handler = new GitSimpleHandler(myProject, myVcsRoot, GitCommand.LS_FILES); handler.addParameters("-v", "--others", "--exclude-standard"); handler.setSilent(true); handler.setNoSSH(true); handler.setStdoutSuppressed(true); handler.endOptions(); } // run handler and collect changes parseFiles(handler.run()); } private void parseFiles(String list) throws VcsException { for (StringScanner sc = new StringScanner(list); sc.hasMoreData();) { if (sc.isEol()) { sc.nextLine(); continue; } char status = sc.peek(); sc.skipChars(2); if ('?' == status) { VirtualFile file = myVcsRoot.findFileByRelativePath(GitUtil.unescapePath(sc.line())); if (GitUtil.gitRootOrNull(file) == myVcsRoot) { myUnversioned.add(file); } } else { //noinspection HardCodedStringLiteral if ('M' == status) { sc.boundedToken('\t'); String file = GitUtil.unescapePath(sc.line()); VirtualFile vFile = myVcsRoot.findFileByRelativePath(file); if (GitUtil.gitRootOrNull(vFile) != myVcsRoot) { continue; } if (!myUnmergedNames.add(file)) { continue; } // TODO handle conflict rename-modify // TODO handle conflict copy-modify // TODO handle conflict delete-modify // TODO handle conflict rename-delete // assume modify-modify conflict ContentRevision before = GitContentRevision.createRevision(myVcsRoot, file, new GitRevisionNumber("orig_head"), myProject, false, true); ContentRevision after = GitContentRevision.createRevision(myVcsRoot, file, null, myProject, false, false); myChanges.add(new Change(before, after, FileStatus.MERGED_WITH_CONFLICTS)); } else { throw new VcsException("Unsupported type of the merge conflict detected: " + status); } } } } }
apache-2.0
jkdcdlly/zipkin
zipkin-hbase/src/test/scala/com/twitter/zipkin/storage/hbase/HBaseSpanStoreSpec.scala
6570
package com.twitter.zipkin.storage.hbase import com.twitter.zipkin.Constants import com.twitter.zipkin.common.{Annotation, Span, Endpoint} import com.twitter.zipkin.hbase.TableLayouts import com.twitter.zipkin.storage.hbase.mapping.ServiceMapper import com.twitter.zipkin.storage.hbase.utils.{HBaseTable, IDGenerator} /** * This isn't really a great unit test but it's a good starting * point until I have a mock HBaseTable. */ class HBaseSpanStoreSpec extends ZipkinHBaseSpecification { val tablesNeeded = TableLayouts.tables.keys.toSeq val traceId = 100L val spanId = 567L val span = Span(traceId, "span.methodCall()", spanId) val spanStore = new HBaseSpanStore(_conf) after { spanStore.close() } val endOfTime = Long.MaxValue def before(ts: Long) = ts - 1 val traceIdOne = 100 val spanOneStart = 90000L val serviceNameOne = "HBase.Client" val endpointOne = new Endpoint(0, 0, serviceNameOne) val annoOneList = List( new Annotation(spanOneStart, Constants.ClientSend, Some(endpointOne)), new Annotation(spanOneStart + 100, Constants.ClientRecv, Some(endpointOne)) ) val spanOneId: Long = 32003 val spanOneName = "startingSpan" val spanOne = Span(traceIdOne, spanOneName, spanOneId, None, annoOneList, Seq()) val spanTwoStart = spanOneStart + 100 val serviceNameTwo = "HBase.RegionServer" val endPointTwo = new Endpoint(0, 0, serviceNameTwo) val annoTwoList = List(new Annotation(spanTwoStart, Constants.ServerRecv, Some(endPointTwo))) val spanTwo = Span(traceIdOne, "secondSpan", 45006, Some(spanOneId), annoTwoList, Seq()) val spanThreeStart = spanTwoStart + 100 val annoThreeList = List(new Annotation(spanThreeStart, Constants.ServerRecv, Some(endPointTwo))) val spanThree = Span(traceIdOne, "spanThree", 45007, Some(spanOneId), annoThreeList, Seq()) val traceIdFour = 103 val spanFourStart = spanThreeStart + 100 val annoFourList = List(new Annotation(spanFourStart, Constants.ServerRecv, Some(endPointTwo))) val spanFour = Span(traceIdFour, "spanThree", 45008, None, annoFourList, Seq()) val spanFiveStart = spanFourStart + 100 val annoFiveValue = "CustomANNO" val annoFiveList = List(new Annotation(spanFiveStart, annoFiveValue, Some(endPointTwo))) val spanFive = Span(traceIdFour, "spanThree", 45009, Some(45006), annoFiveList, Seq()) test("indexServiceName") { val serviceTable = new HBaseTable(_conf, TableLayouts.idxServiceTableName) val mappingTable = new HBaseTable(_conf, TableLayouts.mappingTableName) val idGenTable = new HBaseTable(_conf, TableLayouts.idGenTableName) val idGen = new IDGenerator(idGenTable) val serviceMapper = new ServiceMapper(mappingTable, idGen) Await.result(spanStore.indexServiceName(spanOne)) val results = Await.result(serviceTable.scan(new Scan(), 100)) results.size should be(1) val result = results.head result.getRow.size should be(Bytes.SIZEOF_LONG * 2) val serviceNameFromSpan = spanOne.serviceName.get val serviceMapping = Await.result(serviceMapper.get(serviceNameFromSpan)) Bytes.toLong(result.getRow) should be(serviceMapping.id) Bytes.toLong(result.getRow.slice(Bytes.SIZEOF_LONG, Bytes.SIZEOF_LONG * 2)) should be(Long.MaxValue - spanOneStart) } test("indexTraceIdByServiceAndName") { val serviceSpanNameTable = new HBaseTable(_conf, TableLayouts.idxServiceSpanNameTableName) Await.result(spanStore.indexTraceIdByServiceAndName(spanOne)) val scan = new Scan() val results = Await.result(serviceSpanNameTable.scan(scan, 100)) results.size should be(1) } test("indexSpanByAnnotations") { val annoTable = new HBaseTable(_conf, TableLayouts.idxServiceAnnotationTableName) Await.result(spanStore.indexSpanByAnnotations(spanFive)) val result = Await.result(annoTable.scan(new Scan(), 1000)) result.size should be(1) } test("getTraceIdsByName") { Await.result(spanStore.indexServiceName(spanOne)) Await.result(spanStore.indexServiceName(spanTwo)) Await.result(spanStore.indexServiceName(spanThree)) Await.result(spanStore.indexServiceName(spanFour)) Await.result(spanStore.indexTraceIdByServiceAndName(spanOne)) Await.result(spanStore.indexTraceIdByServiceAndName(spanTwo)) Await.result(spanStore.indexTraceIdByServiceAndName(spanThree)) Await.result(spanStore.indexTraceIdByServiceAndName(spanFour)) val emptyResult = Await.result(spanStore.getTraceIdsByName(serviceNameOne, None, before(spanOneStart), 1)) emptyResult should be(Seq()) // Try and get the first trace from the first service name val t1 = Await.result(spanStore.getTraceIdsByName(serviceNameOne, None, before(endOfTime), 1)) t1.map { _.traceId } should contain(traceIdOne) t1.map { _.timestamp } should contain(spanOneStart) t1.size should be(1) // Try and get the first two traces from the second service name val t2 = Await.result(spanStore.getTraceIdsByName(serviceNameTwo, None, before(endOfTime), 100)) t2.map { _.traceId } should contain(traceIdOne) t2.map { _.traceId } should contain(traceIdFour) t2.map { _.timestamp } should contain(spanTwoStart) t2.map { _.timestamp } should contain(spanThreeStart) // Try and get the first trace from the first service name and the first span name val t3 = Await.result(spanStore.getTraceIdsByName(serviceNameOne, Some(spanOne.name), before(endOfTime), 1)) t3.map { _.traceId } should contain(traceIdOne) t3.map { _.timestamp } should contain(spanOneStart) t3.size should be(1) } test("getTraceIdsByAnnotation") { Await.result(spanStore.indexSpanByAnnotations(spanFive)) val idf = spanStore.getTraceIdsByAnnotation(spanFive.annotations.head.serviceName, spanFive.annotations.head.value, None, before(endOfTime), 100) val ids = Await.result(idf) ids.size should be(1) ids.map { _.traceId } should contain(spanFive.traceId) } test("storeSpan") { Await.result(spanStore.apply(Seq(span))) // The data should be there by now. val htable = new HTable(_conf, TableLayouts.storageTableName) val result = htable.get(new Get(Bytes.toBytes(traceId))) result.size shouldEqual 1 } test("getTracesByIds") { Await.result(spanStore.apply(Seq(span))) val spansFoundFuture = spanStore.getTracesByIds(Seq(traceId, 302L)) val spansFound = Await.result(spansFoundFuture).flatten spansFound should contain(span) spansFound.size should be(1) } }
apache-2.0
n2hsu/Nii-Launcher
src/com/google/protobuf/FieldSet.java
29379
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // http://code.google.com/p/protobuf/ // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. package com.google.protobuf; import com.google.protobuf.LazyField.LazyIterator; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; /** * A class which represents an arbitrary set of fields of some message type. * This is used to implement {@link DynamicMessage}, and also to represent * extensions in {@link GeneratedMessage}. This class is package-private, since * outside users should probably be using {@link DynamicMessage}. * * @author kenton@google.com Kenton Varda */ final class FieldSet<FieldDescriptorType extends FieldSet.FieldDescriptorLite<FieldDescriptorType>> { /** * Interface for a FieldDescriptor or lite extension descriptor. This * prevents FieldSet from depending on {@link Descriptors.FieldDescriptor}. */ public interface FieldDescriptorLite<T extends FieldDescriptorLite<T>> extends Comparable<T> { int getNumber(); WireFormat.FieldType getLiteType(); WireFormat.JavaType getLiteJavaType(); boolean isRepeated(); boolean isPacked(); Internal.EnumLiteMap<?> getEnumType(); // If getLiteJavaType() == MESSAGE, this merges a message object of the // type into a builder of the type. Returns {@code to}. MessageLite.Builder internalMergeFrom(MessageLite.Builder to, MessageLite from); } private final SmallSortedMap<FieldDescriptorType, Object> fields; private boolean isImmutable; private boolean hasLazyField = false; /** Construct a new FieldSet. */ private FieldSet() { this.fields = SmallSortedMap.newFieldMap(16); } /** * Construct an empty FieldSet. This is only used to initialize * DEFAULT_INSTANCE. */ private FieldSet(final boolean dummy) { this.fields = SmallSortedMap.newFieldMap(0); makeImmutable(); } /** Construct a new FieldSet. */ public static <T extends FieldSet.FieldDescriptorLite<T>> FieldSet<T> newFieldSet() { return new FieldSet<T>(); } /** Get an immutable empty FieldSet. */ @SuppressWarnings("unchecked") public static <T extends FieldSet.FieldDescriptorLite<T>> FieldSet<T> emptySet() { return DEFAULT_INSTANCE; } @SuppressWarnings("rawtypes") private static final FieldSet DEFAULT_INSTANCE = new FieldSet(true); /** Make this FieldSet immutable from this point forward. */ @SuppressWarnings("unchecked") public void makeImmutable() { if (isImmutable) { return; } fields.makeImmutable(); isImmutable = true; } /** * Returns whether the FieldSet is immutable. This is true if it is the * {@link #emptySet} or if {@link #makeImmutable} were called. * * @return whether the FieldSet is immutable. */ public boolean isImmutable() { return isImmutable; } /** * Clones the FieldSet. The returned FieldSet will be mutable even if the * original FieldSet was immutable. * * @return the newly cloned FieldSet */ @Override public FieldSet<FieldDescriptorType> clone() { // We can't just call fields.clone because List objects in the map // should not be shared. FieldSet<FieldDescriptorType> clone = FieldSet.newFieldSet(); for (int i = 0; i < fields.getNumArrayEntries(); i++) { Map.Entry<FieldDescriptorType, Object> entry = fields.getArrayEntryAt(i); FieldDescriptorType descriptor = entry.getKey(); clone.setField(descriptor, entry.getValue()); } for (Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { FieldDescriptorType descriptor = entry.getKey(); clone.setField(descriptor, entry.getValue()); } clone.hasLazyField = hasLazyField; return clone; } // ================================================================= /** See {@link Message.Builder#clear()}. */ public void clear() { fields.clear(); hasLazyField = false; } /** * Get a simple map containing all the fields. */ public Map<FieldDescriptorType, Object> getAllFields() { if (hasLazyField) { SmallSortedMap<FieldDescriptorType, Object> result = SmallSortedMap.newFieldMap(16); for (int i = 0; i < fields.getNumArrayEntries(); i++) { cloneFieldEntry(result, fields.getArrayEntryAt(i)); } for (Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { cloneFieldEntry(result, entry); } if (fields.isImmutable()) { result.makeImmutable(); } return result; } return fields.isImmutable() ? fields : Collections.unmodifiableMap(fields); } private void cloneFieldEntry(Map<FieldDescriptorType, Object> map, Map.Entry<FieldDescriptorType, Object> entry) { FieldDescriptorType key = entry.getKey(); Object value = entry.getValue(); if (value instanceof LazyField) { map.put(key, ((LazyField) value).getValue()); } else { map.put(key, value); } } /** * Get an iterator to the field map. This iterator should not be leaked out * of the protobuf library as it is not protected from mutation when fields * is not immutable. */ public Iterator<Map.Entry<FieldDescriptorType, Object>> iterator() { if (hasLazyField) { return new LazyIterator<FieldDescriptorType>(fields.entrySet().iterator()); } return fields.entrySet().iterator(); } /** * Useful for implementing * {@link Message#hasField(Descriptors.FieldDescriptor)}. */ public boolean hasField(final FieldDescriptorType descriptor) { if (descriptor.isRepeated()) { throw new IllegalArgumentException("hasField() can only be called on non-repeated fields."); } return fields.get(descriptor) != null; } /** * Useful for implementing * {@link Message#getField(Descriptors.FieldDescriptor)}. This method * returns {@code null} if the field is not set; in this case it is up to * the caller to fetch the field's default value. */ public Object getField(final FieldDescriptorType descriptor) { Object o = fields.get(descriptor); if (o instanceof LazyField) { return ((LazyField) o).getValue(); } return o; } /** * Useful for implementing * {@link Message.Builder#setField(Descriptors.FieldDescriptor,Object)}. */ @SuppressWarnings({ "unchecked", "rawtypes" }) public void setField(final FieldDescriptorType descriptor, Object value) { if (descriptor.isRepeated()) { if (!(value instanceof List)) { throw new IllegalArgumentException("Wrong object type used with protocol message reflection."); } // Wrap the contents in a new list so that the caller cannot change // the list's contents after setting it. final List newList = new ArrayList(); newList.addAll((List) value); for (final Object element : newList) { verifyType(descriptor.getLiteType(), element); } value = newList; } else { verifyType(descriptor.getLiteType(), value); } if (value instanceof LazyField) { hasLazyField = true; } fields.put(descriptor, value); } /** * Useful for implementing * {@link Message.Builder#clearField(Descriptors.FieldDescriptor)}. */ public void clearField(final FieldDescriptorType descriptor) { fields.remove(descriptor); if (fields.isEmpty()) { hasLazyField = false; } } /** * Useful for implementing * {@link Message#getRepeatedFieldCount(Descriptors.FieldDescriptor)}. */ public int getRepeatedFieldCount(final FieldDescriptorType descriptor) { if (!descriptor.isRepeated()) { throw new IllegalArgumentException("getRepeatedField() can only be called on repeated fields."); } final Object value = getField(descriptor); if (value == null) { return 0; } else { return ((List<?>) value).size(); } } /** * Useful for implementing * {@link Message#getRepeatedField(Descriptors.FieldDescriptor,int)}. */ public Object getRepeatedField(final FieldDescriptorType descriptor, final int index) { if (!descriptor.isRepeated()) { throw new IllegalArgumentException("getRepeatedField() can only be called on repeated fields."); } final Object value = getField(descriptor); if (value == null) { throw new IndexOutOfBoundsException(); } else { return ((List<?>) value).get(index); } } /** * Useful for implementing * {@link Message.Builder#setRepeatedField(Descriptors.FieldDescriptor,int,Object)} * . */ @SuppressWarnings("unchecked") public void setRepeatedField(final FieldDescriptorType descriptor, final int index, final Object value) { if (!descriptor.isRepeated()) { throw new IllegalArgumentException("getRepeatedField() can only be called on repeated fields."); } final Object list = getField(descriptor); if (list == null) { throw new IndexOutOfBoundsException(); } verifyType(descriptor.getLiteType(), value); ((List<Object>) list).set(index, value); } /** * Useful for implementing * {@link Message.Builder#addRepeatedField(Descriptors.FieldDescriptor,Object)} * . */ @SuppressWarnings("unchecked") public void addRepeatedField(final FieldDescriptorType descriptor, final Object value) { if (!descriptor.isRepeated()) { throw new IllegalArgumentException("addRepeatedField() can only be called on repeated fields."); } verifyType(descriptor.getLiteType(), value); final Object existingValue = getField(descriptor); List<Object> list; if (existingValue == null) { list = new ArrayList<Object>(); fields.put(descriptor, list); } else { list = (List<Object>) existingValue; } list.add(value); } /** * Verifies that the given object is of the correct type to be a valid value * for the given field. (For repeated fields, this checks if the object is * the right type to be one element of the field.) * * @throws IllegalArgumentException * The value is not of the right type. */ private static void verifyType(final WireFormat.FieldType type, final Object value) { if (value == null) { throw new NullPointerException(); } boolean isValid = false; switch (type.getJavaType()) { case INT: isValid = value instanceof Integer; break; case LONG: isValid = value instanceof Long; break; case FLOAT: isValid = value instanceof Float; break; case DOUBLE: isValid = value instanceof Double; break; case BOOLEAN: isValid = value instanceof Boolean; break; case STRING: isValid = value instanceof String; break; case BYTE_STRING: isValid = value instanceof ByteString; break; case ENUM: // TODO(kenton): Caller must do type checking here, I guess. isValid = value instanceof Internal.EnumLite; break; case MESSAGE: // TODO(kenton): Caller must do type checking here, I guess. isValid = (value instanceof MessageLite) || (value instanceof LazyField); break; } if (!isValid) { // TODO(kenton): When chaining calls to setField(), it can be hard // to // tell from the stack trace which exact call failed, since the // whole // chain is considered one line of code. It would be nice to print // more information here, e.g. naming the field. We used to do that. // But we can't now that FieldSet doesn't use descriptors. Maybe // this // isn't a big deal, though, since it would only really apply when // using // reflection and generally people don't chain reflection setters. throw new IllegalArgumentException("Wrong object type used with protocol message reflection."); } } // ================================================================= // Parsing and serialization /** * See {@link Message#isInitialized()}. Note: Since {@code FieldSet} itself * does not have any way of knowing about required fields that aren't * actually present in the set, it is up to the caller to check that all * required fields are present. */ public boolean isInitialized() { for (int i = 0; i < fields.getNumArrayEntries(); i++) { if (!isInitialized(fields.getArrayEntryAt(i))) { return false; } } for (final Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { if (!isInitialized(entry)) { return false; } } return true; } @SuppressWarnings("unchecked") private boolean isInitialized(final Map.Entry<FieldDescriptorType, Object> entry) { final FieldDescriptorType descriptor = entry.getKey(); if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE) { if (descriptor.isRepeated()) { for (final MessageLite element : (List<MessageLite>) entry.getValue()) { if (!element.isInitialized()) { return false; } } } else { Object value = entry.getValue(); if (value instanceof MessageLite) { if (!((MessageLite) value).isInitialized()) { return false; } } else if (value instanceof LazyField) { return true; } else { throw new IllegalArgumentException( "Wrong object type used with protocol message reflection."); } } } return true; } /** * Given a field type, return the wire type. * * @returns One of the {@code WIRETYPE_} constants defined in * {@link WireFormat}. */ static int getWireFormatForFieldType(final WireFormat.FieldType type, boolean isPacked) { if (isPacked) { return WireFormat.WIRETYPE_LENGTH_DELIMITED; } else { return type.getWireType(); } } /** * Like {@link Message.Builder#mergeFrom(Message)}, but merges from another * {@link FieldSet}. */ public void mergeFrom(final FieldSet<FieldDescriptorType> other) { for (int i = 0; i < other.fields.getNumArrayEntries(); i++) { mergeFromField(other.fields.getArrayEntryAt(i)); } for (final Map.Entry<FieldDescriptorType, Object> entry : other.fields.getOverflowEntries()) { mergeFromField(entry); } } @SuppressWarnings({ "unchecked", "rawtypes" }) private void mergeFromField(final Map.Entry<FieldDescriptorType, Object> entry) { final FieldDescriptorType descriptor = entry.getKey(); Object otherValue = entry.getValue(); if (otherValue instanceof LazyField) { otherValue = ((LazyField) otherValue).getValue(); } if (descriptor.isRepeated()) { Object value = getField(descriptor); if (value == null) { // Our list is empty, but we still need to make a defensive copy // of // the other list since we don't know if the other FieldSet is // still // mutable. fields.put(descriptor, new ArrayList((List) otherValue)); } else { // Concatenate the lists. ((List) value).addAll((List) otherValue); } } else if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE) { Object value = getField(descriptor); if (value == null) { fields.put(descriptor, otherValue); } else { // Merge the messages. fields.put( descriptor, descriptor.internalMergeFrom(((MessageLite) value).toBuilder(), (MessageLite) otherValue).build()); } } else { fields.put(descriptor, otherValue); } } // TODO(kenton): Move static parsing and serialization methods into some // other class. Probably WireFormat. /** * Read a field of any primitive type from a CodedInputStream. Enums, * groups, and embedded messages are not handled by this method. * * @param input * The stream from which to read. * @param type * Declared type of the field. * @return An object representing the field's value, of the exact type which * would be returned by * {@link Message#getField(Descriptors.FieldDescriptor)} for this * field. */ public static Object readPrimitiveField(CodedInputStream input, final WireFormat.FieldType type) throws IOException { switch (type) { case DOUBLE: return input.readDouble(); case FLOAT: return input.readFloat(); case INT64: return input.readInt64(); case UINT64: return input.readUInt64(); case INT32: return input.readInt32(); case FIXED64: return input.readFixed64(); case FIXED32: return input.readFixed32(); case BOOL: return input.readBool(); case STRING: return input.readString(); case BYTES: return input.readBytes(); case UINT32: return input.readUInt32(); case SFIXED32: return input.readSFixed32(); case SFIXED64: return input.readSFixed64(); case SINT32: return input.readSInt32(); case SINT64: return input.readSInt64(); case GROUP: throw new IllegalArgumentException("readPrimitiveField() cannot handle nested groups."); case MESSAGE: throw new IllegalArgumentException("readPrimitiveField() cannot handle embedded messages."); case ENUM: // We don't handle enums because we don't know what to do if the // value is not recognized. throw new IllegalArgumentException("readPrimitiveField() cannot handle enums."); } throw new RuntimeException("There is no way to get here, but the compiler thinks otherwise."); } /** See {@link Message#writeTo(CodedOutputStream)}. */ public void writeTo(final CodedOutputStream output) throws IOException { for (int i = 0; i < fields.getNumArrayEntries(); i++) { final Map.Entry<FieldDescriptorType, Object> entry = fields.getArrayEntryAt(i); writeField(entry.getKey(), entry.getValue(), output); } for (final Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { writeField(entry.getKey(), entry.getValue(), output); } } /** * Like {@link #writeTo} but uses MessageSet wire format. */ public void writeMessageSetTo(final CodedOutputStream output) throws IOException { for (int i = 0; i < fields.getNumArrayEntries(); i++) { writeMessageSetTo(fields.getArrayEntryAt(i), output); } for (final Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { writeMessageSetTo(entry, output); } } private void writeMessageSetTo(final Map.Entry<FieldDescriptorType, Object> entry, final CodedOutputStream output) throws IOException { final FieldDescriptorType descriptor = entry.getKey(); if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE && !descriptor.isRepeated() && !descriptor.isPacked()) { output.writeMessageSetExtension(entry.getKey().getNumber(), (MessageLite) entry.getValue()); } else { writeField(descriptor, entry.getValue(), output); } } /** * Write a single tag-value pair to the stream. * * @param output * The output stream. * @param type * The field's type. * @param number * The field's number. * @param value * Object representing the field's value. Must be of the exact * type which would be returned by * {@link Message#getField(Descriptors.FieldDescriptor)} for this * field. */ private static void writeElement(final CodedOutputStream output, final WireFormat.FieldType type, final int number, final Object value) throws IOException { // Special case for groups, which need a start and end tag; other fields // can just use writeTag() and writeFieldNoTag(). if (type == WireFormat.FieldType.GROUP) { output.writeGroup(number, (MessageLite) value); } else { output.writeTag(number, getWireFormatForFieldType(type, false)); writeElementNoTag(output, type, value); } } /** * Write a field of arbitrary type, without its tag, to the stream. * * @param output * The output stream. * @param type * The field's type. * @param value * Object representing the field's value. Must be of the exact * type which would be returned by * {@link Message#getField(Descriptors.FieldDescriptor)} for this * field. */ private static void writeElementNoTag(final CodedOutputStream output, final WireFormat.FieldType type, final Object value) throws IOException { switch (type) { case DOUBLE: output.writeDoubleNoTag((Double) value); break; case FLOAT: output.writeFloatNoTag((Float) value); break; case INT64: output.writeInt64NoTag((Long) value); break; case UINT64: output.writeUInt64NoTag((Long) value); break; case INT32: output.writeInt32NoTag((Integer) value); break; case FIXED64: output.writeFixed64NoTag((Long) value); break; case FIXED32: output.writeFixed32NoTag((Integer) value); break; case BOOL: output.writeBoolNoTag((Boolean) value); break; case STRING: output.writeStringNoTag((String) value); break; case GROUP: output.writeGroupNoTag((MessageLite) value); break; case MESSAGE: output.writeMessageNoTag((MessageLite) value); break; case BYTES: output.writeBytesNoTag((ByteString) value); break; case UINT32: output.writeUInt32NoTag((Integer) value); break; case SFIXED32: output.writeSFixed32NoTag((Integer) value); break; case SFIXED64: output.writeSFixed64NoTag((Long) value); break; case SINT32: output.writeSInt32NoTag((Integer) value); break; case SINT64: output.writeSInt64NoTag((Long) value); break; case ENUM: output.writeEnumNoTag(((Internal.EnumLite) value).getNumber()); break; } } /** Write a single field. */ public static void writeField(final FieldDescriptorLite<?> descriptor, final Object value, final CodedOutputStream output) throws IOException { WireFormat.FieldType type = descriptor.getLiteType(); int number = descriptor.getNumber(); if (descriptor.isRepeated()) { final List<?> valueList = (List<?>) value; if (descriptor.isPacked()) { output.writeTag(number, WireFormat.WIRETYPE_LENGTH_DELIMITED); // Compute the total data size so the length can be written. int dataSize = 0; for (final Object element : valueList) { dataSize += computeElementSizeNoTag(type, element); } output.writeRawVarint32(dataSize); // Write the data itself, without any tags. for (final Object element : valueList) { writeElementNoTag(output, type, element); } } else { for (final Object element : valueList) { writeElement(output, type, number, element); } } } else { if (value instanceof LazyField) { writeElement(output, type, number, ((LazyField) value).getValue()); } else { writeElement(output, type, number, value); } } } /** * See {@link Message#getSerializedSize()}. It's up to the caller to cache * the resulting size if desired. */ public int getSerializedSize() { int size = 0; for (int i = 0; i < fields.getNumArrayEntries(); i++) { final Map.Entry<FieldDescriptorType, Object> entry = fields.getArrayEntryAt(i); size += computeFieldSize(entry.getKey(), entry.getValue()); } for (final Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { size += computeFieldSize(entry.getKey(), entry.getValue()); } return size; } /** * Like {@link #getSerializedSize} but uses MessageSet wire format. */ public int getMessageSetSerializedSize() { int size = 0; for (int i = 0; i < fields.getNumArrayEntries(); i++) { size += getMessageSetSerializedSize(fields.getArrayEntryAt(i)); } for (final Map.Entry<FieldDescriptorType, Object> entry : fields.getOverflowEntries()) { size += getMessageSetSerializedSize(entry); } return size; } private int getMessageSetSerializedSize(final Map.Entry<FieldDescriptorType, Object> entry) { final FieldDescriptorType descriptor = entry.getKey(); Object value = entry.getValue(); if (descriptor.getLiteJavaType() == WireFormat.JavaType.MESSAGE && !descriptor.isRepeated() && !descriptor.isPacked()) { if (value instanceof LazyField) { return CodedOutputStream.computeLazyFieldMessageSetExtensionSize(entry.getKey().getNumber(), (LazyField) value); } else { return CodedOutputStream.computeMessageSetExtensionSize(entry.getKey().getNumber(), (MessageLite) value); } } else { return computeFieldSize(descriptor, value); } } /** * Compute the number of bytes that would be needed to encode a single * tag/value pair of arbitrary type. * * @param type * The field's type. * @param number * The field's number. * @param value * Object representing the field's value. Must be of the exact * type which would be returned by * {@link Message#getField(Descriptors.FieldDescriptor)} for this * field. */ private static int computeElementSize(final WireFormat.FieldType type, final int number, final Object value) { int tagSize = CodedOutputStream.computeTagSize(number); if (type == WireFormat.FieldType.GROUP) { tagSize *= 2; } return tagSize + computeElementSizeNoTag(type, value); } /** * Compute the number of bytes that would be needed to encode a particular * value of arbitrary type, excluding tag. * * @param type * The field's type. * @param value * Object representing the field's value. Must be of the exact * type which would be returned by * {@link Message#getField(Descriptors.FieldDescriptor)} for this * field. */ private static int computeElementSizeNoTag(final WireFormat.FieldType type, final Object value) { switch (type) { // Note: Minor violation of 80-char limit rule here because this would // actually be harder to read if we wrapped the lines. case DOUBLE: return CodedOutputStream.computeDoubleSizeNoTag((Double) value); case FLOAT: return CodedOutputStream.computeFloatSizeNoTag((Float) value); case INT64: return CodedOutputStream.computeInt64SizeNoTag((Long) value); case UINT64: return CodedOutputStream.computeUInt64SizeNoTag((Long) value); case INT32: return CodedOutputStream.computeInt32SizeNoTag((Integer) value); case FIXED64: return CodedOutputStream.computeFixed64SizeNoTag((Long) value); case FIXED32: return CodedOutputStream.computeFixed32SizeNoTag((Integer) value); case BOOL: return CodedOutputStream.computeBoolSizeNoTag((Boolean) value); case STRING: return CodedOutputStream.computeStringSizeNoTag((String) value); case GROUP: return CodedOutputStream.computeGroupSizeNoTag((MessageLite) value); case BYTES: return CodedOutputStream.computeBytesSizeNoTag((ByteString) value); case UINT32: return CodedOutputStream.computeUInt32SizeNoTag((Integer) value); case SFIXED32: return CodedOutputStream.computeSFixed32SizeNoTag((Integer) value); case SFIXED64: return CodedOutputStream.computeSFixed64SizeNoTag((Long) value); case SINT32: return CodedOutputStream.computeSInt32SizeNoTag((Integer) value); case SINT64: return CodedOutputStream.computeSInt64SizeNoTag((Long) value); case MESSAGE: if (value instanceof LazyField) { return CodedOutputStream.computeLazyFieldSizeNoTag((LazyField) value); } else { return CodedOutputStream.computeMessageSizeNoTag((MessageLite) value); } case ENUM: return CodedOutputStream.computeEnumSizeNoTag(((Internal.EnumLite) value).getNumber()); } throw new RuntimeException("There is no way to get here, but the compiler thinks otherwise."); } /** * Compute the number of bytes needed to encode a particular field. */ public static int computeFieldSize(final FieldDescriptorLite<?> descriptor, final Object value) { WireFormat.FieldType type = descriptor.getLiteType(); int number = descriptor.getNumber(); if (descriptor.isRepeated()) { if (descriptor.isPacked()) { int dataSize = 0; for (final Object element : (List<?>) value) { dataSize += computeElementSizeNoTag(type, element); } return dataSize + CodedOutputStream.computeTagSize(number) + CodedOutputStream.computeRawVarint32Size(dataSize); } else { int size = 0; for (final Object element : (List<?>) value) { size += computeElementSize(type, number, element); } return size; } } else { return computeElementSize(type, number, value); } } }
apache-2.0
SoulBeaver/i-hate-rectangles
IHateRectangles/IHateRectangles/Components/ColorComponent.cs
295
using Artemis; using Microsoft.Xna.Framework; namespace IHateRectangles.Components { public class ColorComponent : ComponentPoolable { public Color Color { get; private set; } public ColorComponent(Color color) { Color = color; } } }
apache-2.0
kryptnostic/rhizome
src/test/java/com/geekbeast/rhizome/tests/bootstrap/RhizomeTests.java
7781
package com.geekbeast.rhizome.tests.bootstrap; import com.fasterxml.jackson.databind.ObjectMapper; import com.geekbeast.rhizome.hazelcast.pods.TestSharedStreamSerializersPod; import com.geekbeast.rhizome.tests.authentication.Auth0SecurityTestPod; import com.geekbeast.rhizome.tests.configurations.TestConfiguration; import com.geekbeast.rhizome.tests.controllers.SimpleControllerAPI; import com.geekbeast.rhizome.tests.pods.DispatcherServletsPod; import com.google.common.base.Optional; import com.google.common.net.HttpHeaders; import com.kryptnostic.rhizome.core.Cutting; import com.kryptnostic.rhizome.core.Rhizome; import com.kryptnostic.rhizome.pods.ConfigurationLoaderPod; import com.kryptnostic.rhizome.pods.hazelcast.RegistryBasedHazelcastInstanceConfigurationPod; import com.openlattice.auth0.Auth0Pod; import com.openlattice.authentication.AuthenticationTest; import com.openlattice.retrofit.RhizomeByteConverterFactory; import com.openlattice.retrofit.RhizomeCallAdapterFactory; import com.openlattice.retrofit.RhizomeJacksonConverterFactory; import com.openlattice.retrofit.RhizomeRetrofitCallException; import okhttp3.OkHttpClient; import okhttp3.Response; import org.apache.commons.lang3.RandomUtils; import org.apache.commons.lang3.StringUtils; import org.apache.commons.text.RandomStringGenerator; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.BeansException; import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import retrofit2.Retrofit; import java.io.IOException; public class RhizomeTests { public static final byte[] TEST_BYTES = RandomUtils.nextBytes( 1 << 12 ); private final static Logger logger = LoggerFactory.getLogger( RhizomeTests.class ); private static Retrofit adapter; protected static Rhizome rhizome = null; private static TestConfiguration expected = null; @Test public void getCutting() { Assert.assertNotNull( "Cuttings of the rhizome must always be present.", rhizome.getContext().getBean( Cutting.class ) ); } @Test public void testReadWriteConfiguration() { // We're assuming that data is not persisted across runs here. SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); TestConfiguration actual = api.getTestConfiguration(); Assert.assertEquals( expected, actual ); } @Test public void testByteArray() throws IOException { SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); byte[] response = api.gzipTest(); Assert.assertNotNull( response ); Assert.assertArrayEquals( TEST_BYTES, response ); } @Test public void teapotTest() throws IOException { SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); Assert.assertEquals( "I AM A TEAPOT!", api.teapot() ); } @Test public void testSimpleControllerGets() throws Exception { SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); ObjectMapper mapper = new ObjectMapper(); logger.info( "Context configuration: {}", mapper.writeValueAsString( api.getContextConfiguration() ) ); logger.info( "Jetty configuration: {}", mapper.writeValueAsString( api.getJettyConfiguration() ) ); } @Test public void testGetAdminEndpoint() { SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); TestConfiguration actual = api.getTestConfigurationSecuredAdmin(); } @Test(expected = RhizomeRetrofitCallException.class ) public void testGetFooEndpoint() { SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); try { TestConfiguration actual = api.getTestConfigurationSecuredFoo(); } catch ( RhizomeRetrofitCallException ex) { Assert.assertEquals( HttpStatus.FORBIDDEN.value(), ex.getCode()); throw ex; } } @Test public void testGetUserEndpoint() { SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); TestConfiguration actual = api.getTestConfigurationSecuredUser(); Assert.assertEquals( expected, actual ); } @BeforeClass public static void plant() throws Exception { final String jwtToken = (String) AuthenticationTest.authenticate().getCredentials(); rhizome = new Rhizome( ConfigurationLoaderPod.class, Auth0Pod.class, Auth0SecurityTestPod.class, DispatcherServletsPod.class, RegistryBasedHazelcastInstanceConfigurationPod.class, TestSharedStreamSerializersPod.class); rhizome.sprout(); logger.info( "Successfully started Rhizome microservice." ); /* * These interceptor are finicky and order dependent. Jetty doesn't do gzip unless content is long enough so * only verify gzip is enabled if contentLength > 2K. Exact value is tough since it is post compression. */ OkHttpClient httpClient = new OkHttpClient.Builder() .addInterceptor( chain -> { Response response = chain.proceed( chain.request() ); int responseCode = response.code(); if ( responseCode >= 200 && responseCode < 300 && response.body().contentLength() > 2048 ) { Assert.assertTrue( "Content encoding header must be present", response.headers().names().contains( HttpHeaders.CONTENT_ENCODING ) ); Assert.assertEquals( "gzip", response.headers().get( HttpHeaders.CONTENT_ENCODING ) ); } return response; } ) .addInterceptor( chain -> { Response response = chain.proceed( chain.request() ); if ( response.code() == HttpStatus.I_AM_A_TEAPOT.value() ) { Assert.assertTrue( StringUtils.startsWith( response.body().contentType().toString(), MediaType.TEXT_PLAIN_VALUE ) ); return response.newBuilder().code( 200 ).build(); } return response; } ) .addInterceptor( chain -> chain .proceed( chain.request().newBuilder().addHeader( "Authorization", "Bearer " + jwtToken ) .build() ) ) .build(); adapter = new Retrofit.Builder().baseUrl( "http://localhost:8081/rhizome/api/" ).client( httpClient ) .addConverterFactory( new RhizomeByteConverterFactory() ) .addConverterFactory( new RhizomeJacksonConverterFactory() ) .addCallAdapterFactory( new RhizomeCallAdapterFactory() ).build(); SimpleControllerAPI api = adapter.create( SimpleControllerAPI.class ); TestConfiguration configuration = api.getTestConfiguration(); Assert.assertNull( configuration ); expected = new TestConfiguration( new RandomStringGenerator.Builder().build().generate( 10 ), Optional.<String>absent() ); TestConfiguration actual = api.setTestConfiguration( expected ); Assert.assertEquals( expected, actual ); } @AfterClass public static void plow() throws BeansException, Exception { logger.info( "Finished testing loading servlet pod." ); rhizome.wilt(); logger.info( "Successfully shutdown Jetty, exiting main thread" ); } }
apache-2.0
kunalkushwaha/containerd
vendor/github.com/containerd/cri/pkg/server/image_status.go
3209
/* Copyright 2017 The Kubernetes Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package server import ( "encoding/json" "github.com/pkg/errors" "github.com/sirupsen/logrus" "golang.org/x/net/context" runtime "k8s.io/kubernetes/pkg/kubelet/apis/cri/runtime/v1alpha2" imagestore "github.com/containerd/cri/pkg/store/image" imagespec "github.com/opencontainers/image-spec/specs-go/v1" ) // ImageStatus returns the status of the image, returns nil if the image isn't present. // TODO(random-liu): We should change CRI to distinguish image id and image spec. (See // kubernetes/kubernetes#46255) func (c *criService) ImageStatus(ctx context.Context, r *runtime.ImageStatusRequest) (*runtime.ImageStatusResponse, error) { image, err := c.localResolve(ctx, r.GetImage().GetImage()) if err != nil { return nil, errors.Wrapf(err, "can not resolve %q locally", r.GetImage().GetImage()) } if image == nil { // return empty without error when image not found. return &runtime.ImageStatusResponse{}, nil } // TODO(random-liu): [P0] Make sure corresponding snapshot exists. What if snapshot // doesn't exist? runtimeImage := toCRIRuntimeImage(image) info, err := c.toCRIImageInfo(ctx, image, r.GetVerbose()) if err != nil { return nil, errors.Wrap(err, "failed to generate image info") } return &runtime.ImageStatusResponse{ Image: runtimeImage, Info: info, }, nil } // toCRIRuntimeImage converts internal image object to CRI runtime.Image. func toCRIRuntimeImage(image *imagestore.Image) *runtime.Image { runtimeImage := &runtime.Image{ Id: image.ID, RepoTags: image.RepoTags, RepoDigests: image.RepoDigests, Size_: uint64(image.Size), } uid, username := getUserFromImage(image.ImageSpec.Config.User) if uid != nil { runtimeImage.Uid = &runtime.Int64Value{Value: *uid} } runtimeImage.Username = username return runtimeImage } // TODO (mikebrow): discuss moving this struct and / or constants for info map for some or all of these fields to CRI type verboseImageInfo struct { ChainID string `json:"chainID"` ImageSpec imagespec.Image `json:"imageSpec"` } // toCRIImageInfo converts internal image object information to CRI image status response info map. func (c *criService) toCRIImageInfo(ctx context.Context, image *imagestore.Image, verbose bool) (map[string]string, error) { if !verbose { return nil, nil } info := make(map[string]string) imi := &verboseImageInfo{ ChainID: image.ChainID, ImageSpec: image.ImageSpec, } m, err := json.Marshal(imi) if err == nil { info["info"] = string(m) } else { logrus.WithError(err).Errorf("failed to marshal info %v", imi) info["info"] = err.Error() } return info, nil }
apache-2.0
landeeyo/PizzaOrdering
Landeeyo.Pizza.DataAccess/Migrations/201405041219279_AddedSoftDeleteToSomeEntities.cs
1114
namespace Landeeyo.Pizza.DataAccessLayer.Migrations { using System; using System.Data.Entity.Migrations; public partial class AddedSoftDeleteToSomeEntities : DbMigration { public override void Up() { AddColumn("dbo.Pizza", "CreateDate", c => c.DateTime()); AddColumn("dbo.Pizza", "DeactivationDate", c => c.DateTime()); AddColumn("dbo.Restaurant", "CreateDate", c => c.DateTime()); AddColumn("dbo.Restaurant", "DeactivationDate", c => c.DateTime()); DropColumn("dbo.Pizza", "IsActive"); DropColumn("dbo.Restaurant", "IsActive"); } public override void Down() { AddColumn("dbo.Restaurant", "IsActive", c => c.Boolean(nullable: false)); AddColumn("dbo.Pizza", "IsActive", c => c.Boolean(nullable: false)); DropColumn("dbo.Restaurant", "DeactivationDate"); DropColumn("dbo.Restaurant", "CreateDate"); DropColumn("dbo.Pizza", "DeactivationDate"); DropColumn("dbo.Pizza", "CreateDate"); } } }
apache-2.0
termsuite/termsuite-core
src/main/java/fr/univnantes/termsuite/engines/splitter/CompostIndexEntry.java
2153
/******************************************************************************* * Copyright 2015-2016 - CNRS (Centre National de Recherche Scientifique) * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. * *******************************************************************************/ package fr.univnantes.termsuite.engines.splitter; import com.google.common.base.MoreObjects; public class CompostIndexEntry { private String text; private boolean inDico = false; private boolean inCorpus = false; private boolean inNeoClassicalPrefix = false; public CompostIndexEntry() { super(); } public boolean isInDico() { return inDico; } public void setInDico(boolean inDico) { this.inDico = inDico; } public boolean isInCorpus() { return inCorpus; } public void setInCorpus(boolean inCorpus) { this.inCorpus = inCorpus; } public boolean isInNeoClassicalPrefix() { return inNeoClassicalPrefix; } public void setInNeoClassicalPrefix(boolean inNeoClassicalPrefix) { this.inNeoClassicalPrefix = inNeoClassicalPrefix; } public String getText() { return text; } public void setText(String text) { this.text = text; } @Override public String toString() { return MoreObjects.toStringHelper(this) .add("text", this.text) .addValue(this.inDico) .addValue(this.inCorpus) .addValue(this.isInNeoClassicalPrefix()) .toString() ; } }
apache-2.0
UltraCart/rest_api_v2_sdk_csharp
src/com.ultracart.admin.v2/Model/AffiliateLedgerQuery.cs
8478
/* * UltraCart Rest API V2 * * UltraCart REST API Version 2 * * OpenAPI spec version: 2.0.0 * Contact: support@ultracart.com * Generated by: https://github.com/swagger-api/swagger-codegen.git */ using System; using System.Linq; using System.IO; using System.Text; using System.Text.RegularExpressions; using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Runtime.Serialization; using Newtonsoft.Json; using Newtonsoft.Json.Converters; using System.ComponentModel.DataAnnotations; using SwaggerDateConverter = com.ultracart.admin.v2.Client.SwaggerDateConverter; namespace com.ultracart.admin.v2.Model { /// <summary> /// AffiliateLedgerQuery /// </summary> [DataContract] public partial class AffiliateLedgerQuery : IEquatable<AffiliateLedgerQuery>, IValidatableObject { /// <summary> /// Initializes a new instance of the <see cref="AffiliateLedgerQuery" /> class. /// </summary> /// <param name="affiliateOid">Affiliate ID associated with the ledger.</param> /// <param name="itemId">Item id associated with the ledger entry.</param> /// <param name="orderId">Order ID associated with the ledger entries.</param> /// <param name="subId">Sub ID value passed on the click that generated the ledger.</param> /// <param name="transactionDtsBegin">Minimum transaction date/time to return.</param> /// <param name="transactionDtsEnd">Maximum transaction date/time to return.</param> public AffiliateLedgerQuery(int? affiliateOid = default(int?), string itemId = default(string), string orderId = default(string), string subId = default(string), string transactionDtsBegin = default(string), string transactionDtsEnd = default(string)) { this.AffiliateOid = affiliateOid; this.ItemId = itemId; this.OrderId = orderId; this.SubId = subId; this.TransactionDtsBegin = transactionDtsBegin; this.TransactionDtsEnd = transactionDtsEnd; } /// <summary> /// Affiliate ID associated with the ledger /// </summary> /// <value>Affiliate ID associated with the ledger</value> [DataMember(Name="affiliate_oid", EmitDefaultValue=false)] public int? AffiliateOid { get; set; } /// <summary> /// Item id associated with the ledger entry /// </summary> /// <value>Item id associated with the ledger entry</value> [DataMember(Name="item_id", EmitDefaultValue=false)] public string ItemId { get; set; } /// <summary> /// Order ID associated with the ledger entries /// </summary> /// <value>Order ID associated with the ledger entries</value> [DataMember(Name="order_id", EmitDefaultValue=false)] public string OrderId { get; set; } /// <summary> /// Sub ID value passed on the click that generated the ledger /// </summary> /// <value>Sub ID value passed on the click that generated the ledger</value> [DataMember(Name="sub_id", EmitDefaultValue=false)] public string SubId { get; set; } /// <summary> /// Minimum transaction date/time to return /// </summary> /// <value>Minimum transaction date/time to return</value> [DataMember(Name="transaction_dts_begin", EmitDefaultValue=false)] public string TransactionDtsBegin { get; set; } /// <summary> /// Maximum transaction date/time to return /// </summary> /// <value>Maximum transaction date/time to return</value> [DataMember(Name="transaction_dts_end", EmitDefaultValue=false)] public string TransactionDtsEnd { get; set; } /// <summary> /// Returns the string presentation of the object /// </summary> /// <returns>String presentation of the object</returns> public override string ToString() { var sb = new StringBuilder(); sb.Append("class AffiliateLedgerQuery {\n"); sb.Append(" AffiliateOid: ").Append(AffiliateOid).Append("\n"); sb.Append(" ItemId: ").Append(ItemId).Append("\n"); sb.Append(" OrderId: ").Append(OrderId).Append("\n"); sb.Append(" SubId: ").Append(SubId).Append("\n"); sb.Append(" TransactionDtsBegin: ").Append(TransactionDtsBegin).Append("\n"); sb.Append(" TransactionDtsEnd: ").Append(TransactionDtsEnd).Append("\n"); sb.Append("}\n"); return sb.ToString(); } /// <summary> /// Returns the JSON string presentation of the object /// </summary> /// <returns>JSON string presentation of the object</returns> public virtual string ToJson() { return JsonConvert.SerializeObject(this, Formatting.Indented); } /// <summary> /// Returns true if objects are equal /// </summary> /// <param name="input">Object to be compared</param> /// <returns>Boolean</returns> public override bool Equals(object input) { return this.Equals(input as AffiliateLedgerQuery); } /// <summary> /// Returns true if AffiliateLedgerQuery instances are equal /// </summary> /// <param name="input">Instance of AffiliateLedgerQuery to be compared</param> /// <returns>Boolean</returns> public bool Equals(AffiliateLedgerQuery input) { if (input == null) return false; return ( this.AffiliateOid == input.AffiliateOid || (this.AffiliateOid != null && this.AffiliateOid.Equals(input.AffiliateOid)) ) && ( this.ItemId == input.ItemId || (this.ItemId != null && this.ItemId.Equals(input.ItemId)) ) && ( this.OrderId == input.OrderId || (this.OrderId != null && this.OrderId.Equals(input.OrderId)) ) && ( this.SubId == input.SubId || (this.SubId != null && this.SubId.Equals(input.SubId)) ) && ( this.TransactionDtsBegin == input.TransactionDtsBegin || (this.TransactionDtsBegin != null && this.TransactionDtsBegin.Equals(input.TransactionDtsBegin)) ) && ( this.TransactionDtsEnd == input.TransactionDtsEnd || (this.TransactionDtsEnd != null && this.TransactionDtsEnd.Equals(input.TransactionDtsEnd)) ); } /// <summary> /// Gets the hash code /// </summary> /// <returns>Hash code</returns> public override int GetHashCode() { unchecked // Overflow is fine, just wrap { int hashCode = 41; if (this.AffiliateOid != null) hashCode = hashCode * 59 + this.AffiliateOid.GetHashCode(); if (this.ItemId != null) hashCode = hashCode * 59 + this.ItemId.GetHashCode(); if (this.OrderId != null) hashCode = hashCode * 59 + this.OrderId.GetHashCode(); if (this.SubId != null) hashCode = hashCode * 59 + this.SubId.GetHashCode(); if (this.TransactionDtsBegin != null) hashCode = hashCode * 59 + this.TransactionDtsBegin.GetHashCode(); if (this.TransactionDtsEnd != null) hashCode = hashCode * 59 + this.TransactionDtsEnd.GetHashCode(); return hashCode; } } /// <summary> /// To validate all properties of the instance /// </summary> /// <param name="validationContext">Validation context</param> /// <returns>Validation Result</returns> IEnumerable<System.ComponentModel.DataAnnotations.ValidationResult> IValidatableObject.Validate(ValidationContext validationContext) { yield break; } } }
apache-2.0
afaucher/see-the-dark
core/src/com/beanfarmergames/seethedark/game/Emission.java
443
package com.beanfarmergames.seethedark.game; import com.badlogic.gdx.math.Vector2; //Immutable public class Emission { public Vector2 source = null; public Vector2 dest = null; public float power = 0; // TODO: In the future we would want things like 'spectrum' here public Emission(Vector2 source, Vector2 dest, float power) { this.source = source; this.dest = dest; this.power = power; } }
apache-2.0
rjptegelaar/liquid-os
src/main/java/com/pte/liquid/Liquid.java
2745
//Copyright 2015 Paul Tegelaar // //Licensed under the Apache License, Version 2.0 (the "License"); //you may not use this file except in compliance with the License. //You may obtain a copy of the License at // //http://www.apache.org/licenses/LICENSE-2.0 // //Unless required by applicable law or agreed to in writing, software //distributed under the License is distributed on an "AS IS" BASIS, //WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. //See the License for the specific language governing permissions and //limitations under the License. package com.pte.liquid; import org.apache.commons.daemon.Daemon; import org.apache.commons.daemon.DaemonContext; import org.apache.commons.daemon.DaemonInitException; import org.apache.log4j.Logger; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.context.ConfigurableApplicationContext; import org.springframework.context.annotation.ImportResource; import org.springframework.context.support.AbstractApplicationContext; @SpringBootApplication @ImportResource({"broker-application-context.xml","storage-application-context.xml"}) public class Liquid implements Daemon{ private static ConfigurableApplicationContext ctx; private final static Logger logger = Logger.getLogger(Liquid.class); public static void main(String[] args) { buildUp(args); } @Override public void destroy() { tearDown(); } @Override public void init(DaemonContext context) throws DaemonInitException, Exception { logger.info("Initializing liquid daemon"); buildUp(context.getArguments()); } @Override public void start() throws Exception { logger.info("Start liquid daemon"); } @Override public void stop() throws Exception { logger.info("Stopped liquid daemon"); tearDown(); } public static void startService(String[] args) throws Exception { logger.info("Start liquid serivce"); buildUp(args); } public static void stopService(String[] args) throws Exception { logger.info("Stopped liquid serivce"); tearDown(); } private static void buildUp(String[] args) { logger.info("Starting context..."); logger.info("Working Directory = " + System.getProperty("user.dir")); ctx = SpringApplication.run(Liquid.class, args); logger.info("Done starting context..."); } private static void tearDown() { logger.info("Stopping context..."); ((AbstractApplicationContext) ctx).stop(); ((AbstractApplicationContext) ctx).close(); ((AbstractApplicationContext) ctx).destroy(); logger.info("Context stopped..."); } }
apache-2.0
markjolah/MexIFace
test/VMC_IFace.cpp
6735
/** @file TestArmadilloIFace.cpp * @author Mark J. Olah (mjo\@cs.unm.edu) * @date 2018-2019 */ #include <omp.h> #include <functional> #include "TestArmadillo.h" #include "MexIFace/MexIFace.h" /* vector, matrix, cube test */ class TestVMC { public: using VecT = arma::Col<double>; using MatT = arma::Mat<double>; using CubeT = arma::Cube<double>; using StatsT = std::map<std::string,double>; TestVMC(VecT v, MatT m, CubeT c) : v(v), m(m), c(c) {} void set_vec(const VecT &v_) { v = v_; } void set_mat(const MatT &m_) { m = m_; } void set_cube(const CubeT &c_) { c = c_; } const VecT& get_vec() { return v; } const MatT& get_mat() { return m; } const CubeT& get_cube() { return c; } void add_vec(const VecT &v_) { v+=v_; } void add_mat(const MatT &m_) { m+=m_; } void add_cube(const CubeT &c_) { c+=c_; } MatT solve_mat(const MatT &B) { return arma::solve(m,B); } void svd_mat(MatT &U, VecT &s, MatT &V) const { arma::svd(U,s,V,m); } StatsT get_stats() { StatsT stats; stats["v.n_elem"]=v.n_elem; stats["m.n_rows"]=m.n_rows; stats["m.n_cols"]=m.n_cols; stats["c.n_rows"]=c.n_rows; stats["c.n_cols"]=c.n_cols; stats["c.n_slices"]=c.n_slices; return stats; } private: VecT v; MatT m; CubeT c; }; /* Testing interface * Add more methods as needed to achieve full testing coverage. * Currently tests * - Object constriction and data storage * - vector/matric/cube argument passing * - Dictionary (std::map) arguments * - Static methods */ class VMC_IFace : public mexiface::MexIFace, public mexiface::MexIFaceHandler<TestVMC> { public: VMC_IFace(); private: using VecT=typename TestVMC::VecT; using MatT=typename TestVMC::MatT; using CubeT=typename TestVMC::CubeT; void objConstruct(); void objGetVec(); void objGetMat(); void objGetCube(); void objGet(); void objSetVec(); void objSetMat(); void objSetCube(); void objSet(); void objAdd(); void objSolve(); void objSolveOMP(); void objSvd(); void objGetStats(); /* static methods */ void staticVecSum(); void staticMatProd(); }; VMC_IFace::VMC_IFace() { methodmap["getVec"] = std::bind(&VMC_IFace::objGetVec, this); methodmap["getMat"] = std::bind(&VMC_IFace::objGetMat, this); methodmap["getCube"] = std::bind(&VMC_IFace::objGetCube, this); methodmap["get"] = std::bind(&VMC_IFace::objGet, this); methodmap["setVec"] = std::bind(&VMC_IFace::objGetVec, this); methodmap["setMat"] = std::bind(&VMC_IFace::objGetMat, this); methodmap["setCube"] = std::bind(&VMC_IFace::objGetCube, this); methodmap["set"] = std::bind(&VMC_IFace::objGet, this); methodmap["add"] = std::bind(&VMC_IFace::objAdd, this); methodmap["solve"] = std::bind(&VMC_IFace::objSolve, this); methodmap["solveOMP"] = std::bind(&VMC_IFace::objSolveOMP, this); methodmap["svd"] = std::bind(&VMC_IFace::objSvd, this); methodmap["getStats"] = std::bind(&VMC_IFace::objGetStats, this); staticmethodmap["vecSum"] = std::bind(&VMC_IFace::staticVecSum, this); staticmethodmap["matProd"] = std::bind(&VMC_IFace::staticMatProd, this); } void VMC_IFace::objConstruct() { checkNumArgs(1,3); //(#out, #in) auto v = getVec(); auto m = getMat(); auto c = getCube(); outputHandle(new TestVMC(v,m,c)); } void VMC_IFace::objGetVec() { checkNumArgs(1,0); //(#out, #in) output(obj->get_vec()); } void VMC_IFace::objGetMat() { checkNumArgs(1,0); //(#out, #in) output(obj->get_mat()); } void VMC_IFace::objGetCube() { checkNumArgs(1,0); //(#out, #in) output(obj->get_cube()); } void VMC_IFace::objGet() { checkMaxNumArgs(3,0); //(#out, #in) if(nlhs>0) output(obj->get_vec()); if(nlhs>1) output(obj->get_mat()); if(nlhs>2) output(obj->get_cube()); } void VMC_IFace::objSetVec() { checkNumArgs(0,1); //(#out, #in) obj->set_vec(getVec()); } void VMC_IFace::objSetMat() { checkNumArgs(0,1); //(#out, #in) obj->set_mat(getMat()); } void VMC_IFace::objSetCube() { checkNumArgs(0,1); //(#out, #in) obj->set_cube(getCube()); } void VMC_IFace::objSet() { checkMinNumArgs(0,1); //(#out, #in) checkMaxNumArgs(0,3); //(#out, #in) obj->set_vec(getVec()); if(nrhs>1) obj->set_mat(getMat()); if(nrhs>2) obj->set_cube(getCube()); } void VMC_IFace::objAdd() { checkMinNumArgs(0,1); //(#out, #in) checkMaxNumArgs(3,3); //(#out, #in) obj->add_vec(getVec()); if(nrhs>1) obj->add_mat(getMat()); if(nrhs>2) obj->add_cube(getCube()); //output if(nlhs>0) output(obj->get_vec()); if(nlhs>1) output(obj->get_mat()); if(nlhs>2) output(obj->get_cube()); } void VMC_IFace::objSolve() { checkNumArgs(1,1); //(#out, #in) const auto &m = obj->get_mat(); auto N = m.n_rows; auto B = getMat(); if(N!=B.n_rows) error("svd","BadShape","m and B must have same number of rows"); output(arma::solve(m,B).eval()); } void VMC_IFace::objSolveOMP() { checkNumArgs(1,1); //(#out, #in) const auto &m = obj->get_mat(); auto N = m.n_rows; auto B = getCube(); auto X = makeOutputArray(B.n_rows,B.n_cols,B.n_slices); if(N!=B.n_rows) error("svd","BadShape","m and B must have same number of rows"); #pragma omp parallel { MatT x; #pragma omp for for(arma::uword i=0; i<B.n_slices; i++){ arma::solve(x,m,B.slice(i)); if(x.is_empty()) X.slice(i).zeros(); else X.slice(i) = x; } } } void VMC_IFace::objSvd() { checkNumArgs(3,1); //(#out, #in) const auto &m = obj->get_mat(); auto N = m.n_rows; if(m.n_cols != N) error("svd","BadShape","m is not square"); MatT U(N,N), V(N,N); VecT s(N); arma::svd(U,s,V,m); if(U.is_empty()) error("svd","NumericalErrror","SVD failure"); output(U); output(s); output(V); } void VMC_IFace::objGetStats() { output(obj->get_stats()); } void VMC_IFace::staticVecSum() { checkNumArgs(1,2); //(#out, #in) auto a = getVec(); auto b = getVec(); if(a.n_elem!=b.n_elem) error("vecSum","BadSize","#elem must match"); output((a+b).eval()); } void VMC_IFace::staticMatProd() { checkNumArgs(1,2); //(#out, #in) auto A = getMat(); auto B = getMat(); if(A.n_cols!=B.n_rows) error("matProd","BadSize","#cols must match #rows"); auto C = makeOutputArray(A.n_rows,A.n_cols); C=A*B; } VMC_IFace iface; /**< Global iface object provides a iface.mexFunction */ void mexFunction(int nlhs, mxArray *lhs[], int nrhs, const mxArray *rhs[]) { iface.mexFunction(nlhs, lhs, nrhs, rhs); }
apache-2.0
UXAspects/UXAspects
docs/app/pages/components/components-sections/input-controls/checkbox/snippets/app.ts
327
import { Component } from '@angular/core'; @Component({ selector: 'app', templateUrl: './app.component.html' }) export class AppComponent { checkModel = { option1: true, option2: false, option3: false, option4: false }; indeterminateValue = -1; disableCheck = false; }
apache-2.0
dheraclio/dependometer
dependometer-core/src/main/java/com/valtech/source/dependometer/app/controller/layer/DispatcherLayerCycleParticipationCollectedEvent.java
683
// GENERATED FILE! // VERSION-INFO: com.valtech.source.ag.evf.codegen.Dispatcher // DATE/TIME-INFO: 17.05.04 09:36:43 package com.valtech.source.dependometer.app.controller.layer; import com.valtech.source.ag.evf.Dispatcher; import com.valtech.source.ag.evf.EventIf; final class DispatcherLayerCycleParticipationCollectedEvent extends Dispatcher { protected void dispatch(Object handler, EventIf event) { assert handler != null; assert event != null; ((HandleLayerCycleParticipationCollectedEventIf)handler) .handleEvent((com.valtech.source.dependometer.app.controller.layer.LayerCycleParticipationCollectedEvent)event); } }
apache-2.0