code stringlengths 3 1.05M | repo_name stringlengths 4 116 | path stringlengths 4 991 | language stringclasses 9
values | license stringclasses 15
values | size int32 3 1.05M |
|---|---|---|---|---|---|
/*
* SessionDirty.cpp
*
* Copyright (C) 2009-12 by RStudio, Inc.
*
* Unless you have received this program directly from RStudio pursuant
* to the terms of a commercial license agreement with RStudio, then
* this program is licensed to you under the terms of version 3 of the
* GNU Affero General Public License. This program is distributed WITHOUT
* ANY EXPRESS OR IMPLIED WARRANTY, INCLUDING THOSE OF NON-INFRINGEMENT,
* MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. Please refer to the
* AGPL (http://www.gnu.org/licenses/agpl-3.0.txt) for more details.
*
*/
#include "SessionDirty.hpp"
#include <algorithm>
#include <boost/bind.hpp>
#include <boost/format.hpp>
#include <boost/utility.hpp>
#include <core/Error.hpp>
#include <core/Log.hpp>
#include <core/Exec.hpp>
#include <core/FilePath.hpp>
#include <core/json/JsonRpc.hpp>
#include <r/RExec.hpp>
#include <r/RRoutines.hpp>
#include <r/RErrorCategory.hpp>
#include <r/session/RSession.hpp>
#include <session/SessionModuleContext.hpp>
#include <session/SessionUserSettings.hpp>
#include <session/SessionSourceDatabase.hpp>
using namespace rstudio::core ;
using namespace rstudio::r::sexp;
using namespace rstudio::r::exec;
namespace rstudio {
namespace session {
namespace modules {
namespace dirty {
namespace {
// last save action.
// NOTE: we don't persist this (or the workspace dirty state) during suspends in
// server mode. this means that if you are ever suspended then you will always
// end up with a 'dirty' workspace. not a big deal considering how infrequently
// quit occurs in server mode.
// TODO: this now affects switching projects after a suspend. we should try
// to figure out how to preserve dirty state of the workspace accross suspend
int s_lastSaveAction = r::session::kSaveActionAsk;
// list of dirty documents (if it's empty then no document save is required)
std::set<std::string> s_dirtyDocuments;
const char * const kSaveActionState = "saveActionState";
const char * const kImageDirtyState = "imageDirtyState";
void updateSavePromptRequired()
{
bool workspaceSavePromptRequired =
s_lastSaveAction == r::session::kSaveActionAsk;
bool documentSavePromptRequired = s_dirtyDocuments.size() > 0;
bool savePromptRequired = workspaceSavePromptRequired ||
documentSavePromptRequired;
module_context::activeSession().setSavePromptRequired(savePromptRequired);
}
void onDocUpdated(boost::shared_ptr<source_database::SourceDocument> pDoc)
{
// ignore docs with no path
if (pDoc->path().empty())
return;
// if it's dirty then ensure it's in the list, otherwise remove it
size_t previousDirtyDocsSize = s_dirtyDocuments.size();
if (pDoc->dirty())
s_dirtyDocuments.insert(pDoc->id());
else
s_dirtyDocuments.erase(pDoc->id());
if (s_dirtyDocuments.size() != previousDirtyDocsSize)
updateSavePromptRequired();
}
void onDocRemoved(const std::string& id)
{
s_dirtyDocuments.erase(id);
updateSavePromptRequired();
}
void onRemoveAll()
{
s_dirtyDocuments.clear();
updateSavePromptRequired();
}
void handleSaveActionChanged()
{
// update savePromptRequired
updateSavePromptRequired();
// enque event to client
json::Object saveAction;
saveAction["action"] = s_lastSaveAction;
ClientEvent event(client_events::kSaveActionChanged, saveAction);
module_context::enqueClientEvent(event);
}
void checkForSaveActionChanged()
{
// compute current save action
int currentSaveAction = r::session::imageIsDirty() ?
module_context::saveWorkspaceAction() :
r::session::kSaveActionNoSave;
// compare and fire event if necessary
if (s_lastSaveAction != currentSaveAction)
{
s_lastSaveAction = currentSaveAction;
handleSaveActionChanged();
}
}
void onSuspend(const r::session::RSuspendOptions&, Settings* pSettings)
{
pSettings->set(kSaveActionState, s_lastSaveAction);
pSettings->set(kImageDirtyState, r::session::imageIsDirty());
}
void onResume(const Settings& settings)
{
s_lastSaveAction = settings.getInt(kSaveActionState,
r::session::kSaveActionAsk);
r::session::setImageDirty(settings.getBool(kImageDirtyState, true));
handleSaveActionChanged();
}
void onClientInit()
{
// enque save action changed
handleSaveActionChanged();
}
void onDetectChanges(module_context::ChangeSource source)
{
// check for save action changed
checkForSaveActionChanged();
}
} // anonymous namespace
Error initialize()
{
// add suspend handler
using namespace session::module_context;
addSuspendHandler(SuspendHandler(onSuspend, onResume));
// subscribe to events
using boost::bind;
module_context::events().onClientInit.connect(bind(onClientInit));
module_context::events().onDetectChanges.connect(bind(onDetectChanges, _1));
source_database::events().onDocUpdated.connect(onDocUpdated);
source_database::events().onDocRemoved.connect(onDocRemoved);
source_database::events().onRemoveAll.connect(onRemoveAll);
return Success();
}
} // namepsace dirty
} // namespace modules
} // namesapce session
} // namespace rstudio
| piersharding/rstudio | src/cpp/session/modules/SessionDirty.cpp | C++ | agpl-3.0 | 5,263 |
<?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
/*********************************************************************************
* SugarCRM is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2010 SugarCRM Inc.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo. If the display of the logo is not reasonably feasible for
* technical reasons, the Appropriate Legal Notices must display the words
* "Powered by SugarCRM".
********************************************************************************/
require_once('include/generic/SugarWidgets/SugarWidgetSubPanelTopButton.php');
require_once('include/generic/SugarWidgets/SugarWidgetReportField.php');
require_once('include/database/DBHelper.php');
class LayoutManager
{
var $defs = array();
var $widget_prefix = 'SugarWidget';
var $default_widget_name = 'Field';
var $DBHelper;
function LayoutManager()
{
// set a sane default for context
$this->defs['context'] = 'Detail';
$this->DBHelper = $GLOBALS['db']->getHelper();
}
function setAttribute($key,$value)
{
$this->defs[$key] = $value;
}
function setAttributePtr($key,&$value)
{
$this->defs[$key] = $value;
}
function getAttribute($key)
{
if ( isset($this->defs[$key]))
{
return $this->defs[$key];
} else {
return null;
}
}
// Take the class name from the widget definition and use the class to look it up
// $use_default will default classes to SugarWidgetFieldxxxxx
function getClassFromWidgetDef($widget_def, $use_default = false)
{
static $class_map = array(
'SugarWidgetSubPanelTopCreateButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopButton',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LBL_NEW_BUTTON_LABEL',
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopButtonQuickCreate' => array(
'widget_class'=>'SugarWidgetSubPanelTopButtonQuickCreate',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LBL_NEW_BUTTON_LABEL',
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopScheduleMeetingButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopScheduleMeetingButton',
'module'=>'Meetings',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LNK_NEW_MEETING',
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopScheduleCallButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopScheduleCallButton',
'module'=>'Calls',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LNK_NEW_CALL',
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopCreateTaskButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopCreateTaskButton',
'module'=>'Tasks',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LNK_NEW_TASK',
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopCreateNoteButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopCreateNoteButton',
'module'=>'Notes',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LNK_NEW_NOTE',
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopCreateContactAccountButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopButton',
'module'=>'Contacts',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LBL_NEW_BUTTON_LABEL',
'additional_form_fields' => array(
'primary_address_street' => 'shipping_address_street',
'primary_address_city' => 'shipping_address_city',
'primary_address_state' => 'shipping_address_state',
'primary_address_country' => 'shipping_address_country',
'primary_address_postalcode' => 'shipping_address_postalcode',
'to_email_addrs' => 'email1'
),
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopCreateContact' => array(
'widget_class'=>'SugarWidgetSubPanelTopButton',
'module'=>'Contacts',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LBL_NEW_BUTTON_LABEL',
'additional_form_fields' => array(
'account_id' => 'account_id',
'account_name' => 'account_name',
),
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopCreateRevisionButton'=> array(
'widget_class'=>'SugarWidgetSubPanelTopButton',
'module'=>'DocumentRevisions',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LBL_NEW_BUTTON_LABEL',
'additional_form_fields' => array(
'parent_name'=>'document_name',
'document_name' => 'document_name',
'document_revision' => 'latest_revision',
'document_filename' => 'filename',
'document_revision_id' => 'document_revision_id',
),
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopCreateDirectReport' => array(
'widget_class'=>'SugarWidgetSubPanelTopButton',
'module'=>'Contacts',
'title'=>'LBL_NEW_BUTTON_TITLE',
'access_key'=>'LBL_NEW_BUTTON_KEY',
'form_value'=>'LBL_NEW_BUTTON_LABEL',
'additional_form_fields' => array(
'reports_to_name' => 'name',
'reports_to_id' => 'id',
),
'ACL'=>'edit',
),
'SugarWidgetSubPanelTopSelectFromReportButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopSelectButton',
'module'=>'Reports',
'title'=>'LBL_SELECT_REPORTS_BUTTON_LABEL',
'access_key'=>'LBL_SELECT_BUTTON_KEY',
'form_value'=>'LBL_SELECT_REPORTS_BUTTON_LABEL',
'ACL'=>'edit',
'add_to_passthru_data'=>array (
'return_type'=>'report',
)
),
'SugarWidgetSubPanelAddToProspectListButton' => array(
'widget_class'=>'SugarWidgetSubPanelTopSelectButton',
'module'=>'ProspectLists',
'title'=>'LBL_ADD_TO_PROSPECT_LIST_BUTTON_LABEL',
'access_key'=>'LBL_ADD_TO_PROSPECT_LIST_BUTTON_KEY',
'form_value'=>'LBL_ADD_TO_PROSPECT_LIST_BUTTON_LABEL',
'ACL'=>'edit',
'add_to_passthru_data'=>array (
'return_type'=>'addtoprospectlist',
'parent_module'=>'ProspectLists',
'parent_type'=>'ProspectList',
'child_id'=>'target_id',
'link_attribute'=>'target_type',
'link_type'=>'polymorphic', //polymorphic or default
)
),
);
$fieldDef = $this->getFieldDef($widget_def);
if(!empty($fieldDef) && !empty($fieldDef['type']) && strtolower(trim($fieldDef['type'])) == 'multienum'){
$widget_def['widget_class'] = 'Fieldmultienum';
}
if(!empty($fieldDef) && !empty($fieldDef['type']) && strtolower(trim($fieldDef['type'])) == 'bool'){
$widget_def['widget_class'] = 'Fieldbool';
}
if($use_default) {
switch($widget_def['name']) {
case 'assigned_user_id':
$widget_def['widget_class'] = 'Fielduser_name';
break;
default:
$widget_def['widget_class'] = 'Field' . $this->DBHelper->getFieldType($widget_def);
}
}
if(!empty($widget_def['name']) && $widget_def['name'] == 'team_set_id'){
$widget_def['widget_class'] = 'Fieldteam_set_id';
}
if(empty($widget_def['widget_class']))
{
// Default the class to SugarWidgetField
$class_name = $this->widget_prefix.$this->default_widget_name;
}
else
{
# --- tracy: intended to override field type of customized phone fields;
# this is to fix Report issues when custom phone field is used
if ($widget_def['widget_class'] == 'Fieldfunction') {
$class_name = $this->widget_prefix.'Fieldphone';
} else {
$class_name = $this->widget_prefix.$widget_def['widget_class'];
}
# ---
}
// Check to see if this is one of the known class mappings.
if(!empty($class_map[$class_name]))
{
if (empty($class_map[$class_name]['widget_class'])) {
$widget = new SugarWidgetSubPanelTopButton($class_map[$class_name]);
} else {
if (!class_exists($class_map[$class_name]['widget_class'])) {
require_once('include/generic/SugarWidgets/'.$class_map[$class_name]['widget_class'].'.php');
}
$widget = new $class_map[$class_name]['widget_class']($class_map[$class_name]);
}
return $widget;
}
// At this point, we have a class name and we do not have a valid class defined.
if(!class_exists($class_name))
{
// The class does not exist. Try including it.
if (file_exists('custom/include/generic/SugarWidgets/'.$class_name.'.php'))
require_once('custom/include/generic/SugarWidgets/'.$class_name.'.php');
else if (file_exists('include/generic/SugarWidgets/'.$class_name.'.php'))
require_once('include/generic/SugarWidgets/'.$class_name.'.php');
if(!class_exists($class_name))
{
// If we still do not have a class, oops....
die("LayoutManager: Class not found:".$class_name);
}
}
$widget = new $class_name($this); // cache disabled $this->getClassFromCache($class_name);
return $widget;
}
// 27426
function getFieldDef($widget_def){
static $beanCache;
if(!empty($widget_def['module']) &&!empty($GLOBALS['beanList'][$widget_def['module']]) && !empty($GLOBALS['beanFiles'][$GLOBALS['beanList'][$widget_def['module']]])){
if (!isset($beanCache[$widget_def['module']])){
$beanCache[$widget_def['module']] = new $GLOBALS['beanList'][$widget_def['module']]();
}
$bean = $beanCache[$widget_def['module']];
if(!empty($widget_def['name']) && !empty($bean->field_name_map) &&!empty($bean->field_name_map[$widget_def['name']]) ){
return $bean->field_name_map[$widget_def['name']];
}
}
return null;
}
function widgetDisplay($widget_def, $use_default = false)
{
$theclass = $this->getClassFromWidgetDef($widget_def, $use_default);
$label = isset($widget_def['module']) ? $widget_def['module'] : '';
if (is_subclass_of($theclass, 'SugarWidgetSubPanelTopButton')) {
$label = $theclass->get_subpanel_relationship_name($widget_def);
}
$theclass->setWidgetId($label);
//#27426
$fieldDef = $this->getFieldDef($widget_def);
if(!empty($fieldDef) && !empty($fieldDef['type']) && strtolower(trim($fieldDef['type'])) == 'multienum'){
$widget_def['fields'] = sugarArrayMerge($widget_def['fields'] , $fieldDef);
$widget_def['fields']['module'] = $label;
}
//end
return $theclass->display($widget_def);
}
function widgetQuery($widget_def, $use_default = false)
{
$theclass = $this->getClassFromWidgetDef($widget_def, $use_default);
// _pp($theclass);
return $theclass->query($widget_def);
}
// display an input field
// module is the parent module of the def
function widgetDisplayInput($widget_def, $use_default = false)
{
$theclass = $this->getClassFromWidgetDef($widget_def, $use_default);
return $theclass->displayInput($widget_def);
}
}
?>
| MarStan/sugar_work | include/generic/LayoutManager.php | PHP | agpl-3.0 | 12,373 |
<?php
/*-------------------------------------------------------+
| PHP-Fusion Content Management System
| Copyright (C) PHP-Fusion Inc
| https://www.php-fusion.co.uk/
+--------------------------------------------------------+
| Filename: gallery.php
| Author: PHP-Fusion Development Team
| Co-Author: PHP-Fusion Development Team
+--------------------------------------------------------+
| This program is released as free software under the
| Affero GPL license. You can redistribute it and/or
| modify it under the terms of this license which you
| can read by viewing the included agpl.txt or online
| at www.gnu.org/licenses/agpl.html. Removal of this
| copyright header is strictly prohibited without
| written permission from the original author(s).
+--------------------------------------------------------*/
require_once file_exists('maincore.php') ? 'maincore.php' : __DIR__."/../../maincore.php";
if (!db_exists(DB_PHOTO_ALBUMS)) {
redirect(BASEDIR."error.php?code=404");
}
require_once THEMES."templates/header.php";
$locale = fusion_get_locale('', GALLERY_LOCALE);
include INFUSIONS."gallery/templates/gallery.php";
require_once INCLUDES."infusions_include.php";
$gallery_settings = get_settings("gallery");
if (!defined('SAFEMODE')) {
define("SAFEMODE", @ini_get("safe_mode") ? TRUE : FALSE);
}
/* View Photo */
if (isset($_GET['photo_id']) && isnum($_GET['photo_id'])) {
include INCLUDES."comments_include.php";
include INCLUDES."ratings_include.php";
add_to_jquery("$('a.photogallery_photo_link').colorbox({width:'80%', height:'80%', photo:true});");
$result = dbquery("SELECT tp.*, ta.album_id, ta.album_title, ta.album_access, ta.album_keywords,
tu.user_id, tu.user_name, tu.user_status,
SUM(tr.rating_vote) AS sum_rating, COUNT(tr.rating_item_id) AS count_votes,
count(tc.comment_id) AS comment_count
FROM ".DB_PHOTOS." tp
LEFT JOIN ".DB_PHOTO_ALBUMS." ta USING (album_id)
LEFT JOIN ".DB_USERS." tu ON tp.photo_user=tu.user_id
LEFT JOIN ".DB_RATINGS." tr ON tr.rating_item_id = tp.photo_id AND tr.rating_type='P'
LEFT JOIN ".DB_COMMENTS." tc ON tc.comment_item_id=tp.photo_id AND comment_type='P'
WHERE ".groupaccess('album_access')." AND photo_id='".intval($_GET['photo_id'])."' GROUP BY tp.photo_id");
$info = array();
if (dbrows($result) > 0) {
$data = dbarray($result);
/* Declaration */
$result = dbquery("UPDATE ".DB_PHOTOS." SET photo_views=(photo_views+1) WHERE photo_id='".$_GET['photo_id']."'");
$pres = dbquery("SELECT photo_id FROM ".DB_PHOTOS." WHERE photo_order='".($data['photo_order'] - 1)."' AND album_id='".$data['album_id']."'");
$nres = dbquery("SELECT photo_id FROM ".DB_PHOTOS." WHERE photo_order='".($data['photo_order'] + 1)."' AND album_id='".$data['album_id']."'");
$fres = dbquery("SELECT photo_id FROM ".DB_PHOTOS." WHERE photo_order='1' AND album_id='".$data['album_id']."'");
$lastres = dbresult(dbquery("SELECT MAX(photo_order) FROM ".DB_PHOTOS." WHERE album_id='".$data['album_id']."'"), 0);
$lres = dbquery("SELECT photo_id FROM ".DB_PHOTOS." WHERE photo_order>='".$lastres."' AND album_id='".$data['album_id']."'");
if (dbrows($pres)) {
$prev = dbarray($pres);
}
if (dbrows($nres)) {
$next = dbarray($nres);
}
if (dbrows($fres)) {
$first = dbarray($fres);
}
if (dbrows($lres)) {
$last = dbarray($lres);
}
add_to_head("<link rel='stylesheet' href='".INCLUDES."jquery/colorbox/colorbox.css' type='text/css' media='screen' />");
add_to_head("<script type='text/javascript' src='".INCLUDES."jquery/colorbox/jquery.colorbox.js'></script>");
set_title($data['photo_title'].$locale['global_201']);
add_to_title(\PHPFusion\SiteLinks::get_current_SiteLinks(INFUSIONS.'gallery/gallery.php', "link_name"));
\PHPFusion\BreadCrumbs::getInstance()->addBreadCrumb([
'link' => INFUSIONS."gallery/gallery.php?album_id=".$data['album_id'],
'title' => $data['album_title']
]);
if ($data['album_keywords'] !== "") {
set_meta("keywords", $data['album_keywords']);
if ($data['photo_keywords'] !== "") {
add_to_meta("keywords", $data['photo_keywords']);
}
} else {
if ($data['photo_keywords'] !== "") {
set_meta("keywords", $data['photo_keywords']);
}
}
\PHPFusion\BreadCrumbs::getInstance()->addBreadCrumb([
'link' => INFUSIONS."gallery/gallery.php?photo_id=".$data['photo_id'],
'title' => $data['photo_title']
]);
// broken watermaking. how to do this?
if ($gallery_settings['photo_watermark']) {
// how does watermarking do?
if ($gallery_settings['photo_watermark_save']) {
$parts = explode(".", $data['photo_filename']);
$wm_file1 = $parts[0]."_w1.".$parts[1];
$wm_file2 = $parts[0]."_w2.".$parts[1];
if (!file_exists(IMAGES_G_T.$wm_file1)) {
if ($data['photo_thumb2']) {
$info['photo_thumb'] = INFUSIONS."gallery/photo.php?photo_id=".$_GET['photo_id'];
}
$info['photo_filename'] = INFUSIONS."gallery/photo.php?photo_id=".$_GET['photo_id']."&full";
} else {
if ($data['photo_thumb2']) {
$info['photo_thumb'] = IMAGES_G."/".$wm_file1;
}
$info['photo_filename'] = IMAGES_G."/".$wm_file2;
}
} else {
if ($data['photo_thumb2']) {
$info['photo_thumb'] = INFUSIONS."gallery/photo.php?photo_id=".$_GET['photo_id'];
}
$info['photo_filename'] = INFUSIONS."gallery/photo.php?photo_id=".$_GET['photo_id']."&full";
}
$info['photo_size'] = @getimagesize(IMAGES_G.$data['photo_filename']);
} else {
$info += array(
"photo_thumb2" => $data['photo_thumb2'] ? IMAGES_G_T.$data['photo_thumb2'] : "",
"photo_thumb1" => $data['photo_thumb1'] ? IMAGES_G_T.$data['photo_thumb1'] : "",
"photo_filename" => IMAGES_G.$data['photo_filename'],
"photo_size" => getimagesize(IMAGES_G.$data['photo_filename'])
);
}
$info += array(
"photo_description" => $data['photo_description'] ? nl2br(parse_textarea($data['photo_description'], FALSE, FALSE, TRUE, FALSE)) : '',
"photo_byte" => parsebytesize($gallery_settings['photo_watermark'] ? filesize(IMAGES_G.$data['photo_filename']) : filesize(IMAGES_G.$data['photo_filename'])),
"photo_comment" => $data['photo_allow_comments'] ? number_format($data['comment_count']) : 0,
"photo_ratings" => $data['photo_allow_ratings'] && $data['count_votes'] > 0 ? number_format(ceil($data['sum_rating'] / $data['count_votes'])) : '0',
);
if (defined('IN_PERMALINK')) {
$info['photo_description'] = strtr($info['photo_description'], [fusion_get_settings('site_path') => '']);
}
if ((isset($prev['photo_id']) && isnum($prev['photo_id'])) || (isset($next['photo_id']) && isnum($next['photo_id']))) {
if (isset($prev) && isset($first)) {
$info['nav']['first'] = array(
'link' => INFUSIONS."gallery/gallery.php?photo_id=".$first['photo_id'],
'name' => $locale['459']
);
}
if (isset($prev)) {
$info['nav']['prev'] = array(
'link' => INFUSIONS."gallery/gallery.php?photo_id=".$prev['photo_id'],
'name' => $locale['451']
);
}
if (isset($next)) {
$info['nav']['next'] = array(
'link' => INFUSIONS."gallery/gallery.php?photo_id=".$next['photo_id'],
'name' => $locale['452']
);
}
if (isset($next) && isset($last)) {
$info['nav']['last'] = array(
'link' => INFUSIONS."gallery/gallery.php?photo_id=".$last['photo_id'],
'name' => $locale['460']
);
}
}
$data['photo_show_comments'] = get_photo_comments($data);
$data['photo_show_ratings'] = get_photo_ratings($data);
$info += $data;
render_photo($info);
} else {
redirect(INFUSIONS.'gallery/gallery.php');
}
} else {
if (isset($_GET['album_id']) && isnum($_GET['album_id'])) {
/* View Album */
$result = dbquery("SELECT album_title, album_description, album_keywords, album_image, album_thumb1, album_thumb2, album_access
FROM ".DB_PHOTO_ALBUMS." WHERE ".groupaccess('album_access')." AND album_id='".intval($_GET['album_id'])."'
");
if (dbrows($result) > 0) {
$info = dbarray($result);
set_title($info['album_title'].$locale['global_201']);
add_to_title(\PHPFusion\SiteLinks::get_current_SiteLinks(INFUSIONS.'gallery/gallery.php', "link_name"));
\PHPFusion\BreadCrumbs::getInstance()->addBreadCrumb([
'link' => INFUSIONS.'gallery/gallery.php',
'title' => \PHPFusion\SiteLinks::get_current_SiteLinks("", "link_name")
]);
\PHPFusion\BreadCrumbs::getInstance()->addBreadCrumb([
'link' => INFUSIONS.'gallery/gallery.php?album_id='.$_GET['album_id'],
'title' => $info['album_title']
]);
if ($info['album_keywords'] !== "") {
add_to_meta("keywords", $info['album_keywords']);
}
/* Category Info */
$info['album_thumb'] = displayAlbumImage($info['album_image'], $info['album_thumb2'], $info['album_thumb1'], "");
$info['album_link'] = array(
'link' => INFUSIONS.'gallery/gallery.php?album_id='.$_GET['album_id'],
'name' => $info['album_title']
);
$info['max_rows'] = dbcount("(photo_id)", DB_PHOTOS, "album_id='".$_GET['album_id']."'");
$_GET['rowstart'] = isset($_GET['rowstart']) && isnum($_GET['rowstart']) && $_GET['rowstart'] <= $info['max_rows'] ? $_GET['rowstart'] : 0;
if ($info['max_rows'] > 0) {
// Album stats
$latest_update = dbarray(dbquery("
SELECT tp.photo_datestamp, tu.user_id, tu.user_name, tu.user_status
FROM ".DB_PHOTOS." tp
LEFT JOIN ".DB_USERS." tu ON tp.photo_user=tu.user_id
WHERE album_id='".intval($_GET['album_id'])."'
ORDER BY photo_datestamp DESC LIMIT 1"));
$info['album_stats'] = $locale['422']." ".$info['max_rows']."<br />\n";
$info['album_stats'] .= $locale['423']." ".profile_link($latest_update['user_id'], $latest_update['user_name'], $latest_update['user_status'])." ".$locale['424']." ".showdate("longdate", $latest_update['photo_datestamp'])."\n";
$result = dbquery("SELECT tp.*,
tu.user_id, tu.user_name, tu.user_status, tu.user_avatar,
SUM(tr.rating_vote) 'sum_rating',
COUNT(tr.rating_vote) 'count_rating',
COUNT(tr.rating_item_id) 'count_votes'
FROM ".DB_PHOTOS." tp
LEFT JOIN ".DB_USERS." tu ON tp.photo_user=tu.user_id
LEFT JOIN ".DB_RATINGS." tr ON tr.rating_item_id = tp.photo_id AND tr.rating_type='P'
WHERE album_id='".intval($_GET['album_id'])."'
GROUP BY photo_id ORDER BY photo_order
limit ".intval($_GET['rowstart']).",".intval($gallery_settings['gallery_pagination']));
$info['photo_rows'] = dbrows($result);
$info['page_nav'] = $info['max_rows'] > $gallery_settings['gallery_pagination'] ? makepagenav($_GET['rowstart'],
$gallery_settings['gallery_pagination'],
$info['max_rows'], 3,
INFUSIONS."gallery/gallery.php?album_id=".$_GET['album_id']."&") : '';
if ($info['photo_rows'] > 0) {
// this is photo
while ($data = dbarray($result)) {
// data manipulation
$data += array(
"photo_link" => array(
'link' => INFUSIONS."gallery/gallery.php?photo_id=".$data['photo_id'],
'name' => $data['photo_title']
),
"image" => displayPhotoImage($data['photo_id'], $data['photo_filename'], $data['photo_thumb1'], $data['photo_thumb2'], INFUSIONS."gallery/gallery.php?photo_id=".$data['photo_id']),
"title" => ($data['photo_title']) ? $data['photo_title'] : $data['image'],
"description" => ($data['photo_description']) ? nl2br(parse_textarea($data['photo_description'])) : '',
"photo_views" => format_word($data['photo_views'], $locale['fmt_views']),
);
if (iADMIN && checkrights("PH")) {
global $aidlink;
$data['photo_edit'] = array(
"link" => INFUSIONS."gallery/gallery_admin.php".$aidlink."&section=photo_form&action=edit&photo_id=".$data['photo_id'],
"name" => $locale['edit']
);
$data['photo_delete'] = array(
"link" => INFUSIONS."gallery/gallery_admin.php".$aidlink."&section=actions&action=delete&photo_id=".$data['photo_id'],
"name" => $locale['delete']
);
}
if ($data['photo_allow_comments']) {
$data += array(
"photo_votes" => $data['count_votes'] > 0 ? $data['count_votes'] : '0',
"photo_comments" => array(
'link' => $data['photo_link']['link'].'#comments',
'name' => $data['count_votes'],
'word' => format_word($data['count_votes'], $locale['fmt_comment'])
)
);
}
if ($data['photo_allow_ratings']) {
$data += array(
"sum_rating" => $data['sum_rating'] > 0 ? $data['sum_rating'] : '0',
"photo_ratings" => array(
'link' => $data['photo_link']['link'].'#ratings',
'name' => $data['sum_rating'],
'word' => ($data['sum_rating'] > 0) ? ($data['sum_rating'] / $data['count_rating'] * 10)."/10" : "0/10",
)
);
}
$info['item'][] = $data;
}
}
}
render_photo_album($info);
} else {
redirect(INFUSIONS.'gallery/gallery.php');
}
} else {
/* Main Index */
set_title(\PHPFusion\SiteLinks::get_current_SiteLinks('infusions/gallery/gallery.php', "link_name"));
\PHPFusion\BreadCrumbs::getInstance()->addBreadCrumb([
'link' => INFUSIONS.'gallery/gallery.php',
'title' => \PHPFusion\SiteLinks::get_current_SiteLinks(INFUSIONS.'gallery/gallery.php', "link_name")
]);
$info['max_rows'] = dbcount("(album_id)", DB_PHOTO_ALBUMS, groupaccess('album_access'));
$_GET['rowstart'] = isset($_GET['rowstart']) && isnum($_GET['rowstart']) && $_GET['rowstart'] <= $info['max_rows'] ? $_GET['rowstart'] : 0;
if ($info['max_rows'] > 0) {
$info['page_nav'] = ($info['max_rows'] > $gallery_settings['gallery_pagination']) ? makepagenav($_GET['rowstart'],
$gallery_settings['gallery_pagination'],
$info['max_rows'], 3) : '';
$result = dbquery("SELECT ta.album_id, ta.album_title, ta.album_description, ta.album_image, ta.album_thumb1, ta.album_thumb2, ta.album_datestamp,
tu.user_id, tu.user_name, tu.user_status
FROM ".DB_PHOTO_ALBUMS." ta
LEFT JOIN ".DB_USERS." tu ON ta.album_user=tu.user_id
".(multilang_table("PG") ? "WHERE album_language='".LANGUAGE."' AND" : "WHERE")."
".groupaccess('album_access')." ORDER BY album_order
LIMIT ".$_GET['rowstart'].", ".$gallery_settings['gallery_pagination']);
while ($data = dbarray($result)) {
$data['album_link'] = array(
"link" => INFUSIONS."gallery/gallery.php?album_id=".$data['album_id'],
"name" => $data['album_title']
);
if (iADMIN && checkrights("PH")) {
global $aidlink;
$data['album_edit'] = array(
"link" => INFUSIONS."gallery/gallery_admin.php".$aidlink."&section=album_form&action=edit&cat_id=".$data['album_id'],
"name" => $locale['edit']
);
$data['album_delete'] = array(
"link" => INFUSIONS."gallery/gallery_admin.php".$aidlink."&section=album_form&action=delete&cat_id=".$data['album_id'],
"name" => $locale['delete']
);
}
$photo_directory = !SAFEMODE ? "album_".$data['album_id'] : '';
// if ($data['album_image']) {
$data['image'] = displayAlbumImage($data['album_image'], $data['album_thumb1'], $data['album_thumb2'],
INFUSIONS."gallery/gallery.php?album_id=".$data['album_id']);
//}
$data['title'] = $data['album_title'] ? $data['album_title'] : $locale['402'];
$data['description'] = $data['album_description'] ? nl2br(parse_textarea($data['album_description'])) : '';
$_photo = dbquery("SELECT pp.photo_user, u.user_id, u.user_name, u.user_status, u.user_avatar
FROM ".DB_PHOTOS." pp
LEFT JOIN ".DB_USERS." u on u.user_id=pp.photo_user
WHERE album_id='".intval($data['album_id'])."'
ORDER BY photo_datestamp
");
$data['photo_rows'] = dbrows($_photo);
$user = array();
if ($data['photo_rows'] > 0) {
while ($_photo_data = dbarray($_photo)) {
$user[$_photo_data['user_id']] = $_photo_data;
} // distinct value.
}
$data['photo_user'] = $user;
$info['item'][] = $data;
}
}
render_gallery($info);
}
}
function photo_thumbnail($data) {
$locale = fusion_get_locale();
echo "<div class='panel panel-default tbl-border'>\n";
echo "<div class='p-0'>\n";
echo "<!--photogallery_album_photo_".$data['photo_id']."-->";
echo "<a href='".INFUSIONS."gallery/gallery.php?photo_id=".$data['photo_id']."' class='photogallery_album_photo_link'>\n";
$thumb_img = ($data['photo_thumb1'] && file_exists(IMAGES_G.$data['photo_thumb1'])) ? IMAGES_G.$data['photo_thumb1'] : DOWNLOADS."images/no_image.jpg";
$title = ($data['album_thumb1'] && file_exists(PHOTOS.$data['album_thumb1'])) ? $data['album_thumb1'] : $locale['402'];
echo "<img class='photogallery_album_photo img-responsive' style='min-width: 100%;' src='".$thumb_img."' title='$title' alt='$title' />\n";
echo "</a>\n";
echo "</div>\n<div class='panel-body photogallery_album_photo_info'>\n";
echo "<a href='".INFUSIONS."gallery/gallery.php?photo_id=".$data['photo_id']."' class='photogallery_album_photo_link'><strong>".$data['photo_title']."</strong></a>\n";
echo "</div>\n<div class='panel-body photogallery_album_photo_info' style='border-top:1px solid #ddd'>\n";
echo "<!--photogallery_album_photo_info-->\n";
echo "<span class='display-inline-block'>\n";
echo($data['photo_allow_ratings'] ? $locale['437'].($data['count_votes'] > 0 ? str_repeat("<i class='fa fa-star'></i>", ceil($data['sum_rating'] / $data['count_votes'])) : $locale['438'])."<br />\n" : "");
echo "</span>\n<br/>\n";
echo "</div>\n<div class='panel-body photogallery_album_photo_info' style='border-top:1px solid #ddd'>\n";
echo "<span> ".$locale['434'].profile_link($data['user_id'], $data['user_name'], $data['user_status'])." </span>";
echo "</div>\n<div class='panel-body photogallery_album_photo_info' style='border-top:1px solid #ddd'>\n";
echo "<span class='m-r-10'><abbr title='".$locale['403'].showdate("shortdate", $data['photo_datestamp'])."'><i title='".$locale['403'].showdate("shortdate", $data['photo_datestamp'])."' class='entypo calendar text-lighter'></i></abbr></span>";
$photo_comments = dbcount("(comment_id)", DB_COMMENTS, "comment_type='P' AND comment_item_id='".$data['photo_id']."'");
$comments_text = ($data['photo_allow_comments'] ? ($photo_comments == 1 ? $locale['436b'] : $locale['436']).$photo_comments : "");
echo "<span class='m-r-10'><abbr title='".$comments_text."'><i class='entypo icomment text-lighter'></i></abbr> $photo_comments</abbr></span>";
echo "<span class='m-r-10'><abbr title='".$locale['434'].$data['user_name']."'><i class='entypo user text-lighter'></i></span>";
echo "<span><abbr title='".$locale['435'].$data['photo_views']."'><i class='entypo eye text-lighter'></i></abbr> ".$data['photo_views']."</span>";
echo "</div></div>\n";
}
require_once THEMES."templates/footer.php";
/**
* Displays the Album Image
*
* @param $album_image
* @param $album_thumb1
* @param $album_thumb2
* @param $link
*
* @return string
*/
function displayAlbumImage($album_image, $album_thumb1, $album_thumb2, $link) {
global $gallery_settings;
// include generation of watermark which requires photo_id. but album doesn't have id.
// Thumb will have 2 possible path following v7
if (!empty($album_thumb1) && (file_exists(IMAGES_G_T.$album_thumb1) || file_exists(IMAGES_G.$album_thumb1))) {
if (file_exists(IMAGES_G.$album_thumb1)) {
// uncommon first
$image = thumbnail(IMAGES_G.$album_thumb1, $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
} else {
// sure fire if image is usually more than thumb threshold
$image = thumbnail(IMAGES_G_T.$album_thumb1, $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
}
return $image;
}
if (!empty($album_thumb2) && file_exists(IMAGES_G.$album_thumb2)) {
return thumbnail(IMAGES_G.$album_thumb2, $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
}
if (!empty($album_image) && file_exists(IMAGES_G.$album_image)) {
return thumbnail(IMAGES_G.$album_image, $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
}
return thumbnail(IMAGES_G."album_default.jpg", $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
}
/**
* Displays Album Thumb with Colorbox
*
* @param $photo_filename
* @param $photo_thumb1
* @param $photo_thumb2
* @param $link
*
* @return string
*/
function displayPhotoImage($photo_id, $photo_filename, $photo_thumb1, $photo_thumb2, $link) {
global $gallery_settings;
// Remove the whole of watermarking requirements in thumbnails.
/*
if ($gallery_settings['photo_watermark']) {
// need photo_id.
if ($gallery_settings['photo_watermark_save']) {
$parts = explode(".", $photo_filename);
$wm_file1 = $parts[0]."_w1.".$parts[1]; // big pic
$wm_file2 = $parts[0]."_w2.".$parts[1]; // small pic
if (!file_exists(IMAGES_G.$wm_file1)) {
$photo_filename = INFUSIONS."gallery/photo.php?photo_id=".$photo_id."&full";
if ($photo_thumb2) {
$photo_thumb1 = INFUSIONS."gallery/photo.php?photo_id=".$photo_id;
return thumbnail($photo_thumb1, $gallery_settings['thumb_w']."px", $photo_filename, TRUE, FALSE, "cropfix");
}
return thumbnail($photo_filename, $gallery_settings['thumb_w']."px", $photo_filename, TRUE, FALSE, "cropfix");
} else {
$photo_filename = IMAGES_G.$wm_file2;
if ($photo_thumb2) {
$photo_thumb1 = IMAGES_G.$wm_file1;
return thumbnail($photo_thumb1, $gallery_settings['thumb_w']."px", $photo_filename, TRUE, FALSE, "cropfix");
}
return thumbnail($photo_filename, $gallery_settings['thumb_w']."px", $photo_filename, TRUE, FALSE, "cropfix");
}
} else {
if ($photo_thumb2 && file_exists(IMAGES_G.$photo_thumb2)) {
$photo_thumb2 = INFUSIONS."gallery/photo.php?photo_id=".$photo_id;
return thumbnail($photo_thumb2, $gallery_settings['thumb_w']."px", $photo_thumb2, TRUE, FALSE, "cropfix");
}
$photo_filename = INFUSIONS."gallery/photo.php?photo_id=".$photo_id."&full";
return thumbnail($photo_filename, $gallery_settings['thumb_w']."px", $photo_filename, TRUE, FALSE, "cropfix");
}
}
*/
// Thumb will have 2 possible path following v7
if (!empty($photo_thumb1) && (file_exists(IMAGES_G_T.$photo_thumb1) || file_exists(IMAGES_G.$photo_thumb1))) {
if (file_exists(IMAGES_G.$photo_thumb1)) {
// uncommon first
$image = thumbnail(IMAGES_G.$photo_thumb1, $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
} else {
// sure fire if image is usually more than thumb threshold
$image = thumbnail(IMAGES_G_T.$photo_thumb1, $gallery_settings['thumb_w']."px", $link, FALSE, FALSE, "cropfix");
}
return $image;
}
if (!empty($photo_thumb2) && file_exists(IMAGES_G.$photo_thumb2)) {
return thumbnail(IMAGES_G.$photo_thumb2, $gallery_settings['thumb_w']."px", $link, TRUE, FALSE, "cropfix");
}
if (!empty($photo_filename) && file_exists(IMAGES_G.$photo_filename)) {
return thumbnail(IMAGES_G.$photo_filename, $gallery_settings['thumb_w']."px", $link, TRUE, FALSE, "cropfix");
}
return thumbnail(IMAGES_G."album_default.jpg", $gallery_settings['thumb_w']."px", "", FALSE, FALSE, "cropfix");
}
function get_photo_comments($data) {
$html = "";
if (fusion_get_settings('comments_enabled') && $data['photo_allow_comments']) {
ob_start();
showcomments("P", DB_PHOTOS, "photo_id", $data['photo_id'], BASEDIR."infusions/gallery/gallery.php?photo_id=".$data['photo_id'], FALSE);
$html = ob_get_contents();
ob_end_clean();
}
return (string)$html;
}
function get_photo_ratings($data) {
$html = "";
if (fusion_get_settings('ratings_enabled') && $data['photo_allow_ratings']) {
ob_start();
showratings("P", $data['photo_id'], BASEDIR."infusions/gallery/gallery.php?photo_id=".$data['photo_id']);
$html = ob_get_clean();
}
return (string)$html;
} | helmuthm/PHP-Fusion | infusions/gallery/gallery.php | PHP | agpl-3.0 | 27,622 |
/*
* This file is part of ELKI:
* Environment for Developing KDD-Applications Supported by Index-Structures
*
* Copyright (C) 2019
* ELKI Development Team
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package elki.index.tree.spatial.rstarvariants.xtree;
import elki.data.NumberVector;
import elki.database.relation.Relation;
import elki.persistent.PageFile;
import elki.persistent.PageFileFactory;
/**
* Factory for an xtree.
*
* @author Erich Schubert
* @since 0.7.5
*/
public class XTreeFactory<O extends NumberVector> extends AbstractXTreeFactory<O, XTreeNode> {
public XTreeFactory(PageFileFactory<?> pageFileFactory, XTreeSettings settings) {
super(pageFileFactory, settings);
}
@Override
public XTreeIndex<O> instantiate(Relation<O> relation) {
PageFile<XTreeNode> pagefile = makePageFile(getNodeClass());
XTreeIndex<O> index = new XTreeIndex<>(relation, pagefile, settings);
return index;
}
protected Class<XTreeNode> getNodeClass() {
return XTreeNode.class;
}
public static class Par<O extends NumberVector> extends AbstractXTreeFactory.Par<O> {
@Override
public XTreeFactory<O> make() {
return new XTreeFactory<>(pageFileFactory, settings);
}
}
}
| elki-project/elki | addons/xtree/src/main/java/elki/index/tree/spatial/rstarvariants/xtree/XTreeFactory.java | Java | agpl-3.0 | 1,853 |
#
#
# Copyright (C) 2011 Instructure, Inc.
#
# This file is part of Canvas.
#
# Canvas is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation, version 3 of the License.
#
# Canvas is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
# details.
#
# You should have received a copy of the GNU Affero General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
#
require File.expand_path(File.dirname(__FILE__) + '/../api_spec_helper')
describe EnrollmentsApiController, :type => :integration do
describe "enrollment creation" do
context "an admin user" do
before do
course_with_student(:active_all => true)
Account.site_admin.add_user(@student)
@unenrolled_user = user_with_pseudonym
@section = @course.course_sections.create
@path = "/api/v1/courses/#{@course.id}/enrollments"
@path_options = { :controller => 'enrollments_api', :action => 'create', :format => 'json', :course_id => @course.id.to_s }
@user = @student
end
it "should create a new student enrollment" do
json = api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'StudentEnrollment',
:enrollment_state => 'active',
:course_section_id => @section.id,
:limit_privileges_to_course_section => true
}
}
new_enrollment = Enrollment.find(json['id'])
json.should == {
'root_account_id' => @course.account.id,
'id' => new_enrollment.id,
'user_id' => @unenrolled_user.id,
'course_section_id' => @section.id,
'limit_privileges_to_course_section' => true,
'enrollment_state' => 'active',
'course_id' => @course.id,
'type' => 'StudentEnrollment'
}
new_enrollment.root_account_id.should eql @course.account.id
new_enrollment.user_id.should eql @unenrolled_user.id
new_enrollment.course_section_id.should eql @section.id
new_enrollment.limit_privileges_to_course_section.should eql true
new_enrollment.workflow_state.should eql 'active'
new_enrollment.course_id.should eql @course.id
new_enrollment.should be_an_instance_of StudentEnrollment
end
it "should create a new teacher enrollment" do
json = api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'TeacherEnrollment',
:enrollment_state => 'active',
:course_section_id => @section.id,
:limit_privileges_to_course_section => true
}
}
Enrollment.find(json['id']).should be_an_instance_of TeacherEnrollment
end
it "should create a new ta enrollment" do
json = api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'TaEnrollment',
:enrollment_state => 'active',
:course_section_id => @section.id,
:limit_privileges_to_course_section => true
}
}
Enrollment.find(json['id']).should be_an_instance_of TaEnrollment
end
it "should create a new observer enrollment" do
json = api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'ObserverEnrollment',
:enrollment_state => 'active',
:course_section_id => @section.id,
:limit_privileges_to_course_section => true
}
}
Enrollment.find(json['id']).should be_an_instance_of ObserverEnrollment
end
it "should default new enrollments to the 'invited' state" do
json = api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'StudentEnrollment'
}
}
Enrollment.find(json['id']).workflow_state.should eql 'invited'
end
it "should throw an error if no params are given" do
raw_api_call :post, @path, @path_options, { :enrollment => { } }
response.code.should eql '403'
JSON.parse(response.body).should == {
'message' => 'No parameters given'
}
end
it "should assume a StudentEnrollment if no type is given" do
api_call :post, @path, @path_options, { :enrollment => { :user_id => @unenrolled_user.id } }
JSON.parse(response.body)['type'].should eql 'StudentEnrollment'
end
it "should return an error if an invalid type is given" do
raw_api_call :post, @path, @path_options, { :enrollment => { :user_id => @unenrolled_user.id, :type => 'PandaEnrollment' } }
JSON.parse(response.body)['message'].should eql 'Invalid type'
end
it "should return an error if no user_id is given" do
raw_api_call :post, @path, @path_options, { :enrollment => { :type => 'StudentEnrollment' } }
response.code.should eql '403'
JSON.parse(response.body).should == {
'message' => "Can't create an enrollment without a user. Include enrollment[user_id] to create an enrollment"
}
end
end
context "a teacher" do
before do
course_with_teacher(:active_all => true)
@course_with_teacher = @course
@course_wo_teacher = course
@course = @course_with_teacher
@unenrolled_user = user_with_pseudonym
@section = @course.course_sections.create
@path = "/api/v1/courses/#{@course.id}/enrollments"
@path_options = { :controller => 'enrollments_api', :action => 'create', :format => 'json', :course_id => @course.id.to_s }
@user = @teacher
end
it "should create enrollments for its own class" do
json = api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'StudentEnrollment',
:enrollment_state => 'active',
:course_section_id => @section.id,
:limit_privileges_to_course_section => true
}
}
new_enrollment = Enrollment.find(json['id'])
json.should == {
'root_account_id' => @course.account.id,
'id' => new_enrollment.id,
'user_id' => @unenrolled_user.id,
'course_section_id' => @section.id,
'limit_privileges_to_course_section' => true,
'enrollment_state' => 'active',
'course_id' => @course.id,
'type' => 'StudentEnrollment'
}
new_enrollment.root_account_id.should eql @course.account.id
new_enrollment.user_id.should eql @unenrolled_user.id
new_enrollment.course_section_id.should eql @section.id
new_enrollment.limit_privileges_to_course_section.should eql true
new_enrollment.workflow_state.should eql 'active'
new_enrollment.course_id.should eql @course.id
new_enrollment.should be_an_instance_of StudentEnrollment
end
it "should not create an enrollment for another class" do
raw_api_call :post, "/api/v1/courses/#{@course_wo_teacher.id}/enrollments", @path_options.merge(:course_id => @course_wo_teacher.id.to_s),
{
:enrollment => {
:user_id => @unenrolled_user.id,
:type => 'StudentEnrollment'
}
}
response.code.should eql '401'
end
end
context "a student" do
before do
course_with_student(:active_all => true)
@unenrolled_user = user_with_pseudonym
@path = "/api/v1/courses/#{@course.id}/enrollments"
@path_options = { :controller => 'enrollments_api', :action => 'create', :format => 'json', :course_id => @course.id.to_s }
@user = @student
end
it "should return 401 Unauthorized" do
raw_api_call :post, @path, @path_options,
{
:enrollment => {
:user_id => @unenrolled_user,
:type => 'StudentEnrollment'
}
}
response.code.should eql '401'
end
end
end
describe "enrollment listing" do
before do
course_with_student(:active_all => true, :user => user_with_pseudonym)
@teacher = User.create(:name => 'Señor Chang')
@teacher.pseudonyms.create(:unique_id => 'chang@example.com')
@course.enroll_teacher(@teacher)
User.all.each { |u| u.destroy unless u.pseudonym.present? }
@path = "/api/v1/courses/#{@course.id}/enrollments"
@params = { :controller => "enrollments_api", :action => "index", :course_id => @course.id.to_param, :format => "json" }
end
context "a student" do
it "should list all members of a course" do
json = api_call(:get, @path, @params)
enrollments = %w{observer student ta teacher}.inject([]) do |res, type|
res = res + @course.send("#{type}_enrollments").scoped(:include => :user, :order => 'users.sortable_name ASC')
end
json.should == enrollments.map { |e|
{
'root_account_id' => e.root_account_id,
'limit_privileges_to_course_section' => e.limit_privileges_to_course_section,
'enrollment_state' => e.workflow_state,
'id' => e.id,
'user_id' => e.user_id,
'type' => e.type,
'course_section_id' => e.course_section_id,
'course_id' => e.course_id,
'user' => {
'name' => e.user.name,
'sortable_name' => e.user.sortable_name,
'short_name' => e.user.short_name,
'id' => e.user.id
}
}
}
end
it "should filter by enrollment workflow_state" do
@teacher.enrollments.first.update_attribute(:workflow_state, 'completed')
json = api_call(:get, "#{@path}?state[]=completed", @params.merge(:state => %w{completed}))
json.each { |e| e['enrollment_state'].should eql 'completed' }
end
it "should not include the users' sis and login ids" do
json = api_call(:get, @path, @params)
json.each do |res|
%w{sis_user_id sis_login_id login_id}.each { |key| res['user'].should_not include(key) }
end
end
end
context "a teacher" do
it "should include users' sis and login ids" do
@user = @teacher
json = api_call(:get, @path, @params)
enrollments = %w{observer student ta teacher}.inject([]) do |res, type|
res = res + @course.send("#{type}_enrollments").scoped(:include => :user)
end
json.should == enrollments.map do |e|
user_json = {
'name' => e.user.name,
'sortable_name' => e.user.sortable_name,
'short_name' => e.user.short_name,
'id' => e.user.id,
'login_id' => e.user.pseudonym ? e.user.pseudonym.unique_id : nil
}
user_json.merge!({
'sis_user_id' => e.user.pseudonym.sis_user_id,
'sis_login_id' => e.user.pseudonym.unique_id,
}) if e.user.pseudonym && e.user.pseudonym.sis_user_id
{
'root_account_id' => e.root_account_id,
'limit_privileges_to_course_section' => e.limit_privileges_to_course_section,
'enrollment_state' => e.workflow_state,
'id' => e.id,
'user_id' => e.user_id,
'type' => e.type,
'course_section_id' => e.course_section_id,
'course_id' => e.course_id,
'user' => user_json
}
end
end
end
context "a user without roster permissions" do
it "should return 401 unauthorized" do
@user = user_with_pseudonym(:name => 'Don Draper', :username => 'ddraper@sterling-cooper.com')
raw_api_call(:get, "/api/v1/courses/#{@course.id}/enrollments", @params.merge(:course_id => @course.id.to_param))
response.code.should eql "401"
end
end
describe "pagination" do
it "should properly paginate" do
json = api_call(:get, "#{@path}?page=1&per_page=1", @params.merge(:page => 1.to_param, :per_page => 1.to_param))
enrollments = %w{observer student ta teacher}.inject([]) { |res, type|
res = res + @course.send("#{type}_enrollments").scoped(:include => :user)
}.map do |e|
{
'root_account_id' => e.root_account_id,
'limit_privileges_to_course_section' => e.limit_privileges_to_course_section,
'enrollment_state' => e.workflow_state,
'id' => e.id,
'user_id' => e.user_id,
'type' => e.type,
'course_section_id' => e.course_section_id,
'course_id' => e.course_id,
'user' => {
'name' => e.user.name,
'sortable_name' => e.user.sortable_name,
'short_name' => e.user.short_name,
'id' => e.user.id
}
}
end
link_header = response.headers['Link'].split(',')
link_header[0].should match /page=2&per_page=1/ # next page
link_header[1].should match /page=1&per_page=1/ # first page
link_header[2].should match /page=2&per_page=1/ # last page
json.should eql [enrollments[0]]
json = api_call(:get, "#{@path}?page=2&per_page=1", @params.merge(:page => 2.to_param, :per_page => 1.to_param))
link_header = response.headers['Link'].split(',')
link_header[0].should match /page=1&per_page=1/ # prev page
link_header[1].should match /page=1&per_page=1/ # first page
link_header[2].should match /page=2&per_page=1/ # last page
json.should eql [enrollments[1]]
end
end
describe "filters" do
it "should properly filter by a single enrollment type" do
json = api_call(:get, "#{@path}?type[]=StudentEnrollment", @params.merge(:type => %w{StudentEnrollment}))
json.should eql @course.student_enrollments.map { |e|
{
'root_account_id' => e.root_account_id,
'limit_privileges_to_course_section' => e.limit_privileges_to_course_section,
'enrollment_state' => e.workflow_state,
'id' => e.id,
'user_id' => e.user_id,
'type' => e.type,
'course_section_id' => e.course_section_id,
'course_id' => e.course_id,
'user' => {
'name' => e.user.name,
'sortable_name' => e.user.sortable_name,
'short_name' => e.user.short_name,
'id' => e.user.id
}
}
}
end
it "should properly filter by multiple enrollment types" do
# set up some enrollments that shouldn't be returned by the api
request_user = @user
@new_user = user_with_pseudonym(:name => 'Zombo', :username => 'nobody2@example.com')
@course.enroll_user(@new_user, 'TaEnrollment', 'active')
@course.enroll_user(@new_user, 'ObserverEnrollment', 'active')
@user = request_user
json = api_call(:get, "#{@path}?type[]=StudentEnrollment&type[]=TeacherEnrollment", @params.merge(:type => %w{StudentEnrollment TeacherEnrollment}))
json.should == (@course.student_enrollments + @course.teacher_enrollments).map { |e|
{
'root_account_id' => e.root_account_id,
'limit_privileges_to_course_section' => e.limit_privileges_to_course_section,
'enrollment_state' => e.workflow_state,
'id' => e.id,
'user_id' => e.user_id,
'type' => e.type,
'course_section_id' => e.course_section_id,
'course_id' => e.course_id,
'user' => {
'name' => e.user.name,
'sortable_name' => e.user.sortable_name,
'short_name' => e.user.short_name,
'id' => e.user.id
}
}
}
end
end
end
end
| ottenhoff/canvas-lms | spec/apis/v1/enrollments_api_spec.rb | Ruby | agpl-3.0 | 17,367 |
/*
* Copyright (C) 2016 OpenMotics BV
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import {inject, customElement, bindable, bindingMode} from 'aurelia-framework';
import 'bootstrap';
import 'bootstrap-toggle';
import {Base} from '../base';
import {Toolbox} from '../../components/toolbox';
import Shared from '../../components/shared';
@bindable({
name: 'thermostat',
defaultBindingMode: bindingMode.twoWay
})
@customElement('global-thermostat')
@inject(Element)
export class GlobalThermostat extends Base {
constructor(element, ...rest) {
super(...rest);
this.element = element;
this.bool = false;
}
thermostatWidth() {
let offset = Shared === undefined || Shared.locale === 'en' ? 0 : 20;
let width = Toolbox.getDeviceViewport() === 'lg' ? 110 + offset : 40;
return `${width}px`;
}
}
| openmotics/gateway-frontend | src/resources/globalthermostat/thermostat.js | JavaScript | agpl-3.0 | 1,490 |
/*******************************************************************************
* gvGeoportal is sponsored by the General Directorate for Information
* Technologies (DGTI) of the Regional Ministry of Finance and Public
* Administration of the Generalitat Valenciana (Valencian Community,
* Spain), managed by gvSIG Association and led by DISID Corporation.
*
* Copyright (C) 2016 DGTI - Generalitat Valenciana
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package es.gva.dgti.gvgeoportal.security;
import java.io.Serializable;
import org.gvnix.addon.gva.security.providers.safe.GvNIXUserSAFE;
import org.springframework.roo.addon.javabean.RooJavaBean;
import org.springframework.security.core.userdetails.UserDetails;
@GvNIXUserSAFE
@RooJavaBean
public class SafeUser implements UserDetails, Serializable {
}
| gvSIGAssociation/gvsig-web | src/main/java/es/gva/dgti/gvgeoportal/security/SafeUser.java | Java | agpl-3.0 | 1,528 |
<?php
class Controller_Notifications_Dropdown extends Controller_Core_Lang {
public function before() {
if (!\Request::is_hmvc()) {
return;
}
parent::before();
}
public function get_index() {
$user_id = $this->current_user->id;
$count = \Model_Notification::get_unread_count($user_id);
$notifications = \Model_Notification::get_unread($user_id) ? : [];
\Event::trigger('gather_notifications');
return \View::forge('notifications/dropdown',
['unread_count' => $count > 0 ? $count : '', 'notifications' => $notifications]);
}
}
| MelcherSt/HTweb | fuel/app/classes/controller/notifications/dropdown.php | PHP | agpl-3.0 | 572 |
/*
* Tanaguru - Automated webpage assessment
* Copyright (C) 2008-2017 Tanaguru.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact us by mail: tanaguru AT tanaguru DOT org
*/
package org.tanaguru.rules.rgaa32017;
import org.tanaguru.ruleimplementation.AbstractNotTestedRuleImplementation;
/**
* Implementation of the rule 11.10.6 of the referential Rgaa 3-2017.
*
* For more details about the implementation, refer to <a href="http://tanaguru-rules-rgaa3.readthedocs.org/en/latest/Rule-11-10-6">the rule 11.10.6 design page.</a>
* @see <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#test-11-10-6"> 11.10.6 rule specification</a>
*/
public class Rgaa32017Rule111006 extends AbstractNotTestedRuleImplementation {
/**
* Default constructor
*/
public Rgaa32017Rule111006 () {
super();
}
} | Tanaguru/Tanaguru | rules/rgaa3-2017/src/main/java/org/tanaguru/rules/rgaa32017/Rgaa32017Rule111006.java | Java | agpl-3.0 | 1,491 |
/*
* Tanaguru - Automated webpage assessment
* Copyright (C) 2008-2016 Tanaguru.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
* Contact us by mail: tanaguru AT tanaguru DOT org
*/
package org.tanaguru.rules.rgaa32016;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.jsoup.nodes.Element;
import static org.tanaguru.entity.audit.TestSolution.*;
import org.tanaguru.processor.SSPHandler;
import org.tanaguru.ruleimplementation.AbstractMarkerPageRuleImplementation;
import org.tanaguru.ruleimplementation.ElementHandler;
import org.tanaguru.ruleimplementation.ElementHandlerImpl;
import org.tanaguru.ruleimplementation.TestSolutionHandler;
import org.tanaguru.rules.elementchecker.ElementChecker;
import org.tanaguru.rules.elementchecker.element.ElementPresenceChecker;
import org.tanaguru.rules.elementselector.SimpleElementSelector;
import static org.tanaguru.rules.keystore.AttributeStore.*;
import static org.tanaguru.rules.keystore.CssLikeQueryStore.*;
import static org.tanaguru.rules.keystore.HtmlElementStore.*;
import static org.tanaguru.rules.keystore.MarkerStore.*;
import static org.tanaguru.rules.keystore.RemarkMessageStore.*;
/**
* Implementation of the rule 1.2.4 of the referential Rgaa 3-2016.
*
* For more details about the implementation, refer to <a href="http://tanaguru-rules-rgaa3.readthedocs.org/en/latest/Rule-1-2-4">the rule 1.2.4 design page.</a>
* @see <a href="http://references.modernisation.gouv.fr/referentiel-technique-0#test-1-2-4"> 1.2.4 rule specification</a>
*/
public class Rgaa32016Rule010204 extends AbstractMarkerPageRuleImplementation {
/**
* Contains all the decorative SVG elements without the "img" role
*/
private final ElementHandler<Element> decorativeSvgElementsWithoutRoleImage
= new ElementHandlerImpl();
/*
* Contains all the SVG elements without the "img" role.
*/
private final ElementHandler<Element> svgElementsWithoutRoleImage
= new ElementHandlerImpl();
/*
*Contains all the SVG element with title attribute or child with title attribute
*/
private final ElementHandler<Element> titleAttrOnSvgOrChild
= new ElementHandlerImpl();
/*
*Contains all the decorative SVG element with title attribute or child with title attribute
*/
private final ElementHandler<Element> titleAttrOnDecorativeSvgOrChild
= new ElementHandlerImpl();
/*
*Contains suspected decorative SVG elements
*/
private final ElementHandler<Element> decorativeSvgElements
= new ElementHandlerImpl();
/*
*Suspected decorative SVG elements
*/
private final ElementHandler<Element> suspectedDecorativeSvgElements
= new ElementHandlerImpl();
/*
*Contains SVG elements with desc or title child
*/
private final ElementHandler<Element> svgElementsWithDescOrTitleChild
= new ElementHandlerImpl();
/*
*Contains decorative SVG elements with desc or title child
*/
private final ElementHandler<Element> decorativeSvgElementsWithDescOrTitleChild
= new ElementHandlerImpl();
/*
*Contains SVG elements with arria attribute or on child
*/
private final ElementHandler<Element> ariaAttrOnSvgOrChild
= new ElementHandlerImpl();
/*
*Contains decorative SVG elements with arria attribute or on child
*/
private final ElementHandler<Element> ariaAttrOnDecorativeSvgOrChild
= new ElementHandlerImpl();
/**
* Default constructor
*/
public Rgaa32016Rule010204 () {
super(DECORATIVE_IMAGE_MARKER, INFORMATIVE_IMAGE_MARKER);
setElementSelector(new SimpleElementSelector(SVG_NOT_IN_LINK_CSS_LIKE_QUERY));
}
@Override
protected void select(SSPHandler sspHandler) {
super.select(sspHandler);
extractMalformedPatternDetectedElements(
getSelectionWithMarkerHandler(), // all svg identified as decorative by marker
decorativeSvgElementsWithoutRoleImage,
ariaAttrOnDecorativeSvgOrChild,
decorativeSvgElementsWithDescOrTitleChild,
titleAttrOnDecorativeSvgOrChild,
decorativeSvgElements);
extractMalformedPatternDetectedElements( // all svg that are neither decorative, nor informative by marker
getSelectionWithoutMarkerHandler(),
svgElementsWithoutRoleImage,
ariaAttrOnSvgOrChild,
svgElementsWithDescOrTitleChild,
titleAttrOnSvgOrChild,
suspectedDecorativeSvgElements);
}
/**
*
* @param svgElements
* @param svgElementsWithoutRoleImage
* @param ariaAttrOnSvgOrChild
* @param svgElementsWithDescOrTitleChild
* @param titleAttrOnSvgOrChild
*/
private void extractMalformedPatternDetectedElements (
ElementHandler<Element> svgElements,
ElementHandler<Element> svgElementsWithoutRoleImage,
ElementHandler<Element> ariaAttrOnSvgOrChild,
ElementHandler<Element> svgElementsWithDescOrTitleChild,
ElementHandler<Element> titleAttrOnSvgOrChild,
ElementHandler<Element> wellFormedSvgElements) {
for(Element element : svgElements.get()) {
boolean patternDetected= false;
if (!StringUtils.equalsIgnoreCase(element.attr(ROLE_ATTR), "img")) {
svgElementsWithoutRoleImage.add(element);
patternDetected= true;
}
if (element.hasAttr(ARIA_LABEL_ATTR) ||
element.hasAttr(ARIA_LABELLEDBY_ATTR) ||
element.hasAttr(ARIA_DESCRIBEDBY_ATTR) ||
!element.select(ARIA_DESCRIBEDBY_CSS_LIKE_QUERY+","+ ARIA_LABEL_CSS_LIKE_QUERY+","+ARIA_LABELLEDBY_CSS_LIKE_QUERY).isEmpty()) {
ariaAttrOnSvgOrChild.add(element);
patternDetected= true;
}
if (!element.select(NOT_EMPTY_ARIA_TITLE_CSS_LIKE_QUERY+","+NOT_EMPTY_ARIA_DESC_CSS_LIKE_QUERY).isEmpty()) {
svgElementsWithDescOrTitleChild.add(element);
patternDetected= true;
}
if (element.hasAttr(TITLE_ELEMENT) ||
!element.select("[title]").isEmpty()) {
titleAttrOnSvgOrChild.add(element);
patternDetected= true;
}
if (wellFormedSvgElements != null && !patternDetected) {
wellFormedSvgElements.add(element);
}
}
}
@Override
protected void check(SSPHandler sspHandler, TestSolutionHandler testSolutionHandler) {
if (getSelectionWithMarkerHandler().isEmpty() &&
getSelectionWithoutMarkerHandler().isEmpty()) {
testSolutionHandler.addTestSolution(NOT_APPLICABLE);
return;
}
ElementChecker ec = new ElementPresenceChecker(
new ImmutablePair(FAILED, DECORATIVE_SVG_WITHOUT_ROLE_IMG_ATTRIBUTE),
new ImmutablePair(PASSED,""));
if (!decorativeSvgElementsWithoutRoleImage.isEmpty()) {
// result is failed for sure
ec.check(sspHandler, decorativeSvgElementsWithoutRoleImage, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(FAILED, DECORATIVE_SVG_OR_CHILDREN_WITH_ARWIA_ATTRIBUTE),
new ImmutablePair(PASSED,""));
if (!ariaAttrOnDecorativeSvgOrChild.isEmpty()) {
ec.check(sspHandler, ariaAttrOnDecorativeSvgOrChild, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(FAILED, DECORATIVE_SVG_WITH_NOT_EMPTY_TITLE_OR_DESC_TAGS),
new ImmutablePair(PASSED,""));
if (!decorativeSvgElementsWithDescOrTitleChild.isEmpty()) {
ec.check(sspHandler, decorativeSvgElementsWithDescOrTitleChild, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(FAILED, DECORATIVE_SVG_OR_CHILD_WITH_TITLE_ATTRIBUTE),
new ImmutablePair(PASSED,""));
if (!titleAttrOnDecorativeSvgOrChild.isEmpty()) {
// ec.check(sspHandler, titleAttrOnSvgOrChild, testSolutionHandler);
ec.check(sspHandler, titleAttrOnDecorativeSvgOrChild, testSolutionHandler);
}
if (!decorativeSvgElements.isEmpty()) {
testSolutionHandler.addTestSolution(PASSED);
}
// This control is inhibated to avoid throwing same error as 1.3.5
// Thus, each vectorial image not determined with marker without role="img"
// will raise a Failed, only once, in 1.3.5
// ec = new ElementPresenceChecker(
// new ImmutablePair(TestSolution.NEED_MORE_INFO, SUSPECTED_INFORMATIVE_SVG_ROLE_IMAGE_MISSING_ON_SVG),
// new ImmutablePair(TestSolution.PASSED,""));
//
// if (!svgElementsWithoutRoleImage.isEmpty()) {
// // result is failed for sure
// ec.check(sspHandler, svgElementsWithoutRoleImage, testSolutionHandler);
// }
ec = new ElementPresenceChecker(
new ImmutablePair(NEED_MORE_INFO, SVG_WITHOUT_ROLE_IMAGE_MSG),
new ImmutablePair(PASSED, ""));
if (!svgElementsWithoutRoleImage.isEmpty()) {
// result is failed for sure
ec.check(sspHandler, svgElementsWithoutRoleImage, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(NEED_MORE_INFO, SUSPECTED_INFORMATIVE_SVG_WITH_ARIA_ATTRIBUTE_DETECTED_ON_ELEMENT_OR_CHILD),
new ImmutablePair(PASSED,""));
if (!ariaAttrOnSvgOrChild.isEmpty()) {
ec.check(sspHandler, ariaAttrOnSvgOrChild, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(NEED_MORE_INFO, SUSPECTED_INFORMATIVE_SVG_WITH_DESC_OR_TITLE_CHILD_TAG),
new ImmutablePair(PASSED,""));
if (!svgElementsWithDescOrTitleChild.isEmpty()) {
ec.check(sspHandler, svgElementsWithDescOrTitleChild, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(NEED_MORE_INFO, SUSPECTED_INFORMATIVE_SVG_WITH_TITLE_ATTRIBUTE_ON_ELEMENT_OR_CHILD),
new ImmutablePair(PASSED,""));
if (!titleAttrOnSvgOrChild.isEmpty()) {
ec.check(sspHandler, titleAttrOnSvgOrChild, testSolutionHandler);
}
ec = new ElementPresenceChecker(
new ImmutablePair(NEED_MORE_INFO, SUSPECTED_WELL_FORMATED_DECORATIVE_SVG),
new ImmutablePair(PASSED,""));
if (!suspectedDecorativeSvgElements.isEmpty()) {
ec.check(sspHandler, suspectedDecorativeSvgElements, testSolutionHandler);
}
}
} | Tanaguru/Tanaguru | rules/rgaa3-2016/src/main/java/org/tanaguru/rules/rgaa32016/Rgaa32016Rule010204.java | Java | agpl-3.0 | 11,818 |
// Copyright 2015 by Paulo Augusto Peccin. See license.txt distributed with this file.
// Implements the 64K "X07" AtariAge format
jt.Cartridge64K_X07 = function(rom, format) {
"use strict";
function init(self) {
self.rom = rom;
self.format = format;
bytes = rom.content; // uses the content of the ROM directly
self.bytes = bytes;
}
this.read = function(address) {
// Always add the correct offset to access bank selected
return bytes[bankAddressOffset + (address & ADDRESS_MASK)];
};
this.performBankSwitchOnMonitoredAccess = function(address) {
if ((address & 0x180f) === 0x080d) // Method 1
bankAddressOffset = ((address & 0x00f0) >> 4) * BANK_SIZE; // Pick bank from bits 7-4
else if (bankAddressOffset >= BANK_14_ADDRESS && (address & 0x1880) === 0x0000) // Method 2, only if at bank 14 or 15
bankAddressOffset = ((address & 0x0040) === 0 ? 14 : 15) * BANK_SIZE; // Pick bank 14 or 15 from bit 6
};
// Savestate -------------------------------------------
this.saveState = function() {
return {
f: this.format.name,
r: this.rom.saveState(),
b: jt.Util.compressInt8BitArrayToStringBase64(bytes),
bo: bankAddressOffset
};
};
this.loadState = function(state) {
this.format = jt.CartridgeFormats[state.f];
this.rom = jt.ROM.loadState(state.r);
bytes = jt.Util.uncompressStringBase64ToInt8BitArray(state.b, bytes);
this.bytes = bytes;
bankAddressOffset = state.bo;
};
var bytes;
var bankAddressOffset = 0;
var ADDRESS_MASK = 0x0fff;
var BANK_SIZE = 4096;
var BANK_14_ADDRESS = 14 * BANK_SIZE;
if (rom) init(this);
};
jt.Cartridge64K_X07.prototype = jt.CartridgeBankedByBusMonitoring.base;
jt.Cartridge64K_X07.recreateFromSaveState = function(state, prevCart) {
var cart = prevCart || new jt.Cartridge64K_X07();
cart.loadState(state);
return cart;
};
| ppeccin/javatari.js | src/main/atari/cartridge/formats/Cartridge64K_X07.js | JavaScript | agpl-3.0 | 2,125 |
// Copyright (C) 2003 salvatore orlando <salvatore.orlando@unive.it>
// University of Venice, Ca' Foscari, Dipartimento di Informatica, (VE) Italy
// Istituto di Scienza e Tecnologia dell'Informazione, ISTI - CNR, (PI) Italy
//
// This program is free software; you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#include <stdio.h>
#include <iostream>
using namespace std;
#include "utils.hh"
Chronos GLOBAL_CLOCK;
void Chronos::StartChronos()
{
/*
if(!( (State==READY_CHRONOS || Time==0) ||
(State==STOPPED_CHRONOS) ) )
ResetChronos();
*/
State=RUNNING_CHRONOS;
gettimeofday(&tv,&tz);
sec1=tv.tv_sec;
usec1=tv.tv_usec;
}
void Chronos::StopChronos()
{
State=STOPPED_CHRONOS;
gettimeofday(&tv,&tz);
sec2=tv.tv_sec;
usec2=tv.tv_usec;
Time += (double)((sec2-sec1)) +
(double) ((usec2-usec1))/1000000.0;
// if ((usec2-usec1) == 0)
// Time += 0.0005;
}
double Chronos::ReadChronos()
{
if(State==STOPPED_CHRONOS || State==READY_CHRONOS)
return Time;
else {
gettimeofday(&tv,&tz);
sec3=tv.tv_sec;
usec3=tv.tv_usec;
Time = (double)((sec3-sec1)) +
(double) ((usec3-usec1))/1000000.0;
return Time;
}
}
void Chronos::ResetChronos()
{
Time=0;
State=READY_CHRONOS;
}
void Chronos::RestartChronos()
{
ResetChronos();
StartChronos();
}
| examachine/bitdrill | aux/dci-closed/utils.cc | C++ | agpl-3.0 | 1,939 |
package org.cbioportal.persistence.mybatis;
import org.cbioportal.model.GeneMolecularAlteration;
import org.cbioportal.model.GenericAssayMolecularAlteration;
import org.cbioportal.model.GenesetMolecularAlteration;
import org.cbioportal.model.MolecularProfileSamples;
import java.util.List;
import org.apache.ibatis.cursor.Cursor;
public interface MolecularDataMapper {
List<MolecularProfileSamples> getCommaSeparatedSampleIdsOfMolecularProfiles(List<String> molecularProfileIds);
List<GeneMolecularAlteration> getGeneMolecularAlterations(String molecularProfileId, List<Integer> entrezGeneIds,
String projection);
Cursor<GeneMolecularAlteration> getGeneMolecularAlterationsIter(String molecularProfileId, List<Integer> entrezGeneIds,
String projection);
List<GeneMolecularAlteration> getGeneMolecularAlterationsInMultipleMolecularProfiles(List<String> molecularProfileIds,
List<Integer> entrezGeneIds, String projection);
List<GenesetMolecularAlteration> getGenesetMolecularAlterations(String molecularProfileId, List<String> genesetIds,
String projection);
List<GenericAssayMolecularAlteration> getGenericAssayMolecularAlterations(String molecularProfileId, List<String> stableIds,
String projection);
}
| mandawilson/cbioportal | persistence/persistence-mybatis/src/main/java/org/cbioportal/persistence/mybatis/MolecularDataMapper.java | Java | agpl-3.0 | 1,595 |
import { Collection } from '../../utilities'
import Packet from './Packet'
import * as Types from '../types'
/**
* ScriptControlChange Packet
*/
class ScriptControlChange extends Packet {
/**
* Packet ID, this value is only unique per-frequency range, see key get
* method of Packet, plus the buffer helper of the network namespace for
* generating a lookup codes.
*
* @type {number}
*/
public static id: number = 189
/**
* Packet frequency. This value determines whether the message ID is 8, 16, or
* 32 bits. There can be unique 254 messages IDs in the "High" or "Medium"
* frequencies and 32,000 in "Low". A message with a "Fixed" frequency also
* defines its own ID and is considered to be a signal.
*
* @type {number}
*/
public static frequency: number = 0
/**
* If this value is true, the client cannot send this packet as circuits only
* accept trusted packets from internal connections (to utility servers etc).
*
* @type {boolean}
*/
public static trusted: boolean = true
/**
* States if this packet should use or be using zerocoding, to attempt to
* compress the sequences of zeros in the message in order to reduce network
* load.
*
* @type {boolean}
*/
public static compression: boolean = false
/**
* Determines the blocks that are are contained in the message and it's
* required parameters.
*
* @see {@link http://wiki.secondlife.com/wiki/Message_Layout}
* @type {Collection}
*/
public static format: Collection<string, any> = new Collection([
// tslint:disable-next-line:max-line-length
['data', { parameters: new Collection<string, any>([['takeControls', Types.Boolean], ['controls', Types.U32], ['passToAgent', Types.Boolean]]) }]
])
/**
* ScriptControlChange constructor, can be passed either a fully
* initialized Packet Buffer or an object containing this Objects required
* parameters from {@link ScriptControlChange.format}. Note that
* "agentData" blocks may be excluded if {@link build} is able to fetch the
* requirements itself.
*
* @param {object|Buffer} [data] Packet block data to be seralized, may be optional
* @param {boolean} [data.data.takeControls] TakeControls
* @param {U32} [data.data.controls] Controls
* @param {boolean} [data.data.passToAgent] PassToAgent
*/
constructor(data = {}) {
super(data)
}
}
export default ScriptControlChange
| gwigz/sljs | src/network/packets/ScriptControlChange.ts | TypeScript | agpl-3.0 | 2,450 |
Rails.application.configure do
# Verifies that versions and hashed value of the package contents in the project's package.json
config.webpacker.check_yarn_integrity = false
# Settings specified here will take precedence over those in config/application.rb.
# Use lograge for logging to production
config.lograge.enabled = true
config.log_level = :info
config.lograge.formatter = Lograge::Formatters::Logstash.new # Use logstash format
config.lograge.custom_options = lambda do |event|
{
remote_ip: event.payload[:ip],
params: event.payload[:params].except("controller", "action", "format", "id")
}
end
# Log times where people pass non-permitted params
config.action_controller.action_on_unpermitted_parameters :log
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
config.cache_store = :redis_cache_store, {url: ENV["REDIS_CACHE_URL"]}
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV["RAILS_SERVE_STATIC_FILES"].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Store uploaded files on the local file system (see config/storage.yml for options)
config.active_storage.service = :local
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Prepend all log lines with the following tags.
config.log_tags = [:request_id]
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = [I18n.default_locale]
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new($stdout)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.action_mailer.default_url_options = {protocol: "https", host: "bikeindex.org"}
config.action_mailer.delivery_method = :postmark
config.action_mailer.postmark_settings = {
api_token: ENV["POSTMARK_API_TOKEN"]
}
end
| bikeindex/bike_index | config/environments/production.rb | Ruby | agpl-3.0 | 4,855 |
# du7f - a simple web app for simple polls
# Copyright (C) 2011 Adrian Friedli <adi@koalatux.ch>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
require 'test_helper'
class ParticipantTest < ActiveSupport::TestCase
test 'valid participant' do
participant = Participant.new(name: 'joey')
participant.poll = polls(:alices_poll)
participant.entries = polls(:alices_poll).choices.map { |c| Entry.new(answer: 1) { |e| e.participant = participant; e.choice = c } }
assert participant.save
end
test 'participant with no answers should fail' do
participant = Participant.new(name: 'john')
participant.poll = polls(:alices_poll)
assert !participant.save
end
test 'participant with duplicate answers should fail' do
participant = Participant.new(name: 'john')
participant.poll = polls(:alices_poll)
participant.entries = (polls(:alices_poll).choices[0...-1] + polls(:alices_poll).choices[0...1]).map { |c| Entry.new(answer: 1) { |e| e.participant = participant; e.choice = c } }
assert !participant.save
end
end
| koalatux/du7f | test/unit/participant_test.rb | Ruby | agpl-3.0 | 1,672 |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* Copyright 2017 ScyllaDB
*
* Modified by ScyllaDB
*/
/*
* This file is part of Scylla.
*
* Scylla is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Scylla is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with Scylla. If not, see <http://www.gnu.org/licenses/>.
*/
#pragma once
#include <seastar/core/sstring.hh>
#include "cql3/statements/authentication_statement.hh"
#include "cql3/role_name.hh"
#include "cql3/role_options.hh"
namespace cql3 {
namespace statements {
class alter_role_statement final : public authentication_statement {
sstring _role;
role_options _options;
public:
alter_role_statement(const cql3::role_name& name, const role_options& options)
: _role(name.to_string())
, _options(std::move(options)) {
}
std::unique_ptr<prepared_statement> prepare(database& db, cql_stats& stats) override;
void validate(service::storage_proxy&, const service::client_state&) const override;
virtual future<> check_access(service::storage_proxy& proxy, const service::client_state&) const override;
virtual future<::shared_ptr<cql_transport::messages::result_message>>
execute(service::storage_proxy&, service::query_state&, const query_options&) const override;
};
}
}
| avikivity/scylla | cql3/statements/alter_role_statement.hh | C++ | agpl-3.0 | 2,536 |
#include <iostream>
#include <string>
using namespace std;
int main() {
string x;
x = "Hello, world";
cout << x << endl;
return 0;
} | cawaltrip/compilers | stubs/8.cpp | C++ | agpl-3.0 | 139 |
"""
BigchainDB TYMLEZ Consensus Plugin
"""
from setuptools import setup
tests_require = [
'pytest',
'pep8',
'pylint',
'pytest',
]
dev_require = [
'ipdb',
'ipython',
]
docs_require = [
]
setup(
name='BigchainDB TYMLEZ Consensus Plugin',
version='0.0.2',
description='BigchainDB TYMLEZ Consensus Plugin',
long_description=__doc__,
url='https://github.com/tymlez/consensus',
zip_safe=False,
classifiers=[
'Development Status :: 3 - Alpha'
],
packages=[
'consensus_template'
],
entry_points={
'bigchaindb.consensus': [
'tymlezconsensus=consensus_template.consensus:ConsensusRulesTemplate'
]
},
install_requires=[
'bigchaindb>=0.10.0.dev'
],
setup_requires=['pytest-runner'],
tests_require=tests_require,
extras_require={
'test': tests_require,
'dev': dev_require + tests_require + docs_require,
'docs': docs_require,
},
)
| tymlez/consensus | setup.py | Python | agpl-3.0 | 1,000 |
<?php
/*
* Copyright 2004-2017, AfterLogic Corp.
* Licensed under AGPLv3 license or AfterLogic license
* if commercial version of the product was purchased.
* See the LICENSE file for a full license statement.
*/
namespace MailSo\Sieve;
/**
* @category MailSo
* @package Sieve
*/
class ManageSieveClient extends \MailSo\Net\NetClient
{
/**
* @var bool
*/
private $bIsLoggined;
/**
* @var array
*/
private $aCapa;
/**
* @var int
*/
private $iRequestTime;
/**
* @access protected
*/
protected function __construct()
{
parent::__construct();
$this->bIsLoggined = false;
$this->iRequestTime = 0;
$this->aCapa = array();
$this->aModules = array();
}
/**
* @return \MailSo\Sieve\ManageSieveClient
*/
public static function NewInstance()
{
return new self();
}
/**
* @param string $sCapa
*
* @return bool
*/
public function IsSupported($sCapa)
{
return isset($this->aCapa[\strtoupper($sCapa)]);
}
/**
* @param string $sModule
*
* @return bool
*/
public function IsModuleSupported($sModule)
{
return $this->IsSupported('SIEVE') && \in_array(\strtoupper($sModule), $this->aModules);
}
/**
* @param string $sAuth
*
* @return bool
*/
public function IsAuthSupported($sAuth)
{
return $this->IsSupported('SASL') && \in_array(\strtoupper($sAuth), $this->aAuth);
}
/**
* @param string $sServerName
* @param int $iPort
* @param int $iSecurityType = \MailSo\Net\Enumerations\ConnectionSecurityType::AUTO_DETECT
* @param bool $bVerifySsl = false
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Base\Exceptions\InvalidArgumentException
* @throws \MailSo\Sieve\Exceptions\ResponseException
*/
public function Connect($sServerName, $iPort,
$iSecurityType = \MailSo\Net\Enumerations\ConnectionSecurityType::AUTO_DETECT, $bVerifySsl = false)
{
$this->iRequestTime = microtime(true);
parent::Connect($sServerName, $iPort, $iSecurityType, $bVerifySsl);
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$this->parseStartupResponse($mResponse);
if (\MailSo\Net\Enumerations\ConnectionSecurityType::UseStartTLS(
$this->IsSupported('STARTTLS'), $this->iSecurityType))
{
$this->sendRequestWithCheck('STARTTLS');
$this->EnableCrypto();
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$this->parseStartupResponse($mResponse);
}
else if (\MailSo\Net\Enumerations\ConnectionSecurityType::STARTTLS === $this->iSecurityType)
{
$this->writeLogException(
new \MailSo\Net\Exceptions\SocketUnsuppoterdSecureConnectionException('STARTTLS is not supported'),
\MailSo\Log\Enumerations\Type::ERROR, true);
}
return $this;
}
/**
* @param string $sLogin
* @param string $sPassword
* @param string $sLoginAuthKey = ''
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Base\Exceptions\InvalidArgumentException
* @throws \MailSo\Sieve\Exceptions\LoginException
*/
public function Login($sLogin, $sPassword, $sLoginAuthKey = '')
{
if (!\MailSo\Base\Validator::NotEmptyString($sLogin, true) ||
!\MailSo\Base\Validator::NotEmptyString($sPassword, true))
{
$this->writeLogException(
new \MailSo\Base\Exceptions\InvalidArgumentException(),
\MailSo\Log\Enumerations\Type::ERROR, true);
}
if ($this->IsSupported('SASL'))
{
$bAuth = false;
try
{
if ($this->IsAuthSupported('PLAIN'))
{
$sAuth = base64_encode($sLoginAuthKey."\0".$sLogin."\0".$sPassword);
$this->sendRequest('AUTHENTICATE "PLAIN" {'.strlen($sAuth).'+}');
$this->sendRequest($sAuth);
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$this->parseStartupResponse($mResponse);
$bAuth = true;
}
else if ($this->IsAuthSupported('LOGIN'))
{
$sLogin = base64_encode($sLogin);
$sPassword = base64_encode($sPassword);
$this->sendRequest('AUTHENTICATE "LOGIN"');
$this->sendRequest('{'.strlen($sLogin).'+}');
$this->sendRequest($sLogin);
$this->sendRequest('{'.strlen($sPassword).'+}');
$this->sendRequest($sPassword);
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$this->parseStartupResponse($mResponse);
$bAuth = true;
}
}
catch (\MailSo\Sieve\Exceptions\NegativeResponseException $oException)
{
$this->writeLogException(
new \MailSo\Sieve\Exceptions\LoginBadCredentialsException(
$oException->GetResponses(), '', 0, $oException),
\MailSo\Log\Enumerations\Type::ERROR, true);
}
if (!$bAuth)
{
$this->writeLogException(
new \MailSo\Sieve\Exceptions\LoginBadMethodException(),
\MailSo\Log\Enumerations\Type::ERROR, true);
}
}
else
{
$this->writeLogException(
new \MailSo\Sieve\Exceptions\LoginException(),
\MailSo\Log\Enumerations\Type::ERROR, true);
}
$this->bIsLoggined = true;
return $this;
}
/**
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function Logout()
{
if ($this->bIsLoggined)
{
$this->sendRequestWithCheck('LOGOUT');
$this->bIsLoggined = false;
}
return $this;
}
/**
* @return array
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function ListScripts()
{
$this->sendRequest('LISTSCRIPTS');
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$aResult = array();
if (is_array($mResponse))
{
foreach ($mResponse as $sLine)
{
$aTokens = $this->parseLine($sLine);
if (false === $aTokens)
{
continue;
}
$aResult[$aTokens[0]] = 'ACTIVE' === substr($sLine, -6);
}
}
return $aResult;
}
/**
* @return array
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function Capability()
{
$this->sendRequest('CAPABILITY');
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$this->parseStartupResponse($mResponse);
return $this->aCapa;
}
/**
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function Noop()
{
$this->sendRequestWithCheck('NOOP');
return $this;
}
/**
* @param string $sScriptName
*
* @return string
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function GetScript($sScriptName)
{
$this->sendRequest('GETSCRIPT "'.$sScriptName.'"');
$mResponse = $this->parseResponse();
$this->validateResponse($mResponse);
$sScript = '';
if (is_array($mResponse) && 0 < count($mResponse))
{
if ('{' === $mResponse[0]{0})
{
array_shift($mResponse);
}
if (\in_array(\substr($mResponse[\count($mResponse) - 1], 0, 2), array('OK', 'NO')))
{
array_pop($mResponse);
}
$sScript = \implode("\n", $mResponse);
}
return $sScript;
}
/**
* @param string $sScriptName
* @param string $sScriptSource
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function PutScript($sScriptName, $sScriptSource)
{
$sScriptSource = stripslashes($sScriptSource);
$this->sendRequest('PUTSCRIPT "'.$sScriptName.'" {'.strlen($sScriptSource).'+}');
$this->sendRequestWithCheck($sScriptSource);
return $this;
}
/**
* @param string $sScriptSource
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function CheckScript($sScriptSource)
{
$sScriptSource = stripslashes($sScriptSource);
$this->sendRequest('CHECKSCRIPT {'.strlen($sScriptSource).'+}');
$this->sendRequestWithCheck($sScriptSource);
return $this;
}
/**
* @param string $sScriptName
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function SetActiveScript($sScriptName)
{
$this->sendRequestWithCheck('SETACTIVE "'.$sScriptName.'"');
return $this;
}
/**
* @param string $sScriptName
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function DeleteScript($sScriptName)
{
$this->sendRequestWithCheck('DELETESCRIPT "'.$sScriptName.'"');
return $this;
}
/**
* @return string
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function GetActiveScriptName()
{
$aList = $this->ListScripts();
if (is_array($aList) && 0 < count($aList))
{
foreach ($aList as $sName => $bIsActive)
{
if ($bIsActive)
{
return $sName;
}
}
}
return '';
}
/**
* @param string $sScriptName
*
* @return bool
*
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
public function IsActiveScript($sScriptName)
{
return $sScriptName === $this->GetActiveScriptName();
}
/**
* @param string $sLine
* @return array|false
*/
private function parseLine($sLine)
{
if (false === $sLine || null === $sLine || \in_array(\substr($sLine, 0, 2), array('OK', 'NO')))
{
return false;
}
$iStart = -1;
$iIndex = 0;
$aResult = false;
for ($iPos = 0; $iPos < \strlen($sLine); $iPos++)
{
if ('"' === $sLine[$iPos] && '\\' !== $sLine[$iPos])
{
if (-1 === $iStart)
{
$iStart = $iPos;
}
else
{
$aResult = \is_array($aResult) ? $aResult : array();
$aResult[$iIndex++] = \substr($sLine, $iStart + 1, $iPos - $iStart - 1);
$iStart = -1;
}
}
}
return \is_array($aResult) && isset($aResult[0]) ? $aResult : false;
}
/**
* @param string $sCommand
*
* @return void
*
* @throws \MailSo\Base\Exceptions\InvalidArgumentException
* @throws \MailSo\Net\Exceptions\Exception
*/
private function parseStartupResponse($mResponse)
{
foreach ($mResponse as $sLine)
{
$aTokens = $this->parseLine($sLine);
if (false === $aTokens || !isset($aTokens[0]) ||
\in_array(\substr($sLine, 0, 2), array('OK', 'NO')))
{
continue;
}
$sToken = \strtoupper($aTokens[0]);
$this->aCapa[$sToken] = isset($aTokens[1]) ? $aTokens[1] : '';
if (isset($aTokens[1]))
{
switch ($sToken) {
case 'SASL':
$this->aAuth = \explode(' ', \strtoupper($aTokens[1]));
break;
case 'SIEVE':
$this->aModules = \explode(' ', \strtoupper($aTokens[1]));
break;
}
}
}
}
/**
* @param string $sRequest
*
* @return void
*
* @throws \MailSo\Base\Exceptions\InvalidArgumentException
* @throws \MailSo\Net\Exceptions\Exception
*/
private function sendRequest($sRequest)
{
if (!\MailSo\Base\Validator::NotEmptyString($sRequest, true))
{
$this->writeLogException(
new \MailSo\Base\Exceptions\InvalidArgumentException(),
\MailSo\Log\Enumerations\Type::ERROR, true);
}
$this->IsConnected(true);
$sRequest = \trim($sRequest);
$this->sendRaw($sRequest);
}
/**
* @param string $sRequest
*
* @return void
*
* @throws \MailSo\Base\Exceptions\InvalidArgumentException
* @throws \MailSo\Net\Exceptions\Exception
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
private function sendRequestWithCheck($sRequest)
{
$this->sendRequest($sRequest);
$this->validateResponse($this->parseResponse());
}
/**
* @param string $sLine
*
* @return string
*/
private function convertEndOfLine($sLine)
{
$sLine = \trim($sLine);
if ('}' === \substr($sLine, -1))
{
$iPos = \strrpos($sLine, '{');
if (false !== $iPos)
{
$sSunLine = \substr($sLine, $iPos + 1, -1);
if (\is_numeric($sSunLine) && 0 < (int) $sSunLine)
{
$iLen = (int) $sSunLine;
$this->getNextBuffer($iLen, true);
if (strlen($this->sResponseBuffer) === $iLen)
{
$sLine = \trim(\substr_replace($sLine, $this->sResponseBuffer, $iPos));
}
}
}
}
return $sLine;
}
/**
* @return array|bool
*/
private function parseResponse()
{
$this->iRequestTime = \microtime(true);
$aResult = array();
do
{
$this->getNextBuffer();
$sLine = $this->sResponseBuffer;
if (false === $sLine)
{
break;
}
else if (\in_array(\substr($sLine, 0, 2), array('OK', 'NO')))
{
$aResult[] = $this->convertEndOfLine($sLine);
break;
}
else
{
$aResult[] = $this->convertEndOfLine($sLine);
}
}
while (true);
$this->writeLog((\microtime(true) - $this->iRequestTime),
\MailSo\Log\Enumerations\Type::TIME);
return $aResult;
}
/**
* @throws \MailSo\Sieve\Exceptions\NegativeResponseException
*/
private function validateResponse($aResponse)
{
if (!\is_array($aResponse) || 0 === \count($aResponse) ||
'OK' !== \substr($aResponse[\count($aResponse) - 1], 0, 2))
{
$this->writeLogException(
new \MailSo\Sieve\Exceptions\NegativeResponseException($aResponse),
\MailSo\Log\Enumerations\Type::WARNING, true);
}
}
/**
* @return string
*/
protected function getLogName()
{
return 'MANAGE-SIEVE';
}
/**
* @param \MailSo\Log\Logger $oLogger
*
* @return \MailSo\Sieve\ManageSieveClient
*
* @throws \MailSo\Base\Exceptions\InvalidArgumentException
*/
public function SetLogger($oLogger)
{
parent::SetLogger($oLogger);
return $this;
}
}
| afterlogic/webmail-lite | libraries/MailSo/Sieve/ManageSieveClient.php | PHP | agpl-3.0 | 14,564 |
package com.ice.tar;
import java.math.BigInteger;
import java.util.ArrayList;
/**
* These are the standard static helpers for parsing/writing the header data.
* This could probably be cleaned up further.
*
* This library is under the Apache License Version 2.0
*
* Authors:
*
* @author Jeremy Lucier
*
* Thanks to Thomas Ledoux for his many contributions all over this code.
* He fixed the byte shifts and discrepancies between this and the official format.
*
*/
public class TarFileUtil {
private static final int BYTE_MASK = 255;
/**
* Parse the checksum octal integer from a header buffer.
*
* @param value The header value
* @param buf The buffer from which to parse.
* @param offset The offset into the buffer from which to parse.
* @param length The number of header bytes to parse.
* @return The integer value of the entry's checksum.
*/
public static int getCheckSumOctalBytes(long value, byte[] buf, int offset, int length) {
getPostfixOctalBytes(value, buf, offset, length, TarConstants.SPACER_BYTE);
return offset + length;
}
/**
* Compute the checksum of a tar entry header.
*
* @param buf The tar entry's header buffer.
* @return The computed checksum.
*/
public static long computeCheckSum(byte[] buf) {
long sum = 0;
for (int i = 0; i < buf.length; ++i) {
sum += BYTE_MASK & buf[i];
}
return sum;
}
/**
* This method, like getNameBytes(), is intended to place a name into a
* TarHeader's buffer. However, this method is sophisticated enough to
* recognize long names (name.length() > NAMELEN). In these cases, the
* method will break the name into a prefix and suffix and place the name in
* the header in 'ustar' format. It is up to the TarEntry to manage the
* "entry header format". This method assumes the name is valid for the type
* of archive being generated.
*
* @param outbuf
* The buffer containing the entry header to modify.
* @param newName
* The new name to place into the header buffer.
* @return The current offset in the tar header (always
* TarConstants.NAMELEN).
* @throws InvalidHeaderException
* If the name will not fit in the header.
*/
public static int getFileNameBytes(String newName, byte[] outbuf,
boolean isGNUTar) throws InvalidHeaderException {
if (isGNUTar == false && newName.length() > 100) {
// Locate a pathname "break" prior to the maximum name length...
int index = newName.indexOf("/", newName.length() - 100);
if (index == -1) {
throw new InvalidHeaderException(
"file name is greater than 100 characters, " + newName);
}
// Get the "suffix subpath" of the name.
String name = newName.substring(index + 1);
// Get the "prefix subpath", or "prefix", of the name.
String prefix = newName.substring(0, index);
if (prefix.length() > TarConstants.PREFIXLEN) {
throw new InvalidHeaderException(
"file prefix is greater than 155 characters");
}
getNameBytes(name, outbuf, TarConstants.NAMEOFFSET,
TarConstants.NAMELEN);
getNameBytes(prefix, outbuf, TarConstants.PREFIXOFFSET,
TarConstants.PREFIXLEN);
} else {
getNameBytes(newName, outbuf, TarConstants.NAMEOFFSET,
TarConstants.NAMELEN);
}
// The offset, regardless of the format, is now the end of the
// original name field.
//
return TarConstants.NAMELEN;
}
/**
* Parse an octal long integer from a header buffer.
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The long value of the octal bytes.
*/
public static int getLongOctalBytes(long value, byte[] buf, int offset,
int length) {
getOctalBytes(value, buf, offset, length, TarConstants.ZERO_BYTE);
return offset + length;
}
/**
* Parse an octal long integer from a header buffer this is special for -M
* (multi volume drives. Because GNU doesnt accept byte 32 it recuires and
* is shifted off one and the regular octal doesnt work.
*
*So i call the regular one convert 32 to 48 and then put it in at the
* offset
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The long value of the octal bytes.
*/
public static int getLongOctalBytesMulti(long value, byte[] buf, int offset,
int length) {
byte[] temp = new byte[length + 1];
getOctalBytes(value, temp, 0, length + 1, TarConstants.ZERO_BYTE);
for (int m = 0; m < length + 1; m++) {
if (temp[m] == 32) {
temp[m] = (byte) 48;
}
}
for (int i = 0; i < length; i++) {
if (i > 0) {
buf[offset + i] = temp[i - 1];
} else {
buf[offset + i] = (byte) 48;
}
}
/*
* for(int i=0;i<length;i++){
*
* if(i>0){ buf[offset+i]=temp[i]; }else if(i==length+2) //break;
* buf[offset+i]=(byte) 0; else buf[offset+i]=(byte) 48; }
*/
// Free memory
temp = null;
return offset + length;
}
/**
* Move the bytes from the name StringBuffer into the header's buffer.
*
* @param header
* The header buffer into which to copy the name.
* @param offset
* The offset into the buffer at which to store.
* @param length
* The number of header bytes to store.
* @return The new offset (offset + length).
*/
public static int getNameBytes(String name, byte[] buf, int offset,
int length) {
int i = 0;
int nameLen = name.length();
for (i = 0; i < length && i < nameLen; ++i) {
buf[offset + i] = (byte) name.charAt(i);
}
// Leave as a prefix for loop...
for (; i < length; ++i) {
buf[offset + i] = 0;
}
return offset + length;
}
/**
* Parse an octal integer from a header buffer.
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The integer value of the octal bytes.
*/
public static int getOctalBytes(long value, byte[] buf, int offset, int length, byte prefix) {
// Leave the prefix calls
int idx = length - 1;
// Set the last byte null
buf[offset + idx] = 0;
--idx;
if (value == 0) {
// Set the last value to zero
buf[offset + idx] = TarConstants.ZERO_BYTE;
--idx;
} else {
for (long val = value; idx >= 0 && val > 0; --idx) {
buf[offset + idx] = (byte) (TarConstants.ZERO_BYTE + (byte) (val & 7));
val = val >> 3;
}
}
// Leave for loop a a prefix iterator
for (; idx >= 0; --idx) {
buf[offset + idx] = prefix; // Was a spacer byte
}
return offset + length;
}
/**
* Parse an octal integer from a header buffer. This is postfixed, required for checksum
* to match tar in linux's behavior
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The integer value of the octal bytes.
*/
public static int getPostfixOctalBytes(long value, byte[] buf, int offset, int length, byte postfix) {
int leftIdx = 0;
if(value == 0) {
buf[offset + leftIdx] = TarConstants.ZERO_BYTE;
leftIdx++;
} else {
// We're going to shove all the digits of the long into a byte ArrayList,
// then we're going to put them back in rev order into the buffer.
// This isn't efficient and should probably be rewritten when I find some
// time.
//TODO: Optimize
ArrayList<Byte> vals = new ArrayList<Byte>();
long val = value;
while(val > 0) {
vals.add((byte) (TarConstants.ZERO_BYTE + (byte) (val & 7)));
val = val >> 3;
}
// Now let's iterate in reverse through it and put it on the buffer
for(int x = vals.size() - 1; x >= 0; x--) {
buf[offset + leftIdx] = vals.get(x);
leftIdx++;
}
vals.clear();
}
// NUL terminate after sequence
buf[offset + leftIdx] = 0;
leftIdx++;
// Postfix iterator
while(leftIdx < length) {
buf[offset + leftIdx] = postfix; // Was a spacer byte
leftIdx++;
}
return offset + length;
}
/**
* Move the bytes from the offset StringBuffer into the header's buffer.
*
* @param header
* The header buffer into which to copy the name.
* @param offset
* The offset into the buffer at which to store.
* @param length
* The number of header bytes to store.
* @return The new offset (offset + length).
*/
public static int getOffBytes(StringBuffer offVal, byte[] buf, int offset,
int length) {
int i;
for (i = 0; i < length && i < offVal.length(); ++i) {
buf[offset + i] = (byte) offVal.charAt(i);
}
// Leave prefix loop alone...
for (; i < length; ++i) {
buf[offset + i] = 0;
}
return offset + length;
}
/**
* gets the real size as binary data for files that are larger than 8GB
*/
public static long getSize(byte[] header, int offset, int length) {
long test = parseOctal(header, offset, length);
if (test <= 0 && header[offset] == (byte) 128) {
byte[] last = new byte[length];
for (int i = 0; i < length; i++) {
last[i] = header[offset + i];
}
last[0] = (byte) 0;
long rSize = new BigInteger(last).longValue();
// Free memory
last = null;
return rSize;
}
return test;
}
/**
* Parse a file name from a header buffer. This is different from
* parseName() in that is recognizes 'ustar' names and will handle adding on
* the "prefix" field to the name.
*
* Contributed by Dmitri Tikhonov <dxt2431@yahoo.com>
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The header's entry name.
*/
public static String parseFileName(byte[] header) {
StringBuilder result = new StringBuilder(256);
// If header[345] is not equal to zero, then it is the "prefix"
// that 'ustar' defines. It must be prepended to the "normal"
// name field. We are responsible for the separating '/'.
//
if (header[345] != 0) {
for (int i = 345; i < 500 && header[i] != 0; ++i) {
result.append((char) header[i]);
}
result.append("/");
}
for (int i = 0; i < 100 && header[i] != 0; ++i) {
result.append((char) header[i]);
}
return result.toString();
}
/**
* Parse an entry name from a header buffer.
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The header's entry name.
*/
public static String parseName(byte[] header, int offset, int length) {
StringBuilder result = new StringBuilder(length);
int end = offset + length;
for (int i = offset; i < end; ++i) {
if (header[i] == 0) {
break;
}
result.append((char) header[i]);
}
return result.toString();
}
/**
* Parse an octal string from a header buffer. This is used for the file
* permission mode value.
*
* @param header
* The header buffer from which to parse.
* @param offset
* The offset into the buffer from which to parse.
* @param length
* The number of header bytes to parse.
* @return The long value of the octal string.
*/
public static long parseOctal(byte[] header, int offset, int length) {
long result = 0;
boolean stillPadding = true;
int end = offset + length;
for (int i = offset; i < end; ++i) {
if (header[i] == 0) {
break;
}
if (header[i] == TarConstants.SPACER_BYTE
|| header[i] == TarConstants.ZERO_BYTE) { // == '0'
if (stillPadding) {
continue;
}
if (header[i] == TarConstants.SPACER_BYTE) {
break;
}
}
stillPadding = false;
result = (result << 3) + (header[i] - TarConstants.ZERO_BYTE); // -
// '0'
}
return result;
}
/**
* sets the real size as binary data for files that are larger than 8GB
*/
public static int setRealSize(long value, byte[] buf, int offset, int length) {
BigInteger Rsize = new BigInteger("" + value);
byte[] last = new byte[12];
byte[] copier = Rsize.toByteArray();
for (int i = 0; i < copier.length; i++) {
last[last.length - copier.length + i] = copier[i];
}
last[0] = (byte) 128;
/*
* for(int i=0;i<last.length;i++){ //System.out.print(i+":");
* //printBinary(last[i]); }
*/
int lastLen = last.length;
for (int i = 0; i < lastLen; i++) {
buf[offset + i] = last[i];
}
last = null;
copier = null;
return offset + length;
}
}
| ISCPIF/PSEExperiments | openmole-src/openmole/third-parties/com.ice.tar/src/main/java/com/ice/tar/TarFileUtil.java | Java | agpl-3.0 | 13,023 |
# -*- coding: utf-8 -*-
###############################################################################
#
# ODOO (ex OpenERP)
# Open Source Management Solution
# Copyright (C) 2001-2015 Micronaet S.r.l. (<http://www.micronaet.it>)
# Developer: Nicola Riolini @thebrush (<https://it.linkedin.com/in/thebrush>)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###############################################################################
{
'name': 'product_extra_photo',
'version': '0.1',
'category': 'Customization',
'author': 'Micronaet s.r.l.',
'website': 'http://www.micronaet.it',
'license': 'AGPL-3',
'depends': ['base','product'],
'init_xml': [],
'demo_xml': [],
'update_xml': [
'security/ir.model.access.csv',
#'product.xml',
#'wizard/wizard_import_view.xml',
],
'active': False,
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Micronaet/micronaet-migration | __UNPORTED__/product_extra_photo/__openerp__.py | Python | agpl-3.0 | 1,570 |
/**
* Copyright (C) 2001-2017 by RapidMiner and the contributors
*
* Complete list of developers available at our web site:
*
* http://rapidminer.com
*
* This program is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either version 3
* of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
* even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License along with this program.
* If not, see http://www.gnu.org/licenses/.
*/
package com.rapidminer.generator;
import com.rapidminer.example.Attribute;
import com.rapidminer.example.ExampleSet;
import com.rapidminer.example.Statistics;
import com.rapidminer.operator.OperatorException;
import com.rapidminer.tools.math.*;
import java.util.Collections;
import java.util.LinkedList;
import java.util.List;
import java.util.Random;
/**
* Factory class to produce new attributes based on the fourier synthesis of the label mapped on an
* attribute dimension.
*
* @author Ingo Mierswa
*/
public class SinusFactory {
/** Indicates the min evidence factor. */
private static final double MIN_EVIDENCE = 0.2d;
/**
* The constant ADAPTION_TYPES.
*/
public static final String[] ADAPTION_TYPES = { "uniformly", "uniformly_without_nu", "gaussian" };
/**
* The constant UNIFORMLY.
*/
public static final int UNIFORMLY = 0;
/**
* The constant UNIFORMLY_WITHOUT_NU.
*/
public static final int UNIFORMLY_WITHOUT_NU = 1;
/**
* The constant GAUSSIAN.
*/
public static final int GAUSSIAN = 2;
/**
* Generates this number of peaks in a range of <code>epsilon * frequency</code>. Necessary
* because the FT does not deliver the correct frequency (aliasing, leakage) in all cases. In
* later releases this should be replaced by a gradient search or a evolutionary search for the
* correct value.
*/
private int attributesPerPeak = 3;
/**
* Generates this <code>peaksPerPeak</code> peaks in the range of
* <code>epsilon * frequency</code>. Necessary because the FT does not deliver the correct
* frequency (aliasing, leakage) in all cases. In later releases this should be replaced by a
* gradient search or a evolutionary search for the correct value.
*/
private double epsilon = 0.1;
/** Indicates the type of frequency adaption. */
private int adaptionType = UNIFORMLY;
/**
* The maximal number of generated attributes for each possible attribute. Corresponds to the
* highest peaks in the frequency spectrum of the label in the source attribute's space.
*/
private int maxPeaks = 5;
/** The fast fourier transformation calculator. */
private FastFourierTransform fft = null;
/**
* The spectrum filter type which should be applied on the spectrum after the fourier
* transformation.
*/
private SpectrumFilter filter = null;
/** The algorithm to find the peaks in the frequency spectrum. */
private PeakFinder peakFinder = null;
/**
* Creates a new sinus factory which creates <code>maxPeaks</code> new peaks. Uses
* Blackman-Harris window function and no spectrum filter as default. The adaption type is
* gaussian with an epsilon of 0.1. The factory produces three attributes for each highest peak
* as default.
*
* @param maxPeaks the max peaks
*/
public SinusFactory(int maxPeaks) {
this.maxPeaks = maxPeaks;
this.fft = new FastFourierTransform(WindowFunction.BLACKMAN_HARRIS);
this.filter = new SpectrumFilter(SpectrumFilter.NONE);
this.peakFinder = new BinaryPeakFinder();
}
/**
* Sets adaption type.
*
* @param type the type
*/
public void setAdaptionType(int type) {
this.adaptionType = type;
}
/**
* Sets epsilon.
*
* @param epsilon the epsilon
*/
public void setEpsilon(double epsilon) {
this.epsilon = epsilon;
}
/**
* Must be bigger than 2! @param attributesPerPeak the attributes per peak
*
* @param attributesPerPeak the attributes per peak
*/
public void setAttributePerPeak(int attributesPerPeak) {
this.attributesPerPeak = attributesPerPeak;
}
/**
* Calculates the fourier transformation from the first attribute on the second and delivers the
* <code>maxPeaks</code> highest peaks. Returns a list with the highest attribute peaks.
*
* @param exampleSet the example set
* @param first the first
* @param second the second
* @return the attribute peaks
* @throws OperatorException the operator exception
*/
public List<AttributePeak> getAttributePeaks(ExampleSet exampleSet, Attribute first, Attribute second)
throws OperatorException {
exampleSet.recalculateAllAttributeStatistics();
Complex[] result = fft.getFourierTransform(exampleSet, first, second);
Peak[] spectrum = filter.filter(result, exampleSet.size());
double average = 0.0d;
for (int k = 0; k < spectrum.length; k++) {
average += spectrum[k].getMagnitude();
}
average /= spectrum.length;
List<Peak> peaks = peakFinder.getPeaks(spectrum);
Collections.sort(peaks);
if (maxPeaks < peaks.size()) {
peaks = peaks.subList(0, maxPeaks);
}
// remember highest peaks
double inputDeviation = Math.sqrt(exampleSet.getStatistics(second, Statistics.VARIANCE))
/ (exampleSet.getStatistics(second, Statistics.MAXIMUM)
- exampleSet.getStatistics(second, Statistics.MINIMUM));
double maxEvidence = Double.NaN;
List<AttributePeak> attributes = new LinkedList<AttributePeak>();
for (Peak peak : peaks) {
double evidence = peak.getMagnitude() / average * (1.0d / inputDeviation);
if (Double.isNaN(maxEvidence)) {
maxEvidence = evidence;
}
if (evidence > MIN_EVIDENCE * maxEvidence) {
attributes.add(new AttributePeak(second, peak.getIndex(), evidence));
}
}
return attributes;
}
/**
* Generates a new sinus function attribute for all given attribute peaks. Since the frequency
* cannot be calculated exactly (leakage, aliasing), several new attribute may be added for each
* peak. These additional attributes are randomly chosen (uniformly in epsilon range, uniformly
* without nu, gaussian with epsilon as standard deviation)
*
* @param exampleSet the example set
* @param attributes the attributes
* @param random the random
* @throws GenerationException the generation exception
*/
public void generateSinusFunctions(ExampleSet exampleSet, List<AttributePeak> attributes, Random random)
throws GenerationException {
if (attributes.isEmpty()) {
return;
}
Collections.sort(attributes);
double totalMaxEvidence = attributes.get(0).getEvidence();
for (AttributePeak ae : attributes) {
if (ae.getEvidence() > MIN_EVIDENCE * totalMaxEvidence) {
for (int i = 0; i < attributesPerPeak; i++) {
double frequency = ae.getFrequency();
switch (adaptionType) {
case UNIFORMLY:
if (attributesPerPeak != 1) {
frequency = (double) i / (double) (attributesPerPeak - 1) * 2.0d * epsilon * frequency
+ (frequency - epsilon * frequency);
}
break;
case UNIFORMLY_WITHOUT_NU:
if (attributesPerPeak != 1) {
frequency = (double) i / (double) (attributesPerPeak - 1) * 2.0d * epsilon
+ (frequency - epsilon);
}
break;
case GAUSSIAN:
frequency = random.nextGaussian() * epsilon + frequency;
break;
}
// frequency constant
List<Attribute> frequencyResult = generateAttribute(exampleSet, new ConstantGenerator(frequency));
// scaling with frequency
FeatureGenerator scale = new BasicArithmeticOperationGenerator(
BasicArithmeticOperationGenerator.PRODUCT);
scale.setArguments(new Attribute[] { frequencyResult.get(0), ae.getAttribute() });
List<Attribute> scaleResult = generateAttribute(exampleSet, scale);
// calc sin
FeatureGenerator sin = new TrigonometricFunctionGenerator(TrigonometricFunctionGenerator.SINUS);
sin.setArguments(new Attribute[] { scaleResult.get(0) });
List<Attribute> sinResult = generateAttribute(exampleSet, sin);
for (Attribute attribute : sinResult) {
exampleSet.getAttributes().addRegular(attribute);
}
}
}
}
}
private List<Attribute> generateAttribute(ExampleSet exampleSet, FeatureGenerator generator) throws GenerationException {
List<FeatureGenerator> generators = new LinkedList<FeatureGenerator>();
generators.add(generator);
return FeatureGenerator.generateAll(exampleSet.getExampleTable(), generators);
}
}
| cm-is-dog/rapidminer-studio-core | src/main/java/com/rapidminer/generator/SinusFactory.java | Java | agpl-3.0 | 9,198 |
'use strict'
exports.up = function(knex) {
return knex.schema.createTable('comments', function(table) {
table.increments()
table.string('text').notNullable()
table.integer('userId')
table.integer('pageId')
table.integer('parentId')
table.timestamp('createdAt').defaultTo(knex.raw('CURRENT_TIMESTAMP'))
table.timestamp('modifiedAt').defaultTo(knex.raw('CURRENT_TIMESTAMP'))
table.timestamp('deletedAt')
})
}
exports.down = function(knex) {
return knex.schema.dropTable('comments')
}
| fiddur/some-comments | migrations/20150923143009_comments.js | JavaScript | agpl-3.0 | 524 |
<!DOCTYPE html>
<html class="ng-csp" data-placeholder-focus="false" lang="<?php p($_['language']); ?>" >
<head data-user="<?php p($_['user_uid']); ?>" data-user-displayname="<?php p($_['user_displayname']); ?>" data-requesttoken="<?php p($_['requesttoken']); ?>">
<meta charset="utf-8">
<title>
<?php
p(!empty($_['application'])?$_['application'].' - ':'');
p($theme->getTitle());
?>
</title>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="referrer" content="no-referrer">
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=1.0">
<meta name="apple-itunes-app" content="app-id=<?php p($theme->getiTunesAppId()); ?>">
<meta name="apple-mobile-web-app-capable" content="yes">
<meta name="apple-mobile-web-app-status-bar-style" content="black">
<meta name="apple-mobile-web-app-title" content="<?php p((!empty($_['application']) && $_['appid']!='files')? $_['application']:$theme->getTitle()); ?>">
<meta name="mobile-web-app-capable" content="yes">
<meta name="theme-color" content="<?php p($theme->getColorPrimary()); ?>">
<link rel="icon" href="<?php print_unescaped(image_path($_['appid'], 'favicon.ico')); /* IE11+ supports png */ ?>">
<link rel="apple-touch-icon-precomposed" href="<?php print_unescaped(image_path($_['appid'], 'favicon-touch.png')); ?>">
<link rel="mask-icon" sizes="any" href="<?php print_unescaped(image_path($_['appid'], 'favicon-mask.svg')); ?>" color="<?php p($theme->getColorPrimary()); ?>">
<link rel="manifest" href="<?php print_unescaped(image_path($_['appid'], 'manifest.json')); ?>">
<?php emit_css_loading_tags($_); ?>
<?php emit_script_loading_tags($_); ?>
<?php print_unescaped($_['headers']); ?>
</head>
<body id="<?php p($_['bodyid']);?>">
<?php include 'layout.noscript.warning.php'; ?>
<div id="notification-container">
<div id="notification"></div>
</div>
<header role="banner"><div id="header">
<div class="header-left">
<a href="<?php print_unescaped(link_to('', 'index.php')); ?>"
id="nextcloud">
<div class="logo logo-icon">
<h1 class="hidden-visually">
<?php p($theme->getName()); ?> <?php p(!empty($_['application'])?$_['application']: $l->t('Apps')); ?>
</h1>
</div>
</a>
<ul id="appmenu" <?php if ($_['themingInvertMenu']) { ?>class="inverted"<?php } ?>>
<?php foreach ($_['navigation'] as $entry): ?>
<li data-id="<?php p($entry['id']); ?>" class="hidden">
<a href="<?php print_unescaped($entry['href']); ?>"
<?php if ($entry['active']): ?> class="active"<?php endif; ?>>
<svg width="20" height="20" viewBox="0 0 20 20">
<?php if ($_['themingInvertMenu']) { ?>
<defs><filter id="invertMenuMain-<?php p($entry['id']); ?>"><feColorMatrix in="SourceGraphic" type="matrix" values="-1 0 0 0 1 0 -1 0 0 1 0 0 -1 0 1 0 0 0 1 0" /></filter></defs>
<?php } ?>
<image x="0" y="0" width="20" height="20" preserveAspectRatio="xMinYMin meet"<?php if ($_['themingInvertMenu']) { ?> filter="url(#invertMenuMain-<?php p($entry['id']); ?>)"<?php } ?> xlink:href="<?php print_unescaped($entry['icon'] . '?v=' . $_['versionHash']); ?>" class="app-icon" />
</svg>
<div class="icon-loading-small-dark"
style="display:none;"></div>
</a>
<span>
<?php p($entry['name']); ?>
</span>
</li>
<?php endforeach; ?>
<li id="more-apps" class="menutoggle">
<a href="#">
<div class="icon-more-white"></div>
<span><?php p($l->t('More apps')); ?></span>
</a>
</li>
</ul>
<nav role="navigation">
<div id="navigation" style="display: none;">
<div id="apps">
<ul>
<?php foreach($_['navigation'] as $entry): ?>
<li data-id="<?php p($entry['id']); ?>">
<a href="<?php print_unescaped($entry['href']); ?>"
<?php if( $entry['active'] ): ?> class="active"<?php endif; ?>>
<svg width="16" height="16" viewBox="0 0 16 16">
<defs><filter id="invertMenuMore-<?php p($entry['id']); ?>"><feColorMatrix in="SourceGraphic" type="matrix" values="-1 0 0 0 1 0 -1 0 0 1 0 0 -1 0 1 0 0 0 1 0"></feColorMatrix></filter></defs>
<image x="0" y="0" width="16" height="16" preserveAspectRatio="xMinYMin meet" filter="url(#invertMenuMore-<?php p($entry['id']); ?>)" xlink:href="<?php print_unescaped($entry['icon'] . '?v=' . $_['versionHash']); ?>" class="app-icon"></image>
</svg>
<div class="icon-loading-small-dark" style="display:none;"></div>
<span><?php p($entry['name']); ?></span>
</a>
</li>
<?php endforeach; ?>
</ul>
</div>
</div>
</nav>
</div>
<div class="header-right">
<form class="searchbox" action="#" method="post" role="search" novalidate>
<label for="searchbox" class="hidden-visually">
<?php p($l->t('Search'));?>
</label>
<input id="searchbox" type="search" name="query"
value="" required
autocomplete="off">
<button class="icon-close-white" type="reset"><span class="hidden-visually"><?php p($l->t('Reset search'));?></span></button>
</form>
<div id="contactsmenu">
<div class="icon-contacts menutoggle" tabindex="0" role="link"></div>
<div class="menu"></div>
</div>
<div id="settings">
<div id="expand" tabindex="0" role="link" class="menutoggle">
<div class="avatardiv<?php if ($_['userAvatarSet']) { print_unescaped(' avatardiv-shown'); } else { print_unescaped('" style="display: none'); } ?>">
<?php if ($_['userAvatarSet']): ?>
<img alt="" width="32" height="32"
src="<?php p(\OC::$server->getURLGenerator()->linkToRoute('core.avatar.getAvatar', ['userId' => $_['user_uid'], 'size' => 32, 'v' => $_['userAvatarVersion']]));?>"
srcset="<?php p(\OC::$server->getURLGenerator()->linkToRoute('core.avatar.getAvatar', ['userId' => $_['user_uid'], 'size' => 64, 'v' => $_['userAvatarVersion']]));?> 2x, <?php p(\OC::$server->getURLGenerator()->linkToRoute('core.avatar.getAvatar', ['userId' => $_['user_uid'], 'size' => 128, 'v' => $_['userAvatarVersion']]));?> 4x"
>
<?php endif; ?>
</div>
<div id="expandDisplayName" class="icon-settings-white"></div>
</div>
<div id="expanddiv" style="display:none;">
<ul>
<?php foreach($_['settingsnavigation'] as $entry):?>
<li>
<a href="<?php print_unescaped($entry['href']); ?>"
<?php if( $entry["active"] ): ?> class="active"<?php endif; ?>>
<img alt="" src="<?php print_unescaped($entry['icon'] . '?v=' . $_['versionHash']); ?>">
<?php p($entry['name']) ?>
</a>
</li>
<?php endforeach; ?>
</ul>
</div>
</div>
</div>
</div></header>
<div id="sudo-login-background" class="hidden"></div>
<form id="sudo-login-form" class="hidden">
<label>
<?php p($l->t('This action requires you to confirm your password')); ?><br/>
<input type="password" class="question" autocomplete="new-password" name="question" value=" <?php /* Hack against browsers ignoring autocomplete="off" */ ?>"
placeholder="<?php p($l->t('Confirm your password')); ?>" />
</label>
<input class="confirm" value="<?php p($l->t('Confirm')); ?>" type="submit">
</form>
<div id="content-wrapper">
<div id="content" class="app-<?php p($_['appid']) ?>" role="main">
<?php print_unescaped($_['content']); ?>
</div>
</div>
</body>
</html>
| michaelletzgus/nextcloud-server | core/templates/layout.user.php | PHP | agpl-3.0 | 7,561 |
/**
*
*/
package org.aksw.iguana.dg.generator;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import org.aksw.iguana.commons.rabbit.RabbitMQUtils;
import org.aksw.iguana.dp.loader.LoaderManager;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
/**
* This abstract class will implement all the underlying methods
* to send the data to the loader Manager in well sized snippets
*
* @author f.conrads
*
*/
public abstract class AbstractDataGenerator implements
DataGenerator {
protected int maxSize=100;
protected String datasetID;
protected Model data = ModelFactory.createDefaultModel();
private LoaderManager lmanager;
/**
* will call the method {@link #generate()} and afterwars will send the inner Model {@link #data}
*/
public void generateData() throws Exception {
generate();
sendData(data);
}
/**
* This method should generate your data,</br></br>
*
* to make use of the underlying abstract class, please add your data to the data Model
* or elsewise make use of the sendDataSnippet(byte[]) or sendDataComplete(Model) methods.
* If you add your data to the Model data. You do not have to send it. It will be sended
* as soon the generate() method stops.</br></br>
*
* if your model is too big though, you can send snippets of the dataset
*
* you can use the sendData(Model) method for this still. </br>
* It will automatically send the model in the accurate size of blocks to the controller.
*/
public abstract void generate() throws Exception;
/**
* Will send the byte array to the loader Manager
*/
public void sendDataSnippet(byte[] data) throws Exception {
//sendData to LoaderManager
lmanager.upload(data, datasetID);
}
/**
* Will send a {@link org.apache.jena.rdf.model.Model}
*/
public void sendData(Model m) throws Exception {
int sendedSize=0;
//split Model into small parts
StmtIterator sti = m.listStatements();
while(sti.hasNext()){
Model send = ModelFactory.createDefaultModel();
send.setNsPrefixes(m.getNsPrefixMap());
while(sendedSize < maxSize){
Statement stmt = sti.next();
send.add(stmt);
sendedSize+=1;
}
sendDataSnippet(RabbitMQUtils.getData(send));
}
}
/**
* Will send an Ntriple File in well sized snippets
*/
public void sendData(File ntripleFile) throws Exception {
Model send = ModelFactory.createDefaultModel();
int sendedSize=0;
//Read file line by line
try(BufferedReader reader = new BufferedReader(new FileReader(ntripleFile))){
String triple="";
while(null!=(triple=reader.readLine())){
send.read(triple);
if(sendedSize >= maxSize){
sendDataSnippet(RabbitMQUtils.getData(send));
send = ModelFactory.createDefaultModel();
}
}
//if send has still data. send it and close the model
if(!send.isEmpty()){
sendDataSnippet(RabbitMQUtils.getData(send));
send.close();
}
}catch(IOException e){
throw e;
}
}
/**
* Will set the Loader Manager which should handle the upload
*/
public void setLoaderManager(LoaderManager lmanager){
this.lmanager = lmanager;
}
}
| AKSW/IGUANA | iguana.datagenerator/src/main/java/org/aksw/iguana/dg/generator/AbstractDataGenerator.java | Java | agpl-3.0 | 3,299 |
/* Copyright (C) 2013 Marco Heisig
This file is part of Rabenstein.
Rabenstein is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License as published by the Free
Software Foundation, either version 3 of the License, or (at your option) any
later version.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
details.
You should have received a copy of the GNU Affero General Public License along
with this program. If not, see <http://www.gnu.org/licenses/>. */
#ifndef RABENSTEIN__ABOUT_WINDOW_HPP
#define RABENSTEIN__ABOUT_WINDOW_HPP
#include <QtGui/QDialog>
namespace Rabenstein {
class AboutWindow : public QDialog
{
Q_OBJECT
public:
AboutWindow(QWidget* parent);
virtual ~AboutWindow();
};
}
#endif // RABENSTEIN__ABOUT_WINDOW_HPP
| te42kyfo/Pirates-of-Rabenstein | include/AboutWindow.hpp | C++ | agpl-3.0 | 996 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#.apidoc title: Object Relational Mapping
#.apidoc module-mods: member-order: bysource
"""
Object relational mapping to database (postgresql) module
* Hierarchical structure
* Constraints consistency, validations
* Object meta Data depends on its status
* Optimised processing by complex query (multiple actions at once)
* Default fields value
* Permissions optimisation
* Persistant object: DB postgresql
* Datas conversions
* Multi-level caching system
* 2 different inheritancies
* Fields:
- classicals (varchar, integer, boolean, ...)
- relations (one2many, many2one, many2many)
- functions
"""
import babel.dates
import calendar
import collections
import copy
import datetime
import itertools
import logging
import operator
import pickle
import re
import simplejson
import time
import traceback
import types
import psycopg2
from lxml import etree
import fields
import openerp
import openerp.netsvc as netsvc
import openerp.tools as tools
from openerp.tools.config import config
from openerp.tools.misc import CountingStream
from openerp.tools.safe_eval import safe_eval as eval
from openerp.tools.translate import _
from openerp import SUPERUSER_ID
from query import Query
_logger = logging.getLogger(__name__)
_schema = logging.getLogger(__name__ + '.schema')
# List of etree._Element subclasses that we choose to ignore when parsing XML.
from openerp.tools import SKIPPED_ELEMENT_TYPES
regex_order = re.compile('^(([a-z0-9_]+|"[a-z0-9_]+")( *desc| *asc)?( *, *|))+$', re.I)
regex_object_name = re.compile(r'^[a-z0-9_.]+$')
def transfer_field_to_modifiers(field, modifiers):
default_values = {}
state_exceptions = {}
for attr in ('invisible', 'readonly', 'required'):
state_exceptions[attr] = []
default_values[attr] = bool(field.get(attr))
for state, modifs in (field.get("states",{})).items():
for modif in modifs:
if default_values[modif[0]] != modif[1]:
state_exceptions[modif[0]].append(state)
for attr, default_value in default_values.items():
if state_exceptions[attr]:
modifiers[attr] = [("state", "not in" if default_value else "in", state_exceptions[attr])]
else:
modifiers[attr] = default_value
# Don't deal with groups, it is done by check_group().
# Need the context to evaluate the invisible attribute on tree views.
# For non-tree views, the context shouldn't be given.
def transfer_node_to_modifiers(node, modifiers, context=None, in_tree_view=False):
if node.get('attrs'):
modifiers.update(eval(node.get('attrs')))
if node.get('states'):
if 'invisible' in modifiers and isinstance(modifiers['invisible'], list):
# TODO combine with AND or OR, use implicit AND for now.
modifiers['invisible'].append(('state', 'not in', node.get('states').split(',')))
else:
modifiers['invisible'] = [('state', 'not in', node.get('states').split(','))]
for a in ('invisible', 'readonly', 'required'):
if node.get(a):
v = bool(eval(node.get(a), {'context': context or {}}))
if in_tree_view and a == 'invisible':
# Invisible in a tree view has a specific meaning, make it a
# new key in the modifiers attribute.
modifiers['tree_invisible'] = v
elif v or (a not in modifiers or not isinstance(modifiers[a], list)):
# Don't set the attribute to False if a dynamic value was
# provided (i.e. a domain from attrs or states).
modifiers[a] = v
def simplify_modifiers(modifiers):
for a in ('invisible', 'readonly', 'required'):
if a in modifiers and not modifiers[a]:
del modifiers[a]
def transfer_modifiers_to_node(modifiers, node):
if modifiers:
simplify_modifiers(modifiers)
node.set('modifiers', simplejson.dumps(modifiers))
def setup_modifiers(node, field=None, context=None, in_tree_view=False):
""" Processes node attributes and field descriptors to generate
the ``modifiers`` node attribute and set it on the provided node.
Alters its first argument in-place.
:param node: ``field`` node from an OpenERP view
:type node: lxml.etree._Element
:param dict field: field descriptor corresponding to the provided node
:param dict context: execution context used to evaluate node attributes
:param bool in_tree_view: triggers the ``tree_invisible`` code
path (separate from ``invisible``): in
tree view there are two levels of
invisibility, cell content (a column is
present but the cell itself is not
displayed) with ``invisible`` and column
invisibility (the whole column is
hidden) with ``tree_invisible``.
:returns: nothing
"""
modifiers = {}
if field is not None:
transfer_field_to_modifiers(field, modifiers)
transfer_node_to_modifiers(
node, modifiers, context=context, in_tree_view=in_tree_view)
transfer_modifiers_to_node(modifiers, node)
def test_modifiers(what, expected):
modifiers = {}
if isinstance(what, basestring):
node = etree.fromstring(what)
transfer_node_to_modifiers(node, modifiers)
simplify_modifiers(modifiers)
json = simplejson.dumps(modifiers)
assert json == expected, "%s != %s" % (json, expected)
elif isinstance(what, dict):
transfer_field_to_modifiers(what, modifiers)
simplify_modifiers(modifiers)
json = simplejson.dumps(modifiers)
assert json == expected, "%s != %s" % (json, expected)
# To use this test:
# import openerp
# openerp.osv.orm.modifiers_tests()
def modifiers_tests():
test_modifiers('<field name="a"/>', '{}')
test_modifiers('<field name="a" invisible="1"/>', '{"invisible": true}')
test_modifiers('<field name="a" readonly="1"/>', '{"readonly": true}')
test_modifiers('<field name="a" required="1"/>', '{"required": true}')
test_modifiers('<field name="a" invisible="0"/>', '{}')
test_modifiers('<field name="a" readonly="0"/>', '{}')
test_modifiers('<field name="a" required="0"/>', '{}')
test_modifiers('<field name="a" invisible="1" required="1"/>', '{"invisible": true, "required": true}') # TODO order is not guaranteed
test_modifiers('<field name="a" invisible="1" required="0"/>', '{"invisible": true}')
test_modifiers('<field name="a" invisible="0" required="1"/>', '{"required": true}')
test_modifiers("""<field name="a" attrs="{'invisible': [('b', '=', 'c')]}"/>""", '{"invisible": [["b", "=", "c"]]}')
# The dictionary is supposed to be the result of fields_get().
test_modifiers({}, '{}')
test_modifiers({"invisible": True}, '{"invisible": true}')
test_modifiers({"invisible": False}, '{}')
def check_object_name(name):
""" Check if the given name is a valid openerp object name.
The _name attribute in osv and osv_memory object is subject to
some restrictions. This function returns True or False whether
the given name is allowed or not.
TODO: this is an approximation. The goal in this approximation
is to disallow uppercase characters (in some places, we quote
table/column names and in other not, which leads to this kind
of errors:
psycopg2.ProgrammingError: relation "xxx" does not exist).
The same restriction should apply to both osv and osv_memory
objects for consistency.
"""
if regex_object_name.match(name) is None:
return False
return True
def raise_on_invalid_object_name(name):
if not check_object_name(name):
msg = "The _name attribute %s is not valid." % name
_logger.error(msg)
raise except_orm('ValueError', msg)
POSTGRES_CONFDELTYPES = {
'RESTRICT': 'r',
'NO ACTION': 'a',
'CASCADE': 'c',
'SET NULL': 'n',
'SET DEFAULT': 'd',
}
def intersect(la, lb):
return filter(lambda x: x in lb, la)
def fix_import_export_id_paths(fieldname):
"""
Fixes the id fields in import and exports, and splits field paths
on '/'.
:param str fieldname: name of the field to import/export
:return: split field name
:rtype: list of str
"""
fixed_db_id = re.sub(r'([^/])\.id', r'\1/.id', fieldname)
fixed_external_id = re.sub(r'([^/]):id', r'\1/id', fixed_db_id)
return fixed_external_id.split('/')
class except_orm(Exception):
def __init__(self, name, value):
self.name = name
self.value = value
self.args = (name, value)
class BrowseRecordError(Exception):
pass
class browse_null(object):
""" Readonly python database object browser
"""
def __init__(self):
self.id = False
def __getitem__(self, name):
return None
def __getattr__(self, name):
return None # XXX: return self ?
def __int__(self):
return False
def __str__(self):
return ''
def __nonzero__(self):
return False
def __unicode__(self):
return u''
#
# TODO: execute an object method on browse_record_list
#
class browse_record_list(list):
""" Collection of browse objects
Such an instance will be returned when doing a ``browse([ids..])``
and will be iterable, yielding browse() objects
"""
def __init__(self, lst, context=None):
if not context:
context = {}
super(browse_record_list, self).__init__(lst)
self.context = context
class browse_record(object):
""" An object that behaves like a row of an object's table.
It has attributes after the columns of the corresponding object.
Examples::
uobj = pool.get('res.users')
user_rec = uobj.browse(cr, uid, 104)
name = user_rec.name
"""
def __init__(self, cr, uid, id, table, cache, context=None,
list_class=browse_record_list, fields_process=None):
"""
:param table: the browsed object (inherited from orm)
:param dict cache: a dictionary of model->field->data to be shared
across browse objects, thus reducing the SQL
read()s. It can speed up things a lot, but also be
disastrous if not discarded after write()/unlink()
operations
:param dict context: dictionary with an optional context
"""
if fields_process is None:
fields_process = {}
if context is None:
context = {}
self._list_class = list_class
self._cr = cr
self._uid = uid
self._id = id
self._table = table # deprecated, use _model!
self._model = table
self._table_name = self._table._name
self.__logger = logging.getLogger('openerp.osv.orm.browse_record.' + self._table_name)
self._context = context
self._fields_process = fields_process
cache.setdefault(table._name, {})
self._data = cache[table._name]
# if not (id and isinstance(id, (int, long,))):
# raise BrowseRecordError(_('Wrong ID for the browse record, got %r, expected an integer.') % (id,))
# if not table.exists(cr, uid, id, context):
# raise BrowseRecordError(_('Object %s does not exists') % (self,))
if id not in self._data:
self._data[id] = {'id': id}
self._cache = cache
def __getitem__(self, name):
if name == 'id':
return self._id
if name not in self._data[self._id]:
# build the list of fields we will fetch
# fetch the definition of the field which was asked for
if name in self._table._columns:
col = self._table._columns[name]
elif name in self._table._inherit_fields:
col = self._table._inherit_fields[name][2]
elif hasattr(self._table, str(name)):
attr = getattr(self._table, name)
if isinstance(attr, (types.MethodType, types.LambdaType, types.FunctionType)):
def function_proxy(*args, **kwargs):
if 'context' not in kwargs and self._context:
kwargs.update(context=self._context)
return attr(self._cr, self._uid, [self._id], *args, **kwargs)
return function_proxy
else:
return attr
else:
error_msg = "Field '%s' does not exist in object '%s'" % (name, self)
self.__logger.warning(error_msg)
if self.__logger.isEnabledFor(logging.DEBUG):
self.__logger.debug(''.join(traceback.format_stack()))
raise KeyError(error_msg)
# if the field is a classic one or a many2one, we'll fetch all classic and many2one fields
if col._prefetch and not col.groups:
# gen the list of "local" (ie not inherited) fields which are classic or many2one
field_filter = lambda x: x[1]._classic_write and x[1]._prefetch and not x[1].groups
fields_to_fetch = filter(field_filter, self._table._columns.items())
# gen the list of inherited fields
inherits = map(lambda x: (x[0], x[1][2]), self._table._inherit_fields.items())
# complete the field list with the inherited fields which are classic or many2one
fields_to_fetch += filter(field_filter, inherits)
# otherwise we fetch only that field
else:
fields_to_fetch = [(name, col)]
ids = filter(lambda id: name not in self._data[id], self._data.keys())
# read the results
field_names = map(lambda x: x[0], fields_to_fetch)
field_values = self._table.read(self._cr, self._uid, ids, field_names, context=self._context, load="_classic_write")
# TODO: improve this, very slow for reports
if self._fields_process:
lang = self._context.get('lang', 'en_US') or 'en_US'
lang_obj_ids = self.pool.get('res.lang').search(self._cr, self._uid, [('code', '=', lang)])
if not lang_obj_ids:
raise Exception(_('Language with code "%s" is not defined in your system !\nDefine it through the Administration menu.') % (lang,))
lang_obj = self.pool.get('res.lang').browse(self._cr, self._uid, lang_obj_ids[0])
for field_name, field_column in fields_to_fetch:
if field_column._type in self._fields_process:
for result_line in field_values:
result_line[field_name] = self._fields_process[field_column._type](result_line[field_name])
if result_line[field_name]:
result_line[field_name].set_value(self._cr, self._uid, result_line[field_name], self, field_column, lang_obj)
if not field_values:
# Where did those ids come from? Perhaps old entries in ir_model_dat?
_logger.warning("No field_values found for ids %s in %s", ids, self)
raise KeyError('Field %s not found in %s'%(name, self))
# create browse records for 'remote' objects
for result_line in field_values:
new_data = {}
for field_name, field_column in fields_to_fetch:
if field_column._type == 'many2one':
if result_line[field_name]:
obj = self._table.pool.get(field_column._obj)
if isinstance(result_line[field_name], (list, tuple)):
value = result_line[field_name][0]
else:
value = result_line[field_name]
if value:
# FIXME: this happen when a _inherits object
# overwrite a field of it parent. Need
# testing to be sure we got the right
# object and not the parent one.
if not isinstance(value, browse_record):
if obj is None:
# In some cases the target model is not available yet, so we must ignore it,
# which is safe in most cases, this value will just be loaded later when needed.
# This situation can be caused by custom fields that connect objects with m2o without
# respecting module dependencies, causing relationships to be connected to soon when
# the target is not loaded yet.
continue
new_data[field_name] = browse_record(self._cr,
self._uid, value, obj, self._cache,
context=self._context,
list_class=self._list_class,
fields_process=self._fields_process)
else:
new_data[field_name] = value
else:
new_data[field_name] = browse_null()
else:
new_data[field_name] = browse_null()
elif field_column._type in ('one2many', 'many2many') and len(result_line[field_name]):
new_data[field_name] = self._list_class([browse_record(self._cr, self._uid, id, self._table.pool.get(field_column._obj), self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process) for id in result_line[field_name]], self._context)
elif field_column._type == 'reference':
if result_line[field_name]:
if isinstance(result_line[field_name], browse_record):
new_data[field_name] = result_line[field_name]
else:
ref_obj, ref_id = result_line[field_name].split(',')
ref_id = long(ref_id)
if ref_id:
obj = self._table.pool.get(ref_obj)
new_data[field_name] = browse_record(self._cr, self._uid, ref_id, obj, self._cache, context=self._context, list_class=self._list_class, fields_process=self._fields_process)
else:
new_data[field_name] = browse_null()
else:
new_data[field_name] = browse_null()
else:
new_data[field_name] = result_line[field_name]
self._data[result_line['id']].update(new_data)
if not name in self._data[self._id]:
# How did this happen? Could be a missing model due to custom fields used too soon, see above.
self.__logger.error("Fields to fetch: %s, Field values: %s", field_names, field_values)
self.__logger.error("Cached: %s, Table: %s", self._data[self._id], self._table)
raise KeyError(_('Unknown attribute %s in %s ') % (name, self))
return self._data[self._id][name]
def __getattr__(self, name):
try:
return self[name]
except KeyError, e:
raise AttributeError(e)
def __contains__(self, name):
return (name in self._table._columns) or (name in self._table._inherit_fields) or hasattr(self._table, name)
def __iter__(self):
raise NotImplementedError("Iteration is not allowed on %s" % self)
def __hasattr__(self, name):
return name in self
def __int__(self):
return self._id
def __str__(self):
return "browse_record(%s, %d)" % (self._table_name, self._id)
def __eq__(self, other):
if not isinstance(other, browse_record):
return False
return (self._table_name, self._id) == (other._table_name, other._id)
def __ne__(self, other):
if not isinstance(other, browse_record):
return True
return (self._table_name, self._id) != (other._table_name, other._id)
# we need to define __unicode__ even though we've already defined __str__
# because we have overridden __getattr__
def __unicode__(self):
return unicode(str(self))
def __hash__(self):
return hash((self._table_name, self._id))
__repr__ = __str__
def refresh(self):
"""Force refreshing this browse_record's data and all the data of the
records that belong to the same cache, by emptying the cache completely,
preserving only the record identifiers (for prefetching optimizations).
"""
for model, model_cache in self._cache.iteritems():
# only preserve the ids of the records that were in the cache
cached_ids = dict([(i, {'id': i}) for i in model_cache.keys()])
self._cache[model].clear()
self._cache[model].update(cached_ids)
def pg_varchar(size=0):
""" Returns the VARCHAR declaration for the provided size:
* If no size (or an empty or negative size is provided) return an
'infinite' VARCHAR
* Otherwise return a VARCHAR(n)
:type int size: varchar size, optional
:rtype: str
"""
if size:
if not isinstance(size, int):
raise TypeError("VARCHAR parameter should be an int, got %s"
% type(size))
if size > 0:
return 'VARCHAR(%d)' % size
return 'VARCHAR'
FIELDS_TO_PGTYPES = {
fields.boolean: 'bool',
fields.integer: 'int4',
fields.text: 'text',
fields.html: 'text',
fields.date: 'date',
fields.datetime: 'timestamp',
fields.binary: 'bytea',
fields.many2one: 'int4',
fields.serialized: 'text',
}
def get_pg_type(f, type_override=None):
"""
:param fields._column f: field to get a Postgres type for
:param type type_override: use the provided type for dispatching instead of the field's own type
:returns: (postgres_identification_type, postgres_type_specification)
:rtype: (str, str)
"""
field_type = type_override or type(f)
if field_type in FIELDS_TO_PGTYPES:
pg_type = (FIELDS_TO_PGTYPES[field_type], FIELDS_TO_PGTYPES[field_type])
elif issubclass(field_type, fields.float):
if f.digits:
pg_type = ('numeric', 'NUMERIC')
else:
pg_type = ('float8', 'DOUBLE PRECISION')
elif issubclass(field_type, (fields.char, fields.reference)):
pg_type = ('varchar', pg_varchar(f.size))
elif issubclass(field_type, fields.selection):
if (isinstance(f.selection, list) and isinstance(f.selection[0][0], int))\
or getattr(f, 'size', None) == -1:
pg_type = ('int4', 'INTEGER')
else:
pg_type = ('varchar', pg_varchar(getattr(f, 'size', None)))
elif issubclass(field_type, fields.function):
if f._type == 'selection':
pg_type = ('varchar', pg_varchar())
else:
pg_type = get_pg_type(f, getattr(fields, f._type))
else:
_logger.warning('%s type not supported!', field_type)
pg_type = None
return pg_type
class MetaModel(type):
""" Metaclass for the Model.
This class is used as the metaclass for the Model class to discover
the models defined in a module (i.e. without instanciating them).
If the automatic discovery is not needed, it is possible to set the
model's _register attribute to False.
"""
module_to_models = {}
def __init__(self, name, bases, attrs):
if not self._register:
self._register = True
super(MetaModel, self).__init__(name, bases, attrs)
return
# The (OpenERP) module name can be in the `openerp.addons` namespace
# or not. For instance module `sale` can be imported as
# `openerp.addons.sale` (the good way) or `sale` (for backward
# compatibility).
module_parts = self.__module__.split('.')
if len(module_parts) > 2 and module_parts[0] == 'openerp' and \
module_parts[1] == 'addons':
module_name = self.__module__.split('.')[2]
else:
module_name = self.__module__.split('.')[0]
if not hasattr(self, '_module'):
self._module = module_name
# Remember which models to instanciate for this module.
if not self._custom:
self.module_to_models.setdefault(self._module, []).append(self)
# Definition of log access columns, automatically added to models if
# self._log_access is True
LOG_ACCESS_COLUMNS = {
'create_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
'create_date': 'TIMESTAMP',
'write_uid': 'INTEGER REFERENCES res_users ON DELETE SET NULL',
'write_date': 'TIMESTAMP'
}
# special columns automatically created by the ORM
MAGIC_COLUMNS = ['id'] + LOG_ACCESS_COLUMNS.keys()
class BaseModel(object):
""" Base class for OpenERP models.
OpenERP models are created by inheriting from this class' subclasses:
* Model: for regular database-persisted models
* TransientModel: for temporary data, stored in the database but automatically
vaccuumed every so often
* AbstractModel: for abstract super classes meant to be shared by multiple
_inheriting classes (usually Models or TransientModels)
The system will later instantiate the class once per database (on
which the class' module is installed).
To create a class that should not be instantiated, the _register class attribute
may be set to False.
"""
__metaclass__ = MetaModel
_auto = True # create database backend
_register = False # Set to false if the model shouldn't be automatically discovered.
_name = None
_columns = {}
_constraints = []
_custom = False
_defaults = {}
_rec_name = None
_parent_name = 'parent_id'
_parent_store = False
_parent_order = False
_date_name = 'date'
_order = 'id'
_sequence = None
_description = None
_needaction = False
# dict of {field:method}, with method returning the (name_get of records, {id: fold})
# to include in the _read_group, if grouped on this field
_group_by_full = {}
# Transience
_transient = False # True in a TransientModel
# structure:
# { 'parent_model': 'm2o_field', ... }
_inherits = {}
# Mapping from inherits'd field name to triple (m, r, f, n) where m is the
# model from which it is inherits'd, r is the (local) field towards m, f
# is the _column object itself, and n is the original (i.e. top-most)
# parent model.
# Example:
# { 'field_name': ('parent_model', 'm2o_field_to_reach_parent',
# field_column_obj, origina_parent_model), ... }
_inherit_fields = {}
# Mapping field name/column_info object
# This is similar to _inherit_fields but:
# 1. includes self fields,
# 2. uses column_info instead of a triple.
_all_columns = {}
_table = None
_invalids = set()
_log_create = False
_sql_constraints = []
_protected = ['read', 'write', 'create', 'default_get', 'perm_read', 'unlink', 'fields_get', 'fields_view_get', 'search', 'name_get', 'distinct_field_get', 'name_search', 'copy', 'import_data', 'search_count', 'exists']
CONCURRENCY_CHECK_FIELD = '__last_update'
def log(self, cr, uid, id, message, secondary=False, context=None):
return _logger.warning("log() is deprecated. Please use OpenChatter notification system instead of the res.log mechanism.")
def view_init(self, cr, uid, fields_list, context=None):
"""Override this method to do specific things when a view on the object is opened."""
pass
def _field_create(self, cr, context=None):
""" Create entries in ir_model_fields for all the model's fields.
If necessary, also create an entry in ir_model, and if called from the
modules loading scheme (by receiving 'module' in the context), also
create entries in ir_model_data (for the model and the fields).
- create an entry in ir_model (if there is not already one),
- create an entry in ir_model_data (if there is not already one, and if
'module' is in the context),
- update ir_model_fields with the fields found in _columns
(TODO there is some redundancy as _columns is updated from
ir_model_fields in __init__).
"""
if context is None:
context = {}
cr.execute("SELECT id FROM ir_model WHERE model=%s", (self._name,))
if not cr.rowcount:
cr.execute('SELECT nextval(%s)', ('ir_model_id_seq',))
model_id = cr.fetchone()[0]
cr.execute("INSERT INTO ir_model (id,model, name, info,state) VALUES (%s, %s, %s, %s, %s)", (model_id, self._name, self._description, self.__doc__, 'base'))
else:
model_id = cr.fetchone()[0]
if 'module' in context:
name_id = 'model_'+self._name.replace('.', '_')
cr.execute('select * from ir_model_data where name=%s and module=%s', (name_id, context['module']))
if not cr.rowcount:
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
(name_id, context['module'], 'ir.model', model_id)
)
cr.commit()
cr.execute("SELECT * FROM ir_model_fields WHERE model=%s", (self._name,))
cols = {}
for rec in cr.dictfetchall():
cols[rec['name']] = rec
ir_model_fields_obj = self.pool.get('ir.model.fields')
# sparse field should be created at the end, as it depends on its serialized field already existing
model_fields = sorted(self._columns.items(), key=lambda x: 1 if x[1]._type == 'sparse' else 0)
for (k, f) in model_fields:
vals = {
'model_id': model_id,
'model': self._name,
'name': k,
'field_description': f.string,
'ttype': f._type,
'relation': f._obj or '',
'view_load': (f.view_load and 1) or 0,
'select_level': tools.ustr(f.select or 0),
'readonly': (f.readonly and 1) or 0,
'required': (f.required and 1) or 0,
'selectable': (f.selectable and 1) or 0,
'translate': (f.translate and 1) or 0,
'relation_field': f._fields_id if isinstance(f, fields.one2many) else '',
'serialization_field_id': None,
}
if getattr(f, 'serialization_field', None):
# resolve link to serialization_field if specified by name
serialization_field_id = ir_model_fields_obj.search(cr, SUPERUSER_ID, [('model','=',vals['model']), ('name', '=', f.serialization_field)])
if not serialization_field_id:
raise except_orm(_('Error'), _("Serialization field `%s` not found for sparse field `%s`!") % (f.serialization_field, k))
vals['serialization_field_id'] = serialization_field_id[0]
# When its a custom field,it does not contain f.select
if context.get('field_state', 'base') == 'manual':
if context.get('field_name', '') == k:
vals['select_level'] = context.get('select', '0')
#setting value to let the problem NOT occur next time
elif k in cols:
vals['select_level'] = cols[k]['select_level']
if k not in cols:
cr.execute('select nextval(%s)', ('ir_model_fields_id_seq',))
id = cr.fetchone()[0]
vals['id'] = id
cr.execute("""INSERT INTO ir_model_fields (
id, model_id, model, name, field_description, ttype,
relation,view_load,state,select_level,relation_field, translate, serialization_field_id
) VALUES (
%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s
)""", (
id, vals['model_id'], vals['model'], vals['name'], vals['field_description'], vals['ttype'],
vals['relation'], bool(vals['view_load']), 'base',
vals['select_level'], vals['relation_field'], bool(vals['translate']), vals['serialization_field_id']
))
if 'module' in context:
name1 = 'field_' + self._table + '_' + k
cr.execute("select name from ir_model_data where name=%s", (name1,))
if cr.fetchone():
name1 = name1 + "_" + str(id)
cr.execute("INSERT INTO ir_model_data (name,date_init,date_update,module,model,res_id) VALUES (%s, (now() at time zone 'UTC'), (now() at time zone 'UTC'), %s, %s, %s)", \
(name1, context['module'], 'ir.model.fields', id)
)
else:
for key, val in vals.items():
if cols[k][key] != vals[key]:
cr.execute('update ir_model_fields set field_description=%s where model=%s and name=%s', (vals['field_description'], vals['model'], vals['name']))
cr.commit()
cr.execute("""UPDATE ir_model_fields SET
model_id=%s, field_description=%s, ttype=%s, relation=%s,
view_load=%s, select_level=%s, readonly=%s ,required=%s, selectable=%s, relation_field=%s, translate=%s, serialization_field_id=%s
WHERE
model=%s AND name=%s""", (
vals['model_id'], vals['field_description'], vals['ttype'],
vals['relation'], bool(vals['view_load']),
vals['select_level'], bool(vals['readonly']), bool(vals['required']), bool(vals['selectable']), vals['relation_field'], bool(vals['translate']), vals['serialization_field_id'], vals['model'], vals['name']
))
break
cr.commit()
#
# Goal: try to apply inheritance at the instanciation level and
# put objects in the pool var
#
@classmethod
def create_instance(cls, pool, cr):
""" Instanciate a given model.
This class method instanciates the class of some model (i.e. a class
deriving from osv or osv_memory). The class might be the class passed
in argument or, if it inherits from another class, a class constructed
by combining the two classes.
The ``attributes`` argument specifies which parent class attributes
have to be combined.
TODO: the creation of the combined class is repeated at each call of
this method. This is probably unnecessary.
"""
attributes = ['_columns', '_defaults', '_inherits', '_constraints',
'_sql_constraints']
parent_names = getattr(cls, '_inherit', None)
if parent_names:
if isinstance(parent_names, (str, unicode)):
name = cls._name or parent_names
parent_names = [parent_names]
else:
name = cls._name
if not name:
raise TypeError('_name is mandatory in case of multiple inheritance')
for parent_name in ((type(parent_names)==list) and parent_names or [parent_names]):
parent_model = pool.get(parent_name)
if not parent_model:
raise TypeError('The model "%s" specifies an unexisting parent class "%s"\n'
'You may need to add a dependency on the parent class\' module.' % (name, parent_name))
if not getattr(cls, '_original_module', None) and name == parent_model._name:
cls._original_module = parent_model._original_module
parent_class = parent_model.__class__
nattr = {}
for s in attributes:
new = copy.copy(getattr(parent_model, s, {}))
if s == '_columns':
# Don't _inherit custom fields.
for c in new.keys():
if new[c].manual:
del new[c]
# Duplicate float fields because they have a .digits
# cache (which must be per-registry, not server-wide).
for c in new.keys():
if new[c]._type == 'float':
new[c] = copy.copy(new[c])
if hasattr(new, 'update'):
new.update(cls.__dict__.get(s, {}))
elif s=='_constraints':
for c in cls.__dict__.get(s, []):
exist = False
for c2 in range(len(new)):
#For _constraints, we should check field and methods as well
if new[c2][2]==c[2] and (new[c2][0] == c[0] \
or getattr(new[c2][0],'__name__', True) == \
getattr(c[0],'__name__', False)):
# If new class defines a constraint with
# same function name, we let it override
# the old one.
new[c2] = c
exist = True
break
if not exist:
new.append(c)
else:
new.extend(cls.__dict__.get(s, []))
nattr[s] = new
# Keep links to non-inherited constraints, e.g. useful when exporting translations
nattr['_local_constraints'] = cls.__dict__.get('_constraints', [])
nattr['_local_sql_constraints'] = cls.__dict__.get('_sql_constraints', [])
cls = type(name, (cls, parent_class), dict(nattr, _register=False))
else:
cls._local_constraints = getattr(cls, '_constraints', [])
cls._local_sql_constraints = getattr(cls, '_sql_constraints', [])
if not getattr(cls, '_original_module', None):
cls._original_module = cls._module
obj = object.__new__(cls)
obj.__init__(pool, cr)
return obj
def __new__(cls):
"""Register this model.
This doesn't create an instance but simply register the model
as being part of the module where it is defined.
"""
# Set the module name (e.g. base, sale, accounting, ...) on the class.
module = cls.__module__.split('.')[0]
if not hasattr(cls, '_module'):
cls._module = module
# Record this class in the list of models to instantiate for this module,
# managed by the metaclass.
module_model_list = MetaModel.module_to_models.setdefault(cls._module, [])
if cls not in module_model_list:
if not cls._custom:
module_model_list.append(cls)
# Since we don't return an instance here, the __init__
# method won't be called.
return None
def __init__(self, pool, cr):
""" Initialize a model and make it part of the given registry.
- copy the stored fields' functions in the osv_pool,
- update the _columns with the fields found in ir_model_fields,
- ensure there is a many2one for each _inherits'd parent,
- update the children's _columns,
- give a chance to each field to initialize itself.
"""
pool.add(self._name, self)
self.pool = pool
if not self._name and not hasattr(self, '_inherit'):
name = type(self).__name__.split('.')[0]
msg = "The class %s has to have a _name attribute" % name
_logger.error(msg)
raise except_orm('ValueError', msg)
if not self._description:
self._description = self._name
if not self._table:
self._table = self._name.replace('.', '_')
if not hasattr(self, '_log_access'):
# If _log_access is not specified, it is the same value as _auto.
self._log_access = getattr(self, "_auto", True)
self._columns = self._columns.copy()
for store_field in self._columns:
f = self._columns[store_field]
if hasattr(f, 'digits_change'):
f.digits_change(cr)
def not_this_field(stored_func):
x, y, z, e, f, l = stored_func
return x != self._name or y != store_field
self.pool._store_function[self._name] = filter(not_this_field, self.pool._store_function.get(self._name, []))
if not isinstance(f, fields.function):
continue
if not f.store:
continue
sm = f.store
if sm is True:
sm = {self._name: (lambda self, cr, uid, ids, c={}: ids, None, 10, None)}
for object, aa in sm.items():
if len(aa) == 4:
(fnct, fields2, order, length) = aa
elif len(aa) == 3:
(fnct, fields2, order) = aa
length = None
else:
raise except_orm('Error',
('Invalid function definition %s in object %s !\nYou must use the definition: store={object:(fnct, fields, priority, time length)}.' % (store_field, self._name)))
self.pool._store_function.setdefault(object, [])
t = (self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length)
if not t in self.pool._store_function[object]:
self.pool._store_function[object].append((self._name, store_field, fnct, tuple(fields2) if fields2 else None, order, length))
self.pool._store_function[object].sort(lambda x, y: cmp(x[4], y[4]))
for (key, _, msg) in self._sql_constraints:
self.pool._sql_error[self._table+'_'+key] = msg
# Load manual fields
# Check the query is already done for all modules of if we need to
# do it ourselves.
if self.pool.fields_by_model is not None:
manual_fields = self.pool.fields_by_model.get(self._name, [])
else:
cr.execute('SELECT * FROM ir_model_fields WHERE model=%s AND state=%s', (self._name, 'manual'))
manual_fields = cr.dictfetchall()
for field in manual_fields:
if field['name'] in self._columns:
continue
attrs = {
'string': field['field_description'],
'required': bool(field['required']),
'readonly': bool(field['readonly']),
'domain': eval(field['domain']) if field['domain'] else None,
'size': field['size'] or None,
'ondelete': field['on_delete'],
'translate': (field['translate']),
'manual': True,
#'select': int(field['select_level'])
}
if field['serialization_field_id']:
cr.execute('SELECT name FROM ir_model_fields WHERE id=%s', (field['serialization_field_id'],))
attrs.update({'serialization_field': cr.fetchone()[0], 'type': field['ttype']})
if field['ttype'] in ['many2one', 'one2many', 'many2many']:
attrs.update({'relation': field['relation']})
self._columns[field['name']] = fields.sparse(**attrs)
elif field['ttype'] == 'selection':
self._columns[field['name']] = fields.selection(eval(field['selection']), **attrs)
elif field['ttype'] == 'reference':
self._columns[field['name']] = fields.reference(selection=eval(field['selection']), **attrs)
elif field['ttype'] == 'many2one':
self._columns[field['name']] = fields.many2one(field['relation'], **attrs)
elif field['ttype'] == 'one2many':
self._columns[field['name']] = fields.one2many(field['relation'], field['relation_field'], **attrs)
elif field['ttype'] == 'many2many':
_rel1 = field['relation'].replace('.', '_')
_rel2 = field['model'].replace('.', '_')
_rel_name = 'x_%s_%s_%s_rel' % (_rel1, _rel2, field['name'])
self._columns[field['name']] = fields.many2many(field['relation'], _rel_name, 'id1', 'id2', **attrs)
else:
self._columns[field['name']] = getattr(fields, field['ttype'])(**attrs)
self._inherits_check()
self._inherits_reload()
if not self._sequence:
self._sequence = self._table + '_id_seq'
for k in self._defaults:
assert (k in self._columns) or (k in self._inherit_fields), 'Default function defined in %s but field %s does not exist !' % (self._name, k,)
for f in self._columns:
self._columns[f].restart()
# Transience
if self.is_transient():
self._transient_check_count = 0
self._transient_max_count = config.get('osv_memory_count_limit')
self._transient_max_hours = config.get('osv_memory_age_limit')
assert self._log_access, "TransientModels must have log_access turned on, "\
"in order to implement their access rights policy"
# Validate rec_name
if self._rec_name is not None:
assert self._rec_name in self._all_columns.keys() + ['id'], "Invalid rec_name %s for model %s" % (self._rec_name, self._name)
else:
self._rec_name = 'name'
def __export_row(self, cr, uid, row, fields, context=None):
if context is None:
context = {}
def check_type(field_type):
if field_type == 'float':
return 0.0
elif field_type == 'integer':
return 0
elif field_type == 'boolean':
return 'False'
return ''
def selection_field(in_field):
col_obj = self.pool.get(in_field.keys()[0])
if f[i] in col_obj._columns.keys():
return col_obj._columns[f[i]]
elif f[i] in col_obj._inherits.keys():
selection_field(col_obj._inherits)
else:
return False
def _get_xml_id(self, cr, uid, r):
model_data = self.pool.get('ir.model.data')
data_ids = model_data.search(cr, uid, [('model', '=', r._table_name), ('res_id', '=', r['id'])])
if len(data_ids):
d = model_data.read(cr, uid, data_ids, ['name', 'module'])[0]
if d['module']:
r = '%s.%s' % (d['module'], d['name'])
else:
r = d['name']
else:
postfix = 0
while True:
n = self._table+'_'+str(r['id']) + (postfix and ('_'+str(postfix)) or '' )
if not model_data.search(cr, uid, [('name', '=', n)]):
break
postfix += 1
model_data.create(cr, SUPERUSER_ID, {
'name': n,
'model': self._name,
'res_id': r['id'],
'module': '__export__',
})
r = '__export__.'+n
return r
lines = []
data = map(lambda x: '', range(len(fields)))
done = []
for fpos in range(len(fields)):
f = fields[fpos]
if f:
r = row
i = 0
while i < len(f):
cols = False
if f[i] == '.id':
r = r['id']
elif f[i] == 'id':
r = _get_xml_id(self, cr, uid, r)
else:
r = r[f[i]]
# To display external name of selection field when its exported
if f[i] in self._columns.keys():
cols = self._columns[f[i]]
elif f[i] in self._inherit_fields.keys():
cols = selection_field(self._inherits)
if cols and cols._type == 'selection':
sel_list = cols.selection
if r and type(sel_list) == type([]):
r = [x[1] for x in sel_list if r==x[0]]
r = r and r[0] or False
if not r:
if f[i] in self._columns:
r = check_type(self._columns[f[i]]._type)
elif f[i] in self._inherit_fields:
r = check_type(self._inherit_fields[f[i]][2]._type)
data[fpos] = r or False
break
if isinstance(r, (browse_record_list, list)):
first = True
fields2 = map(lambda x: (x[:i+1]==f[:i+1] and x[i+1:]) \
or [], fields)
if fields2 in done:
if [x for x in fields2 if x]:
break
done.append(fields2)
if cols and cols._type=='many2many' and len(fields[fpos])>(i+1) and (fields[fpos][i+1]=='id'):
data[fpos] = ','.join([_get_xml_id(self, cr, uid, x) for x in r])
break
for row2 in r:
lines2 = row2._model.__export_row(cr, uid, row2, fields2,
context)
if first:
for fpos2 in range(len(fields)):
if lines2 and lines2[0][fpos2]:
data[fpos2] = lines2[0][fpos2]
if not data[fpos]:
dt = ''
for rr in r:
name_relation = self.pool.get(rr._table_name)._rec_name
if isinstance(rr[name_relation], browse_record):
rr = rr[name_relation]
rr_name = self.pool.get(rr._table_name).name_get(cr, uid, [rr.id], context=context)
rr_name = rr_name and rr_name[0] and rr_name[0][1] or ''
dt += tools.ustr(rr_name or '') + ','
data[fpos] = dt[:-1]
break
lines += lines2[1:]
first = False
else:
lines += lines2
break
i += 1
if i == len(f):
if isinstance(r, browse_record):
r = self.pool.get(r._table_name).name_get(cr, uid, [r.id], context=context)
r = r and r[0] and r[0][1] or ''
data[fpos] = tools.ustr(r or '')
return [data] + lines
def export_data(self, cr, uid, ids, fields_to_export, context=None):
"""
Export fields for selected objects
:param cr: database cursor
:param uid: current user id
:param ids: list of ids
:param fields_to_export: list of fields
:param context: context arguments, like lang, time zone
:rtype: dictionary with a *datas* matrix
This method is used when exporting data via client menu
"""
if context is None:
context = {}
cols = self._columns.copy()
for f in self._inherit_fields:
cols.update({f: self._inherit_fields[f][2]})
fields_to_export = map(fix_import_export_id_paths, fields_to_export)
datas = []
for row in self.browse(cr, uid, ids, context):
datas += self.__export_row(cr, uid, row, fields_to_export, context)
return {'datas': datas}
def import_data(self, cr, uid, fields, datas, mode='init', current_module='', noupdate=False, context=None, filename=None):
"""
.. deprecated:: 7.0
Use :meth:`~load` instead
Import given data in given module
This method is used when importing data via client menu.
Example of fields to import for a sale.order::
.id, (=database_id)
partner_id, (=name_search)
order_line/.id, (=database_id)
order_line/name,
order_line/product_id/id, (=xml id)
order_line/price_unit,
order_line/product_uom_qty,
order_line/product_uom/id (=xml_id)
This method returns a 4-tuple with the following structure::
(return_code, errored_resource, error_message, unused)
* The first item is a return code, it is ``-1`` in case of
import error, or the last imported row number in case of success
* The second item contains the record data dict that failed to import
in case of error, otherwise it's 0
* The third item contains an error message string in case of error,
otherwise it's 0
* The last item is currently unused, with no specific semantics
:param fields: list of fields to import
:param datas: data to import
:param mode: 'init' or 'update' for record creation
:param current_module: module name
:param noupdate: flag for record creation
:param filename: optional file to store partial import state for recovery
:returns: 4-tuple in the form (return_code, errored_resource, error_message, unused)
:rtype: (int, dict or 0, str or 0, str or 0)
"""
context = dict(context) if context is not None else {}
context['_import_current_module'] = current_module
fields = map(fix_import_export_id_paths, fields)
ir_model_data_obj = self.pool.get('ir.model.data')
def log(m):
if m['type'] == 'error':
raise Exception(m['message'])
if config.get('import_partial') and filename:
with open(config.get('import_partial'), 'rb') as partial_import_file:
data = pickle.load(partial_import_file)
position = data.get(filename, 0)
position = 0
try:
for res_id, xml_id, res, info in self._convert_records(cr, uid,
self._extract_records(cr, uid, fields, datas,
context=context, log=log),
context=context, log=log):
ir_model_data_obj._update(cr, uid, self._name,
current_module, res, mode=mode, xml_id=xml_id,
noupdate=noupdate, res_id=res_id, context=context)
position = info.get('rows', {}).get('to', 0) + 1
if config.get('import_partial') and filename and (not (position%100)):
with open(config.get('import_partial'), 'rb') as partial_import:
data = pickle.load(partial_import)
data[filename] = position
with open(config.get('import_partial'), 'wb') as partial_import:
pickle.dump(data, partial_import)
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
cr.commit()
except Exception, e:
cr.rollback()
return -1, {}, 'Line %d : %s' % (position + 1, tools.ustr(e)), ''
if context.get('defer_parent_store_computation'):
self._parent_store_compute(cr)
return position, 0, 0, 0
def load(self, cr, uid, fields, data, context=None):
"""
Attempts to load the data matrix, and returns a list of ids (or
``False`` if there was an error and no id could be generated) and a
list of messages.
The ids are those of the records created and saved (in database), in
the same order they were extracted from the file. They can be passed
directly to :meth:`~read`
:param fields: list of fields to import, at the same index as the corresponding data
:type fields: list(str)
:param data: row-major matrix of data to import
:type data: list(list(str))
:param dict context:
:returns: {ids: list(int)|False, messages: [Message]}
"""
cr.execute('SAVEPOINT model_load')
messages = []
fields = map(fix_import_export_id_paths, fields)
ModelData = self.pool['ir.model.data'].clear_caches()
fg = self.fields_get(cr, uid, context=context)
mode = 'init'
current_module = ''
noupdate = False
ids = []
for id, xid, record, info in self._convert_records(cr, uid,
self._extract_records(cr, uid, fields, data,
context=context, log=messages.append),
context=context, log=messages.append):
try:
cr.execute('SAVEPOINT model_load_save')
except psycopg2.InternalError, e:
# broken transaction, exit and hope the source error was
# already logged
if not any(message['type'] == 'error' for message in messages):
messages.append(dict(info, type='error',message=
u"Unknown database error: '%s'" % e))
break
try:
ids.append(ModelData._update(cr, uid, self._name,
current_module, record, mode=mode, xml_id=xid,
noupdate=noupdate, res_id=id, context=context))
cr.execute('RELEASE SAVEPOINT model_load_save')
except psycopg2.Warning, e:
messages.append(dict(info, type='warning', message=str(e)))
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
except psycopg2.Error, e:
messages.append(dict(
info, type='error',
**PGERROR_TO_OE[e.pgcode](self, fg, info, e)))
# Failed to write, log to messages, rollback savepoint (to
# avoid broken transaction) and keep going
cr.execute('ROLLBACK TO SAVEPOINT model_load_save')
if any(message['type'] == 'error' for message in messages):
cr.execute('ROLLBACK TO SAVEPOINT model_load')
ids = False
return {'ids': ids, 'messages': messages}
def _extract_records(self, cr, uid, fields_, data,
context=None, log=lambda a: None):
""" Generates record dicts from the data sequence.
The result is a generator of dicts mapping field names to raw
(unconverted, unvalidated) values.
For relational fields, if sub-fields were provided the value will be
a list of sub-records
The following sub-fields may be set on the record (by key):
* None is the name_get for the record (to use with name_create/name_search)
* "id" is the External ID for the record
* ".id" is the Database ID for the record
"""
columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
# Fake columns to avoid special cases in extractor
columns[None] = fields.char('rec_name')
columns['id'] = fields.char('External ID')
columns['.id'] = fields.integer('Database ID')
# m2o fields can't be on multiple lines so exclude them from the
# is_relational field rows filter, but special-case it later on to
# be handled with relational fields (as it can have subfields)
is_relational = lambda field: columns[field]._type in ('one2many', 'many2many', 'many2one')
get_o2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
if columns[field[0]]._type == 'one2many'])
get_nono2m_values = itemgetter_tuple(
[index for index, field in enumerate(fields_)
if columns[field[0]]._type != 'one2many'])
# Checks if the provided row has any non-empty non-relational field
def only_o2m_values(row, f=get_nono2m_values, g=get_o2m_values):
return any(g(row)) and not any(f(row))
index = 0
while True:
if index >= len(data): return
row = data[index]
# copy non-relational fields to record dict
record = dict((field[0], value)
for field, value in itertools.izip(fields_, row)
if not is_relational(field[0]))
# Get all following rows which have relational values attached to
# the current record (no non-relational values)
record_span = itertools.takewhile(
only_o2m_values, itertools.islice(data, index + 1, None))
# stitch record row back on for relational fields
record_span = list(itertools.chain([row], record_span))
for relfield in set(
field[0] for field in fields_
if is_relational(field[0])):
column = columns[relfield]
# FIXME: how to not use _obj without relying on fields_get?
Model = self.pool[column._obj]
# get only cells for this sub-field, should be strictly
# non-empty, field path [None] is for name_get column
indices, subfields = zip(*((index, field[1:] or [None])
for index, field in enumerate(fields_)
if field[0] == relfield))
# return all rows which have at least one value for the
# subfields of relfield
relfield_data = filter(any, map(itemgetter_tuple(indices), record_span))
record[relfield] = [subrecord
for subrecord, _subinfo in Model._extract_records(
cr, uid, subfields, relfield_data,
context=context, log=log)]
yield record, {'rows': {
'from': index,
'to': index + len(record_span) - 1
}}
index += len(record_span)
def _convert_records(self, cr, uid, records,
context=None, log=lambda a: None):
""" Converts records from the source iterable (recursive dicts of
strings) into forms which can be written to the database (via
self.create or (ir.model.data)._update)
:returns: a list of triplets of (id, xid, record)
:rtype: list((int|None, str|None, dict))
"""
if context is None: context = {}
Converter = self.pool['ir.fields.converter']
columns = dict((k, v.column) for k, v in self._all_columns.iteritems())
Translation = self.pool['ir.translation']
field_names = dict(
(f, (Translation._get_source(cr, uid, self._name + ',' + f, 'field',
context.get('lang'))
or column.string))
for f, column in columns.iteritems())
convert = Converter.for_model(cr, uid, self, context=context)
def _log(base, field, exception):
type = 'warning' if isinstance(exception, Warning) else 'error'
# logs the logical (not human-readable) field name for automated
# processing of response, but injects human readable in message
record = dict(base, type=type, field=field,
message=unicode(exception.args[0]) % base)
if len(exception.args) > 1 and exception.args[1]:
record.update(exception.args[1])
log(record)
stream = CountingStream(records)
for record, extras in stream:
dbid = False
xid = False
# name_get/name_create
if None in record: pass
# xid
if 'id' in record:
xid = record['id']
# dbid
if '.id' in record:
try:
dbid = int(record['.id'])
except ValueError:
# in case of overridden id column
dbid = record['.id']
if not self.search(cr, uid, [('id', '=', dbid)], context=context):
log(dict(extras,
type='error',
record=stream.index,
field='.id',
message=_(u"Unknown database identifier '%s'") % dbid))
dbid = False
converted = convert(record, lambda field, err:\
_log(dict(extras, record=stream.index, field=field_names[field]), field, err))
yield dbid, xid, converted, dict(extras, record=stream.index)
def get_invalid_fields(self, cr, uid):
return list(self._invalids)
def _validate(self, cr, uid, ids, context=None):
context = context or {}
lng = context.get('lang')
trans = self.pool.get('ir.translation')
error_msgs = []
for constraint in self._constraints:
fun, msg, fields = constraint
# We don't pass around the context here: validation code
# must always yield the same results.
if not fun(self, cr, uid, ids):
# Check presence of __call__ directly instead of using
# callable() because it will be deprecated as of Python 3.0
if hasattr(msg, '__call__'):
tmp_msg = msg(self, cr, uid, ids, context=context)
if isinstance(tmp_msg, tuple):
tmp_msg, params = tmp_msg
translated_msg = tmp_msg % params
else:
translated_msg = tmp_msg
else:
translated_msg = trans._get_source(cr, uid, self._name, 'constraint', lng, msg)
error_msgs.append(
_("Error occurred while validating the field(s) %s: %s") % (','.join(fields), translated_msg)
)
self._invalids.update(fields)
if error_msgs:
raise except_orm('ValidateError', '\n'.join(error_msgs))
else:
self._invalids.clear()
def default_get(self, cr, uid, fields_list, context=None):
"""
Returns default values for the fields in fields_list.
:param fields_list: list of fields to get the default values for (example ['field1', 'field2',])
:type fields_list: list
:param context: optional context dictionary - it may contains keys for specifying certain options
like ``context_lang`` (language) or ``context_tz`` (timezone) to alter the results of the call.
It may contain keys in the form ``default_XXX`` (where XXX is a field name), to set
or override a default value for a field.
A special ``bin_size`` boolean flag may also be passed in the context to request the
value of all fields.binary columns to be returned as the size of the binary instead of its
contents. This can also be selectively overriden by passing a field-specific flag
in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
:return: dictionary of the default values (set on the object model class, through user preferences, or in the context)
"""
# trigger view init hook
self.view_init(cr, uid, fields_list, context)
if not context:
context = {}
defaults = {}
# get the default values for the inherited fields
for t in self._inherits.keys():
defaults.update(self.pool.get(t).default_get(cr, uid, fields_list,
context))
# get the default values defined in the object
for f in fields_list:
if f in self._defaults:
if callable(self._defaults[f]):
defaults[f] = self._defaults[f](self, cr, uid, context)
else:
defaults[f] = self._defaults[f]
fld_def = ((f in self._columns) and self._columns[f]) \
or ((f in self._inherit_fields) and self._inherit_fields[f][2]) \
or False
if isinstance(fld_def, fields.property):
property_obj = self.pool.get('ir.property')
prop_value = property_obj.get(cr, uid, f, self._name, context=context)
if prop_value:
if isinstance(prop_value, (browse_record, browse_null)):
defaults[f] = prop_value.id
else:
defaults[f] = prop_value
else:
if f not in defaults:
defaults[f] = False
# get the default values set by the user and override the default
# values defined in the object
ir_values_obj = self.pool.get('ir.values')
res = ir_values_obj.get(cr, uid, 'default', False, [self._name])
for id, field, field_value in res:
if field in fields_list:
fld_def = (field in self._columns) and self._columns[field] or self._inherit_fields[field][2]
if fld_def._type == 'many2one':
obj = self.pool.get(fld_def._obj)
if not obj.search(cr, uid, [('id', '=', field_value or False)]):
continue
if fld_def._type == 'many2many':
obj = self.pool.get(fld_def._obj)
field_value2 = []
for i in range(len(field_value or [])):
if not obj.search(cr, uid, [('id', '=',
field_value[i])]):
continue
field_value2.append(field_value[i])
field_value = field_value2
if fld_def._type == 'one2many':
obj = self.pool.get(fld_def._obj)
field_value2 = []
for i in range(len(field_value or [])):
field_value2.append({})
for field2 in field_value[i]:
if field2 in obj._columns.keys() and obj._columns[field2]._type == 'many2one':
obj2 = self.pool.get(obj._columns[field2]._obj)
if not obj2.search(cr, uid,
[('id', '=', field_value[i][field2])]):
continue
elif field2 in obj._inherit_fields.keys() and obj._inherit_fields[field2][2]._type == 'many2one':
obj2 = self.pool.get(obj._inherit_fields[field2][2]._obj)
if not obj2.search(cr, uid,
[('id', '=', field_value[i][field2])]):
continue
# TODO add test for many2many and one2many
field_value2[i][field2] = field_value[i][field2]
field_value = field_value2
defaults[field] = field_value
# get the default values from the context
for key in context or {}:
if key.startswith('default_') and (key[8:] in fields_list):
defaults[key[8:]] = context[key]
return defaults
def fields_get_keys(self, cr, user, context=None):
res = self._columns.keys()
# TODO I believe this loop can be replace by
# res.extend(self._inherit_fields.key())
for parent in self._inherits:
res.extend(self.pool.get(parent).fields_get_keys(cr, user, context))
return res
def _rec_name_fallback(self, cr, uid, context=None):
rec_name = self._rec_name
if rec_name not in self._columns:
rec_name = self._columns.keys()[0] if len(self._columns.keys()) > 0 else "id"
return rec_name
#
# Overload this method if you need a window title which depends on the context
#
def view_header_get(self, cr, user, view_id=None, view_type='form', context=None):
return False
def user_has_groups(self, cr, uid, groups, context=None):
"""Return true if the user is at least member of one of the groups
in groups_str. Typically used to resolve ``groups`` attribute
in view and model definitions.
:param str groups: comma-separated list of fully-qualified group
external IDs, e.g.: ``base.group_user,base.group_system``
:return: True if the current user is a member of one of the
given groups
"""
return any([self.pool.get('res.users').has_group(cr, uid, group_ext_id)
for group_ext_id in groups.split(',')])
def __view_look_dom(self, cr, user, node, view_id, in_tree_view, model_fields, context=None):
"""Return the description of the fields in the node.
In a normal call to this method, node is a complete view architecture
but it is actually possible to give some sub-node (this is used so
that the method can call itself recursively).
Originally, the field descriptions are drawn from the node itself.
But there is now some code calling fields_get() in order to merge some
of those information in the architecture.
"""
if context is None:
context = {}
result = False
fields = {}
children = True
modifiers = {}
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
def check_group(node):
"""Apply group restrictions, may be set at view level or model level::
* at view level this means the element should be made invisible to
people who are not members
* at model level (exclusively for fields, obviously), this means
the field should be completely removed from the view, as it is
completely unavailable for non-members
:return: True if field should be included in the result of fields_view_get
"""
if node.tag == 'field' and node.get('name') in self._all_columns:
column = self._all_columns[node.get('name')].column
if column.groups and not self.user_has_groups(cr, user,
groups=column.groups,
context=context):
node.getparent().remove(node)
fields.pop(node.get('name'), None)
# no point processing view-level ``groups`` anymore, return
return False
if node.get('groups'):
can_see = self.user_has_groups(cr, user,
groups=node.get('groups'),
context=context)
if not can_see:
node.set('invisible', '1')
modifiers['invisible'] = True
if 'attrs' in node.attrib:
del(node.attrib['attrs']) #avoid making field visible later
del(node.attrib['groups'])
return True
if node.tag in ('field', 'node', 'arrow'):
if node.get('object'):
attrs = {}
views = {}
xml = "<form>"
for f in node:
if f.tag == 'field':
xml += etree.tostring(f, encoding="utf-8")
xml += "</form>"
new_xml = etree.fromstring(encode(xml))
ctx = context.copy()
ctx['base_model_name'] = self._name
xarch, xfields = self.pool.get(node.get('object')).__view_look_dom_arch(cr, user, new_xml, view_id, ctx)
views['form'] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
fields = xfields
if node.get('name'):
attrs = {}
try:
if node.get('name') in self._columns:
column = self._columns[node.get('name')]
else:
column = self._inherit_fields[node.get('name')][2]
except Exception:
column = False
if column:
relation = self.pool.get(column._obj)
children = False
views = {}
for f in node:
if f.tag in ('form', 'tree', 'graph', 'kanban'):
node.remove(f)
ctx = context.copy()
ctx['base_model_name'] = self._name
xarch, xfields = relation.__view_look_dom_arch(cr, user, f, view_id, ctx)
views[str(f.tag)] = {
'arch': xarch,
'fields': xfields
}
attrs = {'views': views}
if node.get('widget') and node.get('widget') == 'selection':
# Prepare the cached selection list for the client. This needs to be
# done even when the field is invisible to the current user, because
# other events could need to change its value to any of the selectable ones
# (such as on_change events, refreshes, etc.)
# If domain and context are strings, we keep them for client-side, otherwise
# we evaluate them server-side to consider them when generating the list of
# possible values
# TODO: find a way to remove this hack, by allow dynamic domains
dom = []
if column._domain and not isinstance(column._domain, basestring):
dom = list(column._domain)
dom += eval(node.get('domain', '[]'), {'uid': user, 'time': time})
search_context = dict(context)
if column._context and not isinstance(column._context, basestring):
search_context.update(column._context)
attrs['selection'] = relation._name_search(cr, user, '', dom, context=search_context, limit=None, name_get_uid=1)
if (node.get('required') and not int(node.get('required'))) or not column.required:
attrs['selection'].append((False, ''))
fields[node.get('name')] = attrs
field = model_fields.get(node.get('name'))
if field:
transfer_field_to_modifiers(field, modifiers)
elif node.tag in ('form', 'tree'):
result = self.view_header_get(cr, user, False, node.tag, context)
if result:
node.set('string', result)
in_tree_view = node.tag == 'tree'
elif node.tag == 'calendar':
for additional_field in ('date_start', 'date_delay', 'date_stop', 'color'):
if node.get(additional_field):
fields[node.get(additional_field)] = {}
if not check_group(node):
# node must be removed, no need to proceed further with its children
return fields
# The view architeture overrides the python model.
# Get the attrs before they are (possibly) deleted by check_group below
transfer_node_to_modifiers(node, modifiers, context, in_tree_view)
# TODO remove attrs couterpart in modifiers when invisible is true ?
# translate view
if 'lang' in context:
if node.text and node.text.strip():
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.text.strip())
if trans:
node.text = node.text.replace(node.text.strip(), trans)
if node.tail and node.tail.strip():
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.tail.strip())
if trans:
node.tail = node.tail.replace(node.tail.strip(), trans)
if node.get('string') and not result:
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], node.get('string'))
if trans == node.get('string') and ('base_model_name' in context):
# If translation is same as source, perhaps we'd have more luck with the alternative model name
# (in case we are in a mixed situation, such as an inherited view where parent_view.model != model
trans = self.pool.get('ir.translation')._get_source(cr, user, context['base_model_name'], 'view', context['lang'], node.get('string'))
if trans:
node.set('string', trans)
for attr_name in ('confirm', 'sum', 'avg', 'help', 'placeholder'):
attr_value = node.get(attr_name)
if attr_value:
trans = self.pool.get('ir.translation')._get_source(cr, user, self._name, 'view', context['lang'], attr_value)
if trans:
node.set(attr_name, trans)
for f in node:
if children or (node.tag == 'field' and f.tag in ('filter','separator')):
fields.update(self.__view_look_dom(cr, user, f, view_id, in_tree_view, model_fields, context))
transfer_modifiers_to_node(modifiers, node)
return fields
def _disable_workflow_buttons(self, cr, user, node):
""" Set the buttons in node to readonly if the user can't activate them. """
if user == 1:
# admin user can always activate workflow buttons
return node
# TODO handle the case of more than one workflow for a model or multiple
# transitions with different groups and same signal
usersobj = self.pool.get('res.users')
buttons = (n for n in node.getiterator('button') if n.get('type') != 'object')
for button in buttons:
user_groups = usersobj.read(cr, user, [user], ['groups_id'])[0]['groups_id']
cr.execute("""SELECT DISTINCT t.group_id
FROM wkf
INNER JOIN wkf_activity a ON a.wkf_id = wkf.id
INNER JOIN wkf_transition t ON (t.act_to = a.id)
WHERE wkf.osv = %s
AND t.signal = %s
AND t.group_id is NOT NULL
""", (self._name, button.get('name')))
group_ids = [x[0] for x in cr.fetchall() if x[0]]
can_click = not group_ids or bool(set(user_groups).intersection(group_ids))
button.set('readonly', str(int(not can_click)))
return node
def __view_look_dom_arch(self, cr, user, node, view_id, context=None):
""" Return an architecture and a description of all the fields.
The field description combines the result of fields_get() and
__view_look_dom().
:param node: the architecture as as an etree
:return: a tuple (arch, fields) where arch is the given node as a
string and fields is the description of all the fields.
"""
fields = {}
if node.tag == 'diagram':
if node.getchildren()[0].tag == 'node':
node_model = self.pool.get(node.getchildren()[0].get('object'))
node_fields = node_model.fields_get(cr, user, None, context)
fields.update(node_fields)
if not node.get("create") and not node_model.check_access_rights(cr, user, 'create', raise_exception=False):
node.set("create", 'false')
if node.getchildren()[1].tag == 'arrow':
arrow_fields = self.pool.get(node.getchildren()[1].get('object')).fields_get(cr, user, None, context)
fields.update(arrow_fields)
else:
fields = self.fields_get(cr, user, None, context)
fields_def = self.__view_look_dom(cr, user, node, view_id, False, fields, context=context)
node = self._disable_workflow_buttons(cr, user, node)
if node.tag in ('kanban', 'tree', 'form', 'gantt'):
for action, operation in (('create', 'create'), ('delete', 'unlink'), ('edit', 'write')):
if not node.get(action) and not self.check_access_rights(cr, user, operation, raise_exception=False):
node.set(action, 'false')
arch = etree.tostring(node, encoding="utf-8").replace('\t', '')
for k in fields.keys():
if k not in fields_def:
del fields[k]
for field in fields_def:
if field == 'id':
# sometime, the view may contain the (invisible) field 'id' needed for a domain (when 2 objects have cross references)
fields['id'] = {'readonly': True, 'type': 'integer', 'string': 'ID'}
elif field in fields:
fields[field].update(fields_def[field])
else:
cr.execute('select name, model from ir_ui_view where (id=%s or inherit_id=%s) and arch like %s', (view_id, view_id, '%%%s%%' % field))
res = cr.fetchall()[:]
model = res[0][1]
res.insert(0, ("Can't find field '%s' in the following view parts composing the view of object model '%s':" % (field, model), None))
msg = "\n * ".join([r[0] for r in res])
msg += "\n\nEither you wrongly customized this view, or some modules bringing those views are not compatible with your current data model"
_logger.error(msg)
raise except_orm('View error', msg)
return arch, fields
def _get_default_form_view(self, cr, user, context=None):
""" Generates a default single-line form view using all fields
of the current model except the m2m and o2m ones.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a form view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('form', string=self._description)
# TODO it seems fields_get can be replaced by _all_columns (no need for translation)
for field, descriptor in self.fields_get(cr, user, context=context).iteritems():
if descriptor['type'] in ('one2many', 'many2many'):
continue
etree.SubElement(view, 'field', name=field)
if descriptor['type'] == 'text':
etree.SubElement(view, 'newline')
return view
def _get_default_search_view(self, cr, user, context=None):
""" Generates a single-field search view, based on _rec_name.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a tree view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('search', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
return view
def _get_default_tree_view(self, cr, user, context=None):
""" Generates a single-field tree view, based on _rec_name.
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a tree view as an lxml document
:rtype: etree._Element
"""
view = etree.Element('tree', string=self._description)
etree.SubElement(view, 'field', name=self._rec_name_fallback(cr, user, context))
return view
def _get_default_calendar_view(self, cr, user, context=None):
""" Generates a default calendar view by trying to infer
calendar fields from a number of pre-set attribute names
:param cr: database cursor
:param int user: user id
:param dict context: connection context
:returns: a calendar view
:rtype: etree._Element
"""
def set_first_of(seq, in_, to):
"""Sets the first value of ``seq`` also found in ``in_`` to
the ``to`` attribute of the view being closed over.
Returns whether it's found a suitable value (and set it on
the attribute) or not
"""
for item in seq:
if item in in_:
view.set(to, item)
return True
return False
view = etree.Element('calendar', string=self._description)
etree.SubElement(view, 'field', self._rec_name_fallback(cr, user, context))
if self._date_name not in self._columns:
date_found = False
for dt in ['date', 'date_start', 'x_date', 'x_date_start']:
if dt in self._columns:
self._date_name = dt
date_found = True
break
if not date_found:
raise except_orm(_('Invalid Object Architecture!'), _("Insufficient fields for Calendar View!"))
view.set('date_start', self._date_name)
set_first_of(["user_id", "partner_id", "x_user_id", "x_partner_id"],
self._columns, 'color')
if not set_first_of(["date_stop", "date_end", "x_date_stop", "x_date_end"],
self._columns, 'date_stop'):
if not set_first_of(["date_delay", "planned_hours", "x_date_delay", "x_planned_hours"],
self._columns, 'date_delay'):
raise except_orm(
_('Invalid Object Architecture!'),
_("Insufficient fields to generate a Calendar View for %s, missing a date_stop or a date_delay" % self._name))
return view
#
# if view_id, view_type is not required
#
def fields_view_get(self, cr, user, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
"""
Get the detailed composition of the requested view like fields, model, view architecture
:param cr: database cursor
:param user: current user id
:param view_id: id of the view or None
:param view_type: type of the view to return if view_id is None ('form', tree', ...)
:param context: context arguments, like lang, time zone
:param toolbar: true to include contextual actions
:param submenu: deprecated
:return: dictionary describing the composition of the requested view (including inherited views and extensions)
:raise AttributeError:
* if the inherited view has unknown position to work with other than 'before', 'after', 'inside', 'replace'
* if some tag other than 'position' is found in parent view
:raise Invalid ArchitectureError: if there is view type other than form, tree, calendar, search etc defined on the structure
"""
if context is None:
context = {}
def encode(s):
if isinstance(s, unicode):
return s.encode('utf8')
return s
def raise_view_error(error_msg, child_view_id):
view, child_view = self.pool.get('ir.ui.view').browse(cr, user, [view_id, child_view_id], context)
error_msg = error_msg % {'parent_xml_id': view.xml_id}
raise AttributeError("View definition error for inherited view '%s' on model '%s': %s"
% (child_view.xml_id, self._name, error_msg))
def locate(source, spec):
""" Locate a node in a source (parent) architecture.
Given a complete source (parent) architecture (i.e. the field
`arch` in a view), and a 'spec' node (a node in an inheriting
view that specifies the location in the source view of what
should be changed), return (if it exists) the node in the
source view matching the specification.
:param source: a parent architecture to modify
:param spec: a modifying node in an inheriting view
:return: a node in the source matching the spec
"""
if spec.tag == 'xpath':
nodes = source.xpath(spec.get('expr'))
return nodes[0] if nodes else None
elif spec.tag == 'field':
# Only compare the field name: a field can be only once in a given view
# at a given level (and for multilevel expressions, we should use xpath
# inheritance spec anyway).
for node in source.getiterator('field'):
if node.get('name') == spec.get('name'):
return node
return None
for node in source.getiterator(spec.tag):
if isinstance(node, SKIPPED_ELEMENT_TYPES):
continue
if all(node.get(attr) == spec.get(attr) \
for attr in spec.attrib
if attr not in ('position','version')):
# Version spec should match parent's root element's version
if spec.get('version') and spec.get('version') != source.get('version'):
return None
return node
return None
def apply_inheritance_specs(source, specs_arch, inherit_id=None):
""" Apply an inheriting view.
Apply to a source architecture all the spec nodes (i.e. nodes
describing where and what changes to apply to some parent
architecture) given by an inheriting view.
:param source: a parent architecture to modify
:param specs_arch: a modifying architecture in an inheriting view
:param inherit_id: the database id of the inheriting view
:return: a modified source where the specs are applied
"""
specs_tree = etree.fromstring(encode(specs_arch))
# Queue of specification nodes (i.e. nodes describing where and
# changes to apply to some parent architecture).
specs = [specs_tree]
while len(specs):
spec = specs.pop(0)
if isinstance(spec, SKIPPED_ELEMENT_TYPES):
continue
if spec.tag == 'data':
specs += [ c for c in specs_tree ]
continue
node = locate(source, spec)
if node is not None:
pos = spec.get('position', 'inside')
if pos == 'replace':
if node.getparent() is None:
source = copy.deepcopy(spec[0])
else:
for child in spec:
node.addprevious(child)
node.getparent().remove(node)
elif pos == 'attributes':
for child in spec.getiterator('attribute'):
attribute = (child.get('name'), child.text and child.text.encode('utf8') or None)
if attribute[1]:
node.set(attribute[0], attribute[1])
else:
del(node.attrib[attribute[0]])
else:
sib = node.getnext()
for child in spec:
if pos == 'inside':
node.append(child)
elif pos == 'after':
if sib is None:
node.addnext(child)
node = child
else:
sib.addprevious(child)
elif pos == 'before':
node.addprevious(child)
else:
raise_view_error("Invalid position value: '%s'" % pos, inherit_id)
else:
attrs = ''.join([
' %s="%s"' % (attr, spec.get(attr))
for attr in spec.attrib
if attr != 'position'
])
tag = "<%s%s>" % (spec.tag, attrs)
if spec.get('version') and spec.get('version') != source.get('version'):
raise_view_error("Mismatching view API version for element '%s': %r vs %r in parent view '%%(parent_xml_id)s'" % \
(tag, spec.get('version'), source.get('version')), inherit_id)
raise_view_error("Element '%s' not found in parent view '%%(parent_xml_id)s'" % tag, inherit_id)
return source
def apply_view_inheritance(cr, user, source, inherit_id):
""" Apply all the (directly and indirectly) inheriting views.
:param source: a parent architecture to modify (with parent
modifications already applied)
:param inherit_id: the database view_id of the parent view
:return: a modified source where all the modifying architecture
are applied
"""
sql_inherit = self.pool.get('ir.ui.view').get_inheriting_views_arch(cr, user, inherit_id, self._name, context=context)
for (view_arch, view_id) in sql_inherit:
source = apply_inheritance_specs(source, view_arch, view_id)
source = apply_view_inheritance(cr, user, source, view_id)
return source
result = {'type': view_type, 'model': self._name}
sql_res = False
parent_view_model = None
view_ref = context.get(view_type + '_view_ref')
# Search for a root (i.e. without any parent) view.
while True:
if view_ref and not view_id:
if '.' in view_ref:
module, view_ref = view_ref.split('.', 1)
cr.execute("SELECT res_id FROM ir_model_data WHERE model='ir.ui.view' AND module=%s AND name=%s", (module, view_ref))
view_ref_res = cr.fetchone()
if view_ref_res:
view_id = view_ref_res[0]
if view_id:
cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
FROM ir_ui_view
WHERE id=%s""", (view_id,))
else:
cr.execute("""SELECT arch,name,field_parent,id,type,inherit_id,model
FROM ir_ui_view
WHERE model=%s AND type=%s AND inherit_id IS NULL
ORDER BY priority""", (self._name, view_type))
sql_res = cr.dictfetchone()
if not sql_res:
break
view_id = sql_res['inherit_id'] or sql_res['id']
parent_view_model = sql_res['model']
if not sql_res['inherit_id']:
break
# if a view was found
if sql_res:
source = etree.fromstring(encode(sql_res['arch']))
result.update(
arch=apply_view_inheritance(cr, user, source, sql_res['id']),
type=sql_res['type'],
view_id=sql_res['id'],
name=sql_res['name'],
field_parent=sql_res['field_parent'] or False)
else:
# otherwise, build some kind of default view
try:
view = getattr(self, '_get_default_%s_view' % view_type)(
cr, user, context)
except AttributeError:
# what happens here, graph case?
raise except_orm(_('Invalid Architecture!'), _("There is no view of type '%s' defined for the structure!") % view_type)
result.update(
arch=view,
name='default',
field_parent=False,
view_id=0)
if parent_view_model != self._name:
ctx = context.copy()
ctx['base_model_name'] = parent_view_model
else:
ctx = context
xarch, xfields = self.__view_look_dom_arch(cr, user, result['arch'], view_id, context=ctx)
result['arch'] = xarch
result['fields'] = xfields
if toolbar:
def clean(x):
x = x[2]
for key in ('report_sxw_content', 'report_rml_content',
'report_sxw', 'report_rml',
'report_sxw_content_data', 'report_rml_content_data'):
if key in x:
del x[key]
return x
ir_values_obj = self.pool.get('ir.values')
resprint = ir_values_obj.get(cr, user, 'action',
'client_print_multi', [(self._name, False)], False,
context)
resaction = ir_values_obj.get(cr, user, 'action',
'client_action_multi', [(self._name, False)], False,
context)
resrelate = ir_values_obj.get(cr, user, 'action',
'client_action_relate', [(self._name, False)], False,
context)
resaction = [clean(action) for action in resaction
if view_type == 'tree' or not action[2].get('multi')]
resprint = [clean(print_) for print_ in resprint
if view_type == 'tree' or not print_[2].get('multi')]
#When multi="True" set it will display only in More of the list view
resrelate = [clean(action) for action in resrelate
if (action[2].get('multi') and view_type == 'tree') or (not action[2].get('multi') and view_type == 'form')]
for x in itertools.chain(resprint, resaction, resrelate):
x['string'] = x['name']
result['toolbar'] = {
'print': resprint,
'action': resaction,
'relate': resrelate
}
return result
_view_look_dom_arch = __view_look_dom_arch
def search_count(self, cr, user, args, context=None):
if not context:
context = {}
res = self.search(cr, user, args, context=context, count=True)
if isinstance(res, list):
return len(res)
return res
def search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False):
"""
Search for records based on a search domain.
:param cr: database cursor
:param user: current user id
:param args: list of tuples specifying the search domain [('field_name', 'operator', value), ...]. Pass an empty list to match all records.
:param offset: optional number of results to skip in the returned values (default: 0)
:param limit: optional max number of records to return (default: **None**)
:param order: optional columns to sort by (default: self._order=id )
:param context: optional context arguments, like lang, time zone
:type context: dictionary
:param count: optional (default: **False**), if **True**, returns only the number of records matching the criteria, not their ids
:return: id or list of ids of records matching the criteria
:rtype: integer or list of integers
:raise AccessError: * if user tries to bypass access rules for read on the requested object.
**Expressing a search domain (args)**
Each tuple in the search domain needs to have 3 elements, in the form: **('field_name', 'operator', value)**, where:
* **field_name** must be a valid name of field of the object model, possibly following many-to-one relationships using dot-notation, e.g 'street' or 'partner_id.country' are valid values.
* **operator** must be a string with a valid comparison operator from this list: ``=, !=, >, >=, <, <=, like, ilike, in, not in, child_of, parent_left, parent_right``
The semantics of most of these operators are obvious.
The ``child_of`` operator will look for records who are children or grand-children of a given record,
according to the semantics of this model (i.e following the relationship field named by
``self._parent_name``, by default ``parent_id``.
* **value** must be a valid value to compare with the values of **field_name**, depending on its type.
Domain criteria can be combined using 3 logical operators than can be added between tuples: '**&**' (logical AND, default), '**|**' (logical OR), '**!**' (logical NOT).
These are **prefix** operators and the arity of the '**&**' and '**|**' operator is 2, while the arity of the '**!**' is just 1.
Be very careful about this when you combine them the first time.
Here is an example of searching for Partners named *ABC* from Belgium and Germany whose language is not english ::
[('name','=','ABC'),'!',('language.code','=','en_US'),'|',('country_id.code','=','be'),('country_id.code','=','de'))
The '&' is omitted as it is the default, and of course we could have used '!=' for the language, but what this domain really represents is::
(name is 'ABC' AND (language is NOT english) AND (country is Belgium OR Germany))
"""
return self._search(cr, user, args, offset=offset, limit=limit, order=order, context=context, count=count)
def name_get(self, cr, user, ids, context=None):
"""Returns the preferred display value (text representation) for the records with the
given ``ids``. By default this will be the value of the ``name`` column, unless
the model implements a custom behavior.
Can sometimes be seen as the inverse function of :meth:`~.name_search`, but it is not
guaranteed to be.
:rtype: list(tuple)
:return: list of pairs ``(id,text_repr)`` for all records with the given ``ids``.
"""
if not ids:
return []
if isinstance(ids, (int, long)):
ids = [ids]
if self._rec_name in self._all_columns:
rec_name_column = self._all_columns[self._rec_name].column
return [(r['id'], rec_name_column.as_display_name(cr, user, self, r[self._rec_name], context=context))
for r in self.read(cr, user, ids, [self._rec_name],
load='_classic_write', context=context)]
return [(id, "%s,%s" % (self._name, id)) for id in ids]
def name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100):
"""Search for records that have a display name matching the given ``name`` pattern if compared
with the given ``operator``, while also matching the optional search domain (``args``).
This is used for example to provide suggestions based on a partial value for a relational
field.
Sometimes be seen as the inverse function of :meth:`~.name_get`, but it is not
guaranteed to be.
This method is equivalent to calling :meth:`~.search` with a search domain based on ``name``
and then :meth:`~.name_get` on the result of the search.
:param list args: optional search domain (see :meth:`~.search` for syntax),
specifying further restrictions
:param str operator: domain operator for matching the ``name`` pattern, such as ``'like'``
or ``'='``.
:param int limit: optional max number of records to return
:rtype: list
:return: list of pairs ``(id,text_repr)`` for all matching records.
"""
return self._name_search(cr, user, name, args, operator, context, limit)
def name_create(self, cr, uid, name, context=None):
"""Creates a new record by calling :meth:`~.create` with only one
value provided: the name of the new record (``_rec_name`` field).
The new record will also be initialized with any default values applicable
to this model, or provided through the context. The usual behavior of
:meth:`~.create` applies.
Similarly, this method may raise an exception if the model has multiple
required fields and some do not have default values.
:param name: name of the record to create
:rtype: tuple
:return: the :meth:`~.name_get` pair value for the newly-created record.
"""
rec_id = self.create(cr, uid, {self._rec_name: name}, context)
return self.name_get(cr, uid, [rec_id], context)[0]
# private implementation of name_search, allows passing a dedicated user for the name_get part to
# solve some access rights issues
def _name_search(self, cr, user, name='', args=None, operator='ilike', context=None, limit=100, name_get_uid=None):
if args is None:
args = []
if context is None:
context = {}
args = args[:]
# optimize out the default criterion of ``ilike ''`` that matches everything
if not (name == '' and operator == 'ilike'):
args += [(self._rec_name, operator, name)]
access_rights_uid = name_get_uid or user
ids = self._search(cr, user, args, limit=limit, context=context, access_rights_uid=access_rights_uid)
res = self.name_get(cr, access_rights_uid, ids, context)
return res
def read_string(self, cr, uid, id, langs, fields=None, context=None):
res = {}
res2 = {}
self.pool.get('ir.translation').check_access_rights(cr, uid, 'read')
if not fields:
fields = self._columns.keys() + self._inherit_fields.keys()
#FIXME: collect all calls to _get_source into one SQL call.
for lang in langs:
res[lang] = {'code': lang}
for f in fields:
if f in self._columns:
res_trans = self.pool.get('ir.translation')._get_source(cr, uid, self._name+','+f, 'field', lang)
if res_trans:
res[lang][f] = res_trans
else:
res[lang][f] = self._columns[f].string
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), fields)
res2 = self.pool.get(table).read_string(cr, uid, id, langs, cols, context)
for lang in res2:
if lang in res:
res[lang]['code'] = lang
for f in res2[lang]:
res[lang][f] = res2[lang][f]
return res
def write_string(self, cr, uid, id, langs, vals, context=None):
self.pool.get('ir.translation').check_access_rights(cr, uid, 'write')
#FIXME: try to only call the translation in one SQL
for lang in langs:
for field in vals:
if field in self._columns:
src = self._columns[field].string
self.pool.get('ir.translation')._set_ids(cr, uid, self._name+','+field, 'field', lang, [0], vals[field], src)
for table in self._inherits:
cols = intersect(self._inherit_fields.keys(), vals)
if cols:
self.pool.get(table).write_string(cr, uid, id, langs, vals, context)
return True
def _add_missing_default_values(self, cr, uid, values, context=None):
missing_defaults = []
avoid_tables = [] # avoid overriding inherited values when parent is set
for tables, parent_field in self._inherits.items():
if parent_field in values:
avoid_tables.append(tables)
for field in self._columns.keys():
if not field in values:
missing_defaults.append(field)
for field in self._inherit_fields.keys():
if (field not in values) and (self._inherit_fields[field][0] not in avoid_tables):
missing_defaults.append(field)
if len(missing_defaults):
# override defaults with the provided values, never allow the other way around
defaults = self.default_get(cr, uid, missing_defaults, context)
for dv in defaults:
if ((dv in self._columns and self._columns[dv]._type == 'many2many') \
or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'many2many')) \
and defaults[dv] and isinstance(defaults[dv][0], (int, long)):
defaults[dv] = [(6, 0, defaults[dv])]
if (dv in self._columns and self._columns[dv]._type == 'one2many' \
or (dv in self._inherit_fields and self._inherit_fields[dv][2]._type == 'one2many')) \
and isinstance(defaults[dv], (list, tuple)) and defaults[dv] and isinstance(defaults[dv][0], dict):
defaults[dv] = [(0, 0, x) for x in defaults[dv]]
defaults.update(values)
values = defaults
return values
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi``.
"""
try:
getattr(self, '_ormcache')
self._ormcache = {}
self.pool._any_cache_cleared = True
except AttributeError:
pass
def _read_group_fill_results(self, cr, uid, domain, groupby, groupby_list, aggregated_fields,
read_group_result, read_group_order=None, context=None):
"""Helper method for filling in empty groups for all possible values of
the field being grouped by"""
# self._group_by_full should map groupable fields to a method that returns
# a list of all aggregated values that we want to display for this field,
# in the form of a m2o-like pair (key,label).
# This is useful to implement kanban views for instance, where all columns
# should be displayed even if they don't contain any record.
# Grab the list of all groups that should be displayed, including all present groups
present_group_ids = [x[groupby][0] for x in read_group_result if x[groupby]]
all_groups,folded = self._group_by_full[groupby](self, cr, uid, present_group_ids, domain,
read_group_order=read_group_order,
access_rights_uid=openerp.SUPERUSER_ID,
context=context)
result_template = dict.fromkeys(aggregated_fields, False)
result_template[groupby + '_count'] = 0
if groupby_list and len(groupby_list) > 1:
result_template['__context'] = {'group_by': groupby_list[1:]}
# Merge the left_side (current results as dicts) with the right_side (all
# possible values as m2o pairs). Both lists are supposed to be using the
# same ordering, and can be merged in one pass.
result = []
known_values = {}
def append_left(left_side):
grouped_value = left_side[groupby] and left_side[groupby][0]
if not grouped_value in known_values:
result.append(left_side)
known_values[grouped_value] = left_side
else:
count_attr = groupby + '_count'
known_values[grouped_value].update({count_attr: left_side[count_attr]})
def append_right(right_side):
grouped_value = right_side[0]
if not grouped_value in known_values:
line = dict(result_template)
line[groupby] = right_side
line['__domain'] = [(groupby,'=',grouped_value)] + domain
result.append(line)
known_values[grouped_value] = line
while read_group_result or all_groups:
left_side = read_group_result[0] if read_group_result else None
right_side = all_groups[0] if all_groups else None
assert left_side is None or left_side[groupby] is False \
or isinstance(left_side[groupby], (tuple,list)), \
'M2O-like pair expected, got %r' % left_side[groupby]
assert right_side is None or isinstance(right_side, (tuple,list)), \
'M2O-like pair expected, got %r' % right_side
if left_side is None:
append_right(all_groups.pop(0))
elif right_side is None:
append_left(read_group_result.pop(0))
elif left_side[groupby] == right_side:
append_left(read_group_result.pop(0))
all_groups.pop(0) # discard right_side
elif not left_side[groupby] or not left_side[groupby][0]:
# left side == "Undefined" entry, not present on right_side
append_left(read_group_result.pop(0))
else:
append_right(all_groups.pop(0))
if folded:
for r in result:
r['__fold'] = folded.get(r[groupby] and r[groupby][0], False)
return result
def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False):
"""
Get the list of records in list view grouped by the given ``groupby`` fields
:param cr: database cursor
:param uid: current user id
:param domain: list specifying search criteria [['field_name', 'operator', 'value'], ...]
:param list fields: list of fields present in the list view specified on the object
:param list groupby: fields by which the records will be grouped
:param int offset: optional number of records to skip
:param int limit: optional max number of records to return
:param dict context: context arguments, like lang, time zone
:param list orderby: optional ``order by`` specification, for
overriding the natural sort ordering of the
groups, see also :py:meth:`~osv.osv.osv.search`
(supported only for many2one fields currently)
:return: list of dictionaries(one dictionary for each record) containing:
* the values of fields grouped by the fields in ``groupby`` argument
* __domain: list of tuples specifying the search criteria
* __context: dictionary with argument like ``groupby``
:rtype: [{'field_name_1': value, ...]
:raise AccessError: * if user has no read rights on the requested object
* if user tries to bypass access rules for read on the requested object
"""
context = context or {}
self.check_access_rights(cr, uid, 'read')
if not fields:
fields = self._columns.keys()
query = self._where_calc(cr, uid, domain, context=context)
self._apply_ir_rules(cr, uid, query, 'read', context=context)
# Take care of adding join(s) if groupby is an '_inherits'ed field
groupby_list = groupby
qualified_groupby_field = groupby
if groupby:
if isinstance(groupby, list):
groupby = groupby[0]
qualified_groupby_field = self._inherits_join_calc(groupby, query)
if groupby:
assert not groupby or groupby in fields, "Fields in 'groupby' must appear in the list of fields to read (perhaps it's missing in the list view?)"
groupby_def = self._columns.get(groupby) or (self._inherit_fields.get(groupby) and self._inherit_fields.get(groupby)[2])
assert groupby_def and groupby_def._classic_write, "Fields in 'groupby' must be regular database-persisted fields (no function or related fields), or function fields with store=True"
# TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fget = self.fields_get(cr, uid, fields)
flist = ''
group_count = group_by = groupby
if groupby:
if fget.get(groupby):
groupby_type = fget[groupby]['type']
if groupby_type in ('date', 'datetime'):
qualified_groupby_field = "to_char(%s,'yyyy-mm')" % qualified_groupby_field
flist = "%s as %s " % (qualified_groupby_field, groupby)
elif groupby_type == 'boolean':
qualified_groupby_field = "coalesce(%s,false)" % qualified_groupby_field
flist = "%s as %s " % (qualified_groupby_field, groupby)
else:
flist = qualified_groupby_field
else:
# Don't allow arbitrary values, as this would be a SQL injection vector!
raise except_orm(_('Invalid group_by'),
_('Invalid group_by specification: "%s".\nA group_by specification must be a list of valid fields.')%(groupby,))
aggregated_fields = [
f for f in fields
if f not in ('id', 'sequence')
if fget[f]['type'] in ('integer', 'float')
if (f in self._columns and getattr(self._columns[f], '_classic_write'))]
for f in aggregated_fields:
group_operator = fget[f].get('group_operator', 'sum')
if flist:
flist += ', '
qualified_field = '"%s"."%s"' % (self._table, f)
flist += "%s(%s) AS %s" % (group_operator, qualified_field, f)
gb = groupby and (' GROUP BY ' + qualified_groupby_field) or ''
from_clause, where_clause, where_clause_params = query.get_sql()
where_clause = where_clause and ' WHERE ' + where_clause
limit_str = limit and ' limit %d' % limit or ''
offset_str = offset and ' offset %d' % offset or ''
if len(groupby_list) < 2 and context.get('group_by_no_leaf'):
group_count = '_'
cr.execute('SELECT min(%s.id) AS id, count(%s.id) AS %s_count' % (self._table, self._table, group_count) + (flist and ',') + flist + ' FROM ' + from_clause + where_clause + gb + limit_str + offset_str, where_clause_params)
alldata = {}
groupby = group_by
for r in cr.dictfetchall():
for fld, val in r.items():
if val is None: r[fld] = False
alldata[r['id']] = r
del r['id']
order = orderby or groupby
data_ids = self.search(cr, uid, [('id', 'in', alldata.keys())], order=order, context=context)
# the IDs of records that have groupby field value = False or '' should be included too
data_ids += set(alldata.keys()).difference(data_ids)
if groupby:
data = self.read(cr, uid, data_ids, [groupby], context=context)
# restore order of the search as read() uses the default _order (this is only for groups, so the footprint of data should be small):
data_dict = dict((d['id'], d[groupby] ) for d in data)
result = [{'id': i, groupby: data_dict[i]} for i in data_ids]
else:
result = [{'id': i} for i in data_ids]
for d in result:
if groupby:
d['__domain'] = [(groupby, '=', alldata[d['id']][groupby] or False)] + domain
if not isinstance(groupby_list, (str, unicode)):
if groupby or not context.get('group_by_no_leaf', False):
d['__context'] = {'group_by': groupby_list[1:]}
if groupby and groupby in fget:
if d[groupby] and fget[groupby]['type'] in ('date', 'datetime'):
dt = datetime.datetime.strptime(alldata[d['id']][groupby][:7], '%Y-%m')
days = calendar.monthrange(dt.year, dt.month)[1]
date_value = datetime.datetime.strptime(d[groupby][:10], '%Y-%m-%d')
d[groupby] = babel.dates.format_date(
date_value, format='MMMM yyyy', locale=context.get('lang', 'en_US'))
d['__domain'] = [(groupby, '>=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-01', '%Y-%m-%d').strftime('%Y-%m-%d') or False),\
(groupby, '<=', alldata[d['id']][groupby] and datetime.datetime.strptime(alldata[d['id']][groupby][:7] + '-' + str(days), '%Y-%m-%d').strftime('%Y-%m-%d') or False)] + domain
del alldata[d['id']][groupby]
d.update(alldata[d['id']])
del d['id']
if groupby and groupby in self._group_by_full:
result = self._read_group_fill_results(cr, uid, domain, groupby, groupby_list,
aggregated_fields, result, read_group_order=order,
context=context)
return result
def _inherits_join_add(self, current_model, parent_model_name, query):
"""
Add missing table SELECT and JOIN clause to ``query`` for reaching the parent table (no duplicates)
:param current_model: current model object
:param parent_model_name: name of the parent model for which the clauses should be added
:param query: query object on which the JOIN should be added
"""
inherits_field = current_model._inherits[parent_model_name]
parent_model = self.pool.get(parent_model_name)
parent_alias, parent_alias_statement = query.add_join((current_model._table, parent_model._table, inherits_field, 'id', inherits_field), implicit=True)
return parent_alias
def _inherits_join_calc(self, field, query):
"""
Adds missing table select and join clause(s) to ``query`` for reaching
the field coming from an '_inherits' parent table (no duplicates).
:param field: name of inherited field to reach
:param query: query object on which the JOIN should be added
:return: qualified name of field, to be used in SELECT clause
"""
current_table = self
parent_alias = '"%s"' % current_table._table
while field in current_table._inherit_fields and not field in current_table._columns:
parent_model_name = current_table._inherit_fields[field][0]
parent_table = self.pool.get(parent_model_name)
parent_alias = self._inherits_join_add(current_table, parent_model_name, query)
current_table = parent_table
return '%s."%s"' % (parent_alias, field)
def _parent_store_compute(self, cr):
if not self._parent_store:
return
_logger.info('Computing parent left and right for table %s...', self._table)
def browse_rec(root, pos=0):
# TODO: set order
where = self._parent_name+'='+str(root)
if not root:
where = self._parent_name+' IS NULL'
if self._parent_order:
where += ' order by '+self._parent_order
cr.execute('SELECT id FROM '+self._table+' WHERE '+where)
pos2 = pos + 1
for id in cr.fetchall():
pos2 = browse_rec(id[0], pos2)
cr.execute('update '+self._table+' set parent_left=%s, parent_right=%s where id=%s', (pos, pos2, root))
return pos2 + 1
query = 'SELECT id FROM '+self._table+' WHERE '+self._parent_name+' IS NULL'
if self._parent_order:
query += ' order by ' + self._parent_order
pos = 0
cr.execute(query)
for (root,) in cr.fetchall():
pos = browse_rec(root, pos)
return True
def _update_store(self, cr, f, k):
_logger.info("storing computed values of fields.function '%s'", k)
ss = self._columns[k]._symbol_set
update_query = 'UPDATE "%s" SET "%s"=%s WHERE id=%%s' % (self._table, k, ss[0])
cr.execute('select id from '+self._table)
ids_lst = map(lambda x: x[0], cr.fetchall())
while ids_lst:
iids = ids_lst[:40]
ids_lst = ids_lst[40:]
res = f.get(cr, self, iids, k, SUPERUSER_ID, {})
for key, val in res.items():
if f._multi:
val = val[k]
# if val is a many2one, just write the ID
if type(val) == tuple:
val = val[0]
if val is not False:
cr.execute(update_query, (ss[1](val), key))
def _check_selection_field_value(self, cr, uid, field, value, context=None):
"""Raise except_orm if value is not among the valid values for the selection field"""
if self._columns[field]._type == 'reference':
val_model, val_id_str = value.split(',', 1)
val_id = False
try:
val_id = long(val_id_str)
except ValueError:
pass
if not val_id:
raise except_orm(_('ValidateError'),
_('Invalid value for reference field "%s.%s" (last part must be a non-zero integer): "%s"') % (self._table, field, value))
val = val_model
else:
val = value
if isinstance(self._columns[field].selection, (tuple, list)):
if val in dict(self._columns[field].selection):
return
elif val in dict(self._columns[field].selection(self, cr, uid, context=context)):
return
raise except_orm(_('ValidateError'),
_('The value "%s" for the field "%s.%s" is not in the selection') % (value, self._table, field))
def _check_removed_columns(self, cr, log=False):
# iterate on the database columns to drop the NOT NULL constraints
# of fields which were required but have been removed (or will be added by another module)
columns = [c for c in self._columns if not (isinstance(self._columns[c], fields.function) and not self._columns[c].store)]
columns += MAGIC_COLUMNS
cr.execute("SELECT a.attname, a.attnotnull"
" FROM pg_class c, pg_attribute a"
" WHERE c.relname=%s"
" AND c.oid=a.attrelid"
" AND a.attisdropped=%s"
" AND pg_catalog.format_type(a.atttypid, a.atttypmod) NOT IN ('cid', 'tid', 'oid', 'xid')"
" AND a.attname NOT IN %s", (self._table, False, tuple(columns))),
for column in cr.dictfetchall():
if log:
_logger.debug("column %s is in the table %s but not in the corresponding object %s",
column['attname'], self._table, self._name)
if column['attnotnull']:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, column['attname']))
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, column['attname'])
def _save_constraint(self, cr, constraint_name, type):
"""
Record the creation of a constraint for this model, to make it possible
to delete it later when the module is uninstalled. Type can be either
'f' or 'u' depending on the constraint being a foreign key or not.
"""
if not self._module:
# no need to save constraints for custom models as they're not part
# of any module
return
assert type in ('f', 'u')
cr.execute("""
SELECT 1 FROM ir_model_constraint, ir_module_module
WHERE ir_model_constraint.module=ir_module_module.id
AND ir_model_constraint.name=%s
AND ir_module_module.name=%s
""", (constraint_name, self._module))
if not cr.rowcount:
cr.execute("""
INSERT INTO ir_model_constraint
(name, date_init, date_update, module, model, type)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
(SELECT id FROM ir_model WHERE model=%s), %s)""",
(constraint_name, self._module, self._name, type))
def _save_relation_table(self, cr, relation_table):
"""
Record the creation of a many2many for this model, to make it possible
to delete it later when the module is uninstalled.
"""
cr.execute("""
SELECT 1 FROM ir_model_relation, ir_module_module
WHERE ir_model_relation.module=ir_module_module.id
AND ir_model_relation.name=%s
AND ir_module_module.name=%s
""", (relation_table, self._module))
if not cr.rowcount:
cr.execute("""INSERT INTO ir_model_relation (name, date_init, date_update, module, model)
VALUES (%s, now() AT TIME ZONE 'UTC', now() AT TIME ZONE 'UTC',
(SELECT id FROM ir_module_module WHERE name=%s),
(SELECT id FROM ir_model WHERE model=%s))""",
(relation_table, self._module, self._name))
# checked version: for direct m2o starting from `self`
def _m2o_add_foreign_key_checked(self, source_field, dest_model, ondelete):
assert self.is_transient() or not dest_model.is_transient(), \
'Many2One relationships from non-transient Model to TransientModel are forbidden'
if self.is_transient() and not dest_model.is_transient():
# TransientModel relationships to regular Models are annoying
# usually because they could block deletion due to the FKs.
# So unless stated otherwise we default them to ondelete=cascade.
ondelete = ondelete or 'cascade'
fk_def = (self._table, source_field, dest_model._table, ondelete or 'set null')
self._foreign_keys.add(fk_def)
_schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
# unchecked version: for custom cases, such as m2m relationships
def _m2o_add_foreign_key_unchecked(self, source_table, source_field, dest_model, ondelete):
fk_def = (source_table, source_field, dest_model._table, ondelete or 'set null')
self._foreign_keys.add(fk_def)
_schema.debug("Table '%s': added foreign key '%s' with definition=REFERENCES \"%s\" ON DELETE %s", *fk_def)
def _drop_constraint(self, cr, source_table, constraint_name):
cr.execute("ALTER TABLE %s DROP CONSTRAINT %s" % (source_table,constraint_name))
def _m2o_fix_foreign_key(self, cr, source_table, source_field, dest_model, ondelete):
# Find FK constraint(s) currently established for the m2o field,
# and see whether they are stale or not
cr.execute("""SELECT confdeltype as ondelete_rule, conname as constraint_name,
cl2.relname as foreign_table
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
pg_attribute as att1, pg_attribute as att2
WHERE con.conrelid = cl1.oid
AND cl1.relname = %s
AND con.confrelid = cl2.oid
AND array_lower(con.conkey, 1) = 1
AND con.conkey[1] = att1.attnum
AND att1.attrelid = cl1.oid
AND att1.attname = %s
AND array_lower(con.confkey, 1) = 1
AND con.confkey[1] = att2.attnum
AND att2.attrelid = cl2.oid
AND att2.attname = %s
AND con.contype = 'f'""", (source_table, source_field, 'id'))
constraints = cr.dictfetchall()
if constraints:
if len(constraints) == 1:
# Is it the right constraint?
cons, = constraints
if cons['ondelete_rule'] != POSTGRES_CONFDELTYPES.get((ondelete or 'set null').upper(), 'a')\
or cons['foreign_table'] != dest_model._table:
# Wrong FK: drop it and recreate
_schema.debug("Table '%s': dropping obsolete FK constraint: '%s'",
source_table, cons['constraint_name'])
self._drop_constraint(cr, source_table, cons['constraint_name'])
else:
# it's all good, nothing to do!
return
else:
# Multiple FKs found for the same field, drop them all, and re-create
for cons in constraints:
_schema.debug("Table '%s': dropping duplicate FK constraints: '%s'",
source_table, cons['constraint_name'])
self._drop_constraint(cr, source_table, cons['constraint_name'])
# (re-)create the FK
self._m2o_add_foreign_key_checked(source_field, dest_model, ondelete)
def _auto_init(self, cr, context=None):
"""
Call _field_create and, unless _auto is False:
- create the corresponding table in database for the model,
- possibly add the parent columns in database,
- possibly add the columns 'create_uid', 'create_date', 'write_uid',
'write_date' in database if _log_access is True (the default),
- report on database columns no more existing in _columns,
- remove no more existing not null constraints,
- alter existing database columns to match _columns,
- create database tables to match _columns,
- add database indices to match _columns,
- save in self._foreign_keys a list a foreign keys to create (see
_auto_end).
"""
self._foreign_keys = set()
raise_on_invalid_object_name(self._name)
if context is None:
context = {}
store_compute = False
todo_end = []
update_custom_fields = context.get('update_custom_fields', False)
self._field_create(cr, context=context)
create = not self._table_exist(cr)
if getattr(self, '_auto', True):
if create:
self._create_table(cr)
cr.commit()
if self._parent_store:
if not self._parent_columns_exist(cr):
self._create_parent_columns(cr)
store_compute = True
# Create the create_uid, create_date, write_uid, write_date, columns if desired.
if self._log_access:
self._add_log_columns(cr)
self._check_removed_columns(cr, log=False)
# iterate on the "object columns"
column_data = self._select_column_data(cr)
for k, f in self._columns.iteritems():
if k in MAGIC_COLUMNS:
continue
# Don't update custom (also called manual) fields
if f.manual and not update_custom_fields:
continue
if isinstance(f, fields.one2many):
self._o2m_raise_on_missing_reference(cr, f)
elif isinstance(f, fields.many2many):
self._m2m_raise_or_create_relation(cr, f)
else:
res = column_data.get(k)
# The field is not found as-is in database, try if it
# exists with an old name.
if not res and hasattr(f, 'oldname'):
res = column_data.get(f.oldname)
if res:
cr.execute('ALTER TABLE "%s" RENAME "%s" TO "%s"' % (self._table, f.oldname, k))
res['attname'] = k
column_data[k] = res
_schema.debug("Table '%s': renamed column '%s' to '%s'",
self._table, f.oldname, k)
# The field already exists in database. Possibly
# change its type, rename it, drop it or change its
# constraints.
if res:
f_pg_type = res['typname']
f_pg_size = res['size']
f_pg_notnull = res['attnotnull']
if isinstance(f, fields.function) and not f.store and\
not getattr(f, 'nodrop', False):
_logger.info('column %s (%s) in table %s removed: converted to a function !\n',
k, f.string, self._table)
cr.execute('ALTER TABLE "%s" DROP COLUMN "%s" CASCADE' % (self._table, k))
cr.commit()
_schema.debug("Table '%s': dropped column '%s' with cascade",
self._table, k)
f_obj_type = None
else:
f_obj_type = get_pg_type(f) and get_pg_type(f)[0]
if f_obj_type:
ok = False
casts = [
('text', 'char', pg_varchar(f.size), '::%s' % pg_varchar(f.size)),
('varchar', 'text', 'TEXT', ''),
('int4', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('date', 'datetime', 'TIMESTAMP', '::TIMESTAMP'),
('timestamp', 'date', 'date', '::date'),
('numeric', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
('float8', 'float', get_pg_type(f)[1], '::'+get_pg_type(f)[1]),
]
if f_pg_type == 'varchar' and f._type == 'char' and ((f.size is None and f_pg_size) or f_pg_size < f.size):
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, pg_varchar(f.size)))
cr.execute('UPDATE "%s" SET "%s"=temp_change_size::%s' % (self._table, k, pg_varchar(f.size)))
cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
cr.commit()
_schema.debug("Table '%s': column '%s' (type varchar) changed size from %s to %s",
self._table, k, f_pg_size or 'unlimited', f.size or 'unlimited')
for c in casts:
if (f_pg_type==c[0]) and (f._type==c[1]):
if f_pg_type != f_obj_type:
ok = True
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO temp_change_size' % (self._table, k))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, c[2]))
cr.execute(('UPDATE "%s" SET "%s"=temp_change_size'+c[3]) % (self._table, k))
cr.execute('ALTER TABLE "%s" DROP COLUMN temp_change_size CASCADE' % (self._table,))
cr.commit()
_schema.debug("Table '%s': column '%s' changed type from %s to %s",
self._table, k, c[0], c[1])
break
if f_pg_type != f_obj_type:
if not ok:
i = 0
while True:
newname = k + '_moved' + str(i)
cr.execute("SELECT count(1) FROM pg_class c,pg_attribute a " \
"WHERE c.relname=%s " \
"AND a.attname=%s " \
"AND c.oid=a.attrelid ", (self._table, newname))
if not cr.fetchone()[0]:
break
i += 1
if f_pg_notnull:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.execute('ALTER TABLE "%s" RENAME COLUMN "%s" TO "%s"' % (self._table, k, newname))
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
_schema.debug("Table '%s': column '%s' has changed type (DB=%s, def=%s), data moved to column %s !",
self._table, k, f_pg_type, f._type, newname)
# if the field is required and hasn't got a NOT NULL constraint
if f.required and f_pg_notnull == 0:
# set the field to the default value if any
if k in self._defaults:
if callable(self._defaults[k]):
default = self._defaults[k](self, cr, SUPERUSER_ID, context)
else:
default = self._defaults[k]
if default is not None:
ss = self._columns[k]._symbol_set
query = 'UPDATE "%s" SET "%s"=%s WHERE "%s" is NULL' % (self._table, k, ss[0], k)
cr.execute(query, (ss[1](default),))
# add the NOT NULL constraint
cr.commit()
try:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
cr.commit()
_schema.debug("Table '%s': column '%s': added NOT NULL constraint",
self._table, k)
except Exception:
msg = "Table '%s': unable to set a NOT NULL constraint on column '%s' !\n"\
"If you want to have it, you should update the records and execute manually:\n"\
"ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
_schema.warning(msg, self._table, k, self._table, k)
cr.commit()
elif not f.required and f_pg_notnull == 1:
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" DROP NOT NULL' % (self._table, k))
cr.commit()
_schema.debug("Table '%s': column '%s': dropped NOT NULL constraint",
self._table, k)
# Verify index
indexname = '%s_%s_index' % (self._table, k)
cr.execute("SELECT indexname FROM pg_indexes WHERE indexname = %s and tablename = %s", (indexname, self._table))
res2 = cr.dictfetchall()
if not res2 and f.select:
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
cr.commit()
if f._type == 'text':
# FIXME: for fields.text columns we should try creating GIN indexes instead (seems most suitable for an ERP context)
msg = "Table '%s': Adding (b-tree) index for %s column '%s'."\
"This is probably useless (does not work for fulltext search) and prevents INSERTs of long texts"\
" because there is a length limit for indexable btree values!\n"\
"Use a search view instead if you simply want to make the field searchable."
_schema.warning(msg, self._table, f._type, k)
if res2 and not f.select:
cr.execute('DROP INDEX "%s_%s_index"' % (self._table, k))
cr.commit()
msg = "Table '%s': dropping index for column '%s' of type '%s' as it is not required anymore"
_schema.debug(msg, self._table, k, f._type)
if isinstance(f, fields.many2one):
dest_model = self.pool.get(f._obj)
if dest_model._table != 'ir_actions':
self._m2o_fix_foreign_key(cr, self._table, k, dest_model, f.ondelete)
# The field doesn't exist in database. Create it if necessary.
else:
if not isinstance(f, fields.function) or f.store:
# add the missing field
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, k, get_pg_type(f)[1]))
cr.execute("COMMENT ON COLUMN %s.\"%s\" IS %%s" % (self._table, k), (f.string,))
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, k, get_pg_type(f)[1])
# initialize it
if not create and k in self._defaults:
if callable(self._defaults[k]):
default = self._defaults[k](self, cr, SUPERUSER_ID, context)
else:
default = self._defaults[k]
ss = self._columns[k]._symbol_set
query = 'UPDATE "%s" SET "%s"=%s' % (self._table, k, ss[0])
cr.execute(query, (ss[1](default),))
cr.commit()
_logger.debug("Table '%s': setting default value of new column %s", self._table, k)
# remember the functions to call for the stored fields
if isinstance(f, fields.function):
order = 10
if f.store is not True: # i.e. if f.store is a dict
order = f.store[f.store.keys()[0]][2]
todo_end.append((order, self._update_store, (f, k)))
# and add constraints if needed
if isinstance(f, fields.many2one):
if not self.pool.get(f._obj):
raise except_orm('Programming Error', 'There is no reference available for %s' % (f._obj,))
dest_model = self.pool.get(f._obj)
ref = dest_model._table
# ir_actions is inherited so foreign key doesn't work on it
if ref != 'ir_actions':
self._m2o_add_foreign_key_checked(k, dest_model, f.ondelete)
if f.select:
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (self._table, k, self._table, k))
if f.required:
try:
cr.commit()
cr.execute('ALTER TABLE "%s" ALTER COLUMN "%s" SET NOT NULL' % (self._table, k), log_exceptions=False)
_schema.debug("Table '%s': column '%s': added a NOT NULL constraint",
self._table, k)
except Exception:
msg = "WARNING: unable to set column %s of table %s not null !\n"\
"Try to re-run: openerp-server --update=module\n"\
"If it doesn't work, update records and execute manually:\n"\
"ALTER TABLE %s ALTER COLUMN %s SET NOT NULL"
_logger.warning(msg, k, self._table, self._table, k)
cr.commit()
else:
cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
create = not bool(cr.fetchone())
cr.commit() # start a new transaction
self._add_sql_constraints(cr)
if create:
self._execute_sql(cr)
if store_compute:
self._parent_store_compute(cr)
cr.commit()
return todo_end
def _auto_end(self, cr, context=None):
""" Create the foreign keys recorded by _auto_init. """
for t, k, r, d in self._foreign_keys:
cr.execute('ALTER TABLE "%s" ADD FOREIGN KEY ("%s") REFERENCES "%s" ON DELETE %s' % (t, k, r, d))
self._save_constraint(cr, "%s_%s_fkey" % (t, k), 'f')
cr.commit()
del self._foreign_keys
def _table_exist(self, cr):
cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (self._table,))
return cr.rowcount
def _create_table(self, cr):
cr.execute('CREATE TABLE "%s" (id SERIAL NOT NULL, PRIMARY KEY(id))' % (self._table,))
cr.execute(("COMMENT ON TABLE \"%s\" IS %%s" % self._table), (self._description,))
_schema.debug("Table '%s': created", self._table)
def _parent_columns_exist(self, cr):
cr.execute("""SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
""", (self._table, 'parent_left'))
return cr.rowcount
def _create_parent_columns(self, cr):
cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_left" INTEGER' % (self._table,))
cr.execute('ALTER TABLE "%s" ADD COLUMN "parent_right" INTEGER' % (self._table,))
if 'parent_left' not in self._columns:
_logger.error('create a column parent_left on object %s: fields.integer(\'Left Parent\', select=1)',
self._table)
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, 'parent_left', 'INTEGER')
elif not self._columns['parent_left'].select:
_logger.error('parent_left column on object %s must be indexed! Add select=1 to the field definition)',
self._table)
if 'parent_right' not in self._columns:
_logger.error('create a column parent_right on object %s: fields.integer(\'Right Parent\', select=1)',
self._table)
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, 'parent_right', 'INTEGER')
elif not self._columns['parent_right'].select:
_logger.error('parent_right column on object %s must be indexed! Add select=1 to the field definition)',
self._table)
if self._columns[self._parent_name].ondelete not in ('cascade', 'restrict'):
_logger.error("The column %s on object %s must be set as ondelete='cascade' or 'restrict'",
self._parent_name, self._name)
cr.commit()
def _add_log_columns(self, cr):
for field, field_def in LOG_ACCESS_COLUMNS.iteritems():
cr.execute("""
SELECT c.relname
FROM pg_class c, pg_attribute a
WHERE c.relname=%s AND a.attname=%s AND c.oid=a.attrelid
""", (self._table, field))
if not cr.rowcount:
cr.execute('ALTER TABLE "%s" ADD COLUMN "%s" %s' % (self._table, field, field_def))
cr.commit()
_schema.debug("Table '%s': added column '%s' with definition=%s",
self._table, field, field_def)
def _select_column_data(self, cr):
# attlen is the number of bytes necessary to represent the type when
# the type has a fixed size. If the type has a varying size attlen is
# -1 and atttypmod is the size limit + 4, or -1 if there is no limit.
cr.execute("SELECT c.relname,a.attname,a.attlen,a.atttypmod,a.attnotnull,a.atthasdef,t.typname,CASE WHEN a.attlen=-1 THEN (CASE WHEN a.atttypmod=-1 THEN 0 ELSE a.atttypmod-4 END) ELSE a.attlen END as size " \
"FROM pg_class c,pg_attribute a,pg_type t " \
"WHERE c.relname=%s " \
"AND c.oid=a.attrelid " \
"AND a.atttypid=t.oid", (self._table,))
return dict(map(lambda x: (x['attname'], x),cr.dictfetchall()))
def _o2m_raise_on_missing_reference(self, cr, f):
# TODO this check should be a method on fields.one2many.
other = self.pool.get(f._obj)
if other:
# TODO the condition could use fields_get_keys().
if f._fields_id not in other._columns.keys():
if f._fields_id not in other._inherit_fields.keys():
raise except_orm('Programming Error', "There is no reference field '%s' found for '%s'" % (f._fields_id, f._obj,))
def _m2m_raise_or_create_relation(self, cr, f):
m2m_tbl, col1, col2 = f._sql_names(self)
self._save_relation_table(cr, m2m_tbl)
cr.execute("SELECT relname FROM pg_class WHERE relkind IN ('r','v') AND relname=%s", (m2m_tbl,))
if not cr.dictfetchall():
if not self.pool.get(f._obj):
raise except_orm('Programming Error', 'Many2Many destination model does not exist: `%s`' % (f._obj,))
dest_model = self.pool.get(f._obj)
ref = dest_model._table
cr.execute('CREATE TABLE "%s" ("%s" INTEGER NOT NULL, "%s" INTEGER NOT NULL, UNIQUE("%s","%s"))' % (m2m_tbl, col1, col2, col1, col2))
# create foreign key references with ondelete=cascade, unless the targets are SQL views
cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (ref,))
if not cr.fetchall():
self._m2o_add_foreign_key_unchecked(m2m_tbl, col2, dest_model, 'cascade')
cr.execute("SELECT relkind FROM pg_class WHERE relkind IN ('v') AND relname=%s", (self._table,))
if not cr.fetchall():
self._m2o_add_foreign_key_unchecked(m2m_tbl, col1, self, 'cascade')
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col1, m2m_tbl, col1))
cr.execute('CREATE INDEX "%s_%s_index" ON "%s" ("%s")' % (m2m_tbl, col2, m2m_tbl, col2))
cr.execute("COMMENT ON TABLE \"%s\" IS 'RELATION BETWEEN %s AND %s'" % (m2m_tbl, self._table, ref))
cr.commit()
_schema.debug("Create table '%s': m2m relation between '%s' and '%s'", m2m_tbl, self._table, ref)
def _add_sql_constraints(self, cr):
"""
Modify this model's database table constraints so they match the one in
_sql_constraints.
"""
def unify_cons_text(txt):
return txt.lower().replace(', ',',').replace(' (','(')
for (key, con, _) in self._sql_constraints:
conname = '%s_%s' % (self._table, key)
self._save_constraint(cr, conname, 'u')
cr.execute("SELECT conname, pg_catalog.pg_get_constraintdef(oid, true) as condef FROM pg_constraint where conname=%s", (conname,))
existing_constraints = cr.dictfetchall()
sql_actions = {
'drop': {
'execute': False,
'query': 'ALTER TABLE "%s" DROP CONSTRAINT "%s"' % (self._table, conname, ),
'msg_ok': "Table '%s': dropped constraint '%s'. Reason: its definition changed from '%%s' to '%s'" % (
self._table, conname, con),
'msg_err': "Table '%s': unable to drop \'%s\' constraint !" % (self._table, con),
'order': 1,
},
'add': {
'execute': False,
'query': 'ALTER TABLE "%s" ADD CONSTRAINT "%s" %s' % (self._table, conname, con,),
'msg_ok': "Table '%s': added constraint '%s' with definition=%s" % (self._table, conname, con),
'msg_err': "Table '%s': unable to add \'%s\' constraint !\n If you want to have it, you should update the records and execute manually:\n%%s" % (
self._table, con),
'order': 2,
},
}
if not existing_constraints:
# constraint does not exists:
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
elif unify_cons_text(con) not in [unify_cons_text(item['condef']) for item in existing_constraints]:
# constraint exists but its definition has changed:
sql_actions['drop']['execute'] = True
sql_actions['drop']['msg_ok'] = sql_actions['drop']['msg_ok'] % (existing_constraints[0]['condef'].lower(), )
sql_actions['add']['execute'] = True
sql_actions['add']['msg_err'] = sql_actions['add']['msg_err'] % (sql_actions['add']['query'], )
# we need to add the constraint:
sql_actions = [item for item in sql_actions.values()]
sql_actions.sort(key=lambda x: x['order'])
for sql_action in [action for action in sql_actions if action['execute']]:
try:
cr.execute(sql_action['query'])
cr.commit()
_schema.debug(sql_action['msg_ok'])
except:
_schema.warning(sql_action['msg_err'])
cr.rollback()
def _execute_sql(self, cr):
""" Execute the SQL code from the _sql attribute (if any)."""
if hasattr(self, "_sql"):
for line in self._sql.split(';'):
line2 = line.replace('\n', '').strip()
if line2:
cr.execute(line2)
cr.commit()
#
# Update objects that uses this one to update their _inherits fields
#
def _inherits_reload_src(self):
""" Recompute the _inherit_fields mapping on each _inherits'd child model."""
for obj in self.pool.models.values():
if self._name in obj._inherits:
obj._inherits_reload()
def _inherits_reload(self):
""" Recompute the _inherit_fields mapping.
This will also call itself on each inherits'd child model.
"""
res = {}
for table in self._inherits:
other = self.pool.get(table)
for col in other._columns.keys():
res[col] = (table, self._inherits[table], other._columns[col], table)
for col in other._inherit_fields.keys():
res[col] = (table, self._inherits[table], other._inherit_fields[col][2], other._inherit_fields[col][3])
self._inherit_fields = res
self._all_columns = self._get_column_infos()
self._inherits_reload_src()
def _get_column_infos(self):
"""Returns a dict mapping all fields names (direct fields and
inherited field via _inherits) to a ``column_info`` struct
giving detailed columns """
result = {}
for k, (parent, m2o, col, original_parent) in self._inherit_fields.iteritems():
result[k] = fields.column_info(k, col, parent, m2o, original_parent)
for k, col in self._columns.iteritems():
result[k] = fields.column_info(k, col)
return result
def _inherits_check(self):
for table, field_name in self._inherits.items():
if field_name not in self._columns:
_logger.info('Missing many2one field definition for _inherits reference "%s" in "%s", using default one.', field_name, self._name)
self._columns[field_name] = fields.many2one(table, string="Automatically created field to link to parent %s" % table,
required=True, ondelete="cascade")
elif not self._columns[field_name].required or self._columns[field_name].ondelete.lower() not in ("cascade", "restrict"):
_logger.warning('Field definition for _inherits reference "%s" in "%s" must be marked as "required" with ondelete="cascade" or "restrict", forcing it to required + cascade.', field_name, self._name)
self._columns[field_name].required = True
self._columns[field_name].ondelete = "cascade"
#def __getattr__(self, name):
# """
# Proxies attribute accesses to the `inherits` parent so we can call methods defined on the inherited parent
# (though inherits doesn't use Python inheritance).
# Handles translating between local ids and remote ids.
# Known issue: doesn't work correctly when using python's own super(), don't involve inherit-based inheritance
# when you have inherits.
# """
# for model, field in self._inherits.iteritems():
# proxy = self.pool.get(model)
# if hasattr(proxy, name):
# attribute = getattr(proxy, name)
# if not hasattr(attribute, '__call__'):
# return attribute
# break
# else:
# return super(orm, self).__getattr__(name)
# def _proxy(cr, uid, ids, *args, **kwargs):
# objects = self.browse(cr, uid, ids, kwargs.get('context', None))
# lst = [obj[field].id for obj in objects if obj[field]]
# return getattr(proxy, name)(cr, uid, lst, *args, **kwargs)
# return _proxy
def fields_get(self, cr, user, allfields=None, context=None, write_access=True):
""" Return the definition of each field.
The returned value is a dictionary (indiced by field name) of
dictionaries. The _inherits'd fields are included. The string, help,
and selection (if present) attributes are translated.
:param cr: database cursor
:param user: current user id
:param allfields: list of fields
:param context: context arguments, like lang, time zone
:return: dictionary of field dictionaries, each one describing a field of the business object
:raise AccessError: * if user has no create/write rights on the requested object
"""
if context is None:
context = {}
write_access = self.check_access_rights(cr, user, 'write', raise_exception=False) \
or self.check_access_rights(cr, user, 'create', raise_exception=False)
res = {}
translation_obj = self.pool.get('ir.translation')
for parent in self._inherits:
res.update(self.pool.get(parent).fields_get(cr, user, allfields, context))
for f, field in self._columns.iteritems():
if (allfields and f not in allfields) or \
(field.groups and not self.user_has_groups(cr, user, groups=field.groups, context=context)):
continue
res[f] = fields.field_to_dict(self, cr, user, field, context=context)
if not write_access:
res[f]['readonly'] = True
res[f]['states'] = {}
if 'lang' in context:
if 'string' in res[f]:
res_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'field', context['lang'])
if res_trans:
res[f]['string'] = res_trans
if 'help' in res[f]:
help_trans = translation_obj._get_source(cr, user, self._name + ',' + f, 'help', context['lang'])
if help_trans:
res[f]['help'] = help_trans
if 'selection' in res[f]:
if isinstance(field.selection, (tuple, list)):
sel = field.selection
sel2 = []
for key, val in sel:
val2 = None
if val:
val2 = translation_obj._get_source(cr, user, self._name + ',' + f, 'selection', context['lang'], val)
sel2.append((key, val2 or val))
res[f]['selection'] = sel2
return res
def check_field_access_rights(self, cr, user, operation, fields, context=None):
"""
Check the user access rights on the given fields. This raises Access
Denied if the user does not have the rights. Otherwise it returns the
fields (as is if the fields is not falsy, or the readable/writable
fields if fields is falsy).
"""
def p(field_name):
"""Predicate to test if the user has access to the given field name."""
# Ignore requested field if it doesn't exist. This is ugly but
# it seems to happen at least with 'name_alias' on res.partner.
if field_name not in self._all_columns:
return True
field = self._all_columns[field_name].column
if user != SUPERUSER_ID and field.groups:
return self.user_has_groups(cr, user, groups=field.groups, context=context)
else:
return True
if not fields:
fields = filter(p, self._all_columns.keys())
else:
filtered_fields = filter(lambda a: not p(a), fields)
if filtered_fields:
_logger.warning('Access Denied by ACLs for operation: %s, uid: %s, model: %s, fields: %s', operation, user, self._name, ', '.join(filtered_fields))
raise except_orm(
_('Access Denied'),
_('The requested operation cannot be completed due to security restrictions. '
'Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
(self._description, operation))
return fields
def read(self, cr, user, ids, fields=None, context=None, load='_classic_read'):
""" Read records with given ids with the given fields
:param cr: database cursor
:param user: current user id
:param ids: id or list of the ids of the records to read
:param fields: optional list of field names to return (default: all fields would be returned)
:type fields: list (example ['field_name_1', ...])
:param context: optional context dictionary - it may contains keys for specifying certain options
like ``context_lang``, ``context_tz`` to alter the results of the call.
A special ``bin_size`` boolean flag may also be passed in the context to request the
value of all fields.binary columns to be returned as the size of the binary instead of its
contents. This can also be selectively overriden by passing a field-specific flag
in the form ``bin_size_XXX: True/False`` where ``XXX`` is the name of the field.
Note: The ``bin_size_XXX`` form is new in OpenERP v6.0.
:return: list of dictionaries((dictionary per record asked)) with requested field values
:rtype: [{‘name_of_the_field’: value, ...}, ...]
:raise AccessError: * if user has no read rights on the requested object
* if user tries to bypass access rules for read on the requested object
"""
if not context:
context = {}
self.check_access_rights(cr, user, 'read')
fields = self.check_field_access_rights(cr, user, 'read', fields)
if isinstance(ids, (int, long)):
select = [ids]
else:
select = ids
select = map(lambda x: isinstance(x, dict) and x['id'] or x, select)
result = self._read_flat(cr, user, select, fields, context, load)
for r in result:
for key, v in r.items():
if v is None:
r[key] = False
if isinstance(ids, (int, long, dict)):
return result and result[0] or False
return result
def _read_flat(self, cr, user, ids, fields_to_read, context=None, load='_classic_read'):
if not context:
context = {}
if not ids:
return []
if fields_to_read is None:
fields_to_read = self._columns.keys()
# Construct a clause for the security rules.
# 'tables' hold the list of tables necessary for the SELECT including the ir.rule clauses,
# or will at least contain self._table.
rule_clause, rule_params, tables = self.pool.get('ir.rule').domain_get(cr, user, self._name, 'read', context=context)
# all inherited fields + all non inherited fields for which the attribute whose name is in load is True
fields_pre = [f for f in fields_to_read if
f == self.CONCURRENCY_CHECK_FIELD
or (f in self._columns and getattr(self._columns[f], '_classic_write'))
] + self._inherits.values()
res = []
if len(fields_pre):
def convert_field(f):
f_qual = '%s."%s"' % (self._table, f) # need fully-qualified references in case len(tables) > 1
if f in ('create_date', 'write_date'):
return "date_trunc('second', %s) as %s" % (f_qual, f)
if f == self.CONCURRENCY_CHECK_FIELD:
if self._log_access:
return "COALESCE(%s.write_date, %s.create_date, (now() at time zone 'UTC'))::timestamp AS %s" % (self._table, self._table, f,)
return "(now() at time zone 'UTC')::timestamp AS %s" % (f,)
if isinstance(self._columns[f], fields.binary) and context.get('bin_size', False):
return 'length(%s) as "%s"' % (f_qual, f)
return f_qual
fields_pre2 = map(convert_field, fields_pre)
order_by = self._parent_order or self._order
select_fields = ','.join(fields_pre2 + ['%s.id' % self._table])
query = 'SELECT %s FROM %s WHERE %s.id IN %%s' % (select_fields, ','.join(tables), self._table)
if rule_clause:
query += " AND " + (' OR '.join(rule_clause))
query += " ORDER BY " + order_by
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute(query, [tuple(sub_ids)] + rule_params)
results = cr.dictfetchall()
result_ids = [x['id'] for x in results]
self._check_record_rules_result_count(cr, user, sub_ids, result_ids, 'read', context=context)
res.extend(results)
else:
res = map(lambda x: {'id': x}, ids)
if context.get('lang'):
for f in fields_pre:
if f == self.CONCURRENCY_CHECK_FIELD:
continue
if self._columns[f].translate:
ids = [x['id'] for x in res]
#TODO: optimize out of this loop
res_trans = self.pool.get('ir.translation')._get_ids(cr, user, self._name+','+f, 'model', context['lang'], ids)
for r in res:
r[f] = res_trans.get(r['id'], False) or r[f]
for table in self._inherits:
col = self._inherits[table]
cols = [x for x in intersect(self._inherit_fields.keys(), fields_to_read) if x not in self._columns.keys()]
if not cols:
continue
res2 = self.pool.get(table).read(cr, user, [x[col] for x in res], cols, context, load)
res3 = {}
for r in res2:
res3[r['id']] = r
del r['id']
for record in res:
if not record[col]: # if the record is deleted from _inherits table?
continue
record.update(res3[record[col]])
if col not in fields_to_read:
del record[col]
# all fields which need to be post-processed by a simple function (symbol_get)
fields_post = filter(lambda x: x in self._columns and self._columns[x]._symbol_get, fields_to_read)
if fields_post:
for r in res:
for f in fields_post:
r[f] = self._columns[f]._symbol_get(r[f])
ids = [x['id'] for x in res]
# all non inherited fields for which the attribute whose name is in load is False
fields_post = filter(lambda x: x in self._columns and not getattr(self._columns[x], load), fields_to_read)
# Compute POST fields
todo = {}
for f in fields_post:
todo.setdefault(self._columns[f]._multi, [])
todo[self._columns[f]._multi].append(f)
for key, val in todo.items():
if key:
res2 = self._columns[val[0]].get(cr, self, ids, val, user, context=context, values=res)
assert res2 is not None, \
'The function field "%s" on the "%s" model returned None\n' \
'(a dictionary was expected).' % (val[0], self._name)
for pos in val:
for record in res:
if isinstance(res2[record['id']], str): res2[record['id']] = eval(res2[record['id']]) #TOCHECK : why got string instend of dict in python2.6
multi_fields = res2.get(record['id'],{})
if multi_fields:
record[pos] = multi_fields.get(pos,[])
else:
for f in val:
res2 = self._columns[f].get(cr, self, ids, f, user, context=context, values=res)
for record in res:
if res2:
record[f] = res2[record['id']]
else:
record[f] = []
# Warn about deprecated fields now that fields_pre and fields_post are computed
# Explicitly use list() because we may receive tuples
for f in list(fields_pre) + list(fields_post):
field_column = self._all_columns.get(f) and self._all_columns.get(f).column
if field_column and field_column.deprecated:
_logger.warning('Field %s.%s is deprecated: %s', self._name, f, field_column.deprecated)
readonly = None
for vals in res:
for field in vals.copy():
fobj = None
if field in self._columns:
fobj = self._columns[field]
if not fobj:
continue
groups = fobj.read
if groups:
edit = False
for group in groups:
module = group.split(".")[0]
grp = group.split(".")[1]
cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
(grp, module, 'res.groups', user))
readonly = cr.fetchall()
if readonly[0][0] >= 1:
edit = True
break
elif readonly[0][0] == 0:
edit = False
else:
edit = False
if not edit:
if type(vals[field]) == type([]):
vals[field] = []
elif type(vals[field]) == type(0.0):
vals[field] = 0
elif type(vals[field]) == type(''):
vals[field] = '=No Permission='
else:
vals[field] = False
return res
# TODO check READ access
def perm_read(self, cr, user, ids, context=None, details=True):
"""
Returns some metadata about the given records.
:param details: if True, \*_uid fields are replaced with the name of the user
:return: list of ownership dictionaries for each requested record
:rtype: list of dictionaries with the following keys:
* id: object id
* create_uid: user who created the record
* create_date: date when the record was created
* write_uid: last user who changed the record
* write_date: date of the last change to the record
* xmlid: XML ID to use to refer to this record (if there is one), in format ``module.name``
"""
if not context:
context = {}
if not ids:
return []
fields = ''
uniq = isinstance(ids, (int, long))
if uniq:
ids = [ids]
fields = ['id']
if self._log_access:
fields += ['create_uid', 'create_date', 'write_uid', 'write_date']
quoted_table = '"%s"' % self._table
fields_str = ",".join('%s.%s'%(quoted_table, field) for field in fields)
query = '''SELECT %s, __imd.module, __imd.name
FROM %s LEFT JOIN ir_model_data __imd
ON (__imd.model = %%s and __imd.res_id = %s.id)
WHERE %s.id IN %%s''' % (fields_str, quoted_table, quoted_table, quoted_table)
cr.execute(query, (self._name, tuple(ids)))
res = cr.dictfetchall()
for r in res:
for key in r:
r[key] = r[key] or False
if details and key in ('write_uid', 'create_uid') and r[key]:
try:
r[key] = self.pool.get('res.users').name_get(cr, user, [r[key]])[0]
except Exception:
pass # Leave the numeric uid there
r['xmlid'] = ("%(module)s.%(name)s" % r) if r['name'] else False
del r['name'], r['module']
if uniq:
return res[ids[0]]
return res
def _check_concurrency(self, cr, ids, context):
if not context:
return
if not (context.get(self.CONCURRENCY_CHECK_FIELD) and self._log_access):
return
check_clause = "(id = %s AND %s < COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp)"
for sub_ids in cr.split_for_in_conditions(ids):
ids_to_check = []
for id in sub_ids:
id_ref = "%s,%s" % (self._name, id)
update_date = context[self.CONCURRENCY_CHECK_FIELD].pop(id_ref, None)
if update_date:
ids_to_check.extend([id, update_date])
if not ids_to_check:
continue
cr.execute("SELECT id FROM %s WHERE %s" % (self._table, " OR ".join([check_clause]*(len(ids_to_check)/2))), tuple(ids_to_check))
res = cr.fetchone()
if res:
# mention the first one only to keep the error message readable
raise except_orm('ConcurrencyException', _('A document was modified since you last viewed it (%s:%d)') % (self._description, res[0]))
def _check_record_rules_result_count(self, cr, uid, ids, result_ids, operation, context=None):
"""Verify the returned rows after applying record rules matches
the length of `ids`, and raise an appropriate exception if it does not.
"""
ids, result_ids = set(ids), set(result_ids)
missing_ids = ids - result_ids
if missing_ids:
# Attempt to distinguish record rule restriction vs deleted records,
# to provide a more specific error message - check if the missinf
cr.execute('SELECT id FROM ' + self._table + ' WHERE id IN %s', (tuple(missing_ids),))
if cr.rowcount:
# the missing ids are (at least partially) hidden by access rules
if uid == SUPERUSER_ID:
return
_logger.warning('Access Denied by record rules for operation: %s, uid: %s, model: %s', operation, uid, self._name)
raise except_orm(_('Access Denied'),
_('The requested operation cannot be completed due to security restrictions. Please contact your system administrator.\n\n(Document type: %s, Operation: %s)') % \
(self._description, operation))
else:
# If we get here, the missing_ids are not in the database
if operation in ('read','unlink'):
# No need to warn about deleting an already deleted record.
# And no error when reading a record that was deleted, to prevent spurious
# errors for non-transactional search/read sequences coming from clients
return
_logger.warning('Failed operation on deleted record(s): %s, uid: %s, model: %s', operation, uid, self._name)
raise except_orm(_('Missing document(s)'),
_('One of the documents you are trying to access has been deleted, please try again after refreshing.'))
def check_access_rights(self, cr, uid, operation, raise_exception=True): # no context on purpose.
"""Verifies that the operation given by ``operation`` is allowed for the user
according to the access rights."""
return self.pool.get('ir.model.access').check(cr, uid, self._name, operation, raise_exception)
def check_access_rule(self, cr, uid, ids, operation, context=None):
"""Verifies that the operation given by ``operation`` is allowed for the user
according to ir.rules.
:param operation: one of ``write``, ``unlink``
:raise except_orm: * if current ir.rules do not permit this operation.
:return: None if the operation is allowed
"""
if uid == SUPERUSER_ID:
return
if self.is_transient():
# Only one single implicit access rule for transient models: owner only!
# This is ok to hardcode because we assert that TransientModels always
# have log_access enabled so that the create_uid column is always there.
# And even with _inherits, these fields are always present in the local
# table too, so no need for JOINs.
cr.execute("""SELECT distinct create_uid
FROM %s
WHERE id IN %%s""" % self._table, (tuple(ids),))
uids = [x[0] for x in cr.fetchall()]
if len(uids) != 1 or uids[0] != uid:
raise except_orm(_('Access Denied'),
_('For this kind of document, you may only access records you created yourself.\n\n(Document type: %s)') % (self._description,))
else:
where_clause, where_params, tables = self.pool.get('ir.rule').domain_get(cr, uid, self._name, operation, context=context)
if where_clause:
where_clause = ' and ' + ' and '.join(where_clause)
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('SELECT ' + self._table + '.id FROM ' + ','.join(tables) +
' WHERE ' + self._table + '.id IN %s' + where_clause,
[sub_ids] + where_params)
returned_ids = [x['id'] for x in cr.dictfetchall()]
self._check_record_rules_result_count(cr, uid, sub_ids, returned_ids, operation, context=context)
def _workflow_trigger(self, cr, uid, ids, trigger, context=None):
"""Call given workflow trigger as a result of a CRUD operation"""
wf_service = netsvc.LocalService("workflow")
for res_id in ids:
getattr(wf_service, trigger)(uid, self._name, res_id, cr)
def _workflow_signal(self, cr, uid, ids, signal, context=None):
"""Send given workflow signal and return a dict mapping ids to workflow results"""
wf_service = netsvc.LocalService("workflow")
result = {}
for res_id in ids:
result[res_id] = wf_service.trg_validate(uid, self._name, res_id, signal, cr)
return result
def unlink(self, cr, uid, ids, context=None):
"""
Delete records with given ids
:param cr: database cursor
:param uid: current user id
:param ids: id or list of ids
:param context: (optional) context arguments, like lang, time zone
:return: True
:raise AccessError: * if user has no unlink rights on the requested object
* if user tries to bypass access rules for unlink on the requested object
:raise UserError: if the record is default property for other records
"""
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
result_store = self._store_get_values(cr, uid, ids, self._all_columns.keys(), context)
self._check_concurrency(cr, ids, context)
self.check_access_rights(cr, uid, 'unlink')
ir_property = self.pool.get('ir.property')
# Check if the records are used as default properties.
domain = [('res_id', '=', False),
('value_reference', 'in', ['%s,%s' % (self._name, i) for i in ids]),
]
if ir_property.search(cr, uid, domain, context=context):
raise except_orm(_('Error'), _('Unable to delete this document because it is used as a default property'))
# Delete the records' properties.
property_ids = ir_property.search(cr, uid, [('res_id', 'in', ['%s,%s' % (self._name, i) for i in ids])], context=context)
ir_property.unlink(cr, uid, property_ids, context=context)
self._workflow_trigger(cr, uid, ids, 'trg_delete', context=context)
self.check_access_rule(cr, uid, ids, 'unlink', context=context)
pool_model_data = self.pool.get('ir.model.data')
ir_values_obj = self.pool.get('ir.values')
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('delete from ' + self._table + ' ' \
'where id IN %s', (sub_ids,))
# Removing the ir_model_data reference if the record being deleted is a record created by xml/csv file,
# as these are not connected with real database foreign keys, and would be dangling references.
# Note: following steps performed as admin to avoid access rights restrictions, and with no context
# to avoid possible side-effects during admin calls.
# Step 1. Calling unlink of ir_model_data only for the affected IDS
reference_ids = pool_model_data.search(cr, SUPERUSER_ID, [('res_id','in',list(sub_ids)),('model','=',self._name)])
# Step 2. Marching towards the real deletion of referenced records
if reference_ids:
pool_model_data.unlink(cr, SUPERUSER_ID, reference_ids)
# For the same reason, removing the record relevant to ir_values
ir_value_ids = ir_values_obj.search(cr, uid,
['|',('value','in',['%s,%s' % (self._name, sid) for sid in sub_ids]),'&',('res_id','in',list(sub_ids)),('model','=',self._name)],
context=context)
if ir_value_ids:
ir_values_obj.unlink(cr, uid, ir_value_ids, context=context)
for order, object, store_ids, fields in result_store:
if object != self._name:
obj = self.pool.get(object)
cr.execute('select id from '+obj._table+' where id IN %s', (tuple(store_ids),))
rids = map(lambda x: x[0], cr.fetchall())
if rids:
obj._store_set_values(cr, uid, rids, fields, context)
return True
#
# TODO: Validate
#
def write(self, cr, user, ids, vals, context=None):
"""
Update records with given ids with the given field values
:param cr: database cursor
:param user: current user id
:type user: integer
:param ids: object id or list of object ids to update according to **vals**
:param vals: field values to update, e.g {'field_name': new_field_value, ...}
:type vals: dictionary
:param context: (optional) context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
:type context: dictionary
:return: True
:raise AccessError: * if user has no write rights on the requested object
* if user tries to bypass access rules for write on the requested object
:raise ValidateError: if user tries to enter invalid value for a field that is not in selection
:raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
**Note**: The type of field values to pass in ``vals`` for relationship fields is specific:
+ For a many2many field, a list of tuples is expected.
Here is the list of tuple that are accepted, with the corresponding semantics ::
(0, 0, { values }) link to a new record that needs to be created with the given values dictionary
(1, ID, { values }) update the linked record with id = ID (write *values* on it)
(2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
(3, ID) cut the link to the linked record with id = ID (delete the relationship between the two objects but does not delete the target object itself)
(4, ID) link to existing record with id = ID (adds a relationship)
(5) unlink all (like using (3,ID) for all linked records)
(6, 0, [IDs]) replace the list of linked IDs (like using (5) then (4,ID) for each ID in the list of IDs)
Example:
[(6, 0, [8, 5, 6, 4])] sets the many2many to ids [8, 5, 6, 4]
+ For a one2many field, a lits of tuples is expected.
Here is the list of tuple that are accepted, with the corresponding semantics ::
(0, 0, { values }) link to a new record that needs to be created with the given values dictionary
(1, ID, { values }) update the linked record with id = ID (write *values* on it)
(2, ID) remove and delete the linked record with id = ID (calls unlink on ID, that will delete the object completely, and the link to it as well)
Example:
[(0, 0, {'field_name':field_value_record1, ...}), (0, 0, {'field_name':field_value_record2, ...})]
+ For a many2one field, simply use the ID of target record, which must already exist, or ``False`` to remove the link.
+ For a reference field, use a string with the model name, a comma, and the target object id (example: ``'product.product, 5'``)
"""
readonly = None
self.check_field_access_rights(cr, user, 'write', vals.keys())
for field in vals.copy():
fobj = None
if field in self._columns:
fobj = self._columns[field]
elif field in self._inherit_fields:
fobj = self._inherit_fields[field][2]
if not fobj:
continue
groups = fobj.write
if groups:
edit = False
for group in groups:
module = group.split(".")[0]
grp = group.split(".")[1]
cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name=%s and module=%s and model=%s) and uid=%s", \
(grp, module, 'res.groups', user))
readonly = cr.fetchall()
if readonly[0][0] >= 1:
edit = True
break
if not edit:
vals.pop(field)
if not context:
context = {}
if not ids:
return True
if isinstance(ids, (int, long)):
ids = [ids]
self._check_concurrency(cr, ids, context)
self.check_access_rights(cr, user, 'write')
result = self._store_get_values(cr, user, ids, vals.keys(), context) or []
# No direct update of parent_left/right
vals.pop('parent_left', None)
vals.pop('parent_right', None)
parents_changed = []
parent_order = self._parent_order or self._order
if self._parent_store and (self._parent_name in vals):
# The parent_left/right computation may take up to
# 5 seconds. No need to recompute the values if the
# parent is the same.
# Note: to respect parent_order, nodes must be processed in
# order, so ``parents_changed`` must be ordered properly.
parent_val = vals[self._parent_name]
if parent_val:
query = "SELECT id FROM %s WHERE id IN %%s AND (%s != %%s OR %s IS NULL) ORDER BY %s" % \
(self._table, self._parent_name, self._parent_name, parent_order)
cr.execute(query, (tuple(ids), parent_val))
else:
query = "SELECT id FROM %s WHERE id IN %%s AND (%s IS NOT NULL) ORDER BY %s" % \
(self._table, self._parent_name, parent_order)
cr.execute(query, (tuple(ids),))
parents_changed = map(operator.itemgetter(0), cr.fetchall())
upd0 = []
upd1 = []
upd_todo = []
updend = []
direct = []
totranslate = context.get('lang', False) and (context['lang'] != 'en_US')
for field in vals:
field_column = self._all_columns.get(field) and self._all_columns.get(field).column
if field_column and field_column.deprecated:
_logger.warning('Field %s.%s is deprecated: %s', self._name, field, field_column.deprecated)
if field in self._columns:
if self._columns[field]._classic_write and not (hasattr(self._columns[field], '_fnct_inv')):
if (not totranslate) or not self._columns[field].translate:
upd0.append('"'+field+'"='+self._columns[field]._symbol_set[0])
upd1.append(self._columns[field]._symbol_set[1](vals[field]))
direct.append(field)
else:
upd_todo.append(field)
else:
updend.append(field)
if field in self._columns \
and hasattr(self._columns[field], 'selection') \
and vals[field]:
self._check_selection_field_value(cr, user, field, vals[field], context=context)
if self._log_access:
upd0.append('write_uid=%s')
upd0.append("write_date=(now() at time zone 'UTC')")
upd1.append(user)
if len(upd0):
self.check_access_rule(cr, user, ids, 'write', context=context)
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('update ' + self._table + ' set ' + ','.join(upd0) + ' ' \
'where id IN %s', upd1 + [sub_ids])
if cr.rowcount != len(sub_ids):
raise except_orm(_('AccessError'),
_('One of the records you are trying to modify has already been deleted (Document type: %s).') % self._description)
if totranslate:
# TODO: optimize
for f in direct:
if self._columns[f].translate:
src_trans = self.pool.get(self._name).read(cr, user, ids, [f])[0][f]
if not src_trans:
src_trans = vals[f]
# Inserting value to DB
self.write(cr, user, ids, {f: vals[f]})
self.pool.get('ir.translation')._set_ids(cr, user, self._name+','+f, 'model', context['lang'], ids, vals[f], src_trans)
# call the 'set' method of fields which are not classic_write
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
# default element in context must be removed when call a one2many or many2many
rel_context = context.copy()
for c in context.items():
if c[0].startswith('default_'):
del rel_context[c[0]]
for field in upd_todo:
for id in ids:
result += self._columns[field].set(cr, self, id, field, vals[field], user, context=rel_context) or []
unknown_fields = updend[:]
for table in self._inherits:
col = self._inherits[table]
nids = []
for sub_ids in cr.split_for_in_conditions(ids):
cr.execute('select distinct "'+col+'" from "'+self._table+'" ' \
'where id IN %s', (sub_ids,))
nids.extend([x[0] for x in cr.fetchall()])
v = {}
for val in updend:
if self._inherit_fields[val][0] == table:
v[val] = vals[val]
unknown_fields.remove(val)
if v:
self.pool.get(table).write(cr, user, nids, v, context)
if unknown_fields:
_logger.warning(
'No such field(s) in model %s: %s.',
self._name, ', '.join(unknown_fields))
self._validate(cr, user, ids, context)
# TODO: use _order to set dest at the right position and not first node of parent
# We can't defer parent_store computation because the stored function
# fields that are computer may refer (directly or indirectly) to
# parent_left/right (via a child_of domain)
if parents_changed:
if self.pool._init:
self.pool._init_parent[self._name] = True
else:
order = self._parent_order or self._order
parent_val = vals[self._parent_name]
if parent_val:
clause, params = '%s=%%s' % (self._parent_name,), (parent_val,)
else:
clause, params = '%s IS NULL' % (self._parent_name,), ()
for id in parents_changed:
cr.execute('SELECT parent_left, parent_right FROM %s WHERE id=%%s' % (self._table,), (id,))
pleft, pright = cr.fetchone()
distance = pright - pleft + 1
# Positions of current siblings, to locate proper insertion point;
# this can _not_ be fetched outside the loop, as it needs to be refreshed
# after each update, in case several nodes are sequentially inserted one
# next to the other (i.e computed incrementally)
cr.execute('SELECT parent_right, id FROM %s WHERE %s ORDER BY %s' % (self._table, clause, parent_order), params)
parents = cr.fetchall()
# Find Position of the element
position = None
for (parent_pright, parent_id) in parents:
if parent_id == id:
break
position = parent_pright + 1
# It's the first node of the parent
if not position:
if not parent_val:
position = 1
else:
cr.execute('select parent_left from '+self._table+' where id=%s', (parent_val,))
position = cr.fetchone()[0] + 1
if pleft < position <= pright:
raise except_orm(_('UserError'), _('Recursivity Detected.'))
if pleft < position:
cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left+%s, parent_right=parent_right+%s where parent_left>=%s and parent_left<%s', (position-pleft, position-pleft, pleft, pright))
else:
cr.execute('update '+self._table+' set parent_left=parent_left+%s where parent_left>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_right=parent_right+%s where parent_right>=%s', (distance, position))
cr.execute('update '+self._table+' set parent_left=parent_left-%s, parent_right=parent_right-%s where parent_left>=%s and parent_left<%s', (pleft-position+distance, pleft-position+distance, pleft+distance, pright+distance))
result += self._store_get_values(cr, user, ids, vals.keys(), context)
result.sort()
done = {}
for order, object, ids_to_update, fields_to_recompute in result:
key = (object, tuple(fields_to_recompute))
done.setdefault(key, {})
# avoid to do several times the same computation
todo = []
for id in ids_to_update:
if id not in done[key]:
done[key][id] = True
todo.append(id)
self.pool.get(object)._store_set_values(cr, user, todo, fields_to_recompute, context)
self._workflow_trigger(cr, user, ids, 'trg_write', context=context)
return True
#
# TODO: Should set perm to user.xxx
#
def create(self, cr, user, vals, context=None):
"""
Create a new record for the model.
The values for the new record are initialized using the ``vals``
argument, and if necessary the result of ``default_get()``.
:param cr: database cursor
:param user: current user id
:type user: integer
:param vals: field values for new record, e.g {'field_name': field_value, ...}
:type vals: dictionary
:param context: optional context arguments, e.g. {'lang': 'en_us', 'tz': 'UTC', ...}
:type context: dictionary
:return: id of new record created
:raise AccessError: * if user has no create rights on the requested object
* if user tries to bypass access rules for create on the requested object
:raise ValidateError: if user tries to enter invalid value for a field that is not in selection
:raise UserError: if a loop would be created in a hierarchy of objects a result of the operation (such as setting an object as its own parent)
**Note**: The type of field values to pass in ``vals`` for relationship fields is specific.
Please see the description of the :py:meth:`~osv.osv.osv.write` method for details about the possible values and how
to specify them.
"""
if not context:
context = {}
if self.is_transient():
self._transient_vacuum(cr, user)
self.check_access_rights(cr, user, 'create')
if self._log_access:
for f in LOG_ACCESS_COLUMNS:
if vals.pop(f, None) is not None:
_logger.warning(
'Field `%s` is not allowed when creating the model `%s`.',
f, self._name)
vals = self._add_missing_default_values(cr, user, vals, context)
tocreate = {}
for v in self._inherits:
if self._inherits[v] not in vals:
tocreate[v] = {}
else:
tocreate[v] = {'id': vals[self._inherits[v]]}
(upd0, upd1, upd2) = ('', '', [])
upd_todo = []
unknown_fields = []
for v in vals.keys():
if v in self._inherit_fields and v not in self._columns:
(table, col, col_detail, original_parent) = self._inherit_fields[v]
tocreate[table][v] = vals[v]
del vals[v]
else:
if (v not in self._inherit_fields) and (v not in self._columns):
del vals[v]
unknown_fields.append(v)
if unknown_fields:
_logger.warning(
'No such field(s) in model %s: %s.',
self._name, ', '.join(unknown_fields))
# Try-except added to filter the creation of those records whose filds are readonly.
# Example : any dashboard which has all the fields readonly.(due to Views(database views))
try:
cr.execute("SELECT nextval('"+self._sequence+"')")
except:
raise except_orm(_('UserError'),
_('You cannot perform this operation. New Record Creation is not allowed for this object as this object is for reporting purpose.'))
id_new = cr.fetchone()[0]
for table in tocreate:
if self._inherits[table] in vals:
del vals[self._inherits[table]]
record_id = tocreate[table].pop('id', None)
# When linking/creating parent records, force context without 'no_store_function' key that
# defers stored functions computing, as these won't be computed in batch at the end of create().
parent_context = dict(context)
parent_context.pop('no_store_function', None)
if record_id is None or not record_id:
record_id = self.pool.get(table).create(cr, user, tocreate[table], context=parent_context)
else:
self.pool.get(table).write(cr, user, [record_id], tocreate[table], context=parent_context)
upd0 += ',' + self._inherits[table]
upd1 += ',%s'
upd2.append(record_id)
#Start : Set bool fields to be False if they are not touched(to make search more powerful)
bool_fields = [x for x in self._columns.keys() if self._columns[x]._type=='boolean']
for bool_field in bool_fields:
if bool_field not in vals:
vals[bool_field] = False
#End
for field in vals.copy():
fobj = None
if field in self._columns:
fobj = self._columns[field]
else:
fobj = self._inherit_fields[field][2]
if not fobj:
continue
groups = fobj.write
if groups:
edit = False
for group in groups:
module = group.split(".")[0]
grp = group.split(".")[1]
cr.execute("select count(*) from res_groups_users_rel where gid IN (select res_id from ir_model_data where name='%s' and module='%s' and model='%s') and uid=%s" % \
(grp, module, 'res.groups', user))
readonly = cr.fetchall()
if readonly[0][0] >= 1:
edit = True
break
elif readonly[0][0] == 0:
edit = False
else:
edit = False
if not edit:
vals.pop(field)
for field in vals:
if self._columns[field]._classic_write:
upd0 = upd0 + ',"' + field + '"'
upd1 = upd1 + ',' + self._columns[field]._symbol_set[0]
upd2.append(self._columns[field]._symbol_set[1](vals[field]))
#for the function fields that receive a value, we set them directly in the database
#(they may be required), but we also need to trigger the _fct_inv()
if (hasattr(self._columns[field], '_fnct_inv')) and not isinstance(self._columns[field], fields.related):
#TODO: this way to special case the related fields is really creepy but it shouldn't be changed at
#one week of the release candidate. It seems the only good way to handle correctly this is to add an
#attribute to make a field `really readonly´ and thus totally ignored by the create()... otherwise
#if, for example, the related has a default value (for usability) then the fct_inv is called and it
#may raise some access rights error. Changing this is a too big change for now, and is thus postponed
#after the release but, definitively, the behavior shouldn't be different for related and function
#fields.
upd_todo.append(field)
else:
#TODO: this `if´ statement should be removed because there is no good reason to special case the fields
#related. See the above TODO comment for further explanations.
if not isinstance(self._columns[field], fields.related):
upd_todo.append(field)
if field in self._columns \
and hasattr(self._columns[field], 'selection') \
and vals[field]:
self._check_selection_field_value(cr, user, field, vals[field], context=context)
if self._log_access:
upd0 += ',create_uid,create_date,write_uid,write_date'
upd1 += ",%s,(now() at time zone 'UTC'),%s,(now() at time zone 'UTC')"
upd2.extend((user, user))
cr.execute('insert into "'+self._table+'" (id'+upd0+") values ("+str(id_new)+upd1+')', tuple(upd2))
upd_todo.sort(lambda x, y: self._columns[x].priority-self._columns[y].priority)
if self._parent_store and not context.get('defer_parent_store_computation'):
if self.pool._init:
self.pool._init_parent[self._name] = True
else:
parent = vals.get(self._parent_name, False)
if parent:
cr.execute('select parent_right from '+self._table+' where '+self._parent_name+'=%s order by '+(self._parent_order or self._order), (parent,))
pleft_old = None
result_p = cr.fetchall()
for (pleft,) in result_p:
if not pleft:
break
pleft_old = pleft
if not pleft_old:
cr.execute('select parent_left from '+self._table+' where id=%s', (parent,))
pleft_old = cr.fetchone()[0]
pleft = pleft_old
else:
cr.execute('select max(parent_right) from '+self._table)
pleft = cr.fetchone()[0] or 0
cr.execute('update '+self._table+' set parent_left=parent_left+2 where parent_left>%s', (pleft,))
cr.execute('update '+self._table+' set parent_right=parent_right+2 where parent_right>%s', (pleft,))
cr.execute('update '+self._table+' set parent_left=%s,parent_right=%s where id=%s', (pleft+1, pleft+2, id_new))
# default element in context must be remove when call a one2many or many2many
rel_context = context.copy()
for c in context.items():
if c[0].startswith('default_'):
del rel_context[c[0]]
result = []
for field in upd_todo:
result += self._columns[field].set(cr, self, id_new, field, vals[field], user, rel_context) or []
self._validate(cr, user, [id_new], context)
if not context.get('no_store_function', False):
result += self._store_get_values(cr, user, [id_new], vals.keys(), context)
result.sort()
done = []
for order, object, ids, fields2 in result:
if not (object, ids, fields2) in done:
self.pool.get(object)._store_set_values(cr, user, ids, fields2, context)
done.append((object, ids, fields2))
if self._log_create and not (context and context.get('no_store_function', False)):
message = self._description + \
" '" + \
self.name_get(cr, user, [id_new], context=context)[0][1] + \
"' " + _("created.")
self.log(cr, user, id_new, message, True, context=context)
self.check_access_rule(cr, user, [id_new], 'create', context=context)
self._workflow_trigger(cr, user, [id_new], 'trg_create', context=context)
return id_new
def browse(self, cr, uid, select, context=None, list_class=None, fields_process=None):
"""Fetch records as objects allowing to use dot notation to browse fields and relations
:param cr: database cursor
:param uid: current user id
:param select: id or list of ids.
:param context: context arguments, like lang, time zone
:rtype: object or list of objects requested
"""
self._list_class = list_class or browse_record_list
cache = {}
# need to accepts ints and longs because ids coming from a method
# launched by button in the interface have a type long...
if isinstance(select, (int, long)):
return browse_record(cr, uid, select, self, cache, context=context, list_class=self._list_class, fields_process=fields_process)
elif isinstance(select, list):
return self._list_class([browse_record(cr, uid, id, self, cache, context=context, list_class=self._list_class, fields_process=fields_process) for id in select], context=context)
else:
return browse_null()
def _store_get_values(self, cr, uid, ids, fields, context):
"""Returns an ordered list of fields.functions to call due to
an update operation on ``fields`` of records with ``ids``,
obtained by calling the 'store' functions of these fields,
as setup by their 'store' attribute.
:return: [(priority, model_name, [record_ids,], [function_fields,])]
"""
if fields is None: fields = []
stored_functions = self.pool._store_function.get(self._name, [])
# use indexed names for the details of the stored_functions:
model_name_, func_field_to_compute_, id_mapping_fnct_, trigger_fields_, priority_ = range(5)
# only keep functions that should be triggered for the ``fields``
# being written to.
to_compute = [f for f in stored_functions \
if ((not f[trigger_fields_]) or set(fields).intersection(f[trigger_fields_]))]
mapping = {}
fresults = {}
for function in to_compute:
fid = id(function[id_mapping_fnct_])
if not fid in fresults:
# use admin user for accessing objects having rules defined on store fields
fresults[fid] = [id2 for id2 in function[id_mapping_fnct_](self, cr, SUPERUSER_ID, ids, context) if id2]
target_ids = fresults[fid]
# the compound key must consider the priority and model name
key = (function[priority_], function[model_name_])
for target_id in target_ids:
mapping.setdefault(key, {}).setdefault(target_id,set()).add(tuple(function))
# Here mapping looks like:
# { (10, 'model_a') : { target_id1: [ (function_1_tuple, function_2_tuple) ], ... }
# (20, 'model_a') : { target_id2: [ (function_3_tuple, function_4_tuple) ], ... }
# (99, 'model_a') : { target_id1: [ (function_5_tuple, function_6_tuple) ], ... }
# }
# Now we need to generate the batch function calls list
# call_map =
# { (10, 'model_a') : [(10, 'model_a', [record_ids,], [function_fields,])] }
call_map = {}
for ((priority,model), id_map) in mapping.iteritems():
functions_ids_maps = {}
# function_ids_maps =
# { (function_1_tuple, function_2_tuple) : [target_id1, target_id2, ..] }
for fid, functions in id_map.iteritems():
functions_ids_maps.setdefault(tuple(functions), []).append(fid)
for functions, ids in functions_ids_maps.iteritems():
call_map.setdefault((priority,model),[]).append((priority, model, ids,
[f[func_field_to_compute_] for f in functions]))
ordered_keys = call_map.keys()
ordered_keys.sort()
result = []
if ordered_keys:
result = reduce(operator.add, (call_map[k] for k in ordered_keys))
return result
def _store_set_values(self, cr, uid, ids, fields, context):
"""Calls the fields.function's "implementation function" for all ``fields``, on records with ``ids`` (taking care of
respecting ``multi`` attributes), and stores the resulting values in the database directly."""
if not ids:
return True
field_flag = False
field_dict = {}
if self._log_access:
cr.execute('select id,write_date from '+self._table+' where id IN %s', (tuple(ids),))
res = cr.fetchall()
for r in res:
if r[1]:
field_dict.setdefault(r[0], [])
res_date = time.strptime((r[1])[:19], '%Y-%m-%d %H:%M:%S')
write_date = datetime.datetime.fromtimestamp(time.mktime(res_date))
for i in self.pool._store_function.get(self._name, []):
if i[5]:
up_write_date = write_date + datetime.timedelta(hours=i[5])
if datetime.datetime.now() < up_write_date:
if i[1] in fields:
field_dict[r[0]].append(i[1])
if not field_flag:
field_flag = True
todo = {}
keys = []
for f in fields:
if self._columns[f]._multi not in keys:
keys.append(self._columns[f]._multi)
todo.setdefault(self._columns[f]._multi, [])
todo[self._columns[f]._multi].append(f)
for key in keys:
val = todo[key]
if key:
# use admin user for accessing objects having rules defined on store fields
result = self._columns[val[0]].get(cr, self, ids, val, SUPERUSER_ID, context=context)
for id, value in result.items():
if field_flag:
for f in value.keys():
if f in field_dict[id]:
value.pop(f)
upd0 = []
upd1 = []
for v in value:
if v not in val:
continue
if self._columns[v]._type == 'many2one':
try:
value[v] = value[v][0]
except:
pass
upd0.append('"'+v+'"='+self._columns[v]._symbol_set[0])
upd1.append(self._columns[v]._symbol_set[1](value[v]))
upd1.append(id)
if upd0 and upd1:
cr.execute('update "' + self._table + '" set ' + \
','.join(upd0) + ' where id = %s', upd1)
else:
for f in val:
# use admin user for accessing objects having rules defined on store fields
result = self._columns[f].get(cr, self, ids, f, SUPERUSER_ID, context=context)
for r in result.keys():
if field_flag:
if r in field_dict.keys():
if f in field_dict[r]:
result.pop(r)
for id, value in result.items():
if self._columns[f]._type == 'many2one':
try:
value = value[0]
except:
pass
cr.execute('update "' + self._table + '" set ' + \
'"'+f+'"='+self._columns[f]._symbol_set[0] + ' where id = %s', (self._columns[f]._symbol_set[1](value), id))
return True
#
# TODO: Validate
#
def perm_write(self, cr, user, ids, fields, context=None):
raise NotImplementedError(_('This method does not exist anymore'))
# TODO: ameliorer avec NULL
def _where_calc(self, cr, user, domain, active_test=True, context=None):
"""Computes the WHERE clause needed to implement an OpenERP domain.
:param domain: the domain to compute
:type domain: list
:param active_test: whether the default filtering of records with ``active``
field set to ``False`` should be applied.
:return: the query expressing the given domain as provided in domain
:rtype: osv.query.Query
"""
if not context:
context = {}
domain = domain[:]
# if the object has a field named 'active', filter out all inactive
# records unless they were explicitely asked for
if 'active' in self._all_columns and (active_test and context.get('active_test', True)):
if domain:
# the item[0] trick below works for domain items and '&'/'|'/'!'
# operators too
if not any(item[0] == 'active' for item in domain):
domain.insert(0, ('active', '=', 1))
else:
domain = [('active', '=', 1)]
if domain:
e = expression.expression(cr, user, domain, self, context)
tables = e.get_tables()
where_clause, where_params = e.to_sql()
where_clause = where_clause and [where_clause] or []
else:
where_clause, where_params, tables = [], [], ['"%s"' % self._table]
return Query(tables, where_clause, where_params)
def _check_qorder(self, word):
if not regex_order.match(word):
raise except_orm(_('AccessError'), _('Invalid "order" specified. A valid "order" specification is a comma-separated list of valid field names (optionally followed by asc/desc for the direction)'))
return True
def _apply_ir_rules(self, cr, uid, query, mode='read', context=None):
"""Add what's missing in ``query`` to implement all appropriate ir.rules
(using the ``model_name``'s rules or the current model's rules if ``model_name`` is None)
:param query: the current query object
"""
def apply_rule(added_clause, added_params, added_tables, parent_model=None, child_object=None):
""" :param string parent_model: string of the parent model
:param model child_object: model object, base of the rule application
"""
if added_clause:
if parent_model and child_object:
# as inherited rules are being applied, we need to add the missing JOIN
# to reach the parent table (if it was not JOINed yet in the query)
parent_alias = child_object._inherits_join_add(child_object, parent_model, query)
# inherited rules are applied on the external table -> need to get the alias and replace
parent_table = self.pool.get(parent_model)._table
added_clause = [clause.replace('"%s"' % parent_table, '"%s"' % parent_alias) for clause in added_clause]
# change references to parent_table to parent_alias, because we now use the alias to refer to the table
new_tables = []
for table in added_tables:
# table is just a table name -> switch to the full alias
if table == '"%s"' % parent_table:
new_tables.append('"%s" as "%s"' % (parent_table, parent_alias))
# table is already a full statement -> replace reference to the table to its alias, is correct with the way aliases are generated
else:
new_tables.append(table.replace('"%s"' % parent_table, '"%s"' % parent_alias))
added_tables = new_tables
query.where_clause += added_clause
query.where_clause_params += added_params
for table in added_tables:
if table not in query.tables:
query.tables.append(table)
return True
return False
# apply main rules on the object
rule_obj = self.pool.get('ir.rule')
rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, self._name, mode, context=context)
apply_rule(rule_where_clause, rule_where_clause_params, rule_tables)
# apply ir.rules from the parents (through _inherits)
for inherited_model in self._inherits:
rule_where_clause, rule_where_clause_params, rule_tables = rule_obj.domain_get(cr, uid, inherited_model, mode, context=context)
apply_rule(rule_where_clause, rule_where_clause_params, rule_tables,
parent_model=inherited_model, child_object=self)
def _generate_m2o_order_by(self, order_field, query):
"""
Add possibly missing JOIN to ``query`` and generate the ORDER BY clause for m2o fields,
either native m2o fields or function/related fields that are stored, including
intermediate JOINs for inheritance if required.
:return: the qualified field name to use in an ORDER BY clause to sort by ``order_field``
"""
if order_field not in self._columns and order_field in self._inherit_fields:
# also add missing joins for reaching the table containing the m2o field
qualified_field = self._inherits_join_calc(order_field, query)
order_field_column = self._inherit_fields[order_field][2]
else:
qualified_field = '"%s"."%s"' % (self._table, order_field)
order_field_column = self._columns[order_field]
assert order_field_column._type == 'many2one', 'Invalid field passed to _generate_m2o_order_by()'
if not order_field_column._classic_write and not getattr(order_field_column, 'store', False):
_logger.debug("Many2one function/related fields must be stored " \
"to be used as ordering fields! Ignoring sorting for %s.%s",
self._name, order_field)
return
# figure out the applicable order_by for the m2o
dest_model = self.pool.get(order_field_column._obj)
m2o_order = dest_model._order
if not regex_order.match(m2o_order):
# _order is complex, can't use it here, so we default to _rec_name
m2o_order = dest_model._rec_name
else:
# extract the field names, to be able to qualify them and add desc/asc
m2o_order_list = []
for order_part in m2o_order.split(","):
m2o_order_list.append(order_part.strip().split(" ", 1)[0].strip())
m2o_order = m2o_order_list
# Join the dest m2o table if it's not joined yet. We use [LEFT] OUTER join here
# as we don't want to exclude results that have NULL values for the m2o
src_table, src_field = qualified_field.replace('"', '').split('.', 1)
dst_alias, dst_alias_statement = query.add_join((src_table, dest_model._table, src_field, 'id', src_field), implicit=False, outer=True)
qualify = lambda field: '"%s"."%s"' % (dst_alias, field)
return map(qualify, m2o_order) if isinstance(m2o_order, list) else qualify(m2o_order)
def _generate_order_by(self, order_spec, query):
"""
Attempt to consruct an appropriate ORDER BY clause based on order_spec, which must be
a comma-separated list of valid field names, optionally followed by an ASC or DESC direction.
:raise" except_orm in case order_spec is malformed
"""
order_by_clause = ''
order_spec = order_spec or self._order
if order_spec:
order_by_elements = []
self._check_qorder(order_spec)
for order_part in order_spec.split(','):
order_split = order_part.strip().split(' ')
order_field = order_split[0].strip()
order_direction = order_split[1].strip() if len(order_split) == 2 else ''
inner_clause = None
if order_field == 'id' or (self._log_access and order_field in LOG_ACCESS_COLUMNS.keys()):
order_by_elements.append('"%s"."%s" %s' % (self._table, order_field, order_direction))
elif order_field in self._columns:
order_column = self._columns[order_field]
if order_column._classic_read:
inner_clause = '"%s"."%s"' % (self._table, order_field)
elif order_column._type == 'many2one':
inner_clause = self._generate_m2o_order_by(order_field, query)
else:
continue # ignore non-readable or "non-joinable" fields
elif order_field in self._inherit_fields:
parent_obj = self.pool.get(self._inherit_fields[order_field][3])
order_column = parent_obj._columns[order_field]
if order_column._classic_read:
inner_clause = self._inherits_join_calc(order_field, query)
elif order_column._type == 'many2one':
inner_clause = self._generate_m2o_order_by(order_field, query)
else:
continue # ignore non-readable or "non-joinable" fields
else:
raise ValueError( _("Sorting field %s not found on model %s") %( order_field, self._name))
if inner_clause:
if isinstance(inner_clause, list):
for clause in inner_clause:
order_by_elements.append("%s %s" % (clause, order_direction))
else:
order_by_elements.append("%s %s" % (inner_clause, order_direction))
if order_by_elements:
order_by_clause = ",".join(order_by_elements)
return order_by_clause and (' ORDER BY %s ' % order_by_clause) or ''
def _search(self, cr, user, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
"""
Private implementation of search() method, allowing specifying the uid to use for the access right check.
This is useful for example when filling in the selection list for a drop-down and avoiding access rights errors,
by specifying ``access_rights_uid=1`` to bypass access rights check, but not ir.rules!
This is ok at the security level because this method is private and not callable through XML-RPC.
:param access_rights_uid: optional user ID to use when checking access rights
(not for ir.rules, this is only for ir.model.access)
"""
if context is None:
context = {}
self.check_access_rights(cr, access_rights_uid or user, 'read')
# For transient models, restrict acces to the current user, except for the super-user
if self.is_transient() and self._log_access and user != SUPERUSER_ID:
args = expression.AND(([('create_uid', '=', user)], args or []))
query = self._where_calc(cr, user, args, context=context)
self._apply_ir_rules(cr, user, query, 'read', context=context)
order_by = self._generate_order_by(order, query)
from_clause, where_clause, where_clause_params = query.get_sql()
limit_str = limit and ' limit %d' % limit or ''
offset_str = offset and ' offset %d' % offset or ''
where_str = where_clause and (" WHERE %s" % where_clause) or ''
if count:
cr.execute('SELECT count("%s".id) FROM ' % self._table + from_clause + where_str + limit_str + offset_str, where_clause_params)
res = cr.fetchall()
return res[0][0]
cr.execute('SELECT "%s".id FROM ' % self._table + from_clause + where_str + order_by + limit_str + offset_str, where_clause_params)
res = cr.fetchall()
# TDE note: with auto_join, we could have several lines about the same result
# i.e. a lead with several unread messages; we uniquify the result using
# a fast way to do it while preserving order (http://www.peterbe.com/plog/uniqifiers-benchmark)
def _uniquify_list(seq):
seen = set()
return [x for x in seq if x not in seen and not seen.add(x)]
return _uniquify_list([x[0] for x in res])
# returns the different values ever entered for one field
# this is used, for example, in the client when the user hits enter on
# a char field
def distinct_field_get(self, cr, uid, field, value, args=None, offset=0, limit=None):
if not args:
args = []
if field in self._inherit_fields:
return self.pool.get(self._inherit_fields[field][0]).distinct_field_get(cr, uid, field, value, args, offset, limit)
else:
return self._columns[field].search(cr, self, args, field, value, offset, limit, uid)
def copy_data(self, cr, uid, id, default=None, context=None):
"""
Copy given record's data with all its fields values
:param cr: database cursor
:param uid: current user id
:param id: id of the record to copy
:param default: field values to override in the original values of the copied record
:type default: dictionary
:param context: context arguments, like lang, time zone
:type context: dictionary
:return: dictionary containing all the field values
"""
if context is None:
context = {}
# avoid recursion through already copied records in case of circular relationship
seen_map = context.setdefault('__copy_data_seen',{})
if id in seen_map.setdefault(self._name,[]):
return
seen_map[self._name].append(id)
if default is None:
default = {}
if 'state' not in default:
if 'state' in self._defaults:
if callable(self._defaults['state']):
default['state'] = self._defaults['state'](self, cr, uid, context)
else:
default['state'] = self._defaults['state']
context_wo_lang = context.copy()
if 'lang' in context:
del context_wo_lang['lang']
data = self.read(cr, uid, [id,], context=context_wo_lang)
if data:
data = data[0]
else:
raise IndexError( _("Record #%d of %s not found, cannot copy!") %( id, self._name))
# build a black list of fields that should not be copied
blacklist = set(MAGIC_COLUMNS + ['parent_left', 'parent_right'])
def blacklist_given_fields(obj):
# blacklist the fields that are given by inheritance
for other, field_to_other in obj._inherits.items():
blacklist.add(field_to_other)
if field_to_other in default:
# all the fields of 'other' are given by the record: default[field_to_other],
# except the ones redefined in self
blacklist.update(set(self.pool.get(other)._all_columns) - set(self._columns))
else:
blacklist_given_fields(self.pool.get(other))
blacklist_given_fields(self)
res = dict(default)
for f, colinfo in self._all_columns.items():
field = colinfo.column
if f in default:
pass
elif f in blacklist:
pass
elif isinstance(field, fields.function):
pass
elif field._type == 'many2one':
res[f] = data[f] and data[f][0]
elif field._type == 'one2many':
other = self.pool.get(field._obj)
# duplicate following the order of the ids because we'll rely on
# it later for copying translations in copy_translation()!
lines = [other.copy_data(cr, uid, line_id, context=context) for line_id in sorted(data[f])]
# the lines are duplicated using the wrong (old) parent, but then
# are reassigned to the correct one thanks to the (0, 0, ...)
res[f] = [(0, 0, line) for line in lines if line]
elif field._type == 'many2many':
res[f] = [(6, 0, data[f])]
else:
res[f] = data[f]
return res
def copy_translations(self, cr, uid, old_id, new_id, context=None):
if context is None:
context = {}
# avoid recursion through already copied records in case of circular relationship
seen_map = context.setdefault('__copy_translations_seen',{})
if old_id in seen_map.setdefault(self._name,[]):
return
seen_map[self._name].append(old_id)
trans_obj = self.pool.get('ir.translation')
# TODO it seems fields_get can be replaced by _all_columns (no need for translation)
fields = self.fields_get(cr, uid, context=context)
for field_name, field_def in fields.items():
# we must recursively copy the translations for o2o and o2m
if field_def['type'] == 'one2many':
target_obj = self.pool.get(field_def['relation'])
old_record, new_record = self.read(cr, uid, [old_id, new_id], [field_name], context=context)
# here we rely on the order of the ids to match the translations
# as foreseen in copy_data()
old_children = sorted(old_record[field_name])
new_children = sorted(new_record[field_name])
for (old_child, new_child) in zip(old_children, new_children):
target_obj.copy_translations(cr, uid, old_child, new_child, context=context)
# and for translatable fields we keep them for copy
elif field_def.get('translate'):
if field_name in self._columns:
trans_name = self._name + "," + field_name
res_id = new_id
elif field_name in self._inherit_fields:
trans_name = self._inherit_fields[field_name][0] + "," + field_name
# get the id of the parent record to set the translation
inherit_field_name = self._inherit_fields[field_name][1]
res_id = self.read(cr, uid, [new_id], [inherit_field_name], context=context)[0][inherit_field_name][0]
else:
continue
trans_ids = trans_obj.search(cr, uid, [
('name', '=', trans_name),
('res_id', '=', old_id)
])
records = trans_obj.read(cr, uid, trans_ids, context=context)
for record in records:
del record['id']
# remove source to avoid triggering _set_src
del record['source']
record.update({'res_id': res_id})
trans_obj.create(cr, uid, record, context=context)
def copy(self, cr, uid, id, default=None, context=None):
"""
Duplicate record with given id updating it with default values
:param cr: database cursor
:param uid: current user id
:param id: id of the record to copy
:param default: dictionary of field values to override in the original values of the copied record, e.g: ``{'field_name': overriden_value, ...}``
:type default: dictionary
:param context: context arguments, like lang, time zone
:type context: dictionary
:return: id of the newly created record
"""
if context is None:
context = {}
context = context.copy()
data = self.copy_data(cr, uid, id, default, context)
new_id = self.create(cr, uid, data, context)
self.copy_translations(cr, uid, id, new_id, context)
return new_id
def exists(self, cr, uid, ids, context=None):
"""Checks whether the given id or ids exist in this model,
and return the list of ids that do. This is simple to use for
a truth test on a browse_record::
if record.exists():
pass
:param ids: id or list of ids to check for existence
:type ids: int or [int]
:return: the list of ids that currently exist, out of
the given `ids`
"""
if type(ids) in (int, long):
ids = [ids]
query = 'SELECT id FROM "%s"' % self._table
cr.execute(query + "WHERE ID IN %s", (tuple(ids),))
return [x[0] for x in cr.fetchall()]
def check_recursion(self, cr, uid, ids, context=None, parent=None):
_logger.warning("You are using deprecated %s.check_recursion(). Please use the '_check_recursion()' instead!" % \
self._name)
assert parent is None or parent in self._columns or parent in self._inherit_fields,\
"The 'parent' parameter passed to check_recursion() must be None or a valid field name"
return self._check_recursion(cr, uid, ids, context, parent)
def _check_recursion(self, cr, uid, ids, context=None, parent=None):
"""
Verifies that there is no loop in a hierarchical structure of records,
by following the parent relationship using the **parent** field until a loop
is detected or until a top-level record is found.
:param cr: database cursor
:param uid: current user id
:param ids: list of ids of records to check
:param parent: optional parent field name (default: ``self._parent_name = parent_id``)
:return: **True** if the operation can proceed safely, or **False** if an infinite loop is detected.
"""
if not parent:
parent = self._parent_name
# must ignore 'active' flag, ir.rules, etc. => direct SQL query
query = 'SELECT "%s" FROM "%s" WHERE id = %%s' % (parent, self._table)
for id in ids:
current_id = id
while current_id is not None:
cr.execute(query, (current_id,))
result = cr.fetchone()
current_id = result[0] if result else None
if current_id == id:
return False
return True
def _get_external_ids(self, cr, uid, ids, *args, **kwargs):
"""Retrieve the External ID(s) of any database record.
**Synopsis**: ``_get_xml_ids(cr, uid, ids) -> { 'id': ['module.xml_id'] }``
:return: map of ids to the list of their fully qualified External IDs
in the form ``module.key``, or an empty list when there's no External
ID for a record, e.g.::
{ 'id': ['module.ext_id', 'module.ext_id_bis'],
'id2': [] }
"""
ir_model_data = self.pool.get('ir.model.data')
data_ids = ir_model_data.search(cr, uid, [('model', '=', self._name), ('res_id', 'in', ids)])
data_results = ir_model_data.read(cr, uid, data_ids, ['module', 'name', 'res_id'])
result = {}
for id in ids:
# can't use dict.fromkeys() as the list would be shared!
result[id] = []
for record in data_results:
result[record['res_id']].append('%(module)s.%(name)s' % record)
return result
def get_external_id(self, cr, uid, ids, *args, **kwargs):
"""Retrieve the External ID of any database record, if there
is one. This method works as a possible implementation
for a function field, to be able to add it to any
model object easily, referencing it as ``Model.get_external_id``.
When multiple External IDs exist for a record, only one
of them is returned (randomly).
:return: map of ids to their fully qualified XML ID,
defaulting to an empty string when there's none
(to be usable as a function field),
e.g.::
{ 'id': 'module.ext_id',
'id2': '' }
"""
results = self._get_xml_ids(cr, uid, ids)
for k, v in results.iteritems():
if results[k]:
results[k] = v[0]
else:
results[k] = ''
return results
# backwards compatibility
get_xml_id = get_external_id
_get_xml_ids = _get_external_ids
# Transience
def is_transient(self):
""" Return whether the model is transient.
See :class:`TransientModel`.
"""
return self._transient
def _transient_clean_rows_older_than(self, cr, seconds):
assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
# Never delete rows used in last 5 minutes
seconds = max(seconds, 300)
query = ("SELECT id FROM " + self._table + " WHERE"
" COALESCE(write_date, create_date, (now() at time zone 'UTC'))::timestamp"
" < ((now() at time zone 'UTC') - interval %s)")
cr.execute(query, ("%s seconds" % seconds,))
ids = [x[0] for x in cr.fetchall()]
self.unlink(cr, SUPERUSER_ID, ids)
def _transient_clean_old_rows(self, cr, max_count):
# Check how many rows we have in the table
cr.execute("SELECT count(*) AS row_count FROM " + self._table)
res = cr.fetchall()
if res[0][0] <= max_count:
return # max not reached, nothing to do
self._transient_clean_rows_older_than(cr, 300)
def _transient_vacuum(self, cr, uid, force=False):
"""Clean the transient records.
This unlinks old records from the transient model tables whenever the
"_transient_max_count" or "_max_age" conditions (if any) are reached.
Actual cleaning will happen only once every "_transient_check_time" calls.
This means this method can be called frequently called (e.g. whenever
a new record is created).
Example with both max_hours and max_count active:
Suppose max_hours = 0.2 (e.g. 12 minutes), max_count = 20, there are 55 rows in the
table, 10 created/changed in the last 5 minutes, an additional 12 created/changed between
5 and 10 minutes ago, the rest created/changed more then 12 minutes ago.
- age based vacuum will leave the 22 rows created/changed in the last 12 minutes
- count based vacuum will wipe out another 12 rows. Not just 2, otherwise each addition
would immediately cause the maximum to be reached again.
- the 10 rows that have been created/changed the last 5 minutes will NOT be deleted
"""
assert self._transient, "Model %s is not transient, it cannot be vacuumed!" % self._name
_transient_check_time = 20 # arbitrary limit on vacuum executions
self._transient_check_count += 1
if not force and (self._transient_check_count < _transient_check_time):
return True # no vacuum cleaning this time
self._transient_check_count = 0
# Age-based expiration
if self._transient_max_hours:
self._transient_clean_rows_older_than(cr, self._transient_max_hours * 60 * 60)
# Count-based expiration
if self._transient_max_count:
self._transient_clean_old_rows(cr, self._transient_max_count)
return True
def resolve_2many_commands(self, cr, uid, field_name, commands, fields=None, context=None):
""" Serializes one2many and many2many commands into record dictionaries
(as if all the records came from the database via a read()). This
method is aimed at onchange methods on one2many and many2many fields.
Because commands might be creation commands, not all record dicts
will contain an ``id`` field. Commands matching an existing record
will have an ``id``.
:param field_name: name of the one2many or many2many field matching the commands
:type field_name: str
:param commands: one2many or many2many commands to execute on ``field_name``
:type commands: list((int|False, int|False, dict|False))
:param fields: list of fields to read from the database, when applicable
:type fields: list(str)
:returns: records in a shape similar to that returned by ``read()``
(except records may be missing the ``id`` field if they don't exist in db)
:rtype: list(dict)
"""
result = [] # result (list of dict)
record_ids = [] # ids of records to read
updates = {} # {id: dict} of updates on particular records
for command in commands:
if not isinstance(command, (list, tuple)):
record_ids.append(command)
elif command[0] == 0:
result.append(command[2])
elif command[0] == 1:
record_ids.append(command[1])
updates.setdefault(command[1], {}).update(command[2])
elif command[0] in (2, 3):
record_ids = [id for id in record_ids if id != command[1]]
elif command[0] == 4:
record_ids.append(command[1])
elif command[0] == 5:
result, record_ids = [], []
elif command[0] == 6:
result, record_ids = [], list(command[2])
# read the records and apply the updates
other_model = self.pool.get(self._all_columns[field_name].column._obj)
for record in other_model.read(cr, uid, record_ids, fields=fields, context=context):
record.update(updates.get(record['id'], {}))
result.append(record)
return result
# for backward compatibility
resolve_o2m_commands_to_record_dicts = resolve_2many_commands
def _register_hook(self, cr):
""" stuff to do right after the registry is built """
pass
# keep this import here, at top it will cause dependency cycle errors
import expression
class Model(BaseModel):
"""Main super-class for regular database-persisted OpenERP models.
OpenERP models are created by inheriting from this class::
class user(Model):
...
The system will later instantiate the class once per database (on
which the class' module is installed).
"""
_auto = True
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = False # True in a TransientModel
class TransientModel(BaseModel):
"""Model super-class for transient records, meant to be temporarily
persisted, and regularly vaccuum-cleaned.
A TransientModel has a simplified access rights management,
all users can create new records, and may only access the
records they created. The super-user has unrestricted access
to all TransientModel records.
"""
_auto = True
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = True
class AbstractModel(BaseModel):
"""Abstract Model super-class for creating an abstract class meant to be
inherited by regular models (Models or TransientModels) but not meant to
be usable on its own, or persisted.
Technical note: we don't want to make AbstractModel the super-class of
Model or BaseModel because it would not make sense to put the main
definition of persistence methods such as create() in it, and still we
should be able to override them within an AbstractModel.
"""
_auto = False # don't create any database backend for AbstractModels
_register = False # not visible in ORM registry, meant to be python-inherited only
_transient = False
def itemgetter_tuple(items):
""" Fixes itemgetter inconsistency (useful in some cases) of not returning
a tuple if len(items) == 1: always returns an n-tuple where n = len(items)
"""
if len(items) == 0:
return lambda a: ()
if len(items) == 1:
return lambda gettable: (gettable[items[0]],)
return operator.itemgetter(*items)
class ImportWarning(Warning):
""" Used to send warnings upwards the stack during the import process
"""
pass
def convert_pgerror_23502(model, fields, info, e):
m = re.match(r'^null value in column "(?P<field>\w+)" violates '
r'not-null constraint\n',
str(e))
field_name = m.group('field')
if not m or field_name not in fields:
return {'message': unicode(e)}
message = _(u"Missing required value for the field '%s'.") % field_name
field = fields.get(field_name)
if field:
message = _(u"%s This might be '%s' in the current model, or a field "
u"of the same name in an o2m.") % (message, field['string'])
return {
'message': message,
'field': field_name,
}
def convert_pgerror_23505(model, fields, info, e):
m = re.match(r'^duplicate key (?P<field>\w+) violates unique constraint',
str(e))
field_name = m.group('field')
if not m or field_name not in fields:
return {'message': unicode(e)}
message = _(u"The value for the field '%s' already exists.") % field_name
field = fields.get(field_name)
if field:
message = _(u"%s This might be '%s' in the current model, or a field "
u"of the same name in an o2m.") % (message, field['string'])
return {
'message': message,
'field': field_name,
}
PGERROR_TO_OE = collections.defaultdict(
# shape of mapped converters
lambda: (lambda model, fvg, info, pgerror: {'message': unicode(pgerror)}), {
# not_null_violation
'23502': convert_pgerror_23502,
# unique constraint error
'23505': convert_pgerror_23505,
})
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| xxshutong/openerp-7.0 | openerp/osv/orm.py | Python | agpl-3.0 | 265,245 |
var utils = require('../../utils');
var discoverHelper = require('../../helpers/discover-helper');
var chai = require('chai');
var chaiAsPromised = require('chai-as-promised');
chai.use(chaiAsPromised);
var expect = chai.expect;
describe('discover search', () => {
before(async () => {
browser.get(browser.params.glob.host + 'discover/search');
await utils.common.waitLoader();
});
it('screenshot', async () => {
await utils.common.takeScreenshot("discover", "discover-search");
});
describe('top bar', async () => {
after(async () => {
browser.get(browser.params.glob.host + 'discover/search');
await utils.common.waitLoader();
});
it('filters', async () => {
let htmlChanges = await utils.common.outerHtmlChanges(discoverHelper.searchProjectsList());
discoverHelper.searchFilter(3);
await htmlChanges();
let url = await browser.getCurrentUrl();
let projects = discoverHelper.searchProjects();
expect(await projects.count()).to.be.above(0);
expect(url).to.be.equal(browser.params.glob.host + 'discover/search?filter=people');
});
it('search by text', async () => {
discoverHelper.searchInput().sendKeys('Project Example 0');
discoverHelper.sendSearch();
let projects = discoverHelper.searchProjects();
expect(await projects.count()).to.be.equal(1);
});
});
describe('most liked', async () => {
after(async () => {
browser.get(browser.params.glob.host + 'discover/search');
await utils.common.waitLoader();
});
it('default', async () => {
discoverHelper.mostLiked();
utils.common.takeScreenshot("discover", "discover-search-filter");
let url = await browser.getCurrentUrl();
expect(url).to.be.equal(browser.params.glob.host + 'discover/search?order_by=-total_fans_last_week');
});
it('filter', async () => {
discoverHelper.searchOrder(3);
let projects = discoverHelper.searchProjects();
let url = await browser.getCurrentUrl();
expect(await projects.count()).to.be.above(0);
expect(url).to.be.equal(browser.params.glob.host + 'discover/search?order_by=-total_fans');
});
it('clear', async () => {
discoverHelper.clearOrder();
let orderSelector = discoverHelper.orderSelectorWrapper();
expect(await orderSelector.isPresent()).to.be.equal(false);
});
});
describe('most active', async () => {
after(async () => {
browser.get(browser.params.glob.host + 'discover/search');
await utils.common.waitLoader();
});
it('default', async () => {
discoverHelper.mostActived();
utils.common.takeScreenshot("discover", "discover-search-filter");
let url = await browser.getCurrentUrl();
expect(url).to.be.equal(browser.params.glob.host + 'discover/search?order_by=-total_activity_last_week');
});
it('filter', async () => {
discoverHelper.searchOrder(3);
let projects = discoverHelper.searchProjects();
let url = await browser.getCurrentUrl();
expect(await projects.count()).to.be.above(0);
expect(url).to.be.equal(browser.params.glob.host + 'discover/search?order_by=-total_activity');
});
it('clear', async () => {
discoverHelper.clearOrder();
let orderSelector = discoverHelper.orderSelectorWrapper();
expect(await orderSelector.isPresent()).to.be.equal(false);
});
});
});
| Rademade/taiga-front | e2e/suites/discover/discover-search.e2e.js | JavaScript | agpl-3.0 | 3,834 |
/*
* MANDY is a simple webapp to track man-day consumption on activities.
*
* Copyright 2014, rpatriarche
*
* This file is part of MANDY software.
*
* MANDY is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* MANDY is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
/**
* Provides domain model objets (entities including JPA annotations).
*/
package org.softdays.mandy.core.model;
| softdays/mandy | mandy-service/src/main/java/org/softdays/mandy/core/model/package-info.java | Java | agpl-3.0 | 957 |
import { action, observable } from "mobx";
import { ApiClient } from "api/ApiClient";
import { IRankingJson } from "api/schema/IRankingJson";
import { Ranking } from "ranking/Ranking";
export class RankingStore {
@observable
public isLoading = true;
@observable.ref
public ranking: Ranking;
@observable.ref
public lastViewedRankingDate: Date;
public constructor(private apiClient: ApiClient) {}
public async fetchData(date?: Date) {
this.isLoading = true;
const rankingJson = await this.apiClient.fetchRankingJson({ date });
this.handleFetchedData(rankingJson, date);
}
@action
private handleFetchedData(json: IRankingJson, date?: Date) {
this.ranking = Ranking.fromJson(json);
this.lastViewedRankingDate = date ? date : this.ranking.date;
this.isLoading = false;
}
}
| fauu/natrank | frontend/src/ranking/RankingStore.ts | TypeScript | agpl-3.0 | 827 |
/*
XOWA: the XOWA Offline Wiki Application
Copyright (C) 2012 gnosygnu@gmail.com
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package gplx.gfui; import gplx.*;
public class TabBoxEvt_tabSelect {
public static String Key = "TabBoxEvt_tabSelect";
public static final String SelectedChanged_evt = "SelectedChanged_evt";
public static void Send(TabBoxMgr tabBoxMgr, TabPnlItm oldTab, TabPnlItm newTab) {
GfoEvMgr_.PubVal(tabBoxMgr, Key, new TabPnlItm[] {oldTab, newTab});
}
@gplx.Internal protected static void Select(TabBox tabBox, GfsCtx ctx, GfoMsg m) {
TabPnlItm[] ary = (TabPnlItm[])m.CastObj("v");
Select(tabBox, ary[0], ary[1]);
}
@gplx.Internal protected static void Select(TabBox tabBox, TabPnlItm curTabItm, TabPnlItm newTabItm) {
TabPnlAreaMgr.Select(tabBox, curTabItm, newTabItm);
TabBtnAreaMgr.Select(tabBox, curTabItm, newTabItm);
GfoEvMgr_.PubVal(tabBox, SelectedChanged_evt, newTabItm.Idx());
}
public static int Handle(GfsCtx ctx, GfoMsg m) {
return m.ReadInt("v");
}
}
| crosslink/xowa | dev/150_gfui/src_500_tab/gplx/gfui/TabBoxEvt_tabSelect.java | Java | agpl-3.0 | 1,621 |
#include "LightsData.h"
namespace
{
const unsigned char LOW_BEAMS_OFFSET = 0x1;
const unsigned char HIGH_BEAMS_OFFSET = 0x2;
const unsigned char BRAKES_OFFSET = 0x4;
const unsigned char LEFT_SIGNAL_OFFSET = 0x8;
const unsigned char RIGHT_SIGNAL_OFFSET = 0x10;
const unsigned char BMS_STROBE_LIGHT_OFFSET = 0x20;
}
LightsData::LightsData()
: alive_(false)
, lightStatus_(0)
{
// Initialize to 0
}
LightsData::~LightsData()
{
}
/* LightsData Gets */
bool LightsData::getAlive() const
{
return alive_;
}
bool LightsData::getLowBeams() const
{
return static_cast<bool>(lightStatus_ & LOW_BEAMS_OFFSET);
}
bool LightsData::getHighBeams() const
{
return static_cast<bool>(lightStatus_ & HIGH_BEAMS_OFFSET);
}
bool LightsData::getBrakes() const
{
return static_cast<bool>(lightStatus_ & BRAKES_OFFSET);
}
bool LightsData::getLeftSignal() const
{
return static_cast<bool>(lightStatus_ & LEFT_SIGNAL_OFFSET);
}
bool LightsData::getRightSignal() const
{
return static_cast<bool>(lightStatus_ & RIGHT_SIGNAL_OFFSET);
}
bool LightsData::getBmsStrobeLight() const
{
return static_cast<bool>(lightStatus_ & BMS_STROBE_LIGHT_OFFSET);
}
/* LightsData Sets */
void LightsData::setAlive(const bool& alive)
{
alive_ = alive;
}
void LightsData::setLightStatus(const unsigned char& lightStatus)
{
lightStatus_ = lightStatus;
}
| UCSolarCarTeam/Epsilon-Hermes | src/DataLayer/LightsData/LightsData.cpp | C++ | agpl-3.0 | 1,389 |
'use strict';
app.
factory("DashboardResource",function($resource,urlApi){
return $resource(urlApi+'dashboard',{},{
dashboard: {
method: 'GET'
}
});
}); | appsindicato/APP-Sindicado-Eleicoes | webapp/app/scripts/resources/dashboard.js | JavaScript | agpl-3.0 | 168 |
<?php
/**
*
*/
class Content extends Base\Content
{
/**
* Instantiate a new Content
*/
public function __construct()
{
return parent::__construct();
}
}
| dotink/inkwell-cms | user/entities/Content.php | PHP | agpl-3.0 | 181 |
/*
* Copyright (C) 2011-2013 The Animo Project
* http://animotron.org
*
* This file is part of Animotron.
*
* Animotron is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of
* the License, or (at your option) any later version.
*
* Animotron is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of
* the GNU Affero General Public License along with Animotron.
* If not, see <http://www.gnu.org/licenses/>.
*/
package org.animotron.games.whouse;
import org.animotron.ATest;
import org.animotron.expression.AnimoExpression;
import org.animotron.expression.Expression;
import org.junit.Ignore;
import org.junit.Test;
/**
* @author <a href="mailto:shabanovd@gmail.com">Dmitriy Shabanov</a>
*
*/
public class WHouseTest extends ATest {
@Test
@Ignore
public void test_00() throws Throwable {
//party: person & organization
// + receipt or issue
tAnimo("def party.");
tAnimo("def receipt-party (party) (receipty).");
tAnimo("def issue-party (party) (issue).");
tAnimo("def person party.");
tAnimo("def organization party.");
tAnimo("def ORG-1 organization.");
tAnimo("def ORG-2 organization.");
tAnimo("def I person.");
//unit of measure
tAnimo("def UoM.");
tAnimo("def kilo number Q N1000.");
//kg -> kilo + gramm
tAnimo("def gram UoM.");
tAnimo("def kilogram (kilo) (gram).");
//currency
tAnimo("def USD currency.");
//Stock Keeping Unit
tAnimo("def SKU (reference) (qty) (price) (cost).");
//documents structure
tAnimo("def document date.");
tAnimo("def whouse-document (document) (issue-party) (receipt-party) (SKU).");
tAnimo("def whouse-receipt (whouse-receipt) (receipt).");
tAnimo("def whouse-issue (whouse-document) (issue).");
tAnimo("def whouse-transfer (receipt) (issue).");
//documents
tAnimo("def R01 (whouse-document) (date 'T2011-08-07') (issue-party ORG1) (receipt-party I) (SKU item01).");
tAnimo("def item01 reference 'item01'.");
Expression a = tAnimo("def a all whouse-receive with party I.");
assertAnimoResult(a, "a.");
//TODO: how to answer "what do I have?" ("SKU") (answer "item01")
//How may of "item01" I have?
Expression f = tAnimo("def f form R01.");
assertAnimoResult(f, "<form id='R01'>" +
"<input id='date' expression='T2011-08-07'>07 August 2011</input>" +
"<input id='issue-party' expression='an:ORG-01'>Organization 01</input>" +
"<input id='receipt-party' expression='an:I'>I</input>" +
"<table>" +
"<head>" +
"<col id='reference'>reference</col>" +
"<col id='qty'>qty</col>" +
"<col id='price'>price</col>" +
"<head>" +
"<row>" +
"<col><input id='item01*reference' expression='item01'>item01</input></col>" +
"<col><input id='item01*qty' expression='have:number Q:N2; have:UoM an:KG'>2 kg</input></col>" +
"<col><input id='item01*price' expression='have:number Q:N2; have:UoM an:KG'>5 USD per gram</input></col>" +
"<row>" +
"</table>" +
"</form>");
}
@Test
@Ignore
public void test_01() throws Throwable {
__(
"def kilo number 1000.", //* 1000
"def UoM.",
"def gram.",
"def kg (kilo, gram).", //the base unit of mass in the International System of Units
"def measument1 qty (number 1000) (UoM gram)."
);
Expression e = new AnimoExpression("get qty (measument1) (UoM kg)."); //???
assertStringResult(e, "have qty (number 1) (UoM kg)");
}
public void test_02() throws Throwable {
__(
"def SKU " +
"(word " +
"(lang-en \"stock-keeping unit\") " +
"(lang-ru \"единица учета запасов\") " +
") " +
"(goods, qty, price, cost).",
"def qty" +
"(word " +
"(lang-en \"quantity\") " +
"(lang-ru \"количество\") " +
") " +
"(/ (get cost) (get price))" +
"(number, UoM).",
"def price" +
"(/ (get cost) (get qty)).",
"def cost" +
"(* (get qty) (get price))" +
"(number, currency)."
);
assertAnimoResult("html-widget qty", "<label>quantity<input id=\"\" name=\"\" expression=\"\"/></label>");
__(
"def whouse-receive " +
"(word " +
"(lang-en \"ware house receive\") " +
"(lang-ru \"складской приход\") " +
") " +
"(receive-party, issue-party, (goods, qty, price, cost)).",
"def receiptsForWhouse " +
"(D2012-01-29)" +
"(issue companyA) "+
"(receive whouse) "+
"(paper (qty 10,kg) (cost 50,USD)) "+
"(pen (qty 3,box10) (cost 15,USD)). "
);
assertAnimoResult("get price whouse,paper", "/ (5,USD) (kg).");
assertAnimoResult("get qty whouse,paper,D2012-01-30", "(10,kg).");
__(
"def issueForWhouse " +
"(D2012-01-30)" +
"(issue whouse) "+
"(paper (qty 1,kg))."
);
assertAnimoResult("get price whouse,paper", "/ (5,USD) (kg).");
assertAnimoResult("get qty whouse,paper,D2012-01-31", "(9,kg).");
}
}
| animotron/core | src/test/java/org/animotron/games/whouse/WHouseTest.java | Java | agpl-3.0 | 5,744 |
<?php
/*
* Copyright (C) 2015 Julien Fastré <julien.fastre@champs-libres.coop>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
namespace Chill\MainBundle\Security;
/**
*
*
* @author Julien Fastré <julien.fastre@champs-libres.coop>
*/
class RoleProvider
{
/**
*
* @var ProvideRoleInterface[]
*/
private $providers = array();
/**
* Add a role provider
*
* @internal This function is called by the dependency injector: it inject provider
* @param \Chill\MainBundle\Security\ProvideRoleInterface $provider
*/
public function addProvider(ProvideRoleInterface $provider)
{
$this->providers[] = $provider;
}
/**
*
* @return string[] the roles as string
*/
public function getRoles()
{
$roles = array();
foreach ($this->providers as $provider) {
if ($provider->getRoles() !== NULL) {
$roles = array_merge($roles, $provider->getRoles());
}
}
return $roles;
}
/**
*
* @return string[] the roles as string
*/
public function getRolesWithoutScopes()
{
$roles = array();
foreach ($this->providers as $provider) {
if ($provider->getRolesWithoutScope() !== NULL) {
$roles = array_merge($roles, $provider->getRolesWithoutScope());
}
}
return $roles;
}
}
| Chill-project/Main | Security/RoleProvider.php | PHP | agpl-3.0 | 2,095 |
/**
*/
package model.impl;
import java.util.Collection;
import model.ConfiguredImage;
import model.ModelPackage;
import model.StandardImage;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.impl.ENotificationImpl;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Standard Image</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* <ul>
* <li>{@link model.impl.StandardImageImpl#getConfiguredImage <em>Configured Image</em>}</li>
* <li>{@link model.impl.StandardImageImpl#getId <em>Id</em>}</li>
* <li>{@link model.impl.StandardImageImpl#getLogin <em>Login</em>}</li>
* <li>{@link model.impl.StandardImageImpl#getJsonDescription <em>Json Description</em>}</li>
* </ul>
* </p>
*
* @generated
*/
public class StandardImageImpl extends ImageImpl implements StandardImage {
/**
* The cached value of the '{@link #getConfiguredImage() <em>Configured Image</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getConfiguredImage()
* @generated
* @ordered
*/
protected EList configuredImage;
/**
* The default value of the '{@link #getId() <em>Id</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getId()
* @generated
* @ordered
*/
protected static final String ID_EDEFAULT = null;
/**
* The cached value of the '{@link #getId() <em>Id</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getId()
* @generated
* @ordered
*/
protected String id = ID_EDEFAULT;
/**
* The default value of the '{@link #getLogin() <em>Login</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLogin()
* @generated
* @ordered
*/
protected static final String LOGIN_EDEFAULT = null;
/**
* The cached value of the '{@link #getLogin() <em>Login</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getLogin()
* @generated
* @ordered
*/
protected String login = LOGIN_EDEFAULT;
/**
* The default value of the '{@link #getJsonDescription() <em>Json Description</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getJsonDescription()
* @generated
* @ordered
*/
protected static final String JSON_DESCRIPTION_EDEFAULT = null;
/**
* The cached value of the '{@link #getJsonDescription() <em>Json Description</em>}' attribute.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getJsonDescription()
* @generated
* @ordered
*/
protected String jsonDescription = JSON_DESCRIPTION_EDEFAULT;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected StandardImageImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected EClass eStaticClass() {
return ModelPackage.Literals.STANDARD_IMAGE;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public EList getConfiguredImage() {
if (configuredImage == null) {
configuredImage = new EObjectContainmentEList(ConfiguredImage.class, this, ModelPackage.STANDARD_IMAGE__CONFIGURED_IMAGE);
}
return configuredImage;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getId() {
return id;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setId(String newId) {
String oldId = id;
id = newId;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.STANDARD_IMAGE__ID, oldId, id));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getLogin() {
return login;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setLogin(String newLogin) {
String oldLogin = login;
login = newLogin;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.STANDARD_IMAGE__LOGIN, oldLogin, login));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getJsonDescription() {
return jsonDescription;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setJsonDescription(String newJsonDescription) {
String oldJsonDescription = jsonDescription;
jsonDescription = newJsonDescription;
if (eNotificationRequired())
eNotify(new ENotificationImpl(this, Notification.SET, ModelPackage.STANDARD_IMAGE__JSON_DESCRIPTION, oldJsonDescription, jsonDescription));
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case ModelPackage.STANDARD_IMAGE__CONFIGURED_IMAGE:
return ((InternalEList)getConfiguredImage()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case ModelPackage.STANDARD_IMAGE__CONFIGURED_IMAGE:
return getConfiguredImage();
case ModelPackage.STANDARD_IMAGE__ID:
return getId();
case ModelPackage.STANDARD_IMAGE__LOGIN:
return getLogin();
case ModelPackage.STANDARD_IMAGE__JSON_DESCRIPTION:
return getJsonDescription();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case ModelPackage.STANDARD_IMAGE__CONFIGURED_IMAGE:
getConfiguredImage().clear();
getConfiguredImage().addAll((Collection)newValue);
return;
case ModelPackage.STANDARD_IMAGE__ID:
setId((String)newValue);
return;
case ModelPackage.STANDARD_IMAGE__LOGIN:
setLogin((String)newValue);
return;
case ModelPackage.STANDARD_IMAGE__JSON_DESCRIPTION:
setJsonDescription((String)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void eUnset(int featureID) {
switch (featureID) {
case ModelPackage.STANDARD_IMAGE__CONFIGURED_IMAGE:
getConfiguredImage().clear();
return;
case ModelPackage.STANDARD_IMAGE__ID:
setId(ID_EDEFAULT);
return;
case ModelPackage.STANDARD_IMAGE__LOGIN:
setLogin(LOGIN_EDEFAULT);
return;
case ModelPackage.STANDARD_IMAGE__JSON_DESCRIPTION:
setJsonDescription(JSON_DESCRIPTION_EDEFAULT);
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean eIsSet(int featureID) {
switch (featureID) {
case ModelPackage.STANDARD_IMAGE__CONFIGURED_IMAGE:
return configuredImage != null && !configuredImage.isEmpty();
case ModelPackage.STANDARD_IMAGE__ID:
return ID_EDEFAULT == null ? id != null : !ID_EDEFAULT.equals(id);
case ModelPackage.STANDARD_IMAGE__LOGIN:
return LOGIN_EDEFAULT == null ? login != null : !LOGIN_EDEFAULT.equals(login);
case ModelPackage.STANDARD_IMAGE__JSON_DESCRIPTION:
return JSON_DESCRIPTION_EDEFAULT == null ? jsonDescription != null : !JSON_DESCRIPTION_EDEFAULT.equals(jsonDescription);
}
return super.eIsSet(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String toString() {
if (eIsProxy()) return super.toString();
StringBuffer result = new StringBuffer(super.toString());
result.append(" (id: ");
result.append(id);
result.append(", login: ");
result.append(login);
result.append(", jsonDescription: ");
result.append(jsonDescription);
result.append(')');
return result.toString();
}
} //StandardImageImpl
| alexlenk/CloudStandby | org/cloudstandby/model/src/model/impl/StandardImageImpl.java | Java | agpl-3.0 | 8,275 |
<?php // vim: expandtab sw=4 ts=4 sts=4:
/**
* Data input class with filters callback for validation
*
* @version 2.0
* @copyright 2001-2012 Universite catholique de Louvain (UCL)
* @author Frederic Minne <frederic.minne@uclouvain.be>
* @license http://www.fsf.org/licensing/licenses/agpl-3.0.html
* GNU AFFERO GENERAL PUBLIC LICENSE version 3
*/
class Input_Validator implements Input
{
protected $validators;
protected $validatorsForAll;
protected $input;
/**
* @param Input $input
*/
public function __construct( Input $input )
{
$this->validators = array();
$this->validatorsForAll = array();
$this->input = $input;
}
/**
* Set a validator for the given variable
* @param string $name variable name
* @param Validator $validator validator object
* @throws Input_Exception if the filter callback is not callable
*/
public function setValidator( $name, Validator $validator )
{
if ( ! array_key_exists( $name, $this->validators ) )
{
$this->validators[$name] = array();
}
$validatorCallback = array( $validator, 'isValid' );
if ( ! is_callable( $validatorCallback ) )
{
throw new Input_Exception ("Invalid validator callback : "
. $this->getFilterCallbackString($validatorCallback));
}
$this->validators[$name][] = $validatorCallback;
return $this;
}
/**
* Set a validator for all variables
* @param string $name variable name
* @param Validator $validator validator object
* @throws Input_Exception if the filter callback is not callable
*/
public function setValidatorForAll( Validator $validator )
{
$validatorCallback = array( $validator, 'isValid' );
if ( ! is_callable( $validatorCallback ) )
{
throw new Input_Exception ("Invalid validator callback : "
. $this->getFilterCallbackString($validatorCallback));
}
$this->validatorsForAll[] = $validatorCallback;
return $this;
}
/**
* @see Input
* @throws Input_Exception if $value does not pass the validator
*/
public function get( $name, $default = null )
{
$tainted = $this->input->get( $name, $default );
if ( ( is_null( $default ) && is_null( $tainted ) )
|| $tainted == $default )
{
return $default;
}
else
{
return $this->validate( $name, $tainted );
}
}
/**
* @see Input
* @throws Input_Exception if $value does not pass the validator
*/
public function getMandatory( $name )
{
$tainted = $this->input->getMandatory( $name );
return $this->validate( $name, $tainted );
}
/**
* @param string $name
* @param mixed $tainted value
* @throws Validator_Exception if $value does not pass the
* filter for $name
*/
public function validate( $name, $tainted )
{
// validators for all variables if any
if ( !empty ($this->validatorsForAll ) )
{
foreach ( $this->validatorsForAll as $validatorForAllCallback )
{
if ( ! call_user_func( $validatorForAllCallback, $tainted ) )
{
throw new Validator_Exception(
get_class( $validatorForAllCallback[0] )
. " : {$name} does not pass the validator !" );
}
}
}
// validators for the requested variable
if ( array_key_exists( $name, $this->validators ) )
{
foreach ( $this->validators[$name] as $validatorCallback )
{
if ( ! call_user_func( $validatorCallback, $tainted ) )
{
throw new Validator_Exception(
get_class( $validatorCallback[0] )
. " : {$name} does not pass the validator !" );
}
}
}
return $tainted;
}
}
| zefredz/form-ipm | include/classes/Input/Validator.php | PHP | agpl-3.0 | 4,324 |
package florian_haas.lucas.web;
import java.util.*;
import javax.ejb.EJB;
import javax.faces.view.ViewScoped;
import javax.inject.Named;
import javax.validation.constraints.*;
import org.hibernate.validator.constraints.NotBlank;
import org.primefaces.model.DualListModel;
import florian_haas.lucas.business.*;
import florian_haas.lucas.model.*;
import florian_haas.lucas.persistence.*;
import florian_haas.lucas.security.EnumPermission;
import florian_haas.lucas.util.Utils;
import florian_haas.lucas.validation.*;
import florian_haas.lucas.web.converter.*;
import florian_haas.lucas.web.util.WebUtils;
@Named
@ViewScoped
public class LoginUserBean extends BaseBean<ReadOnlyLoginUser> {
public LoginUserBean() {
super(BASE_NAME, 4);
}
public static final String BASE_NAME = "loginUser";
private static final long serialVersionUID = -3674051794597830546L;
@EJB
private LoginBeanLocal loginBean;
@EJB
private LoginUserRoleBeanLocal loginUserRoleBean;
@EJB
private EntityBeanLocal entityBean;
@EJB
private UserBeanLocal userBean;
@NotNull
@Min(0)
private Long searchLoginUserId = 0l;
@NotNull
private Boolean useSearchLoginUserId = Boolean.FALSE;
@QueryComparator(category = EnumQueryComparatorCategory.NUMERIC)
private EnumQueryComparator searchLoginUserIdComparator = EnumQueryComparator.EQUAL;
@NotNull
private String searchLoginUserUsername = "";
@NotNull
private Boolean useSearchLoginUserUsername = Boolean.FALSE;
@QueryComparator(category = EnumQueryComparatorCategory.TEXT)
private EnumQueryComparator searchLoginUserUsernameComparator = EnumQueryComparator.EQUAL;
private ReadOnlyUser searchLoginUserUser = null;
@NotNull
private Boolean useSearchLoginUserUserId = Boolean.FALSE;
@QueryComparator(category = EnumQueryComparatorCategory.LOGIC)
private EnumQueryComparator searchLoginUserUserIdComparator = EnumQueryComparator.EQUAL;
private List<@TypeNotNull @TypeMin(0) Long> searchLoginUserRoles = new ArrayList<>();
@NotNull
private Boolean useSearchLoginUserRoles = Boolean.FALSE;
@QueryComparator(category = EnumQueryComparatorCategory.LOGIC)
private EnumQueryComparator searchLoginUserRolesComparator = EnumQueryComparator.EQUAL;
@Override
public EnumPermission getFindDynamicPermission() {
return EnumPermission.LOGIN_USER_FIND_DYNAMIC;
}
@Override
public EnumPermission getPrintPermission() {
return EnumPermission.LOGIN_USER_PRINT;
}
@Override
public EnumPermission getExportPermission() {
return EnumPermission.LOGIN_USER_EXPORT;
}
@Override
protected List<? extends ReadOnlyLoginUser> searchEntities() {
return loginBean.findLoginUsers(searchLoginUserId, searchLoginUserUsername, searchLoginUserUser != null ? searchLoginUserUser.getId() : null,
searchLoginUserRoles, useSearchLoginUserId, useSearchLoginUserUsername, useSearchLoginUserUserId, useSearchLoginUserRoles,
searchLoginUserIdComparator, searchLoginUserUsernameComparator, searchLoginUserUserIdComparator, searchLoginUserRolesComparator);
}
@Override
protected ReadOnlyLoginUser entityGetter(Long entityId) {
return loginBean.findLoginUserById(entityId);
}
public Long getSearchLoginUserId() {
return searchLoginUserId;
}
public void setSearchLoginUserId(Long searchLoginUserId) {
this.searchLoginUserId = searchLoginUserId;
}
public Boolean getUseSearchLoginUserId() {
return useSearchLoginUserId;
}
public void setUseSearchLoginUserId(Boolean useSearchLoginUserId) {
this.useSearchLoginUserId = useSearchLoginUserId;
}
public EnumQueryComparator getSearchLoginUserIdComparator() {
return searchLoginUserIdComparator;
}
public void setSearchLoginUserIdComparator(EnumQueryComparator searchLoginUserIdComparator) {
this.searchLoginUserIdComparator = searchLoginUserIdComparator;
}
public String getSearchLoginUserUsername() {
return searchLoginUserUsername;
}
public void setSearchLoginUserUsername(String searchLoginUserUsername) {
this.searchLoginUserUsername = searchLoginUserUsername;
}
public Boolean getUseSearchLoginUserUsername() {
return useSearchLoginUserUsername;
}
public void setUseSearchLoginUserUsername(Boolean useSearchLoginUserUsername) {
this.useSearchLoginUserUsername = useSearchLoginUserUsername;
}
public EnumQueryComparator getSearchLoginUserUsernameComparator() {
return searchLoginUserUsernameComparator;
}
public void setSearchLoginUserUsernameComparator(EnumQueryComparator searchLoginUserUsernameComparator) {
this.searchLoginUserUsernameComparator = searchLoginUserUsernameComparator;
}
public ReadOnlyUser getSearchLoginUserUser() {
return searchLoginUserUser;
}
public void setSearchLoginUserUser(ReadOnlyUser searchLoginUserUser) {
this.searchLoginUserUser = searchLoginUserUser;
}
public Boolean getUseSearchLoginUserUserId() {
return useSearchLoginUserUserId;
}
public void setUseSearchLoginUserUserId(Boolean useSearchLoginUserUserId) {
this.useSearchLoginUserUserId = useSearchLoginUserUserId;
}
public EnumQueryComparator getSearchLoginUserUserIdComparator() {
return searchLoginUserUserIdComparator;
}
public void setSearchLoginUserUserIdComparator(EnumQueryComparator searchLoginUserUserIdComparator) {
this.searchLoginUserUserIdComparator = searchLoginUserUserIdComparator;
}
public List<@TypeNotNull @TypeMin(0) Long> getSearchLoginUserRoles() {
return searchLoginUserRoles;
}
public void setSearchLoginUserRoles(List<@TypeNotNull @TypeMin(0) Long> searchLoginUserRoles) {
this.searchLoginUserRoles = searchLoginUserRoles;
}
public Boolean getUseSearchLoginUserRoles() {
return useSearchLoginUserRoles;
}
public void setUseSearchLoginUserRoles(Boolean useSearchLoginUserRoles) {
this.useSearchLoginUserRoles = useSearchLoginUserRoles;
}
public EnumQueryComparator getSearchLoginUserRolesComparator() {
return searchLoginUserRolesComparator;
}
public void setSearchLoginUserRolesComparator(EnumQueryComparator searchLoginUserRolesComparator) {
this.searchLoginUserRolesComparator = searchLoginUserRolesComparator;
}
/*
* -------------------- Create Default Login User Dialog Start --------------------
*/
@NotBlank
private String createDefaultLoginUserUsername = null;
@ValidUnhashedPassword
private char[] createDefaultLoginUserPassword = null;
private DualListModel<ReadOnlyLoginUserRole> createDefaultLoginUserRolesListModel;
public String getCreateDefaultLoginUserUsername() {
return createDefaultLoginUserUsername;
}
public void setCreateDefaultLoginUserUsername(String createDefaultLoginUserUsername) {
this.createDefaultLoginUserUsername = createDefaultLoginUserUsername;
}
public char[] getCreateDefaultLoginUserPassword() {
return createDefaultLoginUserPassword;
}
public void setCreateDefaultLoginUserPassword(char[] createDefaultLoginUserPassword) {
this.createDefaultLoginUserPassword = createDefaultLoginUserPassword;
}
public DualListModel<ReadOnlyLoginUserRole> getCreateDefaultLoginUserRolesListModel() {
return createDefaultLoginUserRolesListModel;
}
public void setCreateDefaultLoginUserRolesListModel(DualListModel<ReadOnlyLoginUserRole> createDefaultLoginUserRolesListModel) {
this.createDefaultLoginUserRolesListModel = createDefaultLoginUserRolesListModel;
}
public void initCreateDefaultLoginUserDialog() {
createDefaultLoginUserUsername = null;
createDefaultLoginUserPassword = null;
createDefaultLoginUserRolesListModel = new DualListModel<>(new ArrayList<>(loginUserRoleBean.findAll()),
new ArrayList<ReadOnlyLoginUserRole>());
}
public void createDefaultLoginUser() {
WebUtils.executeTask(params -> {
List<Long> ids = new ArrayList<>();
createDefaultLoginUserRolesListModel.getTarget().forEach(role -> ids.add(role.getId()));
params.add(WebUtils.getAsString(
loginBean.findLoginUserById(loginBean.newLoginUser(createDefaultLoginUserUsername, createDefaultLoginUserPassword, ids)),
LoginUserConverter.CONVERTER_ID));
Arrays.fill(createDefaultLoginUserPassword, 'c');
return true;
}, "lucas.application.loginUserScreen.createDefaultLoginUser", (exception, params) -> {
return WebUtils.getTranslatedMessage("lucas.application.loginUserScreen.createDefaultLoginUser.notUniqueUsername",
params.toArray(new Object[params.size()]));
}, Utils.asList(createDefaultLoginUserUsername));
}
/*
* -------------------- Create Default Login User Dialog End --------------------
*/
/*
* -------------------- Create Bound Login User Dialog Start --------------------
*/
@NotNull
private ReadOnlyUser createBoundLoginUserBoundUser = null;
@ValidUnhashedPassword
private char[] createBoundLoginUserPassword = null;
private DualListModel<ReadOnlyLoginUserRole> createBoundLoginUserRolesListModel;
public ReadOnlyUser getCreateBoundLoginUserBoundUser() {
return createBoundLoginUserBoundUser;
}
public void setCreateBoundLoginUserBoundUser(ReadOnlyUser createBoundLoginUserBoundUser) {
this.createBoundLoginUserBoundUser = createBoundLoginUserBoundUser;
}
public char[] getCreateBoundLoginUserPassword() {
return createBoundLoginUserPassword;
}
public void setCreateBoundLoginUserPassword(char[] createBoundLoginUserPassword) {
this.createBoundLoginUserPassword = createBoundLoginUserPassword;
}
public DualListModel<ReadOnlyLoginUserRole> getCreateBoundLoginUserRolesListModel() {
return createBoundLoginUserRolesListModel;
}
public void setCreateBoundLoginUserRolesListModel(DualListModel<ReadOnlyLoginUserRole> createBoundLoginUserRolesListModel) {
this.createBoundLoginUserRolesListModel = createBoundLoginUserRolesListModel;
}
public void initCreateBoundLoginUserDialog() {
createBoundLoginUserBoundUser = null;
createBoundLoginUserPassword = null;
createBoundLoginUserRolesListModel = new DualListModel<>(new ArrayList<>(loginUserRoleBean.findAll()),
new ArrayList<ReadOnlyLoginUserRole>());
}
public void createBoundLoginUser() {
WebUtils.executeTask(params -> {
List<Long> ids = new ArrayList<>();
createBoundLoginUserRolesListModel.getTarget().forEach(role -> ids.add(role.getId()));
params.add(WebUtils.getAsString(
loginBean.findLoginUserById(loginBean.newLoginUser(createBoundLoginUserBoundUser.getId(), createBoundLoginUserPassword, ids)),
LoginUserConverter.CONVERTER_ID));
Arrays.fill(createBoundLoginUserPassword, 'c');
return true;
}, "lucas.application.loginUserScreen.createBoundLoginUser", (exception, params) -> {
return WebUtils.getTranslatedMessage(
(exception.getMark().equals(florian_haas.lucas.business.LoginBeanLocal.USER_NOT_UNIQUE_EXCEPTION_MARKER)
? "lucas.application.loginUserScreen.createBoundLoginUser.notUniqueUser"
: "lucas.application.loginUserScreen.createBoundLoginUser.notUniqueUsername"),
params.toArray(new Object[params.size()]));
}, Utils.asList(WebUtils.getAsString(createBoundLoginUserBoundUser, UserConverter.CONVERTER_ID)));
}
/*
* -------------------- Create Bound Login User Dialog End --------------------
*/
/*
* -------------------- Edit Login User Dialog Start --------------------
*/
private ReadOnlyLoginUser editLoginUserSelectedUser = null;
@NotBlank
private String editLoginUserUsername = null;
private DualListModel<ReadOnlyLoginUserRole> editLoginUserRolesListModel;
public ReadOnlyLoginUser getEditLoginUserSelectedUser() {
return this.editLoginUserSelectedUser;
}
public String getEditLoginUserUsername() {
return this.editLoginUserUsername;
}
public void setEditLoginUserUsername(String editLoginUserUsername) {
this.editLoginUserUsername = editLoginUserUsername;
}
public DualListModel<ReadOnlyLoginUserRole> getEditLoginUserRolesListModel() {
return this.editLoginUserRolesListModel;
}
public void setEditLoginUserRolesListModel(DualListModel<ReadOnlyLoginUserRole> editLoginUserRolesListModel) {
this.editLoginUserRolesListModel = editLoginUserRolesListModel;
}
public Boolean getEditLoginUserIsDefaultLoginUser() {
return editLoginUserSelectedUser.getUser() == null;
}
public void initEditLoginUserDialog() {
if (!selectedEntities.isEmpty()) {
editLoginUserSelectedUser = selectedEntities.get(0);
editLoginUserUsername = editLoginUserSelectedUser.getUsername();
if (WebUtils.isPermitted(EnumPermission.LOGIN_USER_GET_ROLES, EnumPermission.LOGIN_USER_ADD_ROLE,
EnumPermission.LOGIN_USER_REMOVE_ROLE)) {
List<ReadOnlyLoginUserRole> rolesOfUser = new ArrayList<>(editLoginUserSelectedUser.getRoles());
List<ReadOnlyLoginUserRole> roles = new ArrayList<>();
List<? extends ReadOnlyLoginUserRole> allRoles = loginUserRoleBean.findAll();
allRoles.forEach(role -> {
if (!rolesOfUser.contains(role)) {
roles.add(role);
}
});
editLoginUserRolesListModel = new DualListModel<>(roles, rolesOfUser);
} else {
editLoginUserRolesListModel = new DualListModel<>();
}
}
}
public void editLoginUser() {
WebUtils.executeTask(params -> {
Long id = editLoginUserSelectedUser.getId();
if (getEditLoginUserIsDefaultLoginUser() && WebUtils.isPermitted(EnumPermission.LOGIN_USER_CHANGE_USERNAME)) {
loginBean.changeUsername(id, editLoginUserUsername);
}
if (WebUtils.isPermitted(EnumPermission.LOGIN_USER_GET_ROLES, EnumPermission.LOGIN_USER_ADD_ROLE,
EnumPermission.LOGIN_USER_REMOVE_ROLE)) {
List<ReadOnlyLoginUserRole> roles = new ArrayList<>(loginBean.getLoginUserRoles(id));
editLoginUserRolesListModel.getTarget().forEach(role -> {
if (!roles.contains(role)) {
loginBean.addLoginUserRoleToUser(id, role.getId());
}
});
roles.forEach(role -> {
if (!editLoginUserRolesListModel.getTarget().contains(role)) {
loginBean.removeLoginUserRoleFromUser(id, role.getId());
}
});
}
ReadOnlyLoginUser newUser = loginBean.findLoginUserById(id);
params.add(WebUtils.getAsString(newUser, LoginUserConverter.CONVERTER_ID));
WebUtils.refreshEntities(ReadOnlyLoginUser.class, searchResults, selectedEntities, newUser, loginBean::findLoginUserById, true);
return true;
}, "lucas.application.loginUserScreen.editLoginUser", (exception, params) -> {
return WebUtils.getTranslatedMessage("lucas.application.loginUserScreen.createBoundLoginUser.notUniqueUsername",
params.toArray(new Object[params.size()]));
}, Utils.asList(editLoginUserUsername));
}
/*
* -------------------- Edit Login User Dialog End --------------------
*/
/*
* -------------------- New Password Dialog Start --------------------
*/
private ReadOnlyLoginUser changePasswordDialogSelectedUser = null;
@ValidUnhashedPassword
private char[] changePasswordDialogPassword = null;
public char[] getChangePasswordDialogPassword() {
return this.changePasswordDialogPassword;
}
public void setChangePasswordDialogPassword(char[] changePasswordDialogPassword) {
this.changePasswordDialogPassword = changePasswordDialogPassword;
}
public void initChangePasswordDialog() {
if (!selectedEntities.isEmpty()) {
changePasswordDialogSelectedUser = selectedEntities.get(0);
}
}
public ReadOnlyLoginUser getChangePasswordDialogSelectedUser() {
return this.changePasswordDialogSelectedUser;
}
public void changePassword() {
WebUtils.executeTask(params -> {
loginBean.newPassword(changePasswordDialogSelectedUser.getId(), changePasswordDialogPassword);
Arrays.fill(changePasswordDialogPassword, 'c');
return true;
}, "lucas.application.loginUserScreen.changePassword",
Utils.asList(WebUtils.getAsString(changePasswordDialogSelectedUser, LoginUserConverter.CONVERTER_ID)));
}
/*
* -------------------- New Password Dialog End --------------------
*/
}
| Listopia-Official/listopia-user-and-company-administration-system | lucas-web/src/florian_haas/lucas/web/LoginUserBean.java | Java | agpl-3.0 | 16,146 |
class SubmissionProcessor < ApplicationProcessor
subscribes_to :submission
def on_message(message)
verify_connections!
json = ActiveSupport::JSON.decode(message)
end
end | water/mainline | app/processors/submission_process.rb | Ruby | agpl-3.0 | 183 |
/*
* Copyright (c) 2012 - 2020 Splice Machine, Inc.
*
* This file is part of Splice Machine.
* Splice Machine is free software: you can redistribute it and/or modify it under the terms of the
* GNU Affero General Public License as published by the Free Software Foundation, either
* version 3, or (at your option) any later version.
* Splice Machine is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
* without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
* See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License along with Splice Machine.
* If not, see <http://www.gnu.org/licenses/>.
*/
package com.splicemachine.derby.stream.output.insert;
import com.splicemachine.SpliceKryoRegistry;
import com.splicemachine.EngineDriver;
import com.splicemachine.db.iapi.error.StandardException;
import com.splicemachine.db.iapi.sql.execute.ExecRow;
import com.splicemachine.db.iapi.types.RowLocation;
import com.splicemachine.derby.iapi.sql.execute.SpliceOperation;
import com.splicemachine.derby.impl.sql.execute.operations.InsertOperation;
import com.splicemachine.derby.impl.sql.execute.operations.TriggerHandler;
import com.splicemachine.derby.impl.sql.execute.sequence.SpliceSequence;
import com.splicemachine.derby.stream.iapi.OperationContext;
import com.splicemachine.derby.stream.output.PermissiveInsertWriteConfiguration;
import com.splicemachine.derby.stream.output.AbstractPipelineWriter;
import com.splicemachine.pipeline.config.RollforwardWriteConfiguration;
import com.splicemachine.pipeline.config.UnsafeWriteConfiguration;
import com.splicemachine.derby.utils.marshall.*;
import com.splicemachine.derby.utils.marshall.dvd.DescriptorSerializer;
import com.splicemachine.derby.utils.marshall.dvd.VersionedSerializers;
import com.splicemachine.kvpair.KVPair;
import com.splicemachine.pipeline.Exceptions;
import com.splicemachine.pipeline.config.WriteConfiguration;
import com.splicemachine.primitives.Bytes;
import com.splicemachine.si.api.txn.TxnView;
import com.splicemachine.si.impl.driver.SIDriver;
import com.splicemachine.storage.Partition;
import com.splicemachine.utils.IntArrays;
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings;
import java.io.IOException;
import java.util.Iterator;
/**
* Created by jleach on 5/5/15.
*/
public class InsertPipelineWriter extends AbstractPipelineWriter<ExecRow>{
protected int[] pkCols;
protected RowLocation[] autoIncrementRowLocationArray;
protected KVPair.Type dataType;
protected SpliceSequence[] spliceSequences;
protected PairEncoder encoder;
protected InsertOperation insertOperation;
protected boolean isUpsert;
private Partition table;
@Override
public String toString() {
return "InsertPipelineWriter {insertOperation=" + (insertOperation == null ? "null" : insertOperation)
+ ", conglom=" + heapConglom
+ ", isUpsert=" + Boolean.toString(isUpsert)
+ ", loadReplaceMode=" + Boolean.toString(loadReplaceMode)
+ ", rollforward=" + Boolean.toString(rollforward)
+ "}";
}
@SuppressFBWarnings(value="EI_EXPOSE_REP2", justification="Intentional")
public InsertPipelineWriter(int[] pkCols,
String tableVersion,
ExecRow execRowDefinition,
RowLocation[] autoIncrementRowLocationArray,
SpliceSequence[] spliceSequences,
long heapConglom,
long tempConglomID,
TxnView txn,
byte[] token, OperationContext operationContext,
boolean isUpsert, boolean loadReplaceMode) {
super(txn,token,heapConglom,tempConglomID,tableVersion, execRowDefinition, operationContext, loadReplaceMode);
assert txn !=null:"txn not supplied";
this.pkCols = pkCols;
this.autoIncrementRowLocationArray = autoIncrementRowLocationArray;
this.spliceSequences = spliceSequences;
this.destinationTable = Bytes.toBytes(Long.toString(heapConglom));
this.isUpsert = isUpsert;
this.dataType = isUpsert?KVPair.Type.UPSERT:KVPair.Type.INSERT;
if (operationContext!=null) {
this.insertOperation = (InsertOperation) operationContext.getOperation();
}
}
public void open() throws StandardException {
open(insertOperation==null?null:insertOperation.getTriggerHandler(),insertOperation, loadReplaceMode);
}
public void open(TriggerHandler triggerHandler, SpliceOperation operation, boolean loadReplaceMode) throws StandardException {
super.open(triggerHandler, operation, loadReplaceMode);
try {
encoder = new PairEncoder(getKeyEncoder(), getRowHash(), dataType);
WriteConfiguration writeConfiguration = writeCoordinator.newDefaultWriteConfiguration();
if (insertOperation != null) {
if (operationContext != null && operationContext.isPermissive())
writeConfiguration = new PermissiveInsertWriteConfiguration(writeConfiguration,
operationContext,
encoder, execRowDefinition);
if (insertOperation.skipConflictDetection() || insertOperation.skipWAL()) {
writeConfiguration = new UnsafeWriteConfiguration(writeConfiguration, insertOperation.skipConflictDetection(), insertOperation.skipWAL());
}
}
if(rollforward)
writeConfiguration = new RollforwardWriteConfiguration(writeConfiguration);
writeConfiguration.setRecordingContext(operationContext);
writeConfiguration.setLoadReplaceMode(loadReplaceMode); // only necessary for FK
this.table =SIDriver.driver().getTableFactory().getTable(Long.toString(heapConglom));
writeBuffer = writeCoordinator.writeBuffer(table,txn,token,writeConfiguration);
if (insertOperation != null)
insertOperation.tableWriter = this;
flushCallback = triggerHandler == null ? null : TriggerHandler.flushCallback(writeBuffer);
}catch(Exception e){
throw Exceptions.parseException(e);
}
}
public void insert(ExecRow execRow) throws StandardException {
try {
if (operationContext!=null && operationContext.isFailed())
return;
beforeRow(execRow);
KVPair encode = encoder.encode(execRow);
writeBuffer.add(encode);
if (triggerRowsEncoder != null) {
KVPair encodeTriggerRow = triggerRowsEncoder.encode(execRow);
addRowToTriggeringResultSet(execRow, encodeTriggerRow);
}
TriggerHandler.fireAfterRowTriggers(triggerHandler, execRow, flushCallback);
} catch (Exception e) {
if (operationContext!=null && operationContext.isPermissive()) {
operationContext.recordBadRecord(e.getLocalizedMessage() + execRow.toString(), e);
return;
}
throw Exceptions.parseException(e);
}
}
public void insert(Iterator<ExecRow> execRows) throws StandardException {
while (execRows.hasNext())
insert(execRows.next());
}
public void write(ExecRow execRow) throws StandardException {
insert(execRow);
}
public void write(Iterator<ExecRow> execRows) throws StandardException {
insert(execRows);
}
public KeyEncoder getKeyEncoder() throws StandardException {
HashPrefix prefix;
DataHash dataHash;
KeyPostfix postfix = NoOpPostfix.INSTANCE;
if(pkCols==null){
prefix = new SaltedPrefix(EngineDriver.driver().newUUIDGenerator(100));
dataHash = NoOpDataHash.INSTANCE;
}else{
int[] keyColumns = new int[pkCols.length];
for(int i=0;i<keyColumns.length;i++){
keyColumns[i] = pkCols[i] -1;
}
prefix = NoOpPrefix.INSTANCE;
DescriptorSerializer[] serializers = VersionedSerializers.forVersion(tableVersion, true).getSerializers(execRowDefinition);
dataHash = BareKeyHash.encoder(keyColumns,null, SpliceKryoRegistry.getInstance(),serializers);
}
return new KeyEncoder(prefix,dataHash,postfix);
}
public DataHash getRowHash() throws StandardException {
//get all columns that are being set
int[] columns = getEncodingColumns(execRowDefinition.nColumns(),pkCols);
DescriptorSerializer[] serializers = VersionedSerializers.forVersion(tableVersion,true).getSerializers(execRowDefinition);
return new EntryDataHash(columns,null,serializers);
}
@Override
public void close() throws StandardException{
super.close();
if(table!=null){
try{
table.close();
}catch(IOException e){
throw Exceptions.parseException(e);
}
}
}
public static int[] getEncodingColumns(int n, int[] pkCols) {
int[] columns = IntArrays.count(n);
// Skip primary key columns to save space
if (pkCols != null) {
for(int pkCol:pkCols) {
columns[pkCol-1] = -1;
}
}
return columns;
}
}
| splicemachine/spliceengine | splice_machine/src/main/java/com/splicemachine/derby/stream/output/insert/InsertPipelineWriter.java | Java | agpl-3.0 | 9,600 |
/*
Classe gerada automaticamente pelo MSTech Code Creator
*/
namespace MSTech.GestaoEscolar.Entities
{
using System;
using MSTech.GestaoEscolar.Entities.Abstracts;
using MSTech.Validation;
/// <summary>
/// Description: .
/// </summary>
[Serializable]
public class ORC_NivelAprendizado : AbstractORC_NivelAprendizado
{
[MSValidRange(200)]
[MSNotNullOrEmpty("Descrição do nível de aprendizado é obrigatório.")]
public override string nap_descricao { get; set; }
[MSValidRange(10)]
[MSNotNullOrEmpty("Sigla do nível de aprendizado é obrigatório.")]
public override string nap_sigla { get; set; }
public override DateTime nap_dataCriacao { get; set; }
public override DateTime nap_dataAlteracao { get; set; }
}
} | prefeiturasp/SME-SGP | Src/MSTech.GestaoEscolar.Entities/ORC_NivelAprendizado.cs | C# | agpl-3.0 | 797 |
import unittest
from labonneboite.common import locations
class CityLocationTest(unittest.TestCase):
def test_hyphenated_city_name(self):
city = locations.CityLocation('19100', 'brive-la-gaillarde')
self.assertEqual(city.name, 'Brive-la-Gaillarde')
def test_unicode_city_name(self):
city = locations.CityLocation('05100', 'briancon')
self.assertEqual(city.name, 'Briançon')
def test_no_slug(self):
city = locations.CityLocation('05100')
self.assertEqual(city.name, 'Briançon')
def test_accented_city_name(self):
city = locations.CityLocation('05100', 'Cervières')
self.assertEqual(city.name, 'Cervières')
self.assertEqual(6.756570896485574, city.location.longitude)
self.assertEqual(44.86053112144938, city.location.latitude)
| StartupsPoleEmploi/labonneboite | labonneboite/tests/app/test_locations.py | Python | agpl-3.0 | 833 |
#
# Bold - more than just blogging.
# Copyright (C) 2015-2016 Jens Krämer <jk@jkraemer.net>
#
# This file is part of Bold.
#
# Bold is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# Bold is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Bold. If not, see <http://www.gnu.org/licenses/>.
#
require 'builder'
class RpcPingJob < ActiveJob::Base
queue_as :default
def perform(post)
# we only ping for posts that are published
if post.is_a?(Post) && post.published?
Bold.with_site(post.site) do
Bold::Config.rpc_ping_urls.map{|url| [ url, ping(url, post) ] }
end
end
end
private
def ping(url, post)
r = HTTParty.post url, body: payload(post),
headers: { 'Content-Type' => 'application/xml' }
unless 200 == r.code
Rails.logger.warn "Ping result from #{url}: #{r.code}\n#{r.body}"
end
r.code.to_s
rescue Exception
Rails.logger.error "RPC Ping to #{url} failed: #{$!}"
$!.to_s
end
def payload(post)
xml = Builder::XmlMarkup.new
xml.instruct!
xml.methodCall do
xml.methodName 'weblogUpdate.ping'
xml.params do
xml.param do
xml.value post.site.name
end
xml.param do
xml.value post.site.external_url
end
end
end
end
end
| bold-app/bold | app/jobs/rpc_ping_job.rb | Ruby | agpl-3.0 | 1,779 |
/*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang( 'undo', 'nl', {
redo: 'Opnieuw uitvoeren',
undo: 'Ongedaan maken'
} );
| astrobin/astrobin | astrobin/static/astrobin/ckeditor/plugins/undo/lang/nl.js | JavaScript | agpl-3.0 | 265 |
import * as Personal from "../../Contents/IndexedDB/Personal";
import Sender from "../../Base/Container/Sender";
/**
*
*/
export default class GuideSender extends Sender {
public static ID = "Guide";
constructor() {
super(GuideSender.ID);
}
public guide: Personal.Guide;
} | iwatendo/skybeje | src/Contents/Sender/GuideSender.ts | TypeScript | agpl-3.0 | 303 |
const setup = require('../../setup')
const SessionController = require('../../../api/controllers/SessionController')
var factories = require('../../setup/factories')
var passport = require('passport')
describe('SessionController.findUser', () => {
var u1, u2
var findUser = SessionController.findUser
before(() => {
u1 = factories.user()
u2 = factories.user()
return Promise.all([u1.save(), u2.save()])
})
describe('with no directly linked user', () => {
it('picks a user with matching email address', () => {
return findUser('facebook', u2.get('email'), 'foo')
.then(user => {
expect(user.id).to.equal(u2.id)
})
})
})
describe('with a directly linked user', () => {
before(() => {
return LinkedAccount.create(u1.id, {type: 'facebook', profile: {id: 'foo'}})
})
after(() => {
return LinkedAccount.query().where('user_id', u1.id).del()
})
it('returns that user, not one with a matching email address', () => {
return findUser('facebook', u2.get('email'), 'foo')
.then(user => {
expect(user.id).to.equal(u1.id)
})
})
})
})
describe('SessionController.upsertLinkedAccount', () => {
var user, req, profile
const upsertLinkedAccount = SessionController.upsertLinkedAccount
const facebookUrl = 'http://facebook.com/foo'
before(() => {
profile = {
id: 'foo',
_json: {
link: facebookUrl
}
}
user = factories.user()
return user.save()
.then(() => {
req = {session: {userId: user.id}}
})
})
describe('with a directly linked user ', () => {
before(() => {
return LinkedAccount.create(user.id, {type: 'facebook', profile: {id: profile.id}})
})
after(() => {
return LinkedAccount.query().where('user_id', user.id).del()
})
it('updates the user facebook_url', () => {
return upsertLinkedAccount(req, 'facebook', profile)
.then(() => user.refresh())
.then(() => {
expect(user.get('facebook_url')).to.equal(facebookUrl)
})
})
})
})
describe('SessionController', function () {
var req, res, cat
before(() => {
req = factories.mock.request()
res = factories.mock.response()
})
describe('.create', function () {
before(() => {
_.extend(req, {
params: {
email: 'iam@cat.org',
password: 'password'
}
})
cat = new User({name: 'Cat', email: 'iam@cat.org', active: true})
return cat.save().then(() =>
new LinkedAccount({
provider_user_id: '$2a$10$UPh85nJvMSrm6gMPqYIS.OPhLjAMbZiFnlpjq1xrtoSBTyV6fMdJS',
provider_key: 'password',
user_id: cat.id
}).save())
})
it('works with a valid username and password', function () {
return SessionController.create(req, res)
.then(() => User.find(cat.id))
.then(user => {
expect(res.status).not.to.have.been.called()
expect(res.ok).to.have.been.called()
expect(req.session.userId).to.equal(cat.id)
expect(user.get('last_login_at').getTime()).to.be.closeTo(new Date().getTime(), 2000)
})
})
})
describe('.createWithToken', () => {
var user, token
before(() => {
UserSession.login = spy(UserSession.login)
user = factories.user()
return user.save({created_at: new Date()})
.then(() => user.generateToken())
.then(t => token = t)
})
it('logs a user in and redirects (Web/GET request)', () => {
_.extend(req.params, {u: user.id, t: token})
req.method = 'GET'
return SessionController.createWithToken(req, res)
.then(() => {
expect(UserSession.login).to.have.been.called()
expect(res.redirect).to.have.been.called()
expect(res.redirected).to.equal(Frontend.Route.evo.passwordSetting())
})
})
it("logs a user in doesn't redirect (API/POST request)", () => {
_.extend(req.params, {u: user.id, t: token})
req.method = 'POST'
res = factories.mock.response()
return SessionController.createWithToken(req, res)
.then(() => {
expect(UserSession.login).to.have.been.called()
expect(res.redirect).not.to.have.been.called()
expect(res.ok).to.have.been.called()
})
})
it('rejects an invalid token', () => {
var error
_.extend(req.params, {u: user.id, t: token + 'x'})
res.send = spy(function (msg) { error = msg })
return SessionController.createWithToken(req, res)
.then(() => {
expect(res.send).to.have.been.called()
expect(error).to.equal('Link expired')
})
})
})
describe('.createWithJWT', () => {
var user, token
before(async () => {
user = factories.user()
await user.save({created_at: new Date()})
.then(() => user.generateJWT())
.then(t => token = t)
req.url = `https://hylo.com?u=${user.id}&token=${token}`
})
it('for valid JWT and GET it will redirect', () => {
_.extend(req.params, {u: user.id, token})
req.method = 'GET'
req.session.authenticated = true
return SessionController.createWithJWT(req, res)
.then(() => {
expect(res.redirect).to.have.been.called()
expect(res.redirected).to.equal(Frontend.Route.evo.passwordSetting())
})
})
it("for valid JWT and POST returns success", () => {
_.extend(req.params, {u: user.id, token})
req.method = 'POST'
req.session.authenticated = true
return SessionController.createWithJWT(req, res)
.then(() => {
expect(res.ok).to.have.been.called()
})
})
it('for invalid token and GET it will still redirect', () => {
req.method = 'GET'
req.session.authenticated = false
return SessionController.createWithJWT(req, res)
.then(() => {
expect(res.redirect).to.have.been.called()
expect(res.redirected).to.equal(Frontend.Route.evo.passwordSetting())
})
})
it('for invalid token and POST it returns error', () => {
let error
res.send = spy(function (msg) { error = msg })
req.method = 'POST'
req.session.authenticated = false
return SessionController.createWithJWT(req, res)
.then(() => {
expect(res.send).to.have.been.called()
expect(error).to.equal('Invalid link, please try again')
})
})
})
describe('.finishFacebookOAuth', () => {
var req, res, origPassportAuthenticate
var mockProfile = {
displayName: 'Lawrence Wang',
email: 'l@lw.io',
emails: [ { value: 'l@lw.io' } ],
gender: 'male',
id: '100101',
name: 'Lawrence Wang',
profileUrl: 'http://www.facebook.com/100101',
provider: 'facebook'
}
const expectMatchMockProfile = userId => {
return User.find(userId, {withRelated: 'linkedAccounts'})
.then(user => {
var account = user.relations.linkedAccounts.first()
expect(account).to.exist
expect(account.get('provider_key')).to.equal('facebook')
expect(user.get('facebook_url')).to.equal(mockProfile.profileUrl)
expect(user.get('avatar_url')).to.equal('https://graph.facebook.com/100101/picture?type=large&access_token=186895474801147|zzzzzz')
return user
})
}
before(() => {
origPassportAuthenticate = passport.authenticate
})
beforeEach(() => {
req = factories.mock.request()
res = factories.mock.response()
UserSession.login = spy(UserSession.login)
User.create = spy(User.create)
passport.authenticate = spy(function (strategy, callback) {
return () => callback(null, mockProfile)
})
return setup.clearDb()
})
afterEach(() => {
passport.authenticate = origPassportAuthenticate
})
it('creates a new user', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => {
expect(UserSession.login).to.have.been.called()
expect(User.create).to.have.been.called()
expect(res.view).to.have.been.called()
expect(res.viewTemplate).to.equal('popupDone')
expect(res.viewAttrs.error).not.to.exist
return User.find('l@lw.io', {withRelated: 'linkedAccounts'})
})
.then(user => {
expect(user).to.exist
expect(user.get('facebook_url')).to.equal('http://www.facebook.com/100101')
var account = user.relations.linkedAccounts.find(a => a.get('provider_key') === 'facebook')
expect(account).to.exist
})
})
describe('with no email in the auth response', () => {
beforeEach(() => {
var profile = _.merge(_.cloneDeep(mockProfile), {email: null, emails: null})
passport.authenticate = spy((strategy, callback) => () => callback(null, profile))
})
afterEach(() => {
passport.authenticate = origPassportAuthenticate
})
it('sets an error in the view parameters', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => {
expect(UserSession.login).not.to.have.been.called()
expect(res.view).to.have.been.called()
expect(res.viewTemplate).to.equal('popupDone')
expect(res.viewAttrs.error).to.equal('no email')
})
})
})
describe('with no user in the auth response', () => {
beforeEach(() => {
passport.authenticate = spy(function (strategy, callback) {
return () => callback(null, null)
})
})
afterEach(() => {
passport.authenticate = origPassportAuthenticate
})
it('sets an error in the view parameters', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => {
expect(res.view).to.have.been.called()
expect(res.viewAttrs.error).to.equal('no user')
})
})
})
describe('for an existing user', () => {
var user
beforeEach(() => {
user = factories.user()
mockProfile.email = user.get('email')
return user.save()
})
it.skip('creates a new linked account', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => expectMatchMockProfile(user.id))
})
describe('with an existing Facebook account', () => {
beforeEach(() => LinkedAccount.create(user.id, {type: 'facebook', profile: {id: 'foo'}}))
it('leaves the existing account unchanged', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => user.load('linkedAccounts'))
.then(user => {
expect(user.relations.linkedAccounts.length).to.equal(2)
var account = user.relations.linkedAccounts.first()
expect(account.get('provider_user_id')).to.equal('foo')
})
})
})
})
describe('for a logged-in user', () => {
var user
beforeEach(() => {
user = factories.user()
return user.save().then(() => req.login(user.id))
})
it('creates a new linked account even if the email does not match', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => expectMatchMockProfile(user.id))
})
describe('with a linked account that belongs to a different user', () => {
var account
beforeEach(() => {
return factories.user().save()
.then(u2 => LinkedAccount.create(u2.id, {type: 'facebook', profile: {id: mockProfile.id}}))
.then(a => { account = a; return a})
})
it('changes ownership', () => {
return SessionController.finishFacebookOAuth(req, res)
.then(() => expectMatchMockProfile(user.id))
.then(user => expect(user.relations.linkedAccounts.first().id).to.equal(account.id))
})
})
})
})
})
| Hylozoic/hylo-node | test/unit/controllers/SessionController.test.js | JavaScript | agpl-3.0 | 11,949 |
Ext.ns('CMDBuild');
CMDBuild.FormPlugin = function(config) {
Ext.apply(this, config);
};
Ext.extend(CMDBuild.FormPlugin, Ext.util.Observable, {
init: function(formPanel) {
var basicForm = formPanel.getForm();
/**
*
* clears the form,
* to use when trackResetOnLoad = true; in these case the reset() function
* set the form with the last loaded values. If you want to clear completely
* the form call clearForm()
*
*/
formPanel.clearForm = function() {
var blankValues = {};
basicForm.items.each(function(f){
blankValues[f.getName()]="";
});
basicForm.setValues(blankValues);
};
/**
*
* Keeps in sync two fields, usually name and description. If the
* master field changes and the slave is empty, or it has the same
* value as the old value of the master, its value is updated with
* the new one.
*
* These function has to be used with the change listener,
* example:
*
* name.on('change', function(field, newValue, oldValue) {
* formPanel.autoComplete(fieldToComplete, newValue, oldValue)
* })
*
*/
formPanel.autoComplete = function(fieldToComplete, newValue, oldValue) {
var actualValue = fieldToComplete.getValue();
if ( actualValue == "" || actualValue == oldValue )
fieldToComplete.setValue(newValue);
};
}
}); | jzinedine/CMDBuild | cmdbuild/src/main/webapp/javascripts/cmdbuild/form/FormPlugin.js | JavaScript | agpl-3.0 | 1,392 |
/***************************************************************************
modifyconstraintsubactivitiespreferredtimeslotsform.cpp - description
-------------------
begin : 2008
copyright : (C) 2008 by Lalescu Liviu
email : Please see https://lalescu.ro/liviu/ for details about contacting Liviu Lalescu (in particular, you can find here the e-mail address)
***************************************************************************/
/***************************************************************************
* *
* This program is free software: you can redistribute it and/or modify *
* it under the terms of the GNU Affero General Public License as *
* published by the Free Software Foundation, either version 3 of the *
* License, or (at your option) any later version. *
* *
***************************************************************************/
#include <QMessageBox>
#include "tablewidgetupdatebug.h"
#include "modifyconstraintsubactivitiespreferredtimeslotsform.h"
#include "timeconstraint.h"
#include <QHeaderView>
#include <QTableWidget>
#include <QTableWidgetItem>
#include <QBrush>
#include <QColor>
#define YES (QString(" "))
#define NO (QString("X"))
ModifyConstraintSubactivitiesPreferredTimeSlotsForm::ModifyConstraintSubactivitiesPreferredTimeSlotsForm(QWidget* parent, ConstraintSubactivitiesPreferredTimeSlots* ctr): QDialog(parent)
{
setupUi(this);
okPushButton->setDefault(true);
connect(preferredTimesTable, SIGNAL(itemClicked(QTableWidgetItem*)), this, SLOT(itemClicked(QTableWidgetItem*)));
connect(cancelPushButton, SIGNAL(clicked()), this, SLOT(cancel()));
connect(okPushButton, SIGNAL(clicked()), this, SLOT(ok()));
connect(setAllAllowedPushButton, SIGNAL(clicked()), this, SLOT(setAllSlotsAllowed()));
connect(setAllNotAllowedPushButton, SIGNAL(clicked()), this, SLOT(setAllSlotsNotAllowed()));
centerWidgetOnScreen(this);
restoreFETDialogGeometry(this);
QSize tmp1=teachersComboBox->minimumSizeHint();
Q_UNUSED(tmp1);
QSize tmp2=studentsComboBox->minimumSizeHint();
Q_UNUSED(tmp2);
QSize tmp3=subjectsComboBox->minimumSizeHint();
Q_UNUSED(tmp3);
QSize tmp4=activityTagsComboBox->minimumSizeHint();
Q_UNUSED(tmp4);
this->_ctr=ctr;
updateTeachersComboBox();
updateStudentsComboBox(parent);
updateSubjectsComboBox();
updateActivityTagsComboBox();
componentNumberSpinBox->setMinimum(1);
componentNumberSpinBox->setMaximum(MAX_SPLIT_OF_AN_ACTIVITY);
componentNumberSpinBox->setValue(this->_ctr->componentNumber);
preferredTimesTable->setRowCount(gt.rules.nHoursPerDay);
preferredTimesTable->setColumnCount(gt.rules.nDaysPerWeek);
for(int j=0; j<gt.rules.nDaysPerWeek; j++){
QTableWidgetItem* item=new QTableWidgetItem(gt.rules.daysOfTheWeek[j]);
preferredTimesTable->setHorizontalHeaderItem(j, item);
}
for(int i=0; i<gt.rules.nHoursPerDay; i++){
QTableWidgetItem* item=new QTableWidgetItem(gt.rules.hoursOfTheDay[i]);
preferredTimesTable->setVerticalHeaderItem(i, item);
}
Matrix2D<bool> currentMatrix;
currentMatrix.resize(gt.rules.nHoursPerDay, gt.rules.nDaysPerWeek);
//bool currentMatrix[MAX_HOURS_PER_DAY][MAX_DAYS_PER_WEEK];
for(int i=0; i<gt.rules.nHoursPerDay; i++)
for(int j=0; j<gt.rules.nDaysPerWeek; j++)
currentMatrix[i][j]=false;
for(int k=0; k<ctr->p_nPreferredTimeSlots_L; k++){
if(ctr->p_hours_L[k]==-1 || ctr->p_days_L[k]==-1)
assert(0);
int i=ctr->p_hours_L[k];
int j=ctr->p_days_L[k];
if(i>=0 && i<gt.rules.nHoursPerDay && j>=0 && j<gt.rules.nDaysPerWeek)
currentMatrix[i][j]=true;
}
for(int i=0; i<gt.rules.nHoursPerDay; i++)
for(int j=0; j<gt.rules.nDaysPerWeek; j++){
QTableWidgetItem* item= new QTableWidgetItem();
item->setTextAlignment(Qt::AlignCenter);
item->setFlags(Qt::ItemIsSelectable|Qt::ItemIsEnabled);
if(SHOW_TOOLTIPS_FOR_CONSTRAINTS_WITH_TABLES)
item->setToolTip(gt.rules.daysOfTheWeek[j]+QString("\n")+gt.rules.hoursOfTheDay[i]);
preferredTimesTable->setItem(i, j, item);
if(!currentMatrix[i][j])
item->setText(NO);
else
item->setText(YES);
colorItem(item);
}
preferredTimesTable->resizeRowsToContents();
weightLineEdit->setText(CustomFETString::number(ctr->weightPercentage));
connect(preferredTimesTable->horizontalHeader(), SIGNAL(sectionClicked(int)), this, SLOT(horizontalHeaderClicked(int)));
connect(preferredTimesTable->verticalHeader(), SIGNAL(sectionClicked(int)), this, SLOT(verticalHeaderClicked(int)));
preferredTimesTable->setSelectionMode(QAbstractItemView::NoSelection);
tableWidgetUpdateBug(preferredTimesTable);
setStretchAvailabilityTableNicely(preferredTimesTable);
}
ModifyConstraintSubactivitiesPreferredTimeSlotsForm::~ModifyConstraintSubactivitiesPreferredTimeSlotsForm()
{
saveFETDialogGeometry(this);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::colorItem(QTableWidgetItem* item)
{
if(USE_GUI_COLORS){
if(item->text()==YES)
item->setBackground(QBrush(Qt::darkGreen));
else
item->setBackground(QBrush(Qt::darkRed));
item->setForeground(QBrush(Qt::lightGray));
}
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::horizontalHeaderClicked(int col)
{
if(col>=0 && col<gt.rules.nDaysPerWeek){
QString s=preferredTimesTable->item(0, col)->text();
if(s==YES)
s=NO;
else{
assert(s==NO);
s=YES;
}
for(int row=0; row<gt.rules.nHoursPerDay; row++){
preferredTimesTable->item(row, col)->setText(s);
colorItem(preferredTimesTable->item(row,col));
}
tableWidgetUpdateBug(preferredTimesTable);
}
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::verticalHeaderClicked(int row)
{
if(row>=0 && row<gt.rules.nHoursPerDay){
QString s=preferredTimesTable->item(row, 0)->text();
if(s==YES)
s=NO;
else{
assert(s==NO);
s=YES;
}
for(int col=0; col<gt.rules.nDaysPerWeek; col++){
preferredTimesTable->item(row, col)->setText(s);
colorItem(preferredTimesTable->item(row,col));
}
tableWidgetUpdateBug(preferredTimesTable);
}
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::setAllSlotsAllowed()
{
for(int i=0; i<gt.rules.nHoursPerDay; i++)
for(int j=0; j<gt.rules.nDaysPerWeek; j++){
preferredTimesTable->item(i, j)->setText(YES);
colorItem(preferredTimesTable->item(i,j));
}
tableWidgetUpdateBug(preferredTimesTable);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::setAllSlotsNotAllowed()
{
for(int i=0; i<gt.rules.nHoursPerDay; i++)
for(int j=0; j<gt.rules.nDaysPerWeek; j++){
preferredTimesTable->item(i, j)->setText(NO);
colorItem(preferredTimesTable->item(i,j));
}
tableWidgetUpdateBug(preferredTimesTable);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::itemClicked(QTableWidgetItem* item)
{
QString s=item->text();
if(s==YES)
s=NO;
else{
assert(s==NO);
s=YES;
}
item->setText(s);
colorItem(item);
tableWidgetUpdateBug(preferredTimesTable);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::updateTeachersComboBox(){
int i=0, j=-1;
teachersComboBox->clear();
teachersComboBox->addItem("");
if(this->_ctr->p_teacherName=="")
j=i;
i++;
for(int k=0; k<gt.rules.teachersList.size(); k++){
Teacher* t=gt.rules.teachersList[k];
teachersComboBox->addItem(t->name);
if(t->name==this->_ctr->p_teacherName)
j=i;
i++;
}
assert(j>=0);
teachersComboBox->setCurrentIndex(j);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::updateStudentsComboBox(QWidget* parent){
int i=0, j=-1;
studentsComboBox->clear();
studentsComboBox->addItem("");
if(this->_ctr->p_studentsName=="")
j=i;
i++;
for(int m=0; m<gt.rules.yearsList.size(); m++){
StudentsYear* sty=gt.rules.yearsList[m];
studentsComboBox->addItem(sty->name);
if(sty->name==this->_ctr->p_studentsName)
j=i;
i++;
for(int n=0; n<sty->groupsList.size(); n++){
StudentsGroup* stg=sty->groupsList[n];
studentsComboBox->addItem(stg->name);
if(stg->name==this->_ctr->p_studentsName)
j=i;
i++;
if(SHOW_SUBGROUPS_IN_COMBO_BOXES) for(int p=0; p<stg->subgroupsList.size(); p++){
StudentsSubgroup* sts=stg->subgroupsList[p];
studentsComboBox->addItem(sts->name);
if(sts->name==this->_ctr->p_studentsName)
j=i;
i++;
}
}
}
if(j<0)
showWarningForInvisibleSubgroupConstraint(parent, this->_ctr->p_studentsName);
else
assert(j>=0);
studentsComboBox->setCurrentIndex(j);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::updateSubjectsComboBox(){
int i=0, j=-1;
subjectsComboBox->clear();
subjectsComboBox->addItem("");
if(this->_ctr->p_subjectName=="")
j=i;
i++;
for(int k=0; k<gt.rules.subjectsList.size(); k++){
Subject* s=gt.rules.subjectsList[k];
subjectsComboBox->addItem(s->name);
if(s->name==this->_ctr->p_subjectName)
j=i;
i++;
}
assert(j>=0);
subjectsComboBox->setCurrentIndex(j);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::updateActivityTagsComboBox(){
int i=0, j=-1;
activityTagsComboBox->clear();
activityTagsComboBox->addItem("");
if(this->_ctr->p_activityTagName=="")
j=i;
i++;
for(int k=0; k<gt.rules.activityTagsList.size(); k++){
ActivityTag* s=gt.rules.activityTagsList[k];
activityTagsComboBox->addItem(s->name);
if(s->name==this->_ctr->p_activityTagName)
j=i;
i++;
}
assert(j>=0);
activityTagsComboBox->setCurrentIndex(j);
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::ok()
{
if(studentsComboBox->currentIndex()<0){
showWarningCannotModifyConstraintInvisibleSubgroupConstraint(this, this->_ctr->p_studentsName);
return;
}
double weight;
QString tmp=weightLineEdit->text();
weight_sscanf(tmp, "%lf", &weight);
if(weight<0.0 || weight>100.0){
QMessageBox::warning(this, tr("FET information"),
tr("Invalid weight (percentage)"));
return;
}
QString teacher=teachersComboBox->currentText();
if(teacher!="")
assert(gt.rules.searchTeacher(teacher)>=0);
QString students=studentsComboBox->currentText();
if(students!="")
assert(gt.rules.searchStudentsSet(students)!=NULL);
QString subject=subjectsComboBox->currentText();
if(subject!="")
assert(gt.rules.searchSubject(subject)>=0);
QString activityTag=activityTagsComboBox->currentText();
if(activityTag!="")
assert(gt.rules.searchActivityTag(activityTag)>=0);
QList<int> days_L;
QList<int> hours_L;
//int days[MAX_N_CONSTRAINT_SUBACTIVITIES_PREFERRED_TIME_SLOTS];
//int hours[MAX_N_CONSTRAINT_SUBACTIVITIES_PREFERRED_TIME_SLOTS];
int n=0;
for(int j=0; j<gt.rules.nDaysPerWeek; j++)
for(int i=0; i<gt.rules.nHoursPerDay; i++)
if(preferredTimesTable->item(i, j)->text()==YES){
days_L.append(j);
hours_L.append(i);
n++;
}
if(n<=0){
int t=QMessageBox::question(this, tr("FET question"),
tr("Warning: 0 slots selected. Are you sure?"),
QMessageBox::Yes, QMessageBox::Cancel);
if(t==QMessageBox::Cancel)
return;
}
this->_ctr->weightPercentage=weight;
this->_ctr->componentNumber=componentNumberSpinBox->value();
this->_ctr->p_teacherName=teacher;
this->_ctr->p_studentsName=students;
this->_ctr->p_subjectName=subject;
this->_ctr->p_activityTagName=activityTag;
this->_ctr->p_nPreferredTimeSlots_L=n;
this->_ctr->p_days_L=days_L;
this->_ctr->p_hours_L=hours_L;
gt.rules.internalStructureComputed=false;
setRulesModifiedAndOtherThings(>.rules);
this->close();
}
void ModifyConstraintSubactivitiesPreferredTimeSlotsForm::cancel()
{
this->close();
}
#undef YES
#undef NO
| karandit/fet | src/interface/modifyconstraintsubactivitiespreferredtimeslotsform.cpp | C++ | agpl-3.0 | 11,702 |
class Auth::PasswordsController < Devise::PasswordsController
layout 'blank'
def create
email= EmailAddress.where('user_id IS NOT NULL').find_by(:email => params[resource_name][:email])
if email
self.resource = email.user
resource.send_reset_password_instructions
else
flash[:error]='Invalid email!'
redirect_to new_user_password_path
return
end
if resource.errors.empty?
set_flash_message :success, :send_instructions
redirect_to new_session_path(resource_name)
else
render_with_scope :new
end
end
end
| ari/jobsworth | app/controllers/auth/passwords_controller.rb | Ruby | agpl-3.0 | 588 |
/**
* Copyright (C) 2009-2014 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package cn.dlb.bim.models.ifc4.impl;
import org.eclipse.emf.ecore.EClass;
import cn.dlb.bim.models.ifc4.Ifc4Package;
import cn.dlb.bim.models.ifc4.IfcUShapeProfileDef;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Ifc UShape Profile Def</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getDepth <em>Depth</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getDepthAsString <em>Depth As String</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFlangeWidth <em>Flange Width</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFlangeWidthAsString <em>Flange Width As String</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getWebThickness <em>Web Thickness</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getWebThicknessAsString <em>Web Thickness As String</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFlangeThickness <em>Flange Thickness</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFlangeThicknessAsString <em>Flange Thickness As String</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFilletRadius <em>Fillet Radius</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFilletRadiusAsString <em>Fillet Radius As String</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getEdgeRadius <em>Edge Radius</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getEdgeRadiusAsString <em>Edge Radius As String</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFlangeSlope <em>Flange Slope</em>}</li>
* <li>{@link cn.dlb.bim.models.ifc4.impl.IfcUShapeProfileDefImpl#getFlangeSlopeAsString <em>Flange Slope As String</em>}</li>
* </ul>
*
* @generated
*/
public class IfcUShapeProfileDefImpl extends IfcParameterizedProfileDefImpl implements IfcUShapeProfileDef {
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IfcUShapeProfileDefImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getDepth() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__DEPTH, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDepth(double newDepth) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__DEPTH, newDepth);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getDepthAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__DEPTH_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setDepthAsString(String newDepthAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__DEPTH_AS_STRING, newDepthAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getFlangeWidth() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_WIDTH, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFlangeWidth(double newFlangeWidth) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_WIDTH, newFlangeWidth);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getFlangeWidthAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_WIDTH_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFlangeWidthAsString(String newFlangeWidthAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_WIDTH_AS_STRING, newFlangeWidthAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getWebThickness() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__WEB_THICKNESS, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setWebThickness(double newWebThickness) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__WEB_THICKNESS, newWebThickness);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getWebThicknessAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__WEB_THICKNESS_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setWebThicknessAsString(String newWebThicknessAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__WEB_THICKNESS_AS_STRING, newWebThicknessAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getFlangeThickness() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_THICKNESS, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFlangeThickness(double newFlangeThickness) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_THICKNESS, newFlangeThickness);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getFlangeThicknessAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_THICKNESS_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFlangeThicknessAsString(String newFlangeThicknessAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_THICKNESS_AS_STRING, newFlangeThicknessAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getFilletRadius() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFilletRadius(double newFilletRadius) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS, newFilletRadius);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetFilletRadius() {
eUnset(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetFilletRadius() {
return eIsSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getFilletRadiusAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFilletRadiusAsString(String newFilletRadiusAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS_AS_STRING, newFilletRadiusAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetFilletRadiusAsString() {
eUnset(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS_AS_STRING);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetFilletRadiusAsString() {
return eIsSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FILLET_RADIUS_AS_STRING);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getEdgeRadius() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setEdgeRadius(double newEdgeRadius) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS, newEdgeRadius);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetEdgeRadius() {
eUnset(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetEdgeRadius() {
return eIsSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getEdgeRadiusAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setEdgeRadiusAsString(String newEdgeRadiusAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS_AS_STRING, newEdgeRadiusAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetEdgeRadiusAsString() {
eUnset(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS_AS_STRING);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetEdgeRadiusAsString() {
return eIsSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__EDGE_RADIUS_AS_STRING);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public double getFlangeSlope() {
return (Double) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFlangeSlope(double newFlangeSlope) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE, newFlangeSlope);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetFlangeSlope() {
eUnset(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetFlangeSlope() {
return eIsSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public String getFlangeSlopeAsString() {
return (String) eGet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE_AS_STRING, true);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setFlangeSlopeAsString(String newFlangeSlopeAsString) {
eSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE_AS_STRING, newFlangeSlopeAsString);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void unsetFlangeSlopeAsString() {
eUnset(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE_AS_STRING);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public boolean isSetFlangeSlopeAsString() {
return eIsSet(Ifc4Package.Literals.IFC_USHAPE_PROFILE_DEF__FLANGE_SLOPE_AS_STRING);
}
} //IfcUShapeProfileDefImpl
| shenan4321/BIMplatform | generated/cn/dlb/bim/models/ifc4/impl/IfcUShapeProfileDefImpl.java | Java | agpl-3.0 | 11,953 |
#ifndef BOOST_MPL_IS_PLACEHOLDER_HPP_INCLUDED
#define BOOST_MPL_IS_PLACEHOLDER_HPP_INCLUDED
// Copyright Aleksey Gurtovoy 2001-2004
//
// Distributed under the Boost Software License, Version 1.0.
// (See accompanying file LICENSE_1_0.txt or copy at
// http://www.boost.org/LICENSE_1_0.txt)
//
// See http://www.boost.org/libs/mpl for documentation.
// $Id: is_placeholder.hpp 49267 2008-10-11 06:19:02Z agurtovoy $
// $Date: 2008-10-11 02:19:02 -0400 (Sat, 11 Oct 2008) $
// $Revision: 49267 $
#include <boost/mpl/arg_fwd.hpp>
#include <boost/mpl/bool.hpp>
#include <boost/mpl/aux_/yes_no.hpp>
#include <boost/mpl/aux_/type_wrapper.hpp>
#include <boost/mpl/aux_/nttp_decl.hpp>
#include <boost/mpl/aux_/config/ctps.hpp>
#include <boost/mpl/aux_/config/static_constant.hpp>
namespace abt_boost{} namespace boost = abt_boost; namespace abt_boost{ namespace mpl {
#if !defined(BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION)
template< typename T >
struct is_placeholder
: bool_<false>
{
};
template< BOOST_MPL_AUX_NTTP_DECL(int, N) >
struct is_placeholder< arg<N> >
: bool_<true>
{
};
#else
namespace aux {
aux::no_tag is_placeholder_helper(...);
template< BOOST_MPL_AUX_NTTP_DECL(int, N) >
aux::yes_tag is_placeholder_helper(aux::type_wrapper< arg<N> >*);
} // namespace aux
template< typename T >
struct is_placeholder
{
static aux::type_wrapper<T>* get();
BOOST_STATIC_CONSTANT(bool, value =
sizeof(aux::is_placeholder_helper(get())) == sizeof(aux::yes_tag)
);
typedef bool_<value> type;
};
#endif // BOOST_NO_TEMPLATE_PARTIAL_SPECIALIZATION
}}
#endif // BOOST_MPL_IS_PLACEHOLDER_HPP_INCLUDED
| jbruestle/aggregate_btree | tiny_boost/boost/mpl/is_placeholder.hpp | C++ | agpl-3.0 | 1,650 |
#include "augs/ensure.h"
void cleanup_proc() {
global_log::save_complete_log("generated/logs/ensure_failed_debug_log.txt");
#ifdef PLATFORM_WINDOWS
__debugbreak();
#endif
}
| geneotech/Mechanical-keyboard-simulator | augs/ensure.cpp | C++ | agpl-3.0 | 186 |
# frozen_string_literal: true
module Mutations
module Repository
module Git
CreateBranchMutation = GraphQL::Field.define do
type Types::Git::BranchType
description 'Creates a new branch'
argument :repositoryId, !types.ID do
description 'The repository to create the branch in'
end
argument :name, !types.String do
description 'The name of the branch'
end
argument :revision, !types.ID do
description 'The revision the branch shall point to'
end
resource!(lambda do |_root, arguments, _context|
RepositoryCompound.first(slug: arguments['repositoryId'])
end)
not_found_unless :show
authorize! :write, policy: :repository
resolve CreateBranchResolver.new
end
# GraphQL mutation to create a new branch
class CreateBranchResolver
def call(repository, arguments, context)
repository.git.create_branch(arguments['name'], arguments['revision'])
rescue Bringit::Repository::InvalidRef,
Bringit::InvalidRefName => e
context.add_error(GraphQL::ExecutionError.new(e.message))
end
end
end
end
end
| ontohub/ontohub-backend | app/graphql/mutations/repository/git/create_branch_mutation.rb | Ruby | agpl-3.0 | 1,238 |
<?php
namespace Qcm\QuestionnairesBundle\Form;
use Symfony\Component\Form\AbstractType;
use Symfony\Component\Form\FormBuilderInterface;
use Symfony\Component\OptionsResolver\OptionsResolverInterface;
class CorrectionType extends AbstractType {
/**
* @param FormBuilderInterface $builder
* @param array $options
*/
public function buildForm(FormBuilderInterface $builder, array $options) {
$builder
->add('baremeCorrect', null, array('error_bubbling'=>false, 'label' => 'Réponse correcte'))
->add('baremeIncorrect', null, array('error_bubbling'=>false,'label' => 'Réponse incorrecte'))
->add('baremeMixte', null, array('error_bubbling'=>false,'label' => 'Réponse partiellement correcte'))
->add('baremeAbsence', null, array('error_bubbling'=>false,'label' => 'Absence de réponse'))
//->add('resultat_public', 'checkbox', array('label'=>false))
//->add('correct',null, array('label'=>'Correcte','required' => false))
;
}
/**
* @param OptionsResolverInterface $resolver
*/
public function setDefaultOptions(OptionsResolverInterface $resolver) {
$resolver->setDefaults(array(
'cascade_validation' => true,
'data_class' => 'Qcm\QuestionnairesBundle\Entity\Correction'
));
}
/**
* @return string
*/
public function getName() {
return 'qcm_questionnairesbundle_correction';
}
}
| YoureInTrouble/YoureInTrouble | src/Qcm/QuestionnairesBundle/Form/CorrectionType.php | PHP | agpl-3.0 | 1,498 |
package com.x.cms.assemble.control.jaxrs.document;
import com.x.base.core.project.exception.LanguagePromptException;
class ExceptionImportViewIdEmpty extends LanguagePromptException {
private static final long serialVersionUID = 1859164370743532895L;
ExceptionImportViewIdEmpty() {
super("分类信息未绑定导入的列表,无法进行导入操作。" );
}
}
| o2oa/o2oa | o2server/x_cms_assemble_control/src/main/java/com/x/cms/assemble/control/jaxrs/document/ExceptionImportViewIdEmpty.java | Java | agpl-3.0 | 373 |
<?php
/**
* ResetPasswordController.php
* Copyright (c) 2019 james@firefly-iii.org
*
* This file is part of Firefly III (https://github.com/firefly-iii).
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <https://www.gnu.org/licenses/>.
*/
declare(strict_types=1);
namespace FireflyIII\Http\Controllers\Auth;
use FireflyIII\Exceptions\FireflyException;
use FireflyIII\Http\Controllers\Controller;
use FireflyIII\User;
use Illuminate\Contracts\View\Factory;
use Illuminate\Foundation\Auth\ResetsPasswords;
use Illuminate\Http\JsonResponse;
use Illuminate\Http\RedirectResponse;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\Password;
use Illuminate\Validation\ValidationException;
use Illuminate\View\View;
/**
* Class ResetPasswordController
*
* This controller is responsible for handling password reset requests
* and uses a simple trait to include this behavior. You're free to
* explore this trait and override any methods you wish to tweak.
*
* @codeCoverageIgnore
*/
class ResetPasswordController extends Controller
{
use ResetsPasswords;
/**
* Where to redirect users after resetting their password.
*
* @var string
*/
protected $redirectTo = '/home';
/**
* Create a new controller instance.
*/
public function __construct()
{
parent::__construct();
$this->middleware('guest');
$loginProvider = config('firefly.login_provider');
$authGuard = config('firefly.authentication_guard');
if ('eloquent' !== $loginProvider || 'web' !== $authGuard) {
throw new FireflyException('Using external identity provider. Cannot continue.');
}
}
/**
* Reset the given user's password.
*
* @param Request $request
*
* @return Factory|JsonResponse|RedirectResponse|View
* @throws ValidationException
*
*/
public function reset(Request $request)
{
$loginProvider = config('firefly.login_provider');
if ('eloquent' !== $loginProvider) {
$message = sprintf('Cannot reset password when authenticating over "%s".', $loginProvider);
return view('error', compact('message'));
}
$rules = [
'token' => 'required',
'email' => 'required|email',
'password' => 'required|confirmed|min:16|secure_password',
];
$this->validate($request, $rules, $this->validationErrorMessages());
// Here we will attempt to reset the user's password. If it is successful we
// will update the password on an actual user model and persist it to the
// database. Otherwise we will parse the error and return the response.
$response = $this->broker()->reset(
$this->credentials($request),
function ($user, $password) {
$this->resetPassword($user, $password);
}
);
// If the password was successfully reset, we will redirect the user back to
// the application's home authenticated view. If there is an error we can
// redirect them back to where they came from with their error message.
return $response === Password::PASSWORD_RESET
? $this->sendResetResponse($request, $response)
: $this->sendResetFailedResponse($request, $response);
}
/**
* Display the password reset view for the given token.
*
* If no token is present, display the link request form.
*
* @param Request $request
* @param null $token
*
* @return Factory|View
* @throws FireflyException
*/
public function showResetForm(Request $request, $token = null)
{
$loginProvider = config('firefly.login_provider');
if ('eloquent' !== $loginProvider) {
$message = sprintf('Cannot reset password when authenticating over "%s".', $loginProvider);
return view('error', compact('message'));
}
// is allowed to register?
$singleUserMode = app('fireflyconfig')->get('single_user_mode', config('firefly.configuration.single_user_mode'))->data;
$userCount = User::count();
$allowRegistration = true;
$pageTitle = (string)trans('firefly.reset_pw_page_title');
if (true === $singleUserMode && $userCount > 0) {
$allowRegistration = false;
}
/** @noinspection PhpUndefinedFieldInspection */
return view('auth.passwords.reset')->with(
['token' => $token, 'email' => $request->email, 'allowRegistration' => $allowRegistration, 'pageTitle' => $pageTitle]
);
}
}
| firefly-iii/firefly-iii | app/Http/Controllers/Auth/ResetPasswordController.php | PHP | agpl-3.0 | 5,272 |
'''
Driver file
To do:
- allow searching of a greater number of common players
- "vanity search" - highest damage differential:
- incorporate STEAMID conversion (shouldn't require the API)
- impose order on results
- start to store player aliases, associated with IDs
- write test
Tools:
Couple of urls:
http://logs.tf/profile/76561198055233348
http://logs.tf/profile/76561197993593754
'''
import player
# constants:
ID64_LENGTH = 17
MAX_PLAYERS = 12
# print title and link for logs common to players
def print_common_logs(players):
num_of_players = len(players)
if num_of_players > MAX_PLAYERS:
raise RuntimeError("Too many players for now!")
elif num_of_players == 0:
print("No players, no logs.")
else:
for key in players[0].data['logs']:
players_with_key = 1
for i in range(1, num_of_players):
if key in players[i].data['logs']:
players_with_key += 1
if players_with_key == num_of_players:
print(players[i].data['logs'][key]['title'])
print("http://logs.tf/" + str(players[i].data['logs'][key]['id']))
def main():
print("Hi! This script finds urls of logs common to up to 12 players.")
print("Enter their logs.tf profiles, then enter 's' to search.")
players = []
steam_id_64 = 0
# get and validate user input for ID
while True:
url_input = input("Enter player URL: ")
if url_input == 's' or url_input == 'S':
break
# id64 will follow this signature:
id_index = url_input.find("/profile/")
if id_index == -1:
print("Input not recognized. Please try again.")
continue
# get 17 digits following "/profile/"
steam_id_64 = url_input[id_index + 9: id_index + 26]
if len(steam_id_64) != 17:
print("Input not recognized. Please try again.")
continue
else:
p = player.Player(steam_id_64)
players.append(p)
print_common_logs(players)
if __name__ == "__main__":
main()
| triested/logs-search | main.py | Python | agpl-3.0 | 2,193 |
class AdminMailer < ActionMailer::Base
default :from => 'info@carbondiet.org',
:to => 'info@carbondiet.org'
def new_signup(name, sent_at = Time.now)
@name = name
mail :date => sent_at, :subject => 'Carbon Diet: New user signed up!'
end
def country_request(user, country, sent_at = Time.now)
@login = user.login
@country = country
mail :date => sent_at, :subject => 'Carbon Diet: Country request!'
end
end
| Floppy/carbon-diet | app/mailers/admin_mailer.rb | Ruby | agpl-3.0 | 453 |
var clover = new Object();
// JSON: {classes : [{name, id, sl, el, methods : [{sl, el}, ...]}, ...]}
clover.pageData = {"classes":[{"el":53,"id":68288,"methods":[{"el":39,"sc":2,"sl":34},{"el":44,"sc":2,"sl":41},{"el":48,"sc":2,"sl":46},{"el":52,"sc":2,"sl":50}],"name":"DoubleArrayPointer","sl":29}]}
// JSON: {test_ID : {"methods": [ID1, ID2, ID3...], "name" : "testXXX() void"}, ...};
clover.testTargets = {}
// JSON: { lines : [{tests : [testid1, testid2, testid3, ...]}, ...]};
clover.srcFileLines = [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []]
| cm-is-dog/rapidminer-studio-core | report/html/de/bwaldvogel/liblinear/DoubleArrayPointer.js | JavaScript | agpl-3.0 | 726 |
<?php
if(!defined('sugarEntry') || !sugarEntry) die('Not A Valid Entry Point');
/*********************************************************************************
* The contents of this file are subject to the SugarCRM Master Subscription
* Agreement ("License") which can be viewed at
* http://www.sugarcrm.com/crm/en/msa/master_subscription_agreement_11_April_2011.pdf
* By installing or using this file, You have unconditionally agreed to the
* terms and conditions of the License, and You may not use this file except in
* compliance with the License. Under the terms of the license, You shall not,
* among other things: 1) sublicense, resell, rent, lease, redistribute, assign
* or otherwise transfer Your rights to the Software, and 2) use the Software
* for timesharing or service bureau purposes such as hosting the Software for
* commercial gain and/or for the benefit of a third party. Use of the Software
* may be subject to applicable fees and any use of the Software without first
* paying applicable fees is strictly prohibited. You do not have the right to
* remove SugarCRM copyrights from the source code or user interface.
*
* All copies of the Covered Code must include on each user interface screen:
* (i) the "Powered by SugarCRM" logo and
* (ii) the SugarCRM copyright notice
* in the same form as they appear in the distribution. See full license for
* requirements.
*
* Your Warranty, Limitations of liability and Indemnity are expressly stated
* in the License. Please refer to the License for the specific language
* governing these rights and limitations under the License. Portions created
* by SugarCRM are Copyright (C) 2004-2011 SugarCRM, Inc.; All Rights Reserved.
********************************************************************************/
/*********************************************************************************
* Description: Contains a variety of utility functions specific to this module.
* Portions created by SugarCRM are Copyright (C) SugarCRM, Inc.
* All Rights Reserved.
* Contributor(s): ______________________________________..
********************************************************************************/
function get_validate_record_js () {
global $mod_strings;
global $app_strings;
$err_missing_required_fields = $app_strings['ERR_MISSING_REQUIRED_FIELDS'];
$the_script = <<<EOQ
<script type="text/javascript" language="Javascript">
function verify_data(form) {
var isError = false;
var errorMessage = "";
if (isError == true) {
alert("$err_missing_required_fields" + errorMessage);
return false;
}
return true;
}
</script>
EOQ;
return $the_script;
}
function get_chooser_js()
{
$the_script = <<<EOQ
<script type="text/javascript" language="Javascript">
<!-- to hide script contents from old browsers
function set_chooser()
{
var display_tabs_def = '';
var hide_tabs_def = '';
for(i=0; i < object_refs['display_tabs'].options.length ;i++)
{
display_tabs_def += object_refs['display_tabs'].options[i].value+":::";
}
for(i=0; i < object_refs['hide_tabs'].options.length ;i++)
{
hide_tabs_def += object_refs['hide_tabs'].options[i].value+":::";
}
document.EditView.display_tabs_def.value = display_tabs_def;
document.EditView.hide_tabs_def.value = hide_tabs_def;
}
// end hiding contents from old browsers -->
</script>
EOQ;
return $the_script;
}
?> | harish-patel/ecrm | modules/Holidays/Forms.php | PHP | agpl-3.0 | 3,408 |
"""
Copyright (c) 2002 Intel Corporation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the Intel Corporation nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
EXPORT LAWS: THIS LICENSE ADDS NO RESTRICTIONS TO THE EXPORT LAWS OF
YOUR JURISDICTION. It is licensee's responsibility to comply with any
export regulations applicable in licensee's jurisdiction. Under
CURRENT (May 2000) U.S. export regulations this software is eligible
for export from the U.S. and can be downloaded by or otherwise
exported or reexported worldwide EXCEPT to U.S. embargoed destinations
which include Cuba, Iraq, Libya, North Korea, Iran, Syria, Sudan,
Afghanistan and any other country to which the U.S. has embargoed
goods and services.
DESCRIPTION: Agent HTTP thread that handles HTTP requests to view the
agent's current state.
AUTHOR: Brent Chun (bnc@intel-research.net)
$Id: agenthttpsvr.py,v 1.1 2003-08-19 17:17:19 aclement Exp $
"""
import SimpleHTTPServer
import SocketServer
import threading
import agent
class agenthttphandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_GET(self):
self.send_response(200)
self.send_header("Content-type", "text/html")
self.end_headers()
self.wfile.write("%s" % agent.agenthtml(self.server.agent))
class agenthttpsvr(SocketServer.ThreadingTCPServer):
def __init__(self, server_address, RequestHandlerClass, agent):
import hacks
SocketServer.ThreadingTCPServer.allow_reuse_address = 1
try:
method = SocketServer.ThreadingTCPServer.__init__
args = [ self, server_address, RequestHandlerClass ]
hacks.retryapply(method, args, 10, 1)
except:
raise "Could not bind to TCP port %d" % server_address[1]
self.agent = agent
class agenthttpsvrthr(threading.Thread):
def __init__(self, agent):
threading.Thread.__init__(self)
self.server = agenthttpsvr(("", agent.conf.port), agenthttphandler, agent)
def run(self):
self.server.serve_forever()
| nmc-probe/emulab-nome | tbsetup/plab/libdslice/dslice/agenthttpsvr.py | Python | agpl-3.0 | 3,426 |
/*
* Claudia Project
* http://claudia.morfeo-project.org
*
* (C) Copyright 2010 Telefonica Investigacion y Desarrollo
* S.A.Unipersonal (Telefonica I+D)
*
* See CREDITS file for info about members and contributors.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the Affero GNU General Public License (AGPL) as
* published by the Free Software Foundation; either version 3 of the License,
* or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the Affero GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*
* If you want to use this software an plan to distribute a
* proprietary application in any way, and you are not licensing and
* distributing your source code under AGPL, you probably need to
* purchase a commercial license of the product. Please contact
* claudia-support@lists.morfeo-project.org for more information.
*/
package com.telefonica.claudia.smi.provisioning;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.FactoryConfigurationError;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.xmlrpc.XmlRpcException;
import org.apache.xmlrpc.client.XmlRpcClient;
import org.apache.xmlrpc.client.XmlRpcClientConfigImpl;
import org.dmtf.schemas.ovf.envelope._1.ContentType;
import org.dmtf.schemas.ovf.envelope._1.DiskSectionType;
import org.dmtf.schemas.ovf.envelope._1.EnvelopeType;
import org.dmtf.schemas.ovf.envelope._1.FileType;
import org.dmtf.schemas.ovf.envelope._1.ProductSectionType;
import org.dmtf.schemas.ovf.envelope._1.RASDType;
import org.dmtf.schemas.ovf.envelope._1.ReferencesType;
import org.dmtf.schemas.ovf.envelope._1.VirtualDiskDescType;
import org.dmtf.schemas.ovf.envelope._1.VirtualHardwareSectionType;
import org.dmtf.schemas.ovf.envelope._1.VirtualSystemCollectionType;
import org.dmtf.schemas.ovf.envelope._1.VirtualSystemType;
import org.dmtf.schemas.ovf.envelope._1.ProductSectionType.Property;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import com.abiquo.ovf.OVFEnvelopeUtils;
import com.abiquo.ovf.exceptions.EmptyEnvelopeException;
import com.abiquo.ovf.section.OVFProductUtils;
import com.abiquo.ovf.xml.OVFSerializer;
import com.telefonica.claudia.smi.DataTypesUtils;
import com.telefonica.claudia.smi.Main;
import com.telefonica.claudia.smi.TCloudConstants;
import com.telefonica.claudia.smi.URICreation;
import com.telefonica.claudia.smi.task.Task;
import com.telefonica.claudia.smi.task.TaskManager;
public class ONEProvisioningDriver implements ProvisioningDriver {
public static enum ControlActionType {shutdown, hold, release, stop, suspend, resume, finalize};
//public static enum VmStateType {INIT, PENDING, HOLD, ACTIVE, STOPPED, SUSPENDED, DONE, FAILED};
private final static int INIT_STATE = 0;
private final static int PENDING_STATE = 1;
private final static int HOLD_STATE = 2;
private final static int ACTIVE_STATE = 3;
private final static int STOPPED_STATE = 4;
private final static int SUSPENDED_STATE = 5;
private final static int DONE_STATE = 6;
private final static int FAILED_STATE = 7;
// LCM_INIT, PROLOG, BOOT, RUNNING, MIGRATE, SAVE_STOP, SAVE_SUSPEND, SAVE_MIGRATE, PROLOG_MIGRATE, EPILOG_STOP, EPILOG, SHUTDOWN, CANCEL
private final static int INIT_SUBSTATE = 0;
private final static int PROLOG_SUBSTATE = 1;
private final static int BOOT_SUBSTATE = 2;
private final static int RUNNING_SUBSTATE = 3;
private final static int MIGRATE_SUBSTATE = 4;
private final static int SAVE_STOP_SUBSTATE = 5;
private final static int SAVE_SUSPEND_SUBSTATE = 6;
private final static int SAVE_MIGRATE_SUBSTATE = 7;
private final static int PROLOG_MIGRATE_SUBSTATE = 8;
private final static int PROLOG_RESUME_SUBSTATE = 9;
private final static int EPILOG_STOP_SUBSTATE = 10;
private final static int EPILOG_SUBSTATE = 11;
private final static int SHUDTOWN_SUBSTATE = 12;
private final static int CANCEL_SUBSTATE = 13;
private HashMap<String, String> text_migrability = new HashMap();
private static org.apache.log4j.Logger log = org.apache.log4j.Logger.getLogger("com.telefonica.claudia.smi.provisioning.ONEProvisioningDriver");
// Tag names of the returning info doc for Virtual machines
public static final String VM_STATE = "STATE";
public static final String VM_SUBSTATE = "LCM_STATE";
// XMLRPC commands to access OpenNebula features
private final static String VM_ALLOCATION_COMMAND = "one.vm.allocate";
private final static String VM_UPDATE_COMMAND = "one.vm.action";
private final static String VM_GETINFO_COMMAND = "one.vm.info";
private final static String VM_GETALL_COMMAND = "one.vmpool.info";
private final static String VM_DELETE_COMMAND = "one.vm.delete";
private final static String NET_ALLOCATION_COMMAND = "one.vn.allocate";
private final static String NET_GETINFO_COMMAND = "one.vn.info";
private final static String NET_GETALL_COMMAND = "one.vnpool.info";
private final static String NET_DELETE_COMMAND = "one.vn.delete";
//private final static String DEBUGGING_CONSOLE = "RAW = [ type =\"kvm\", data =\"<devices><serial type='pty'><source path='/dev/pts/5'/><target port='0'/></serial><console type='pty' tty='/dev/pts/5'><source path='/dev/pts/5'/><target port='0'/></console></devices>\" ]";
/**
* Connection URL for OpenNebula. It defaults to localhost, but can be
* overriden with the property oneURL of the server configuration file.
*/
private String oneURL = "http://localhost:2633/RPC2";
/**
* Server configuration file URL property identifier.
*/
private final static String URL_PROPERTY = "oneUrl";
private final static String USER_PROPERTY = "oneUser";
private final static String PASSWORD_PROPERTY = "onePassword";
private static final String KERNEL_PROPERTY = "oneKernel";
private static final String INITRD_PROPERTY = "oneInitrd";
private static final String ARCH_PROPERTY = "arch";
private static final String ENVIRONMENT_PROPERTY = "oneEnvironmentPath";
private final static String SSHKEY_PROPERTY = "oneSshKey";
private final static String SCRIPTPATH_PROPERTY = "oneScriptPath";
private final static String ETH0_GATEWAY_PROPERTY = "eth0Gateway";
private final static String ETH0_DNS_PROPERTY = "eth0Dns";
private final static String ETH1_GATEWAY_PROPERTY = "eth1Gateway";
private final static String ETH1_DNS_PROPERTY = "eth1Dns";
private final static String NET_INIT_SCRIPT0 = "netInitScript0";
private final static String NET_INIT_SCRIPT1 = "netInitScript1";
private String oneSession = "oneadmin:5baa61e4c9b93f3f0682250b6cf8331b7ee68fd8";
private XmlRpcClient xmlRpcClient = null;
private static final String NETWORK_BRIDGE = "oneNetworkBridge";
private static final String XEN_DISK = "xendisk";
OneOperations operations = null;
OneNetUtilities netUtils = null;
/**
* Collection containing the mapping from fqns to ids. This mapped is used as a cache
* of the getVmId method (vm ids never change once assigned).
*/
private Map<String, Integer> idVmMap = new HashMap<String, Integer>();
/**
* Collection containing the mapping from fqns to ids. This mapped is used as a cache
* of the getVmId method (vm ids never change once assigned).
*/
private Map<String, Integer> idNetMap = new HashMap<String, Integer>();
private String hypervisorInitrd="";
private String arch="";
private String hypervisorKernel="";
private String customizationPort;
private String environmentRepositoryPath;
private static String networkBridge="";
private static String xendisk="";
private static String oneSshKey="";
private static String oneScriptPath="";
private static String eth0Gateway="";
private static String eth1Gateway="";
private static String eth0Dns="";
private static String eth1Dns="";
private static String netInitScript0="";
private static String netInitScript1="";
public static final String ASSIGNATION_SYMBOL = "=";
public static final String LINE_SEPARATOR = System.getProperty("line.separator");
public static final String ONE_VM_ID = "NAME";
public static final String ONE_VM_TYPE = "TYPE";
public static final String ONE_VM_STATE = "STATE";
public static final String ONE_VM_MEMORY = "MEMORY";
public static final String ONE_VM_NAME = "NAME";
public static final String ONE_VM_UUID = "UUID";
public static final String ONE_VM_CPU = "CPU";
public static final String ONE_VM_VCPU = "VCPU";
public static final String ONE_VM_RAW_VMI = "RAW_VMI";
public static final String ONE_VM_OS = "OS";
public static final String ONE_VM_OS_PARAM_KERNEL = "kernel";
public static final String ONE_VM_OS_PARAM_INITRD = "initrd";
public static final String ONE_VM_OS_PARAM_ROOT = "root";
public static final String ONE_VM_OS_PARAM_BOOT = "boot";
public static final String ONE_VM_GRAPHICS = "GRAPHICS";
public static final String ONE_VM_GRAPHICS_TYPE = "type";
public static final String ONE_VM_GRAPHICS_LISTEN = "listen";
public static final String ONE_VM_GRAPHICS_PORT = "port";
public static final String ONE_VM_DISK_COLLECTION = "DISKS";
public static final String ONE_VM_DISK = "DISK";
public static final String ONE_VM_DISK_PARAM_IMAGE = "source";
public static final String ONE_VM_DISK_PARAM_FORMAT = "format";
public static final String ONE_VM_DISK_PARAM_SIZE = "size";
public static final String ONE_VM_DISK_PARAM_TARGET = "target";
public static final String ONE_VM_DISK_PARAM_DIGEST = "digest";
public static final String ONE_VM_DISK_PARAM_TYPE = "type";
public static final String ONE_VM_DISK_PARAM_DRIVER = "driver";
public static final String ONE_VM_NIC_COLLECTION = "NICS";
public static final String ONE_VM_NIC = "NIC";
public static final String ONE_VM_NIC_PARAM_IP = "ip";
public static final String ONE_VM_NIC_PARAM_NETWORK = "NETWORK";
public static final String ONE_NET_ID = "ID";
public static final String ONE_NET_NAME = "NAME";
public static final String ONE_NET_BRIDGE = "BRIDGE";
public static final String ONE_NET_TYPE = "TYPE";
public static final String ONE_NET_ADDRESS = "NETWORK_ADDRESS";
public static final String ONE_NET_SIZE = "NETWORK_SIZE";
public static final String ONE_NET_LEASES = "LEASES";
public static final String ONE_NET_IP = "IP";
public static final String ONE_NET_MAC = "MAC";
public static final String ONE_DISK_ID = "ID";
public static final String ONE_DISK_NAME = "NAME";
public static final String ONE_DISK_URL = "URL";
public static final String ONE_DISK_SIZE = "SIZE";
public static final String ONE_OVF_URL = "OVF";
public static final String ONE_CONTEXT = "CONTEXT";
public static final String ONE_VERSION = "ONEVERSION";
public static final String RESULT_NET_ID = "ID";
public static final String RESULT_NET_NAME = "NAME";
public static final String RESULT_NET_ADDRESS = "NETWORK_ADDRESS";
public static final String RESULT_NET_BRIDGE = "BRIDGE";
public static final String RESULT_NET_TYPE = "TYPE";
public static final String MULT_CONF_LEFT_DELIMITER = "[";
public static final String MULT_CONF_RIGHT_DELIMITER = "]";
public static final String MULT_CONF_SEPARATOR = ",";
public static final String QUOTE = "\"";
private static final int ResourceTypeCPU = 3;
private static final int ResourceTypeMEMORY = 4;
private static final int ResourceTypeNIC = 10;
private static final int ResourceTypeDISK = 17;
private String oneversion = "2.2";
public class DeployVMTask extends Task {
public static final long POLLING_INTERVAL= 10000;
private static final int MAX_CONNECTION_ATEMPTS = 5;
String fqnVm;
String ovf;
public DeployVMTask(String fqn, String ovf) {
super();
this.fqnVm = fqn;
this.ovf = ovf;
}
@Override
public void execute() {
this.status = TaskStatus.RUNNING;
this.startTime = System.currentTimeMillis();
try {
// Create the Virtual Machine
String result = createVirtualMachine();
if (result==null) {
this.status= TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
}
// Wait until the state is RUNNING
this.status = TaskStatus.WAITING;
int connectionAttempts=0;
while (true) {
try {
Document vmInfo = getVirtualMachineState(result);
Integer state = Integer.parseInt(vmInfo.getElementsByTagName(VM_STATE).item(0).getTextContent());
Integer subState = Integer.parseInt(vmInfo.getElementsByTagName(VM_SUBSTATE).item(0).getTextContent());
if (state == ACTIVE_STATE && subState == RUNNING_SUBSTATE ) {
this.status= TaskStatus.SUCCESS;
this.endTime = System.currentTimeMillis();
break;
} else if (state ==FAILED_STATE) {
this.status= TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
break;
}
connectionAttempts=0;
} catch (IOException ioe) {
if (connectionAttempts> MAX_CONNECTION_ATEMPTS) {
this.status= TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
break;
} else
connectionAttempts++;
log.warn("Connection exception accessing ONE. Trying again. Error: " + ioe.getMessage());
}
Thread.sleep(POLLING_INTERVAL);
}
} catch (IOException e) {
log.error("Error connecting to ONE: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
} catch (Exception e) {
log.error("Unexpected error creating VM: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
e.printStackTrace();
return;
}
}
public String createVirtualMachine() throws Exception {
String idvm = null;
try
{
idvm = operations.deployVirtualMachine (ovf, fqnVm );
this.returnMsg = "Virtual machine internal id: " + idvm;
}
catch (Exception e)
{
this.error = new TaskError();
this.error.message = e.getMessage();
}
return idvm;
}
}
public class DeployNetworkTask extends Task {
String fqnNet;
String ovf;
public DeployNetworkTask(String netFqn, String ovf) {
this.fqnNet = netFqn;
this.ovf = ovf;
}
@Override
public void execute() {
this.status = TaskStatus.RUNNING;
this.startTime = System.currentTimeMillis();
try {
if (!createNetwork()) {
this.status= TaskStatus.ERROR;
return;
}
this.status= TaskStatus.SUCCESS;
} catch (IOException e) {
log.error("Error connecting to ONE: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.endTime = System.currentTimeMillis();
this.status = TaskStatus.ERROR;
return;
} catch (Exception e) {
log.error("Unknown error creating network: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.endTime = System.currentTimeMillis();
this.status = TaskStatus.ERROR;
}
}
public boolean createNetwork() throws Exception {
String idvm = null;
String xml = netUtils.TCloud2ONENet(ovf);
try
{
idvm = operations.deployNetwork(xml);
this.returnMsg = "Virtual network machine internal id: " + idvm;
}
catch (Exception e)
{
this.error = new TaskError();
this.error.message = e.getMessage();
return false;
}
return true;
/* List<String> rpcParams = new ArrayList<String>();
rpcParams.add(oneSession);
rpcParams.add(TCloud2ONENet(ovf));
Object[] result = null;
try {
result = (Object[])xmlRpcClient.execute(NET_ALLOCATION_COMMAND, rpcParams);
} catch (XmlRpcException ex) {
log.error("Connection error. Could not reach ONE host: " + ex.getMessage());
throw new IOException ("Error on allocation of the new network , XMLRPC call failed", ex);
}
boolean success = (Boolean)result[0];
if(success) {
log.debug("Network creation request succeded: " + result[1]);
this.returnMsg = ((Integer)result[1]).toString();
return true;
} else {
log.error("Error recieved from ONE: " + (String)result[1]);
this.error = new TaskError();
this.error.message = (String)result[1];
return false;
}*/
}
}
public class UndeployVMTask extends Task {
String fqnVm;
public UndeployVMTask(String vmFqn) {
this.fqnVm = vmFqn;
}
@Override
public void execute() {
this.status= TaskStatus.RUNNING;
this.startTime = System.currentTimeMillis();
// Undeploy the VM
try {
String id = getVmId(fqnVm).toString();
// deleteVirtualMachine(id);
operations.deleteVirtualMachine(id);
this.status= TaskStatus.SUCCESS;
this.endTime = System.currentTimeMillis();
} catch (IOException e) {
System.out.println ( e.getMessage());
log.error("Error connecting to ONE: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
} catch (Exception e) {
System.out.println ( e.getMessage());
log.error("Unknown error undeploying VM: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
}
}
/* @SuppressWarnings("unchecked")
public boolean deleteVirtualMachine(String id) throws IOException {
List rpcParams = new ArrayList ();
ControlActionType controlAction = ControlActionType.finalize;
log.info("PONG deleteVirtualMachine id: "+ id);
rpcParams.add(oneSession);
rpcParams.add(controlAction.toString());
rpcParams.add(Integer.parseInt(id));
Object[] result = null;
try {
result = (Object[])xmlRpcClient.execute(VM_UPDATE_COMMAND, rpcParams);
} catch (XmlRpcException ex) {
log.error("Connection error trying to update VM: " + ex.getMessage());
throw new IOException ("Error updating VEE replica , XMLRPC call failed", ex);
}
if (result==null) {
throw new IOException("No result returned from XMLRPC call");
} else {
return (Boolean)result[0];
}
}*/
}
public class UndeployNetworkTask extends Task {
String fqnNet;
public UndeployNetworkTask(String netFqn) {
this.fqnNet = netFqn;
log.info("Network fqn " + fqnNet);
}
@Override
public void execute() {
this.status= TaskStatus.RUNNING;
this.startTime = System.currentTimeMillis();
// Undeploy the VM
try {
//deleteNetwork(getNetId(fqnNet).toString());
operations.deleteNetwork(getNetId(fqnNet).toString());
this.status= TaskStatus.SUCCESS;
this.endTime = System.currentTimeMillis();
} catch (IOException e) {
log.error("Error connecting to ONE: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
System.out.println ( e.getMessage());
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
System.out.println ( e.getMessage());
return;
} catch (Exception e) {
log.error("Unknown error undeploying Network: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
System.out.println ( e.getMessage());
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
}
}
/* @SuppressWarnings("unchecked")
public void deleteNetwork(String id) throws IOException {
List rpcParams = new ArrayList<String>();
rpcParams.add(oneSession);
rpcParams.add(new Integer(id) );
Object[] result = null;
try {
result = (Object[])xmlRpcClient.execute(NET_DELETE_COMMAND, rpcParams);
} catch (XmlRpcException ex) {
throw new IOException ("Error deleting the network , XMLRPC call failed", ex);
}
boolean success = (Boolean)result[0];
if(success) {
} else {
throw new IOException("Unknown error trying to delete network: " + (String)result[1]);
}
}*/
}
public class ActionVMTask extends Task {
String fqnVM;
String action;
String errorMessage="";
public ActionVMTask(String fqnVM, String action) {
this.fqnVM = fqnVM;
this.action = action;
}
@Override
public void execute() {
this.status= TaskStatus.RUNNING;
this.startTime = System.currentTimeMillis();
// Undeploy the VM
try {
boolean result = operations.doAction(getVmId(fqnVM).toString(), action);
if (result)
this.status= TaskStatus.SUCCESS;
else {
this.status = TaskStatus.ERROR;
this.error = new TaskError();
this.error.message = errorMessage;
}
this.endTime = System.currentTimeMillis();
} catch (IOException e) {
log.error("Error connecting to VMWare: " + e.getMessage());
this.error = new TaskError();
this.error.message = e.getMessage();
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
} catch (Exception e) {
log.error("Unknown error executing action" + action + ": " + e.getMessage() + " -> " + e.getClass().getCanonicalName());
e.printStackTrace();
this.error = new TaskError();
this.error.message = e.getMessage();
this.status = TaskStatus.ERROR;
this.endTime = System.currentTimeMillis();
return;
}
}
}
protected static String ONEVM2TCloud(String ONETemplate) {
// TODO: ONE Template to TCloud translation
return "";
}
/* protected static String getNetContext(VirtualHardwareSectionType vh, String veeFqn,String xml, String scriptListProp) throws Exception {
// log.debug("PONG2 xml" +xml+ "\n");
StringBuffer allParametersString = new StringBuffer();
List<RASDType> items = vh.getItem();
int i=0;
for (Iterator<RASDType> iteratorRASD = items.iterator(); iteratorRASD.hasNext();) {
RASDType item = (RASDType) iteratorRASD.next();
// Get the resource type and process it accordingly
int rsType = new Integer(item.getResourceType().getValue());
int quantity = 1;
if (item.getVirtualQuantity() != null) {
quantity = item.getVirtualQuantity().getValue().intValue();
}
switch (rsType) {
case ResourceTypeNIC:
try {
// log.debug("PONG eth0Dns" + eth0Dns + "\n");
// log.debug("PONG eth0Gateway" + eth0Gateway + "\n");
// log.debug("PONG eth1Dns" + eth1Dns + "\n");
// log.debug("PONG eth1Gateway" + eth1Gateway + "\n");
String fqnNet = URICreation.getService(veeFqn) + ".networks." + item.getConnection().get(0).getValue();
allParametersString.append("ip_eth"+i).append(ASSIGNATION_SYMBOL).append("\"$NIC[IP, NETWORK=\\\""+fqnNet+"\\\"]\"").append(MULT_CONF_SEPARATOR).append(LINE_SEPARATOR);
String dns="";
String gateway="";
if(i==0){
dns=eth0Dns;
gateway=eth0Gateway;
}
if(i==1){
dns=eth1Dns;
gateway=eth1Gateway;
}
if(dns.length()>0)
{
allParametersString.append("dns_eth"+i).append(ASSIGNATION_SYMBOL).append(dns).append(MULT_CONF_SEPARATOR).append(LINE_SEPARATOR);
}
if(gateway.length()>0)
{
allParametersString.append("gateway_eth"+i).append(ASSIGNATION_SYMBOL).append(gateway).append(MULT_CONF_SEPARATOR).append(LINE_SEPARATOR);
}
i++;
} catch (FactoryConfigurationError e) {
log.error("Error retrieving parser: " + e.getMessage());
throw new Exception("Error retrieving parser: " + e.getMessage());
} catch (Exception e) {
log.error("Error configuring a XML Builder.");
throw new Exception("Error configuring a XML Builder: " + e.getMessage());
}
break;
default:
//throw new IllegalArgumentException("unknown hw type: " + rsType);
}
}
StringBuffer scriptexec=new StringBuffer();;
if (i==1){
if(netInitScript0.length()>0) {
scriptexec.append("SCRIPT_EXEC=\""+netInitScript0);
}
}
if (i==2){
if(netInitScript1.length()>0) {
scriptexec.append("SCRIPT_EXEC=\""+netInitScript1);
}
}
if (scriptListProp != null & scriptListProp.length()!=0)
{
String[] scriptList = scriptListProp.split("/");
String scriptListTemplate = "";
for (String scrt: scriptList){
if (scrt.indexOf(".py")!=-1)
{
if (scrt.equals("OVFParser.py")) {
System.out.println ("python /mnt/stratuslab/"+scrt);
scriptexec.append("; python /mnt/stratuslab/"+scrt+"");
}
if (scrt.equals("restful-server.py")) {
System.out.println ("/etc/init.d/lb_server start");
scriptexec.append("; /etc/init.d/lb_server start");
}
if (scrt.equals("torqueProbe.py")) {
System.out.println ("/etc/init.d/probe start");
scriptexec.append("; /etc/init.d/probe start");
}
}
}
}
if (scriptexec.length()>0){
scriptexec.append("\"").append(MULT_CONF_SEPARATOR).append(LINE_SEPARATOR);
}
else {
scriptexec.append("");
}
allParametersString.append(scriptexec);
return allParametersString.toString();
}
*/
/*protected String TCloud2ONENet(String xml) throws Exception {
try {
DocumentBuilder builder = DocumentBuilderFactory.newInstance().newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(xml.getBytes()));
Element root = (Element) doc.getFirstChild();
String fqn = root.getAttribute(TCloudConstants.ATTR_NETWORK_NAME);
StringBuffer allParametersString = new StringBuffer();
NodeList macEnabled = doc.getElementsByTagName(TCloudConstants.TAG_NETWORK_MAC_ENABLED);
Element firstmacenElement = (Element)macEnabled.item(0);
String macenabled = null;
if (firstmacenElement!=null)
{
NodeList textMacenList = firstmacenElement.getChildNodes();
if (((Node)textMacenList.item(0))!=null)
macenabled= ((Node)textMacenList.item(0)).getNodeValue().trim();
}
NodeList netmaskList = doc.getElementsByTagName(TCloudConstants.TAG_NETWORK_NETMASK);
NodeList baseAddressList = doc.getElementsByTagName(TCloudConstants.TAG_NETWORK_BASE_ADDRESS);
NodeList ipLeaseList = doc.getElementsByTagName(TCloudConstants.TAG_NETWORK_IPLEASES);
if (baseAddressList.getLength()==0)
{
allParametersString.append(getTCloud2FixedONENet (fqn));
}
else if (ipLeaseList.getLength()==0)
{
int size = 0;
if (netmaskList.getLength() >0)
{
size = getSizeNetwork ((Element) netmaskList.item(0));
}
allParametersString.append(getTCloud2RangedONENet (fqn, size, baseAddressList.item(0).getTextContent()));
}
else if (ipLeaseList.getLength()>0)
{
int size = 0;
if (netmaskList.getLength() >0)
{
size = getSizeNetwork ((Element) netmaskList.item(0));
}
allParametersString.append(getTCloud2IPElasedONENet (fqn, size, baseAddressList.item(0).getTextContent(), ipLeaseList));
}
System.out.println("Network data sent:\n\n" + allParametersString.toString() + "\n\n");
return allParametersString.toString();
} catch (IOException e1) {
System.out.println("OVF of the virtual machine was not well formed or it contained some errors.");
throw new Exception("OVF of the virtual machine was not well formed or it contained some errors: " + e1.getMessage());
} catch (ParserConfigurationException e) {
System.out.println("Error configuring parser: " + e.getMessage());
throw new Exception("Error configuring parser: " + e.getMessage());
} catch (FactoryConfigurationError e) {
System.out.println("Error retrieving parser: " + e.getMessage());
throw new Exception("Error retrieving parser: " + e.getMessage());
} catch (Exception e) {
e.printStackTrace();
System.out.println("Error configuring a XML Builder.");
throw new Exception("Error configuring a XML Builder: " + e.getMessage());
}
}
*/
/*public String getTCloud2FixedONENet (String fqn)
{
StringBuffer allParametersString = new StringBuffer();
// Translate the simple data to RPC format
allParametersString.append(ONE_NET_NAME).append(ASSIGNATION_SYMBOL).append(fqn).append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_TYPE).append(ASSIGNATION_SYMBOL).append("FIXED").append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_BRIDGE).append(ASSIGNATION_SYMBOL).append(networkBridge).append(LINE_SEPARATOR);
return allParametersString.toString();
}
public String getTCloud2RangedONENet (String fqn, int size, String network)
{
StringBuffer allParametersString = new StringBuffer();
// Translate the simple data to RPC format
allParametersString.append(ONE_NET_NAME).append(ASSIGNATION_SYMBOL).append(fqn).append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_TYPE).append(ASSIGNATION_SYMBOL).append("RANGED").append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_BRIDGE).append(ASSIGNATION_SYMBOL).append(networkBridge).append(LINE_SEPARATOR);
if (size != 0)
allParametersString.append(ONE_NET_SIZE).append(ASSIGNATION_SYMBOL).append(size).append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_ADDRESS).append(ASSIGNATION_SYMBOL).append(network).append(LINE_SEPARATOR);
return allParametersString.toString();
}
public String getTCloud2IPElasedONENet (String fqn, int size, String network, NodeList ipLeaseList)
{
StringBuffer allParametersString = new StringBuffer();
// Translate the simple data to RPC format
allParametersString.append(ONE_NET_NAME).append(ASSIGNATION_SYMBOL).append(fqn).append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_TYPE).append(ASSIGNATION_SYMBOL).append("FIXED").append(LINE_SEPARATOR);
allParametersString.append(ONE_NET_BRIDGE).append(ASSIGNATION_SYMBOL).append(networkBridge).append(LINE_SEPARATOR);
if (size != 0)
allParametersString.append(ONE_NET_SIZE).append(ASSIGNATION_SYMBOL).append(size).append(LINE_SEPARATOR);
// allParametersString.append(ONE_NET_ADDRESS).append(ASSIGNATION_SYMBOL).append(network).append(LINE_SEPARATOR);
for (int i=0; i<ipLeaseList .getLength(); i++){
Node firstIpLeaseNode = ipLeaseList.item(i);
if (firstIpLeaseNode.getNodeType() == Node.ELEMENT_NODE){
Element firstIpLeaseElement = (Element)firstIpLeaseNode;
NodeList ipList =firstIpLeaseElement.getElementsByTagName(TCloudConstants.TAG_NETWORK_IP);
Element firstIpElement = (Element)ipList.item(0);
NodeList textIpList = firstIpElement.getChildNodes();
String ipString = ("IP="+((Node)textIpList.item(0)).getNodeValue().trim());
NodeList macList =firstIpLeaseElement.getElementsByTagName(TCloudConstants.TAG_NETWORK_MAC);
Element firstMacElement = (Element)macList.item(0);
NodeList textMacList = firstMacElement.getChildNodes();
String macString = ("MAC="+((Node)textMacList.item(0)).getNodeValue().trim());
allParametersString.append(ONE_NET_LEASES).append(ASSIGNATION_SYMBOL).append(MULT_CONF_LEFT_DELIMITER);
allParametersString.append(ipString).append(MULT_CONF_SEPARATOR).append(macString).append(MULT_CONF_RIGHT_DELIMITER).append(LINE_SEPARATOR);
}
}
return allParametersString.toString();
}*/
/* public static int getSizeNetwork (Element netmask)
{
if (!netmask.getTextContent().matches("\\d+\\.\\d+\\.\\d+\\.\\d+"))
throw new IllegalArgumentException("Wrong IPv4 format. Expected example: 192.168.0.0 Got: " + netmask.getTextContent());
String[] ipBytes = netmask.getTextContent().split("\\.");
short[] result = new short[4];
for (int i=0; i < 4; i++) {
try {
result[i] = Short.parseShort(ipBytes[i]);
if (result[i]>255) throw new NumberFormatException("Should be in the range [0-255].");
} catch (NumberFormatException nfe) {
throw new IllegalArgumentException("Number out of bounds. Bytes should be on the range 0-255.");
}
}
// The network can host 2^n where n is the number of bits in the network address,
// substracting the broadcast and the network value (all 1s and all 0s).
int size = (int) Math.pow(2, 32.0-getBitNumber(result));
if (size < 8)
size = 8;
else
size -= 2;
return size ;
}*/
protected static String ONENet2TCloud(String ONETemplate) {
return "";
}
/**
* Get the number of bits with value 1 in the given IP.
*
* @return
*/
/* public static int getBitNumber (short[] ip) {
if (ip == null || ip.length != 4)
return 0;
int bits=0;
for (int i=0; i < 4; i++)
for (int j=0; j< 15; j++)
bits += ( ((short)Math.pow(2, j))& ip[i]) / Math.pow(2, j);
return bits;
}*/
/**
* Retrieve the virtual network id given its fqn.
*
* @param fqn
* FQN of the Virtual Network (mapped to its name property in ONE).
*
* @return
* The internal id of the Virtual Network if it exists or -1 otherwise.
*
* @throws Exception
*
*/
protected Integer getNetId(String fqn) throws Exception {
if (!idNetMap.containsKey(fqn))
idNetMap = getNetworkIds();
if (idNetMap.containsKey(fqn))
return idNetMap.get(fqn);
else
return -1;
}
/**
* Retrieve the vm's id given its fqn.
*
* @param fqn
* FQN of the Virtual Machine (mapped to its name property in ONE).
*
* @return
* The internal id of the Virtual Machine if it exists or -1 otherwise.
*
* @throws Exception
*
*/
protected Integer getVmId(String fqn) throws Exception {
if (!idVmMap.containsKey(fqn))
idVmMap = getVmIds();
if (idVmMap.containsKey(fqn))
return idVmMap.get(fqn);
else
return -1;
}
/**
* Retrieve a map of the currently deployed VMs, and its ids.
*
* @return
* A map where the key is the VM's FQN and the value the VM's id.
* @throws Exception
*/
@SuppressWarnings("unchecked")
protected Map<String, Integer> getVmIds() throws Exception {
List rpcParams = new ArrayList<String>();
rpcParams.add(oneSession);
rpcParams.add(-2);
if (this.oneversion.equals("3.0"))
{
rpcParams.add(-1);
rpcParams.add(-1);
rpcParams.add(-2);
}
HashMap<String, Integer> mapResult = new HashMap<String, Integer>();
Object[] result = null;
try {
result = (Object[])xmlRpcClient.execute(VM_GETALL_COMMAND, rpcParams);
} catch (XmlRpcException ex) {
System.out.println (" getVmIds" + ex.getMessage());
throw new IOException ("Error obtaining the VM list: " + ex.getMessage(), ex);
}
boolean success = (Boolean)result[0];
if(success) {
String resultList = (String) result[1];
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(resultList.getBytes()));
NodeList vmList = doc.getElementsByTagName("VM");
for (int i=0; i < vmList.getLength(); i++) {
Element vm = (Element) vmList.item(i);
String fqn = ((Element)vm.getElementsByTagName("NAME").item(0)).getTextContent();
try {
Integer value = Integer.parseInt(((Element)vm.getElementsByTagName("ID").item(0)).getTextContent());
mapResult.put(fqn, value);
} catch(NumberFormatException nfe) {
log.warn("Numerical id expected, got [" + ((Element)vm.getElementsByTagName("ID").item(0)).getTextContent() + "]");
continue;
}
}
return mapResult;
} catch (ParserConfigurationException e) {
log.error("Parser Configuration Error: " + e.getMessage());
throw new IOException ("Parser Configuration Error", e);
} catch (SAXException e) {
log.error("Parse error reading the answer: " + e.getMessage());
throw new IOException ("XML Parse error", e);
}
} else {
log.error("Error recieved from ONE: " + result[1]);
throw new Exception("Error recieved from ONE: " + result[1]);
}
}
@SuppressWarnings("unchecked")
protected HashMap<String, Integer> getNetworkIds() throws IOException {
List rpcParams = new ArrayList();
rpcParams.add(oneSession);
rpcParams.add(-2);
if (this.oneversion.equals("3.0"))
{
rpcParams.add(-1);
rpcParams.add(-1);
}
Object[] result = null;
try {
result = (Object[])xmlRpcClient.execute(NET_GETALL_COMMAND, rpcParams);
} catch (XmlRpcException ex) {
throw new IOException ("Error obtaining the network list", ex);
}
boolean success = (Boolean)result[0];
if(success) {
HashMap<String, Integer> mapResult = new HashMap<String, Integer>();
String resultList = (String) result[1];
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
try {
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(resultList.getBytes()));
NodeList vmList = doc.getElementsByTagName("VNET");
for (int i=0; i < vmList.getLength(); i++) {
Element vm = (Element) vmList.item(i);
String fqn = ((Element)vm.getElementsByTagName("NAME").item(0)).getTextContent();
try {
Integer value = Integer.parseInt(((Element)vm.getElementsByTagName("ID").item(0)).getTextContent());
mapResult.put(fqn, value);
} catch(NumberFormatException nfe) {
log.warn("Numerical id expected, got [" + ((Element)vm.getElementsByTagName("ID").item(0)).getTextContent() + "]");
continue;
}
}
return mapResult;
} catch (ParserConfigurationException e) {
throw new IOException ("Parser Configuration Error", e);
} catch (SAXException e) {
throw new IOException ("XML Parse error", e);
}
} else {
throw new IOException("Error recieved from ONE: " +(String)result[1]);
}
}
public ONEProvisioningDriver(Properties prop) {
XmlRpcClientConfigImpl config = new XmlRpcClientConfigImpl();
log.info("Creating OpenNebula conector");
if (prop.containsKey(URL_PROPERTY)) {
oneURL = (String) prop.get(URL_PROPERTY);
log.info("URL created: " + oneURL);
}
if (prop.containsKey(USER_PROPERTY)&&prop.containsKey(PASSWORD_PROPERTY)) {
oneSession = ((String) prop.get(USER_PROPERTY)) + ":" + ((String) prop.get(PASSWORD_PROPERTY));
log.info("Session created: " + oneSession);
}
if (prop.containsKey(KERNEL_PROPERTY)) {
hypervisorKernel = ((String) prop.get(KERNEL_PROPERTY));
}
if (prop.containsKey(INITRD_PROPERTY)) {
hypervisorInitrd = ((String) prop.get(INITRD_PROPERTY));
}
if (prop.containsKey(ARCH_PROPERTY)) {
arch = ((String) prop.get(ARCH_PROPERTY));
}
if (prop.containsKey("com.telefonica.claudia.customization.port")) {
customizationPort = ((String) prop.get(Main.CUSTOMIZATION_PORT_PROPERTY));
}
if (prop.containsKey(ENVIRONMENT_PROPERTY)) {
environmentRepositoryPath = (String) prop.get(ENVIRONMENT_PROPERTY);
}
if (prop.containsKey(NETWORK_BRIDGE)) {
networkBridge = ((String) prop.get(NETWORK_BRIDGE));
}
if (prop.containsKey(this.ONE_VERSION)) {
oneversion = ((String) prop.get(ONE_VERSION));
}
if (prop.containsKey(XEN_DISK)) {
xendisk = ((String) prop.get(XEN_DISK));
}
if (prop.containsKey(SSHKEY_PROPERTY)) {
oneSshKey = ((String) prop.get(SSHKEY_PROPERTY));
}
if (prop.containsKey(SCRIPTPATH_PROPERTY)) {
oneScriptPath = ((String) prop.get(SCRIPTPATH_PROPERTY));
log.info("oneScriptPath " + oneScriptPath);
}
if (prop.containsKey(ETH0_GATEWAY_PROPERTY)) {
eth0Gateway= ((String) prop.get(ETH0_GATEWAY_PROPERTY));
}
if (prop.containsKey(ETH0_DNS_PROPERTY)) {
eth0Dns = ((String) prop.get(ETH0_DNS_PROPERTY));
}
if (prop.containsKey(ETH1_GATEWAY_PROPERTY)) {
eth1Gateway = ((String) prop.get(ETH1_GATEWAY_PROPERTY));
}
if (prop.containsKey(ETH1_DNS_PROPERTY)) {
eth1Dns = ((String) prop.get(ETH1_DNS_PROPERTY));
}
if (prop.containsKey(NET_INIT_SCRIPT0)) {
netInitScript0 = ((String) prop.get(NET_INIT_SCRIPT0));
}
if (prop.containsKey(NET_INIT_SCRIPT1)) {
netInitScript1 = ((String) prop.get(NET_INIT_SCRIPT1));
}
try {
config.setServerURL(new URL(oneURL));
} catch (MalformedURLException e) {
log.error("Malformed URL: " + oneURL);
throw new RuntimeException(e);
}
String server = null;
if (prop.containsKey("com.telefonica.claudia.server.host")) {
server = ((String) prop.get("com.telefonica.claudia.server.host"));
}
xmlRpcClient = new XmlRpcClient();
log.info("XMLRPC client created");
xmlRpcClient.setConfig(config);
log.info("XMLRPC client configured");
/* MIGRABILITY TAG */
text_migrability.put("cross-host", "HOST");
text_migrability.put("cross-sitehost", "SITE");
text_migrability.put("none", "NONE");
operations = new OneOperations(oneSession, xmlRpcClient);
operations.configOperations(oneversion, networkBridge, environmentRepositoryPath, oneScriptPath,oneSshKey, customizationPort, hypervisorInitrd, hypervisorKernel,
xendisk, arch, server,netInitScript0,netInitScript1);
netUtils = new OneNetUtilities(networkBridge);
// FULL??
}
@SuppressWarnings("unchecked")
public Document getVirtualMachineState(String id) throws IOException {
List rpcParams = new ArrayList ();
rpcParams.add(oneSession);
rpcParams.add(new Integer(id));
log.debug("Virtual machine info requested for id: " + id);
Object[] result = null;
try {
result = (Object[])xmlRpcClient.execute(VM_GETINFO_COMMAND, rpcParams);
} catch (XmlRpcException ex) {
log.error("Connection error trying to get VM information: " + ex.getMessage());
throw new IOException ("Error on reading VM state , XMLRPC call failed", ex);
}
boolean completed = (Boolean) result[0];
if (completed) {
String resultList = (String) result[1];
DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance();
// RESERVOIR ONLY: the info cames with a XML inside the element RAW_VMI, WITH HEADERS
if (resultList.contains("<RAW_VMI>")) {
resultList = resultList.replace(resultList.substring(resultList.indexOf("<RAW_VMI>"), resultList.indexOf("</RAW_VMI>") + 10), "");
}
try {
DocumentBuilder builder = factory.newDocumentBuilder();
Document doc = builder.parse(new ByteArrayInputStream(resultList.getBytes()));
log.debug("VM Info request succeded");
return doc;
} catch (ParserConfigurationException e) {
log.error("Error configuring parser: " + e.getMessage());
throw new IOException ("Parser Configuration Error", e);
} catch (SAXException e) {
log.error("Parse error obtaining info: " + e.getMessage());
throw new IOException ("XML Parse error", e);
}
} else {
log.error("VM Info request failed: " + result[1]);
return null;
}
}
public String getAtributeVirtualSystem(VirtualSystemType vs, String attribute) throws NumberFormatException {
Iterator itr = vs.getOtherAttributes().entrySet().iterator();
while (itr.hasNext()) {
Map.Entry e = (Map.Entry)itr.next();
if ((e.getKey()).equals(new QName ("http://schemas.telefonica.com/claudia/ovf", attribute)))
return (String)e.getValue();
}
return "";
}
public ArrayList<VirtualSystemType> getVirtualSystem (EnvelopeType envelope) throws Exception {
ContentType entityInstance = null;
ArrayList<VirtualSystemType> virtualSystems = new ArrayList ();
try {
entityInstance = OVFEnvelopeUtils.getTopLevelVirtualSystemContent(envelope);
} catch (EmptyEnvelopeException e) {
log.error(e);
}
HashMap<String,VirtualSystemType> virtualsystems = new HashMap();
if (entityInstance instanceof VirtualSystemType) {
virtualSystems.add((VirtualSystemType)entityInstance);
} else if (entityInstance instanceof VirtualSystemCollectionType) {
VirtualSystemCollectionType virtualSystemCollectionType = (VirtualSystemCollectionType) entityInstance;
for (VirtualSystemType vs : OVFEnvelopeUtils.getVirtualSystems(virtualSystemCollectionType))
{
virtualSystems.add(vs);
}
}//End for
return virtualSystems;
}
@Override
public long deleteNetwork(String netFqn) throws IOException {
return TaskManager.getInstance().addTask(new UndeployNetworkTask(netFqn), URICreation.getVDC(netFqn)).getTaskId();
}
@Override
public long deleteVirtualMachine(String vmFqn) throws IOException {
return TaskManager.getInstance().addTask(new UndeployVMTask(vmFqn), URICreation.getVDC(vmFqn)).getTaskId();
}
@Override
public long deployNetwork(String org, String vdc, String network, String ovf) throws IOException {
String netFqn = URICreation.getNetworkFQN(org, vdc, network);
return TaskManager.getInstance().addTask(new DeployNetworkTask(netFqn, ovf), URICreation.getVDC(netFqn)).getTaskId();
}
@Override
public long deployVirtualMachine(String fqn, String ovf) throws IOException {
return TaskManager.getInstance().addTask(new DeployVMTask(fqn, ovf), URICreation.getVDC(fqn)).getTaskId();
}
public long powerActionVirtualMachine(String fqn, String action) throws IOException {
return TaskManager.getInstance().addTask(new ActionVMTask(fqn, action), URICreation.getVDC(fqn)).getTaskId();
}
public String getNetwork(String fqn) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getNetworkList() throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public String getVirtualMachine(String fqn) throws IOException {
String id = null;
try {
id = getVmId(fqn).toString();
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// deleteVirtualMachine(id);
String xml = null;
try
{
String result = operations.getVirtualMachine(id);
System.out.println ("RESULT " + result);
ONEUtilities utils = new ONEUtilities ();
HashMap data = utils.getCpuRamDisk (result);
HashMap ips = utils.getNetworksIp(result);
xml = utils.generateXMLVEE (fqn, ips, (String)data.get("CPU"), (String)data.get("MEMORY"), (String)data.get("DISK"));
}
catch (Exception e)
{
return null;
}
return xml;
}
}
| StratusLab/claudia | driver-one/src/main/java/com/telefonica/claudia/smi/provisioning/ONEProvisioningDriver.java | Java | agpl-3.0 | 46,617 |
<?php
class MetadataProfilesAction extends KalturaAdminConsolePlugin
{
public function __construct($label = null, $action = null, $rootLabel = null)
{
$this->action = $action;
$this->label = $label;
$this->rootLabel = $rootLabel;
}
/**
* @return string - absolute file path of the phtml template
*/
public function getTemplatePath()
{
return realpath(dirname(__FILE__));
}
public function getRequiredPermissions()
{
return array(KalturaPermissionName::SYSTEM_ADMIN_PUBLISHER_CONFIG);
}
public function doAction(Zend_Controller_Action $action)
{
$request = $action->getRequest();
$page = $request->getParam('page', 1);
$pageSize = $request->getParam('pageSize', 10);
$action->view->form = new Form_PartnerFilter();
// init filter
$partnerFilter = $this->getPartnerFilterFromRequest($request);
/*
// get results and paginate
$paginatorAdapter = new Kaltura_FilterPaginator("metadataProfile", "listAction", null, $partnerFilter);
$paginator = new Kaltura_Paginator($paginatorAdapter, $request);
$paginator->setCurrentPageNumber($page);
$paginator->setItemCountPerPage($pageSize);
// popule the form
$form->populate($request->getParams());
// set view
$action->view->form = $form;
$action->view->paginator = $paginator;
*/
return;
$request = $action->getRequest();
$action->view->metadataProfilesForm = new Form_metadataProfiles();
$action->view->metadataProfilesForm->populate($request->getParams());
$partnerId = $request->getParam('partnerId', false);
$entryId = $request->getParam('entryId', false);
$freeText = $request->getParam('freeText', false);
$form = new Form_PartnerFilter();
$form->populate($request->getParams());
$action->view->form = $form;
if(($partnerId) || ($entryId) || ($freeText))
{
try
{
$client = Kaltura_ClientHelper::getClient();
$metadataProfileFilter = new KalturaMetadataProfileFilter();
$metadataProfileFilter->partnerIdEqual = $partnerId;
$this->view->inQueuePaginator = null;
//$client->user->add($systemUser);
}
catch(Exception $ex)
{
//to do
}
}
}
private function getPartnerFilterFromRequest(Zend_Controller_Request_Abstract $request)
{
$filter = new KalturaMetadataProfileFilter();
$filterType = $request->getParam('filter_type');
$filterInput = $request->getParam('filter_input');
if ($filterType == 'byid')
{
$filter->idEqual = $filterInput;
}
else
{
if ($filterType == 'byname')
$filter->nameLike = $filterInput;
//elseif ($filterType == 'free' && $filterInput)
//$filter->
}
$statuses = array();
$statuses[] = KalturaPartnerStatus::ACTIVE;
$statuses[] = KalturaPartnerStatus::BLOCKED;
$filter->statusIn = implode(',', $statuses);
$filter->orderBy = KalturaPartnerOrderBy::ID_DESC;
return $filter;
}
} | richhl/kalturaCE | package/app/app/plugins/metadata/admin/MetadataProfilesAction.php | PHP | agpl-3.0 | 2,988 |
package nl.wietmazairac.bimql.set.attribute;
/******************************************************************************
* Copyright (C) 2009-2017 BIMserver.org
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as
* published by the Free Software Foundation, either version 3 of the
* License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see {@literal<http://www.gnu.org/licenses/>}.
*****************************************************************************/
import org.bimserver.models.ifc2x3tc1.IfcCShapeProfileDef;
public class SetAttributeSubIfcCShapeProfileDef {
// fields
private Object object;
private String attributeName;
private String attributeNewValue;
// constructors
public SetAttributeSubIfcCShapeProfileDef() {
}
public SetAttributeSubIfcCShapeProfileDef(Object object, String attributeName, String attributeNewValue) {
this.object = object;
this.attributeName = attributeName;
this.attributeNewValue = attributeNewValue;
}
// methods
public Object getObject() {
return object;
}
public void setObject(Object object) {
this.object = object;
}
public String getAttributeName() {
return attributeName;
}
public void setAttributeName(String attributeName) {
this.attributeName = attributeName;
}
public String getAttributeNewValue() {
return attributeNewValue;
}
public void setAttributeNewValue(String attributeNewValue) {
this.attributeNewValue = attributeNewValue;
}
public void setAttribute() {
if (attributeName.equals("Depth")) {
//1NoEList
((IfcCShapeProfileDef) object).setDepth(Double.parseDouble(attributeNewValue));
//1void
//1double
}
else if (attributeName.equals("CentreOfGravityInX")) {
//1NoEList
((IfcCShapeProfileDef) object).setCentreOfGravityInX(Double.parseDouble(attributeNewValue));
//1void
//1double
}
else if (attributeName.equals("DepthAsString")) {
//1NoEList
((IfcCShapeProfileDef) object).setDepthAsString(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("WidthAsString")) {
//1NoEList
((IfcCShapeProfileDef) object).setWidthAsString(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("CentreOfGravityInXAsString")) {
//1NoEList
((IfcCShapeProfileDef) object).setCentreOfGravityInXAsString(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("GirthAsString")) {
//1NoEList
((IfcCShapeProfileDef) object).setGirthAsString(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("InternalFilletRadiusAsString")) {
//1NoEList
((IfcCShapeProfileDef) object).setInternalFilletRadiusAsString(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("InternalFilletRadius")) {
//1NoEList
((IfcCShapeProfileDef) object).setInternalFilletRadius(Double.parseDouble(attributeNewValue));
//1void
//1double
}
else if (attributeName.equals("Girth")) {
//1NoEList
((IfcCShapeProfileDef) object).setGirth(Double.parseDouble(attributeNewValue));
//1void
//1double
}
else if (attributeName.equals("WallThicknessAsString")) {
//1NoEList
((IfcCShapeProfileDef) object).setWallThicknessAsString(attributeNewValue);
//1void
//1String
}
else if (attributeName.equals("WallThickness")) {
//1NoEList
((IfcCShapeProfileDef) object).setWallThickness(Double.parseDouble(attributeNewValue));
//1void
//1double
}
else if (attributeName.equals("Width")) {
//1NoEList
((IfcCShapeProfileDef) object).setWidth(Double.parseDouble(attributeNewValue));
//1void
//1double
}
else if (attributeName.equals("Position")) {
//2NoEList
//2void
//2IfcAxis2Placement2D
}
else if (attributeName.equals("ProfileName")) {
//5NoEList
((IfcCShapeProfileDef) object).setProfileName(attributeNewValue);
//5void
//5String
}
else if (attributeName.equals("ProfileType")) {
//5NoEList
//5void
//5IfcProfileTypeEnum
}
else {
}
}
}
| opensourceBIM/bimql | BimQL/src/nl/wietmazairac/bimql/set/attribute/SetAttributeSubIfcCShapeProfileDef.java | Java | agpl-3.0 | 4,630 |
package com.tesora.dve.variables;
/*
* #%L
* Tesora Inc.
* Database Virtualization Engine
* %%
* Copyright (C) 2011 - 2014 Tesora Inc.
* %%
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License, version 3,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
* #L%
*/
import java.sql.Types;
import com.tesora.dve.exceptions.PEException;
import com.tesora.dve.variables.ResultCollector.ResultCollectorFactory;
public abstract class ValueMetadata<Type> {
public abstract Type convertToInternal(String varName, String in) throws PEException;
public abstract String convertToExternal(Type in);
public abstract String toRow(Type in);
public ResultCollector getValueAsResult(Type in) throws PEException {
return ResultCollectorFactory.getInstance(Types.VARCHAR, convertToExternal(in));
}
public abstract boolean isNumeric();
public abstract String getTypeName();
}
| Tesora/tesora-dve-pub | tesora-dve-core/src/main/java/com/tesora/dve/variables/ValueMetadata.java | Java | agpl-3.0 | 1,374 |
<?php
namespace App;
use Illuminate\Database\Eloquent\Model;
use Illuminate\Database\Eloquent\SoftDeletes;
use Hrshadhin\Userstamps\UserstampsTrait;
class Grade extends Model
{
use SoftDeletes;
use UserstampsTrait;
/**
* The attributes that are mass assignable.
*
* @var array
*/
protected $fillable = [
'name',
'rules',
];
}
| hrshadhin/school-management-system | app/Grade.php | PHP | agpl-3.0 | 387 |
<?php
switch ($messaggio) {
case "Modifica il costo aggiuntivo": $messaggio = "Modifier le surcoût"; break;
case "Torna indietro": $messaggio = "Retour"; break;
case "tariffa": $messaggio = "taux"; break;
case "Nome": $messaggio = "Nom"; break;
case "Cambia in": $messaggio = "Changé à"; break;
case "Tipo": $messaggio = "Type"; break;
case "Prezzo": $messaggio = "Prix"; break;
case "la tariffa": $messaggio = "le taux"; break;
case "unico": $messaggio = "simple"; break;
case "settimanale": $messaggio = "hebdomadaire"; break;
case "giornaliero": $messaggio = "quotidien"; break;
case "fisso": $messaggio = "a fixé"; break;
case "percentuale": $messaggio = "pourcentage"; break;
case "Numero di settimane": $messaggio = "Nombre de semaines"; break;
case "Numero di giorni": $messaggio = "Nombre de jours"; break;
case "solo per costi settimanali": $messaggio = "seulement pour des coûts hebdomadaires"; break;
case "solo per costi giornalieri": $messaggio = "seulement pour des coûts quotidiens"; break;
case "tutte": $messaggio = "tout"; break;
case "tutti": $messaggio = "tous"; break;
case "tutte meno una": $messaggio = "tout moins un"; break;
case "tutti meno uno": $messaggio = "tous moins un"; break;
case "chiedere": $messaggio = "demander"; break;
case "Assegna automaticamente con le tariffe": $messaggio = "Assigner automatiquement avec des taux"; break;
case "Mostrare nella pagina di inserzione prenotazioni?": $messaggio = "affichage en page d'insertion de réservation?"; break;
case "SI": $messaggio = "OUI"; break;
case "NO": $messaggio = "NON"; break;
case "Chiedere per quanto deve essere moltiplicato?": $messaggio = "demander par combien il doit être multiplié?"; break;
case "Considerare il costo aggiuntivo come letto(i) aggiuntivo(i)?": $messaggio = "considérer ce surcoût comme couche supplémentaire?"; break;
case "Modifica il costo": $messaggio = "Modifier coût"; break;
case "Il costo è stato cancellato": $messaggio = "le coût a été supprimé"; break;
case "Non si può proseguire perchè il costo aggiuntivo è già stato modificato nel frattempo": $messaggio = "le surcoût ne peut pas être changé parce qu'il a déjà changé en attendant"; break;
case "Non sono stati inseriti tutti i dati necessari": $messaggio = "toutes les données nécessaires n'ont pas été insérée"; break;
case "I soldi o la percentuale del costo sono sbagliati": $messaggio = "le prix ou le pourcentage du coût est erroné"; break;
case "Un costo aggiuntivo dello stesso tipo e con lo stesso nome esiste già": $messaggio = "un surcoût du mêmes type et prix existe déjà"; break;
case "Per chiedere il numero di settimane o il numero da moltiplicare, il costo aggiuntivo non deve essere applicato automaticamente a nessuna tariffa": $messaggio = "pour demander le nombre de semaines ou par combien il doit multiplier le coût ne doit pas n'être appliqué à aucun taux automatiquement"; break;
case "Per chiedere il numero di giorni o il numero da moltiplicare, il costo aggiuntivo non deve essere applicato automaticamente a nessuna tariffa": $messaggio = "pour demander le nombre de jours ou par combien il doit multiplier le coût ne doit pas n'être appliqué à aucun taux automatiquement"; break;
case "Il nome del costo aggiuntivo verrà cambiato da": $messaggio = "le nom du surcoût sera changé de"; break;
case "a": $messaggio = "à"; break;
case "Il tipo di costo aggiuntivo verrà cambiato da": $messaggio = "le type du surcoût sera changé de"; break;
case "arrotondato a": $messaggio = "arrondi à"; break;
case "Il costo verrà applicato": $messaggio = "le coût sera appliqué à"; break;
case "tutte le settimane": $messaggio = "toutes les semaines"; break;
case "tutti i giorni": $messaggio = "toutes les jours"; break;
case "tutte le settimane meno una": $messaggio = "toutes les semaines sans une"; break;
case "tutti i giorni meno uno": $messaggio = "toutes les jours sans une"; break;
case "un numero di settimane a scelta": $messaggio = "un certain nombre de semaines au choix"; break;
case "un numero di giorni a scelta": $messaggio = "un certain nombre de jours au choix"; break;
case "Il valore del costo aggiuntivo verrà cambiato dal": $messaggio = "la valeur du surcoût sera changée de"; break;
case "al": $messaggio = "à"; break;
case "Il valore del costo aggiuntivo verrà cambiato da": $messaggio = "la valeur du surcoût sera changée de"; break;
case "L'applicazione del costo verrà cambiata da": $messaggio = "l'application de coût sera changée de"; break;
case "Il costo <b>non</b> verrà più associato automaticamente alla": $messaggio = "Le coût <b>ne</b> sera <b>pas</b> automatiquement assignée désormais à"; break;
case "Il costo verrà associato automaticamente alla": $messaggio = "le coût sera automatiquement assigné à"; break;
case "Il costo verrà mostrato nella pagina di inserzione prenotazioni": $messaggio = "le coût sera montré dans la page d'insertion de réservation"; break;
case "Il costo non verrà più mostrato nella pagina di inserzione prenotazioni": $messaggio = "le coût ne sera plus montré dans la page d'insertion de réservation"; break;
case "Verrà chiesto per quanto si deve moltiplicare il costo": $messaggio = "te seras renseigné sur combien sera multiplier le coût"; break;
case "Non verrà più chiesto per quanto si deve moltiplicare il costo": $messaggio = "ne te seras plus renseigné sur combien sera multiplier le coût"; break;
case "Il costo verrà considerato come letto(i) aggiuntivo(i)": $messaggio = "le coût sera considéré en tant que couche supplémentaire"; break;
case "Il costo non verrà più considerato come letto(i) aggiuntivo(i)": $messaggio = "le coût ne sera plus considéré en tant que couche supplémentaire"; break;
case "Continua": $messaggio = "Continuer"; break;
case "% su": $messaggio = "% de"; break;
case "il prezzo totale": $messaggio = "prix total"; break;
case "la caparra": $messaggio = "le dépôt"; break;
case "totale meno caparra": $messaggio = "total moins dépôt"; break;
case "minimo": $messaggio = "minimum"; break;
case "massimo": $messaggio = "maximum"; break;
case "esattamente": $messaggio = "exactement"; break;
case "settimane": $messaggio = "semaines"; break;
case "giorni": $messaggio = "jours"; break;
case "Se possibile": $messaggio = "Si possible"; break;
case "Sempre": $messaggio = "Toujours"; break;
case "Associare a specifiche settimane della prenotazione": $messaggio = "associé aux semaines spécifiques de la réservation"; break;
case "Associare a specifici giorni della prenotazione": $messaggio = "associé aux jours spécifiques de la réservation"; break;
case "Tutte": $messaggio = "Tout"; break;
case "Tutti": $messaggio = "Tous"; break;
case "Tutte meno": $messaggio = "Tout moins"; break;
case "Tutti meno": $messaggio = "Tous moins"; break;
case "la prima": $messaggio = "la première"; break;
case "il primo": $messaggio = "le premier"; break;
case "l'ultima": $messaggio = "la dernière"; break;
case "l'ultimo": $messaggio = "le dernier"; break;
case "Chiedere": $messaggio = "Demander"; break;
case "settimane sì": $messaggio = "semaines oui"; break;
case "giorni sì": $messaggio = "jours oui"; break;
case "settimane no": $messaggio = "semaines non"; break;
case "giorni no": $messaggio = "jours non"; break;
case "e": $messaggio = "et"; break;
case "Giorni della settimana selezionati": $messaggio = "Slectionner les jours de semaine"; break;
case "Lunedì": $messaggio = "Lundi"; break;
case "Martedì": $messaggio = "Mardi"; break;
case "Mercoledì": $messaggio = "Mercredi"; break;
case "Giovedì": $messaggio = "Jeudi"; break;
case "Venerdì": $messaggio = "Vendredi"; break;
case "Sabato": $messaggio = "Samedi"; break;
case "Domenica": $messaggio = "Dimanche"; break;
case "Moltiplicare il costo per": $messaggio = "multiplier le coût par"; break;
case "Uno": $messaggio = "Un"; break;
case "Chiedere": $messaggio = "Demander"; break;
case "Numero di persone": $messaggio = "Nombre de personne"; break;
case "Numero di persone totale": $messaggio = "Nombre total de personne"; break;
case "escluse quelle dei costi con letti aggiuntivi": $messaggio = "à l'exclusion de ceux des coûts avec les lits supplémentaires"; break;
case "Periodi in cui è permesso<br> inserire il costo": $messaggio = "Les périodes où s'est permit<br> d'insérer le coût"; break;
case "In tutti": $messaggio = "Tous"; break;
case "Solo nei periodi selezionati": $messaggio = "seulement périodes choisies"; break;
case "dal": $messaggio = "de"; break;
case "Elimina un periodo": $messaggio = "Supprimer période"; break;
case "Aggiungi un periodo": $messaggio = "Ajouter période"; break;
case "se tutte le settimane della prenotazione sono all'interno dei periodi selezionati": $messaggio = "si toutes les semaines de la réservation ont lieu dans les périodes choisies"; break;
case "se tutti i giorni della prenotazione sono all'interno dei periodi selezionati": $messaggio = "si tous les jours de la réservation ont lieu dans les périodes choisies"; break;
case "se anche una sola settimana della prenotazione è all'interno dei periodi selezionati": $messaggio = "si même seulement une semaine de la réservation a lieu dans les périodes choisies"; break;
case "se anche un solo giorno della prenotazione è all'interno dei periodi selezionati": $messaggio = "si même seulement un jour de la réservation a lieu dans les périodes choisies"; break;
case "applicare il costo solo in settimane permesse all'interno della prenotazione": $messaggio = "s'appliquer le coût seulement aux semaines autorisées dans la réservation"; break;
case "applicare il costo solo in giorni permessi all'interno della prenotazione": $messaggio = "s'appliquer le coût seulement aux jours autorisées dans la réservation"; break;
case "Caratteristiche del costo<br> da mantenere quando si<br> modifica una prenotazione": $messaggio = "Coût characteristique qu'on prend<br> quand une réservation est modifiée"; break;
case "Valore percentuale": $messaggio = "valeur de pourcentage"; break;
case "e settimane associate": $messaggio = "et semaines associées"; break;
case "e giorni associati": $messaggio = "et jours associées"; break;
case "Assegnazione settimane": $messaggio = "Assignement de semaines"; break;
case "Assegnazione giorni": $messaggio = "Assignement de jours"; break;
case "Numero per cui viene moltiplicato": $messaggio = "nombre par lequel il est multiplié"; break;
case "Periodi permessi": $messaggio = "périodes autorisées"; break;
case "Associazione alle tariffe": $messaggio = "association de taux"; break;
case "Mostrare nella pagina di inserimento delle prenotazioni": $messaggio = "affichage dans page d'insertion de réservations"; break;
case "Si": $messaggio = "Oui"; break;
case "No": $messaggio = "Non"; break;
case "Considerare il costo come letto/i aggiuntivo/i": $messaggio = "considérer ce surcoût comme couche supplémentaire"; break;
case "Limitarne il numero che è possibile avere contemporaneamente in uno stesso periodo": $messaggio = "limite le nombre qu'il est possible d'avoir contemporairement dans la même période"; break;
case "I valori inseriti sono <div style=\"display: inline; color: red;\">errati</div> o incongruenti": $messaggio = "les valeurs insérées sont <div style=\"display: inline; color: red;\">erronées</div> ou corrompues"; break;
case "Niente da modificare": $messaggio = "rien a été modifié"; break;
case "Il costo aggiuntivo": $messaggio = "Le surcoût"; break;
case "è stato modificato": $messaggio = "a été modifié"; break;
case "Tariffe incompatibili": $messaggio = "taux incompatibles"; break;
case "Quando possibile assegna automaticamente se mancano": $messaggio = "si possible assigner automatiquement si"; break;
case "meno di": $messaggio = "moins que"; break;
case "più di": $messaggio = "plus que"; break;
case "giorni dalla data iniziale della prenotazione quando viene inserita": $messaggio = "Les jours restant de la date commençante de la réservation quand il est inséré"; break;
case "Quando possibile assegna automaticamente se si inseriscono": $messaggio = "si possible assigner automatiquement si"; break;
case "o più prenotazioni contemporaneamente": $messaggio = "ou plus de réservations sont insérées contemporairement"; break;
case "Sempre in periodi permessi": $messaggio = "toujours dans des périodes accordées"; break;
case "Non si può <b style=\"font-weight: normal; color: red;\">contemporaneamente</b> mostrare il costo nella pagina di inserimento delle prenotazioni ed assegnarlo automaticamente con una tariffa": $messaggio = "tu ne peux pas <b style=\"font-weight: normal; color: red;\">contemporairement</b> afficher le coût dans page d'insertion de réservation et l'assigner automatiquement avec un taux"; break;
case "tra": $messaggio = "entre"; break;
case "Beni dell'inventario da<br> eliminare quando si<br> inserisce il costo": $messaggio = "les articles de inventaire sont supprimées <br> quand le coût est inséré"; break;
case "Nessuno": $messaggio = "No one"; break;
case "Beni selezionati": $messaggio = "Selected items"; break;
case "Aggiungi un bene": $messaggio = "Add an item"; break;
case "Elimina un bene": $messaggio = "Delete an item"; break;
case "elimina i beni dall'inventario del magazzino": $messaggio = "Delete items from inventory of stockroom"; break;
case "Beni dell'inventario da eliminare": $messaggio = "Inventory items to delete"; break;
case "Mostrare raggruppato con costi simili della stessa categoria in inserimento": $messaggio = "Display grouped with similar costs from same category in insertion"; break;
case "Tasse": $messaggio = "Taxes"; break;
case "Categoria": $messaggio = "Category"; break;
case "parte fissa della tariffa": $messaggio = "fixed part of the rate"; break;
case "parte per una persona della tariffa": $messaggio = "one person part of the rate"; break;
case "Numero massimo": $messaggio = "Maximum number"; break;
case "0 se illimitato": $messaggio = "0 for unlimited"; break;
case "meno": $messaggio = "minus"; break;
case "il costo viene moltiplicato per<br> zero se la somma è negativa": $messaggio = "the cost will be multiplied by<br> zero if the sum is negative"; break;
case "Combina con altri costi": $messaggio = "Combine with other costs"; break;
case "i costi combinabili vengono mostrati tutti assieme con il nome della categoria ed ognuno è inserito se possibile": $messaggio = "combined costs are shown all together with category name and each one is inserted when possible"; break;
case "aggiungendo": $messaggio = "adding"; break;
case "Costi combinati": $messaggio = "Combined costs"; break;
case "Opzioni in conflitto": $messaggio = "Conflicting options"; break;
case "Attenzione": $messaggio = "Notice"; break;
case "le seguenti opzioni sono state attivate senza essere mantenute quando la prenotazione viene modificata": $messaggio = "the following options have been enabled without being kept when a reservation is modified"; break;
case "": $messaggio = ""; break;
case "": $messaggio = ""; break;
} # fine switch ($messaggio)
?> | pollux1er/gesthotel | includes/lang/fr/modifica_costi.php | PHP | agpl-3.0 | 15,679 |
var clover = new Object();
// JSON: {classes : [{name, id, sl, el, methods : [{sl, el}, ...]}, ...]}
clover.pageData = {"classes":[{"el":50,"id":42785,"methods":[{"el":42,"sc":2,"sl":35},{"el":49,"sc":2,"sl":44}],"name":"ListeningJSlider","sl":30}]}
// JSON: {test_ID : {"methods": [ID1, ID2, ID3...], "name" : "testXXX() void"}, ...};
clover.testTargets = {}
// JSON: { lines : [{tests : [testid1, testid2, testid3, ...]}, ...]};
clover.srcFileLines = [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], []]
| cm-is-dog/rapidminer-studio-core | report/html/com/rapidminer/gui/plotter/settings/ListeningJSlider.js | JavaScript | agpl-3.0 | 662 |
/*
* This file is part of huborcid.
*
* huborcid is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* huborcid is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with huborcid. If not, see <http://www.gnu.org/licenses/>.
*/
'use strict';
angular.module('huborcidApp')
.factory('EnvVariable', function ($resource, DateUtils) {
return $resource('api/envVariables/:id', {}, {
'query': { method: 'GET', isArray: true},
'get': {
method: 'GET',
transformResponse: function (data) {
data = angular.fromJson(data);
return data;
}
},
'update': { method:'PUT' }
});
});
| Cineca/OrcidHub | src/main/webapp/scripts/components/entities/envVariable/envVariable.service.js | JavaScript | agpl-3.0 | 1,196 |
<?php
/**
* OpenEyes.
*
* (C) Moorfields Eye Hospital NHS Foundation Trust, 2008-2011
* (C) OpenEyes Foundation, 2011-2013
* This file is part of OpenEyes.
* OpenEyes is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version.
* OpenEyes is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details.
* You should have received a copy of the GNU Affero General Public License along with OpenEyes in a file titled COPYING. If not, see <http://www.gnu.org/licenses/>.
*
* @link http://www.openeyes.org.uk
*
* @author OpenEyes <info@openeyes.org.uk>
* @copyright Copyright (c) 2011-2013, OpenEyes Foundation
* @license http://www.gnu.org/licenses/agpl-3.0.html The GNU Affero General Public License V3.0
*/
?>
<div class="element-fields">
<?php echo $form->dropDownListRow(
$element,
array(
'surgeon_id',
'assistant_id',
),
array(
CHtml::listData($element->surgeons, 'id', 'ReversedFullName'),
CHtml::listData($element->surgeons, 'id', 'ReversedFullName'),
),
array(
array('empty' => '- Please select -'),
array('empty' => '- None -'),
),
array('field' => 9)
)?>
<?php echo $form->dropDownList($element, 'supervising_surgeon_id', CHtml::listData($element->surgeons, 'id', 'ReversedFullName'), array('empty' => '- None -'), false, array('field' => 3))?>
</div>
| FiviumAustralia/OpenEyes | protected/modules/OphTrOperationnote/views/default/form_Element_OphTrOperationnote_Surgeon.php | PHP | agpl-3.0 | 1,752 |
<?php
namespace Gallery\Models\Db\Row;
use Gallery\Models\Db\Table\Gallery_Albums;
class Image extends \Zend_Db_Table_Row_Abstract {
public function deleteImage() {
if ($this->userid == $_SESSION['user']->userid) {
$albums = new Gallery_Albums();
$album = $albums->find($this->albumid);
$album->current()->removeImage($this->id);
$settings = \Core\Cunity::get("settings");
$likes = new \Likes\Models\Db\Table\Likes();
$comments = new \Comments\Models\Db\Table\Comments();
$comments->delete($this->_getTable()->getAdapter()->quoteInto("ref_id=? AND ref_name='image'", $this->id));
$likes->delete($this->_getTable()->getAdapter()->quoteInto("ref_id=? AND ref_name='image'", $this->id));
unlink("../data/uploads/" . $settings->getSetting("filesdir") . "/" . $this->filename);
unlink("../data/uploads/" . $settings->getSetting("filesdir") . "/thumb_" . $this->filename);
if (file_exists("../data/uploads/" . $settings->getSetting("filesdir") . "/cr_" . $this->filename))
unlink("../data/uploads/" . $settings->getSetting("filesdir") . "/cr_" . $this->filename);
return ($this->delete() == 1);
}
return false;
}
}
| kolplex/cunity | lib/Gallery/Models/Db/Row/Image.php | PHP | agpl-3.0 | 1,307 |
#
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from pixelated.support.encrypted_file_storage import EncryptedFileStorage
import os
import re
import dateutil.parser
from pixelated.adapter.model.status import Status
from pixelated.adapter.search.contacts import contacts_suggestions
from whoosh.index import FileIndex
from whoosh.fields import Schema, ID, KEYWORD, TEXT, NUMERIC
from whoosh.qparser import QueryParser
from whoosh.qparser import MultifieldParser
from whoosh.writing import AsyncWriter
from whoosh import sorting
from pixelated.support.functional import unique
import traceback
class SearchEngine(object):
DEFAULT_INDEX_HOME = os.path.join(os.environ['HOME'], '.leap')
DEFAULT_TAGS = ['inbox', 'sent', 'drafts', 'trash']
def __init__(self, key, agent_home=DEFAULT_INDEX_HOME):
self.key = key
self.index_folder = os.path.join(agent_home, 'search_index')
if not os.path.exists(self.index_folder):
os.makedirs(self.index_folder)
self._index = self._create_index()
def _add_to_tags(self, tags, group, skip_default_tags, count_type, query=None):
query_matcher = re.compile(".*%s.*" % query.lower()) if query else re.compile(".*")
for tag, count in group.iteritems():
if skip_default_tags and tag in self.DEFAULT_TAGS or not query_matcher.match(tag):
continue
if not tags.get(tag):
tags[tag] = {'ident': tag, 'name': tag, 'default': False, 'counts': {'total': 0, 'read': 0},
'mails': []}
tags[tag]['counts'][count_type] += count
def _search_tag_groups(self, is_filtering_tags):
seen = None
query_parser = QueryParser('tag', self._index.schema)
options = {'limit': None, 'groupedby': sorting.FieldFacet('tag', allow_overlap=True), 'maptype': sorting.Count}
with self._index.searcher() as searcher:
total = searcher.search(query_parser.parse('*'), **options).groups()
if not is_filtering_tags:
seen = searcher.search(query_parser.parse("* AND flags:%s" % Status.SEEN), **options).groups()
return seen, total
def _init_tags_defaults(self):
tags = {}
for default_tag in self.DEFAULT_TAGS:
tags[default_tag] = {
'ident': default_tag,
'name': default_tag,
'default': True,
'counts': {
'total': 0,
'read': 0
},
'mails': []
}
return tags
def _build_tags(self, seen, total, skip_default_tags, query):
tags = {}
if not skip_default_tags:
tags = self._init_tags_defaults()
self._add_to_tags(tags, total, skip_default_tags, count_type='total', query=query)
if seen:
self._add_to_tags(tags, seen, skip_default_tags, count_type='read')
return tags.values()
def tags(self, query, skip_default_tags):
is_filtering_tags = True if query else False
seen, total = self._search_tag_groups(is_filtering_tags=is_filtering_tags)
return self._build_tags(seen, total, skip_default_tags, query)
def _mail_schema(self):
return Schema(
ident=ID(stored=True, unique=True),
sender=ID(stored=False),
to=KEYWORD(stored=False, commas=True),
cc=KEYWORD(stored=False, commas=True),
bcc=KEYWORD(stored=False, commas=True),
subject=TEXT(stored=False),
date=NUMERIC(stored=False, sortable=True, bits=64, signed=False),
body=TEXT(stored=False),
tag=KEYWORD(stored=True, commas=True),
flags=KEYWORD(stored=True, commas=True),
raw=TEXT(stored=False))
def _create_index(self):
storage = EncryptedFileStorage(self.index_folder, self.key)
return FileIndex.create(storage, self._mail_schema(), indexname='mails')
def index_mail(self, mail):
with AsyncWriter(self._index) as writer:
self._index_mail(writer, mail)
def _index_mail(self, writer, mail):
mdict = mail.as_dict()
header = mdict['header']
tags = set(mdict.get('tags', {}))
tags.add(mail.mailbox_name.lower())
index_data = {
'sender': self._empty_string_to_none(header.get('from', '')),
'subject': self._empty_string_to_none(header.get('subject', '')),
'date': dateutil.parser.parse(header.get('date', '')).strftime('%s'),
'to': self._format_recipient(header, 'to'),
'cc': self._format_recipient(header, 'cc'),
'bcc': self._format_recipient(header, 'bcc'),
'tag': u','.join(unique(tags)),
'body': unicode(mdict['textPlainBody'] if 'textPlainBody' in mdict else mdict['body']),
'ident': unicode(mdict['ident']),
'flags': unicode(','.join(unique(mail.flags))),
'raw': unicode(mail.raw)
}
writer.update_document(**index_data)
def _format_recipient(self, headers, name):
list = headers.get(name, [''])
return u','.join(list) if list else u''
def _empty_string_to_none(self, field_value):
if not field_value:
return None
else:
return field_value
def index_mails(self, mails, callback=None):
try:
with AsyncWriter(self._index) as writer:
for mail in mails:
self._index_mail(writer, mail)
if callback:
callback()
except Exception, e:
traceback.print_exc(e)
raise
def _search_with_options(self, options, query):
with self._index.searcher() as searcher:
query = QueryParser('raw', self._index.schema).parse(query)
results = searcher.search(query, **options)
return results
def search(self, query, window=25, page=1, all_mails=False):
query = self.prepare_query(query)
return self._search_all_mails(query) if all_mails else self._paginated_search_mails(query, window, page)
def _search_all_mails(self, query):
with self._index.searcher() as searcher:
sorting_facet = sorting.FieldFacet('date', reverse=True)
results = searcher.search(query, sortedby=sorting_facet, reverse=True, limit=None)
return unique([mail['ident'] for mail in results])
def _paginated_search_mails(self, query, window, page):
page = int(page) if page is not None and int(page) > 1 else 1
window = int(window) if window is not None else 25
with self._index.searcher() as searcher:
tags_facet = sorting.FieldFacet('tag', allow_overlap=True, maptype=sorting.Count)
sorting_facet = sorting.FieldFacet('date', reverse=True)
results = searcher.search_page(query, page, pagelen=window, groupedby=tags_facet, sortedby=sorting_facet)
return unique([mail['ident'] for mail in results]), sum(results.results.groups().values())
def prepare_query(self, query):
query = (
query
.replace('-in:', 'AND NOT tag:')
.replace('in:all', '*')
)
return MultifieldParser(['raw', 'body'], self._index.schema).parse(query)
def remove_from_index(self, mail_id):
with AsyncWriter(self._index) as writer:
writer.delete_by_term('ident', mail_id)
def contacts(self, query):
with self._index.searcher() as searcher:
return contacts_suggestions(query, searcher)
| kaeff/pixelated-user-agent | service/pixelated/adapter/search/__init__.py | Python | agpl-3.0 | 8,292 |
import React from 'react';
import SPELLS from 'common/SPELLS';
import SpellLink from 'common/SpellLink';
import SpellIcon from 'common/SpellIcon';
import { formatNumber } from 'common/format';
import StatisticBox, { STATISTIC_ORDER } from 'Main/StatisticBox';
import Combatants from 'Parser/Core/Modules/Combatants';
import Analyzer from 'Parser/Core/Analyzer';
class ThermalVoid extends Analyzer {
static dependencies = {
combatants: Combatants,
};
casts = 0;
buffApplied = 0;
extraUptime = 0;
on_initialized() {
this.active = this.combatants.selected.hasTalent(SPELLS.THERMAL_VOID_TALENT.id);
}
on_toPlayer_applybuff(event) {
const spellId = event.ability.guid;
if (spellId === SPELLS.ICY_VEINS.id) {
this.casts += 1;
this.buffApplied = event.timestamp;
}
}
on_finished() {
if (this.combatants.selected.hasBuff(SPELLS.ICY_VEINS.id)) {
this.casts -= 1;
this.extraUptime = this.owner.currentTimestamp - this.buffApplied;
}
}
get uptime() {
return this.combatants.selected.getBuffUptime(SPELLS.ICY_VEINS.id) - this.extraUptime;
}
get averageDuration() {
return this.uptime / this.casts;
}
get suggestionThresholds() {
return {
actual: this.averageDuration / 1000,
isLessThan: {
minor: 40,
average: 37,
major: 33,
},
style: 'number',
};
}
suggestions(when) {
when(this.suggestionThresholds)
.addSuggestion((suggest, actual, recommended) => {
return suggest(<span>Your <SpellLink id={SPELLS.THERMAL_VOID_TALENT.id} /> duration boost can be improved. Make sure you use <SpellLink id={SPELLS.FROZEN_ORB.id} /> during <SpellLink id={SPELLS.ICY_VEINS.id} /> in order to get extra <SpellLink id={SPELLS.FINGERS_OF_FROST.id} /> Procs</span>)
.icon(SPELLS.ICY_VEINS.icon)
.actual(`${formatNumber(actual)} seconds Average Icy Veins Duration`)
.recommended(`${formatNumber(recommended)} is recommended`);
});
}
statistic() {
const averageDurationSeconds = this.averageDuration / 1000;
return (
<StatisticBox
icon={<SpellIcon id={SPELLS.ICY_VEINS.id} />}
value={`${formatNumber(averageDurationSeconds)}s`}
label="Avg Icy Veins Duration"
tooltip="Icy Veins Casts that do not complete before the fight ends are removed from this statistic"
/>
);
}
statisticOrder = STATISTIC_ORDER.OPTIONAL(0);
}
export default ThermalVoid;
| enragednuke/WoWAnalyzer | src/Parser/Mage/Frost/Modules/Features/ThermalVoid.js | JavaScript | agpl-3.0 | 2,491 |
package ctlcli
import (
"fmt"
"io"
"github.com/koding/logging"
)
// ErrorCommand implements a Command interface for an error - printing the error
// or a custom message when Run is called.
type ErrorCommand struct {
Stdout io.Writer
Log logging.Logger
Message string
Error error
}
func NewErrorCommand(stdout io.Writer, log logging.Logger, err error, msg string) *ErrorCommand {
return &ErrorCommand{
Stdout: stdout,
Log: log.New("errorCommand"),
Message: msg,
Error: err,
}
}
// Print the message to the user if not empty, otherwise print the error string.
func (c *ErrorCommand) Print() {
if c.Message != "" {
fmt.Fprintln(c.Stdout, c.Message)
} else {
fmt.Fprintln(c.Stdout, c.Error.Error())
}
}
func (c *ErrorCommand) Help() {
log := c.Log.New("#help")
log.Error("Original command could not be created. originalErr:%s", c.Error)
c.Print()
}
func (c *ErrorCommand) Run() (int, error) {
log := c.Log.New("#run")
log.Error("Original command could not be created. originalErr:%s", c.Error)
c.Print()
return 1, c.Error
}
| gokmen/koding | go/src/koding/klientctl/ctlcli/error.go | GO | agpl-3.0 | 1,072 |
<?php
/*********************************************************************************
* The contents of this file are subject to the SugarCRM Master Subscription
* Agreement ("License") which can be viewed at
* http://www.sugarcrm.com/crm/en/msa/master_subscription_agreement_11_April_2011.pdf
* By installing or using this file, You have unconditionally agreed to the
* terms and conditions of the License, and You may not use this file except in
* compliance with the License. Under the terms of the license, You shall not,
* among other things: 1) sublicense, resell, rent, lease, redistribute, assign
* or otherwise transfer Your rights to the Software, and 2) use the Software
* for timesharing or service bureau purposes such as hosting the Software for
* commercial gain and/or for the benefit of a third party. Use of the Software
* may be subject to applicable fees and any use of the Software without first
* paying applicable fees is strictly prohibited. You do not have the right to
* remove SugarCRM copyrights from the source code or user interface.
*
* All copies of the Covered Code must include on each user interface screen:
* (i) the "Powered by SugarCRM" logo and
* (ii) the SugarCRM copyright notice
* in the same form as they appear in the distribution. See full license for
* requirements.
*
* Your Warranty, Limitations of liability and Indemnity are expressly stated
* in the License. Please refer to the License for the specific language
* governing these rights and limitations under the License. Portions created
* by SugarCRM are Copyright (C) 2004-2011 SugarCRM, Inc.; All Rights Reserved.
********************************************************************************/
$mod_strings = array (
'LBL_TRACK_BUTTON_KEY' => 'T',
'LBL_QUEUE_BUTTON_KEY' => 'u',
'LBL_TEST_BUTTON_KEY' => 'e',
'LBL_TODETAIL_BUTTON_KEY' => 'T',
'LBL_TRACK_DELETE_BUTTON_KEY' => 'D',
'LBL_NAVIGATION_MENU_MARKETING' => 'Marketing',
'LBL_EMAIL' => 'Email',
'LBL_DEFINE_LEAD_POST_URL' => 'Post URL:',
'LBL_DEFINE_LEAD_REDIRECT_URL' => 'Redirect URL:',
'LBL_LINK_SUBPANEL_TITLE' => 'Link',
'LBL_FROM_ADDR' => '"Feladó" címe',
'LBL_REPLY_ADDR' => '"Válasz" cím:',
'LBL_REPLY_NAME' => '"Címzett" Neve:',
'LBL_MODULE_NAME' => 'Kampányok',
'LBL_MODULE_TITLE' => 'Kampányok: Főoldal',
'LBL_NEWSLETTER_TITLE' => 'Kampányok: Hírlevelek',
'LBL_SEARCH_FORM_TITLE' => 'Kampányok keresése',
'LBL_LIST_FORM_TITLE' => 'Kampány lista',
'LBL_NEWSLETTER_LIST_FORM_TITLE' => 'Hírlevéllista',
'LBL_CAMPAIGN_NAME' => 'Név:',
'LBL_CAMPAIGN' => 'Kampány:',
'LBL_NAME' => 'Név:',
'LBL_INVITEE' => 'Kapcsolatok',
'LBL_LIST_CAMPAIGN_NAME' => 'Kampány',
'LBL_LIST_STATUS' => 'Állapot',
'LBL_LIST_TYPE' => 'Típus',
'LBL_LIST_START_DATE' => 'Kezdés dátuma',
'LBL_LIST_END_DATE' => 'Befejezés dátuma',
'LBL_DATE_ENTERED' => 'Dátum hozzáadva',
'LBL_DATE_MODIFIED' => 'Dátum módosítva',
'LBL_MODIFIED' => 'Módosította',
'LBL_CREATED' => 'Létrehozta',
'LBL_TEAM' => 'Csoport:',
'LBL_ASSIGNED_TO' => 'Felelős:',
'LBL_ASSIGNED_TO_ID' => 'Felelős:',
'LBL_ASSIGNED_TO_NAME' => 'Felelős:',
'LBL_CAMPAIGN_START_DATE' => 'Kezdés dátuma:',
'LBL_CAMPAIGN_END_DATE' => 'Befejezés dátuma:',
'LBL_CAMPAIGN_STATUS' => 'Állapot:',
'LBL_CAMPAIGN_BUDGET' => 'Költségvetés:
',
'LBL_CAMPAIGN_EXPECTED_COST' => 'Várható költség:',
'LBL_CAMPAIGN_ACTUAL_COST' => 'Tényleges költség:',
'LBL_CAMPAIGN_EXPECTED_REVENUE' => 'Várt bevétel:
',
'LBL_CAMPAIGN_IMPRESSIONS' => 'Megjelenések:',
'LBL_CAMPAIGN_COST_PER_IMPRESSION' => 'Költség / példány:',
'LBL_CAMPAIGN_COST_PER_CLICK_THROUGH' => 'Költség / átkattintás:',
'LBL_CAMPAIGN_OPPORTUNITIES_WON' => 'Megnyert lehetőségek:
',
'LBL_CAMPAIGN_TYPE' => 'Típus:',
'LBL_CAMPAIGN_OBJECTIVE' => 'Célkitűzés:',
'LBL_CAMPAIGN_CONTENT' => 'Leírás:',
'LBL_CAMPAIGN_DAYS_REMAIN' => 'Hátralévő napok száma',
'LNK_NEW_CAMPAIGN' => 'Kampány létrehozása (Classic)',
'LNL_NEW_CAMPAIGN_WIZARD' => 'Kampány készítés (Varázsló)',
'LNK_CAMPAIGN_LIST' => 'Kampányok megtekintése',
'LNK_NEW_PROSPECT' => 'Cél létrehozása',
'LNK_PROSPECT_LIST' => 'Célok megtekintése',
'LNK_NEW_PROSPECT_LIST' => 'Céllista létrehozása',
'LNK_PROSPECT_LIST_LIST' => 'Céllista megtekintése',
'LBL_MODIFIED_BY' => 'Módosította',
'LBL_CREATED_BY' => 'Létrehozta',
'LBL_DATE_CREATED' => 'Létrehozás dátuma:
',
'LBL_DATE_LAST_MODIFIED' => 'Módosítás dátuma:',
'LBL_TRACKER_KEY' => 'Követő:',
'LBL_TRACKER_URL' => 'Követő URL:',
'LBL_TRACKER_TEXT' => 'Követő link szöveg:',
'LBL_TRACKER_COUNT' => 'Követő szám:',
'LBL_REFER_URL' => 'Követő átirányítás URL:',
'LBL_DEFAULT_SUBPANEL_TITLE' => 'Kampányok',
'LBL_EMAIL_CAMPAIGNS_TITLE' => 'Email kampányok',
'LBL_NEW_FORM_TITLE' => 'Új kampány létrehozása',
'LBL_TRACKED_URLS' => 'Követő URL-ek',
'LBL_TRACKED_URLS_SUBPANEL_TITLE' => 'Követő URL-ek',
'LBL_CAMPAIGN_ACCOUNTS_SUBPANEL_TITLE' => 'Kliensek',
'LBL_PROSPECT_LIST_SUBPANEL_TITLE' => 'Céllista',
'LBL_EMAIL_MARKETING_SUBPANEL_TITLE' => 'Email marketing',
'LNK_NEW_EMAIL_TEMPLATE' => 'Email sablon létrehozása',
'LNK_EMAIL_TEMPLATE_LIST' => 'Email sablonok megtekintése',
'LBL_TRACK_BUTTON_TITLE' => 'Állapot megtekintése',
'LBL_TRACK_BUTTON_LABEL' => 'Állapot megtekintése',
'LBL_QUEUE_BUTTON_TITLE' => 'Emailek küldése',
'LBL_QUEUE_BUTTON_LABEL' => 'Emailek küldése',
'LBL_TEST_BUTTON_TITLE' => 'Teszt küldése',
'LBL_TEST_BUTTON_LABEL' => 'Teszt küldése',
'LBL_COPY_AND_PASTE_CODE' => 'Vagy másolja és illessze be az alábbi HTML-t egy már létező oldalba',
'LBL_TODETAIL_BUTTON_TITLE' => 'Részletek megtekintése',
'LBL_TODETAIL_BUTTON_LABEL' => 'Részletek megtekintése',
'LBL_DEFAULT' => 'Összes céllista',
'LBL_MESSAGE_QUEUE_TITLE' => 'Várakozó üzenetek',
'LBL_LOG_ENTRIES_TITLE' => 'Válaszok',
'LBL_LOG_ENTRIES_TARGETED_TITLE' => 'Üzenet elküldve / megkísérelve',
'LBL_LOG_ENTRIES_SEND_ERROR_TITLE' => 'Visszapattant üzenetek, Egyéb',
'LBL_LOG_ENTRIES_INVALID_EMAIL_TITLE' => 'Visszapattant üzenetek, Érvénytelen e-mail cím',
'LBL_LOG_ENTRIES_LINK_TITLE' => 'Klikk a következő linkre',
'LBL_LOG_ENTRIES_VIEWED_TITLE' => 'Megtekintett üzenet',
'LBL_LOG_ENTRIES_REMOVED_TITLE' => 'Elutasítva',
'LBL_LOG_ENTRIES_LEAD_TITLE' => 'Ajánlások létrehozva',
'LBL_CAMPAIGN_LEAD_SUBPANEL_TITLE' => 'Ajánlások',
'LBL_OPPORTUNITY_SUBPANEL_TITLE' => 'Lehetőségek',
'LBL_LOG_ENTRIES_CONTACT_TITLE' => 'Kapcsolatok létrehozása',
'LBL_BACK_TO_CAMPAIGNS' => 'Vissza a kampányokhoz',
'ERR_NO_EMAIL_MARKETING' => 'Kell lennie legalább egy aktív e-mail marketing üzenetnek, amely kapcsolódik a kampányhoz.',
'ERR_NO_TARGET_LISTS' => 'Kell lennie legalább egy aktív céllistának, amely kapcsolódik a kampányhoz.',
'ERR_NO_TEST_TARGET_LISTS' => 'Kell lennie legalább egy teszt céllistának, amely kapcsolódik a kampányhoz.',
'ERR_SENDING_NOW' => 'Üzenetek továbbítását kérjük, próbálja később.',
'ERR_MESS_NOT_FOUND_FOR_LIST' => 'Email marketing üzenet nem található ebben a céllistában.',
'ERR_MESS_DUPLICATE_FOR_LIST' => 'Több Email marketing üzenet található ebben a céllistában.',
'ERR_FIX_MESSAGES' => 'Kérjük, javítsa ki a következő hibákat a továbblépéshez.',
'LBL_TRACK_ROI_BUTTON_LABEL' => 'Megtérülés (ROI) megtekintése ',
'LBL_TRACK_DELETE_BUTTON_TITLE' => 'Teszt bejegyzés törlése',
'LBL_TRACK_DELETE_BUTTON_LABEL' => 'Teszt bejegyzés törlése',
'LBL_TRACK_DELETE_CONFIRM' => 'Ez az opció törli a próbaüzem naplóbejegyzéseit. Folytatja?
',
'ERR_NO_MAILBOX' => 'Az alábbi marketing üzenetekhez nincs mail fiók. A helyes hozzárendelés végezze el a folytatás előtt.',
'LBL_LIST_TO_ACTIVITY' => 'Állapot megtekintése',
'LBL_CURRENCY_ID' => 'Pénznem azonosító',
'LBL_CURRENCY' => 'Pénznem:',
'LBL_ROLLOVER_VIEW' => 'Gördítse a csúszkát a részletek megtekintéséhez.',
'LBL_TARGETED' => 'Célzott',
'LBL_TOTAL_TARGETED' => 'Összesen célzott',
'LBL_CAMPAIGN_FREQUENCY' => 'Gyakoriság:',
'LBL_NEWSLETTERS' => 'Hírlevelek megtekintése',
'LBL_NEWSLETTER' => 'Hírlevél',
'LBL_NEWSLETTER_FORENTRY' => 'Hírlevél',
'LBL_MORE_DETAILS' => 'További részletek
',
'LBL_CREATE_NEWSLETTER' => 'Hírlevél létrehozása',
'LBL_LIST_NAME' => 'Név',
'LBL_STATUS_TEXT' => 'Állapot:',
'LBL_FROM_MAILBOX_NAME' => 'Használt postafiók:',
'LBL_FROM_NAME' => 'Feladó neve:
',
'LBL_START_DATE_TIME' => 'Kezdő dátum és idő:
',
'LBL_DATE_START' => 'Kezdés dátuma',
'LBL_TIME_START' => 'Kezdési idő',
'LBL_TEMPLATE' => 'Email sablon:',
'LBL_CREATE_EMAIL_TEMPLATE' => 'Létrehozás',
'LBL_MESSAGE_FOR' => 'Küldje el ezt az üzenetet:',
'LBL_FINISH' => 'Befejezés',
'LBL_ALL_PROSPECT_LISTS' => 'A kampány összes cél listájának kiválasztása.',
'LBL_EDIT_EMAIL_TEMPLATE' => 'Szerkesztés',
'LBL_EMAIL_SETUP_WIZARD' => 'Email beállítása',
'LBL_DIAGNOSTIC_WIZARD' => 'Diagnosztika megtekintése',
'LBL_ALREADY_SUBSCRIBED_HEADER' => 'Hírlevelekre feliratkozott
',
'LBL_UNSUBSCRIBED_HEADER' => 'Elérhető / Hírlevelek Lemondta a',
'LBL_UNSUBSCRIBED_HEADER_EXPL' => 'Hírlevél áthelyezése az Elérhető Hírlevelek / Hírlevelek leiratkozása a listáról opcióba. Nem fogja megszüntetni a kapcsolatot az eredeti Előfizetői listáról vagy Cél listáról.',
'LBL_FILTER_CHART_BY' => 'Szűrés diagram az alábbiak szerint:',
'LBL_MANAGE_SUBSCRIPTIONS_TITLE' => 'Feliratkozások kezelése',
'LBL_MARK_AS_SENT' => 'Megjelölés elküldöttként',
'LBL_DEFAULT_LIST_NOT_FOUND' => 'Alapértelmezett cél lista nem található',
'LBL_DEFAULT_LIST_ENTRIES_NOT_FOUND' => 'Nem találtunk bejegyzést',
'LBL_DEFAULT_LIST_ENTRIES_WERE_PROCESSED' => 'Megjelölés feldolgozottként',
'LBL_EDIT_TRACKER_NAME' => 'Követő neve:',
'LBL_EDIT_TRACKER_URL' => 'Követő URL:',
'LBL_EDIT_OPT_OUT_' => 'Opt-out link ?',
'LBL_EDIT_OPT_OUT' => 'Opt-out link ?',
'LBL_UNSUBSCRIPTION_LIST_NAME' => 'Leiratkozás lista neve:
',
'LBL_SUBSCRIPTION_LIST_NAME' => 'Feliratkozás lista neve:',
'LBL_TEST_LIST_NAME' => 'Teszt lista neve:',
'LBL_UNSUBSCRIPTION_TYPE_NAME' => 'Leiratkozás',
'LBL_SUBSCRIPTION_TYPE_NAME' => 'Feliratkozás',
'LBL_TEST_TYPE_NAME' => 'Teszt',
'LBL_UNSUBSCRIPTION_LIST' => 'Leiratkozás listája',
'LBL_SUBSCRIPTION_LIST' => 'Feliratkozás listája',
'LBL_MRKT_NAME' => 'Név',
'LBL_TEST_LIST' => 'Teszt lista',
'LBL_WIZARD_HEADER_MESSAGE' => 'Töltse ki a szükséges mezőket, hogy azonosítsa a kampányt.',
'LBL_WIZARD_BUDGET_MESSAGE' => 'Adja meg a költségvetést a megtérülés kiszámításához.',
'LBL_WIZARD_SUBSCRIPTION_MESSAGE' => 'Minden hírlevélhez kell három cél lista (Feliratkozás, Leiratkozás, és Teszt). Hozzá is lehet rendelni egy létező cél listát. Ha nem, egy üres céllista jön létre a hírlevél mentésekor.
',
'LBL_WIZARD_TARGET_MESSAGE1' => 'Válasszon ki vagy hozzon létre egy céllistát a kampány használatához. Ez a lista addig használható, amíg el nem küldi az emailt.
',
'LBL_WIZARD_TARGET_MESSAGE2' => 'Vagy hozzon létre egy újat az alábbi űrlapon:',
'LBL_WIZARD_TRACKER_MESSAGE' => 'Adjon meg egy követő URL-t ehhez a kampányhoz. Gépelje be a nevét és az URL-t is a követő létrehozásához.',
'LBL_WIZARD_MARKETING_MESSAGE' => 'Töltse ki az alábbi űrlapot, hogy létrehozhasson egy e-mailt hírleveléhez. Ezáltal meghatározhatók lesznek azok az információk, hogy mikor és hogyan kell elosztani a hírlevelet.',
'LBL_WIZARD_SENDMAIL_MESSAGE' => 'Ez az utolsó lépés a munkafolyamatban. Válassza ki, ha szeretne küldeni egy teszt e-mailt, ütemezni a hírlevél forgalmát, menteni a változtatásokat, majd ha választott, menjen tovább az összefoglaló oldalra.
',
'LBL_HOME_START_MESSAGE' => 'Válassza ki a kampány típusát amit szeretne létrehozni.',
'LBL_WIZARD_LAST_STEP_MESSAGE' => 'Ez már az utolsó lépés.',
'LBL_WIZARD_FIRST_STEP_MESSAGE' => 'Megtette az első lépést.',
'LBL_WIZ_NEWSLETTER_TITLE_STEP1' => 'Kampány fejléc',
'LBL_WIZ_NEWSLETTER_TITLE_STEP2' => 'Kampány költségvetés',
'LBL_WIZ_NEWSLETTER_TITLE_STEP3' => 'Kampány követő URL-ek',
'LBL_WIZ_NEWSLETTER_TITLE_STEP4' => 'Feliratkozási információk',
'LBL_WIZ_MARKETING_TITLE' => 'Email marketing',
'LBL_WIZ_SENDMAIL_TITLE' => 'Email küldése',
'LBL_WIZ_TEST_EMAIL_TITLE' => 'Teszt Email
',
'LBL_WIZ_NEWSLETTER_TITLE_SUMMARY' => 'Összefoglaló',
'LBL_NAVIGATION_MENU_GEN1' => 'Kampány fejléc',
'LBL_NAVIGATION_MENU_GEN2' => 'Költségvetés',
'LBL_NAVIGATION_MENU_TRACKERS' => 'Követők',
'LBL_NAVIGATION_MENU_SEND_EMAIL' => 'Email küldése',
'LBL_NAVIGATION_MENU_SUBSCRIPTIONS' => 'Feliratkozások',
'LBL_NAVIGATION_MENU_SUMMARY' => 'Összefoglaló',
'LBL_SUBSCRIPTION_TARGET_WIZARD_DESC' => 'Ez határozza meg a Feliratkozás típusának cél listáját erre a kampányra.
Ezt a célt fogja használni az email küldéshez.
Ha nincs még kész listája, egy üres lista jön létre az Ön számára.
',
'LBL_UNSUBSCRIPTION_TARGET_WIZARD_DESC' => 'Ez határozza meg a Leiratkozás típusának cél listáját erre a kampányra.
Ez a cél lista fogja tartalmazni a nevét, akik úgy döntöttek, nem kívánják Önnel felvenni a kapcsolatot emailen keresztül.
Ha nincs még kész listája, egy üres lista jön létre az Ön számára.
',
'LBL_TEST_TARGET_WIZARD_DESC' => 'Ez határozza meg a Teszt típusának cél listáját erre a kampányra.
Ezt a célt fogja használni az email küldéshez.
Ha nincs még kész listája, egy üres lista jön létre az Ön számára.',
'LBL_TRACKERS' => 'Követők',
'LBL_ADD_TRACKER' => 'Követő létrehozása',
'LBL_ADD_TARGET' => 'Hozzáadás',
'LBL_CREATE_TARGET' => 'Létrehozás',
'LBL_SELECT_TARGET' => 'Használja a meglévő céllistát',
'LBL_REMOVE' => 'Eltávolítás',
'LBL_CONFIRM' => 'Kezdés',
'LBL_START' => 'Kezdés',
'LBL_TOTAL_ENTRIES' => 'Bejegyzések',
'LBL_CONFIRM_CAMPAIGN_SAVE_CONTINUE' => 'Mentse a munkát, és folytassa az Email-marketinget',
'LBL_CONFIRM_CAMPAIGN_SAVE_OPTIONS' => 'Beállítások mentése',
'LBL_CONFIRM_CAMPAIGN_SAVE_EXIT' => 'Szeretné menteni az adatokat és kilép?',
'LBL_CONFIRM_SEND_SAVE' => 'Tovább lép és folytatja az Email kampány küldését. Szeretné menteni és folytatja?',
'LBL_NEWSLETTER WIZARD_TITLE' => 'Hírlevél:
',
'LBL_NEWSLETTER_WIZARD_START_TITLE' => 'Hírlevél szerkesztés
',
'LBL_CAMPAIGN_WIZARD_START_TITLE' => 'Kampány szerkesztés',
'LBL_SEND_AS_TEST' => 'Teszt marketing Email elküldése',
'LBL_SAVE_EXIT_BUTTON_LABEL' => 'Befejezés',
'LBL_SAVE_CONTINUE_BUTTON_LABEL' => 'Mentés és folytatás
',
'LBL_TARGET_LISTS' => 'Cél listák',
'LBL_NO_SUBS_ENTRIES_WARNING' => 'Nem lehet küldeni marketing emailt, amíg a feliratkozó listán legalább egy bejegyzés nincs. A listát a befejezése után is fel tudja tölteni.',
'LBL_NO_TARGET_ENTRIES_WARNING' => 'Nem lehet küldeni marketing emailt, amíg a céllistán legalább egy bejegyzés nincs. A listát a befejezése után is fel tudja tölteni.',
'LBL_NO_TARGETS_WARNING' => 'Nem lehet küldeni marketing emailt, amíg a kampány legalább egy cél listán nem szerepel.',
'LBL_NONE' => 'Nincs létrehozva',
'LBL_CAMPAIGN_WIZARD' => 'Kampány varázsló',
'LBL_OTHER_TYPE_CAMPAIGN' => 'Nem Email alapú kampány
',
'LBL_CHOOSE_CAMPAIGN_TYPE' => 'Kampány típusa',
'LBL_TARGET_LIST' => 'Céllista',
'LBL_TARGET_TYPE' => 'Céllista típus',
'LBL_TARGET_NAME' => 'Céllista neve',
'LBL_EMAILS_SCHEDULED' => 'Ütemezett Emailek',
'LBL_TEST_EMAILS_SENT' => 'Teszt Emaileket küldeni',
'LBL_USERS_CANNOT_OPTOUT' => 'Rendszer felhasználók kiválasztása sikertelen az Emailek fogadására.',
'LBL_ELECTED_TO_OPTOUT' => 'Ön úgy döntött, hogy kizárja a bejövő Emailek fogadását.',
'LBL_COPY_OF' => 'Másolata',
'LBL_SAVED_SEARCH' => 'Mentett keresések és elrendezési lehetőségek',
'LBL_WIZ_FROM_NAME' => 'Feladó neve:',
'LBL_WIZ_FROM_ADDRESS' => 'Feladó címe:
',
'LBL_EMAILS_PER_RUN' => 'Elküldött emailek száma egy kötegben:',
'LBL_CUSTOM_LOCATION' => 'Felhasználó által definiált',
'LBL_DEFAULT_LOCATION' => 'Alapértelmezett',
'ERR_INT_ONLY_EMAIL_PER_RUN' => 'Csak egész értékeket adja meg a küldendő Emailek számára.',
'LBL_LOCATION_TRACK' => 'Kampány követő file-ok helye (mint a campaign_tracker.php)',
'LBL_MAIL_SENDTYPE' => 'Mail transfer agent',
'LBL_MAIL_SMTPAUTH_REQ' => 'SMTP hitelesítés:',
'LBL_MAIL_SMTPPASS' => 'SMTP jelszó:',
'LBL_MAIL_SMTPPORT' => 'SMTP Port:',
'LBL_MAIL_SMTPSERVER' => 'SMTP Mail szerver:',
'LBL_MAIL_SMTPUSER' => 'SMTP felhasználónév',
'LBL_EMAIL_SETUP_WIZARD_TITLE' => 'E-mail beállítás a kampányokhoz',
'TRACKING_ENTRIES_LOCATION_DEFAULT_VALUE' => 'Config.php site_url beállításának értéke',
'LBL_NOTIFY_TITLE' => 'E-mail értesítések beállításai',
'LBL_MASS_MAILING_TITLE' => 'Körlevél beállítások
',
'LBL_SERVER_TYPE' => 'Levelező szerver protokoll',
'LBL_SERVER_URL' => 'Levelező szerver cím',
'LBL_LOGIN' => 'Felhasználó név',
'LBL_PORT' => 'Levelező szerver port',
'LBL_MAILBOX_NAME' => 'Mail fiók neve:
',
'LBL_PASSWORD' => 'Jelszó',
'LBL_MAILBOX_DEFAULT' => 'Bejövő levelek',
'LBL_MAILBOX' => 'Megfigyelt mappa',
'LBL_NAVIGATION_MENU_SETUP' => 'Email beállítása',
'LBL_NAVIGATION_MENU_NEW_MAILBOX' => 'Új mail postafiók',
'LBL_MAILBOX_CHECK_WIZ_GOOD' => 'Levelező fiók(ok) visszapattanás(ok) kezelésének észlelése. Nem szükséges újat létrehozni, de alább megteheti.',
'LBL_MAILBOX_CHECK_WIZ_BAD' => 'Levelező fiók(ok) visszapattanás(ok) kezelésének észlelése. Hozzon létre egy újat.',
'LBL_CAMP_MESSAGE_COPY' => 'Kampány üzenetek másolatainak megtartása:',
'LBL_CAMP_MESSAGE_COPY_DESC' => 'Szeretné eltárolni a teljes másolatát minden egyes kampány alatt küldött email üzenetnek? Nyomatékosan ajánljuk hogy állítsa alapértelmezetten nem-re! Ha a nem-et választja, akkor csak a sablonok kerülnek eltárolásra a szükséges változókkal, amikből vissza lehet állítani minden egyes üzenet tartalmát.',
'LBL_YES' => 'Igen',
'LBL_NO' => 'Nem',
'LBL_DEFAULT_FROM_ADDR' => 'Alapértelmezett:',
'LBL_EMAIL_SETUP_DESC' => 'Töltse ki az alábbi űrlapot a rendszer beállításainak módosításához, hogy a kampány e-maileket ki lehessen küldeni.',
'LBL_CREATE_MAILBOX' => 'Új mail postafiók létrehozása',
'LBL_SSL_DESC' => 'Ha a levelezési kiszolgáló támogatja a biztonságos socket kapcsolatokat, bekapcsolása esetén kikényszeríti az SSL kapcsolatot az e-mail importálásakor.',
'LBL_SSL' => 'SSL használata',
'LNK_CAMPAIGN_DIGNOSTIC_LINK' => 'Kampány nem működik a kívánt módon, és az emaileket nem lehet elküldeni a következő okok miatt:
',
'LBL_CAMPAIGN_DIAGNOSTICS' => 'Kampány diagnosztika
',
'LBL_DIAGNOSTIC' => 'Diagnosztika',
'LBL_MAILBOX_CHECK1_GOOD' => 'Levelező fiók(ok) visszapattanás(ok) kezelésének észlelése:',
'LBL_MAILBOX_CHECK1_BAD' => 'Levelező fiók(ok) visszapattanás(ok) kezelésének észlelése nincs.',
'LBL_MAILBOX_CHECK2_GOOD' => 'Email beállítások konfigurálva vannak:',
'LBL_MAILBOX_CHECK2_BAD' => 'Kérjük, beállítani a rendszer email címét. E-mail beállítások még nincsenek beállítva.',
'LBL_SCHEDULER_CHECK_GOOD' => 'Ütemezők észlelése',
'LBL_SCHEDULER_CHECK_BAD' => 'Ütemezők nem találhatók',
'LBL_SCHEDULER_CHECK1_BAD' => 'Feladatütemező nem lett beállítva a Visszapattant Kampány Emailek folyamatban.',
'LBL_SCHEDULER_CHECK2_BAD' => 'Feladatütemező nem lett beállítva a Kampány Emailek folyamatban.',
'LBL_SCHEDULER_NAME' => 'Ütemező',
'LBL_SCHEDULER_STATUS' => 'Állapot',
'LBL_MARKETING_CHECK1_GOOD' => 'Email marketing elemeket észlelt.',
'LBL_MARKETING_CHECK1_BAD' => 'Nem talált email marketing elemeket akkor létre kell hoznia egy mailben egy kampányt.',
'LBL_MARKETING_CHECK2_GOOD' => 'Céllistákat talált.',
'LBL_MARKETING_CHECK2_BAD' => 'Nem talált céllistát, akkor létre kell hoznia egyet a kívánt kampány képernyőn.',
'LBL_EMAIL_SETUP_WIZ' => 'Indítsa el az email beállításokat',
'LBL_SCHEDULER_LINK' => 'lépjen át az ütemező admin képernyőjére.',
'LBL_TO_WIZARD' => 'indít',
'LBL_TO_WIZARD_TITLE' => 'Varázsló indítása',
'LBL_EDIT_EXISTING' => 'Kampány szerkesztés',
'LBL_EDIT_TARGET_LIST' => 'Céllista szerkesztése',
'LBL_SEND_EMAIL' => 'Email ütemezés',
'LBL_USE_EXISTING' => 'Meglévőt használ',
'LBL_CREATE_NEW_MARKETING_EMAIL' => 'Új email marketing létrehozása',
'LBL_CHOOSE_NEXT_STEP' => 'Válassza a következő lépést
',
'LBL_NON_ADMIN_ERROR_MSG' => 'Kérjük, jelezzék a rendszergazdának, hogy megoldható-e a problémája.
',
'LBL_EMAIL_COMPONENTS' => 'Email komponensek',
'LBL_SCHEDULER_COMPONENTS' => 'Ütemező komponensek
',
'LBL_RECHECK_BTN' => 'Újra ellenőrzés',
'LBL_WEB_TO_LEAD_FORM_TITLE1' => 'Hozzon létre űrlapot: Válasszon mezőket',
'LBL_WEB_TO_LEAD_FORM_TITLE2' => 'Hozzon létre űrlapot: Űrlap tulajdonságai',
'LBL_DRAG_DROP_COLUMNS' => 'Ajánlások mezők áthozatala az 1-es és 2-es oszlopban Drag&Drop módszerrel',
'LBL_DEFINE_LEAD_HEADER' => 'Űrlap Fejléc: ',
'LBL_LEAD_DEFAULT_HEADER' => 'Web Ajánlás űrlap a kampányhoz',
'LBL_DEFINE_LEAD_SUBMIT' => 'Submit gomb címke:',
'LBL_EDIT_LEAD_POST_URL' => 'Post URL szerkesztése?',
'LBL_LEAD_NOTIFY_CAMPAIGN' => 'Kapcsolódó kampány:',
'LBL_DEFAULT_LEAD_SUBMIT' => 'Elküld',
'LBL_WEB_TO_LEAD' => 'Ajánlás űrlap létrehozása',
'LBL_LEAD_FOOTER' => 'Űrlap Lábléc: ',
'LBL_CAMPAIGN_NOT_SELECTED' => 'Válassza ki és társítson egy kampányt:',
'NTC_NO_LEGENDS' => 'Egyik sem',
'LBL_SELECT_LEAD_FIELDS' => 'Kérjük, válasszon a rendelkezésre álló mezőkből',
'LBL_DESCRIPTION_LEAD_FORM' => 'Űrlap leírás',
'LBL_DESCRIPTION_TEXT_LEAD_FORM' => 'Űrlap elküldése egy ajánlás és hivatkozás létrehozásához ',
'LBL_DOWNLOAD_TEXT_WEB_TO_LEAD_FORM' => 'Kérjük, töltse le az Ajánlás a webről űrlapot
',
'LBL_DOWNLOAD_WEB_TO_LEAD_FORM' => 'Ajánlás a webről',
'LBL_PROVIDE_WEB_TO_LEAD_FORM_FIELDS' => 'Kérjük, adja meg az összes szükséges mezőt',
'LBL_NOT_VALID_EMAIL_ADDRESS' => 'Érvénytelen email cím',
'LBL_AVALAIBLE_FIELDS_HEADER' => 'Elérhető mezők',
'LBL_LEAD_FORM_FIRST_HEADER' => 'Ajánlás űrlap (első oszlop)',
'LBL_LEAD_FORM_SECOND_HEADER' => 'Ajánlás űrlap (második oszlop)',
'LBL_LEAD_MODULE' => 'Ajánlások',
'LBL_CREATE_WEB_TO_LEAD_FORM' => 'Ajánlások a webről űrlap létrehozása',
'LBL_SELECT_REQUIRED_LEAD_FIELDS' => 'Kérjük, válasszon a kötelező mezőkből:',
'LBL_CAMPAIGN_RETURN_ON_INVESTMENT' => 'Kampány megtérülése',
'LBL_CAMPAIGN_RESPONSE_BY_RECIPIENT_ACTIVITY' => 'Kampány válasz a címzett aktivitása szerint',
'LBL_LOG_ENTRIES_BLOCKEDD_TITLE' => 'Elírta az Email címet vagy tartományt',
'LBL_AMOUNT_IN' => 'Összeg',
'LBL_ROI_CHART_REVENUE' => 'Bevétel',
'LBL_ROI_CHART_INVESTMENT' => 'Befektetés',
'LBL_ROI_CHART_BUDGET' => 'Költségvetés',
'LBL_ROI_CHART_EXPECTED_REVENUE' => 'Várható bevétel',
'LBL_TOP_CAMPAIGNS' => 'Top kampányok',
'LBL_TOP_CAMPAIGNS_NAME' => 'Kampány neve',
'LBL_TOP_CAMPAIGNS_REVENUE' => 'Bevétel
',
'LBL_LEADS' => 'Ajánlások',
'LBL_CONTACTS' => 'Kapcsolatok',
'LBL_ACCOUNTS' => 'Kliensek',
'LBL_OPPORTUNITIES' => 'Lehetőségek',
'LBL_CREATED_USER' => 'Felhasználó által létrehozva',
'LBL_MODIFIED_USER' => 'Felhasználó által módosítva',
'LBL_LOG_ENTRIES' => 'Bejegyzések naplózása',
'LBL_PROSPECTLISTS_SUBPANEL_TITLE' => 'Lehetséges vevő lista',
'LBL_EMAILMARKETING_SUBPANEL_TITLE' => 'Email marketing',
'LBL_TRACK_QUEUE_SUBPANEL_TITLE' => 'Követő sor',
'LBL_TARGETED_SUBPANEL_TITLE' => 'Célzott',
'LBL_VIEWED_SUBPANEL_TITLE' => 'Megtekintett',
'LBL_LEAD_SUBPANEL_TITLE' => 'Ajánlás',
'LBL_CONTACT_SUBPANEL_TITLE' => 'Kapcsolat',
'LBL_INVALID EMAIL_SUBPANEL_TITLE' => 'Érvénytelen Email',
'LBL_SEND ERROR_SUBPANEL_TITLE' => 'Hiba küldés',
'LBL_REMOVED_SUBPANEL_TITLE' => 'Eltávolított',
'LBL_BLOCKED_SUBPANEL_TITLE' => 'Zárolt',
'LBL_ACCOUNTS_SUBPANEL_TITLE' => 'Kliensek',
'LBL_LEADS_SUBPANEL_TITLE' => 'Ajánlások',
'LBL_OPPORTUNITIES_SUBPANEL_TITLE' => 'Lehetőségek',
'LBL_IMPORT_PROSPECTS' => 'Célok importálása',
'LBL_LEAD_FORM_WIZARD' => 'Ajánlások űrlap varázsló',
'LBL_CAMPAIGN_INFORMATION' => 'Kampány áttekintése',
'LBL_MONTH' => 'Hónap',
'LBL_YEAR' => 'Év',
'LBL_DAY' => 'Nap',
);
| harish-patel/ecrm | modules/Campaigns/language/hu_HU.lang.php | PHP | agpl-3.0 | 25,040 |
//-*- coding: utf-8 -*-
//############################################################################
//
// OpenERP, Open Source Management Solution
// This module copyright (C) 2015 Therp BV <http://therp.nl>.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
// published by the Free Software Foundation, either version 3 of the
// License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see <http://www.gnu.org/licenses/>.
//
//############################################################################
openerp.attachment_edit = function(instance)
{
instance.web.Sidebar.include(
{
on_attachments_loaded: function(attachments)
{
var self = this;
return jQuery.when(this._super.apply(this, arguments))
.then(function()
{
self.$el.find('.oe-sidebar-attachment-edit')
.click(self.on_attachment_edit);
});
},
on_attachment_edit: function(e)
{
var $target = jQuery(e.currentTarget),
attachment_id = parseInt($target.attr('data-id')),
title = $target.attr('title');
e.preventDefault();
e.stopPropagation();
this.do_action({
type: 'ir_actions.act_window',
name: title,
views: [[false, 'form']],
res_model: 'ir.attachment',
res_id: attachment_id,
flags: {
initial_mode: 'edit',
},
});
},
})
}
| acsone/knowledge | attachment_edit/static/src/js/attachment_edit.js | JavaScript | agpl-3.0 | 2,047 |
using System;
using System.Collections.Generic;
using System.Text.RegularExpressions;
//distance := number 'M'| 'KM'
//range := [WITHIN]
//distance[OF]
//fuzzy := ~
//jct_srch := text / [text]
//coord_srch := [range]
//real,real
//place_srch := [fuzzy] text
//locations := place_srch | coord_srch | jct_srch
//near := NEAR | @ | range
//nearby := locations NEAR locations
//filter := filtertype IN
//request := place[nearby][range]
//action := SHOW[type] | HOW MANY
//group := group by
//show addresses where tesco within 500m of 51.00,0.0000
//text near 51.00,0.0000
namespace Quest.Lib.Search.Semantic
{
public class SemanticQuery
{
private class Sequence
{
public String token;
public string _expression;
}
private static readonly List<Sequence> _sequences = new List<Sequence>()
{
new Sequence() { token = "RANGE", _expression = @"(?'range'\d*M )"},
new Sequence() { token = "REAL", _expression = @"(?'A'[0-9]*\.{0,1}\d*)"},
new Sequence() { token = "COMMA", _expression = @"(,)"},
new Sequence() { token = "JUNCTION", _expression = @"/"},
new Sequence() { token = "WORD", _expression = @"(.*)"},
new Sequence() { token = "NEARBY", _expression = @"(@)"},
new Sequence() { token = "NEARBY", _expression = @"(towards)"},
new Sequence() { token = "WITHIN", _expression = @"(within)"},
new Sequence() { token = "FIND", _expression = @"(find)"},
new Sequence() { token = "IN", _expression = @"(in)"},
new Sequence() { token = "ADDRESS", _expression = @"(address)"},
new Sequence() { token = "ADDRESS", _expression = @"(property)"},
new Sequence() { token = "JUNCTION", _expression = @"(junction)"},
new Sequence() { token = "ROADLINK", _expression = @"(road)"},
};
enum TokenType
{
TOKEN,
PHRASE
}
public class TokenValue
{
public String Name;
public String Value;
}
public class Token
{
public String Name;
public String Context;
public List<TokenValue> Values = new List<TokenValue>();
public int Position;
public override string ToString()
{
return String.Format( "<{0}> {1}",Name, Context);
}
}
public class TokenStream
{
public List<Token> Tokens = new List<Token>();
}
public static List<TokenStream> Tokenize(String text)
{
String[] words = text.Split(new char[] { ' ', ','}, StringSplitOptions.None );
List<TokenStream> streams = new List<TokenStream>();
TokenStream stream = new TokenStream();
streams.Add(stream);
foreach( var word in words)
{
}
foreach (Sequence s in _sequences)
{
Regex r = new Regex(s._expression, RegexOptions.IgnoreCase | RegexOptions.Singleline);
Match mc = r.Match(text);
if (mc.Success)
{
Token t = new Token() { Name = s.token, Values = new List<TokenValue>() };
stream.Tokens.Add(t);
foreach( var gn in r.GetGroupNames())
{
var v = new TokenValue() { Name = gn, Value = mc.Groups[gn].Value };
t.Values.Add(v);
}
}
}
return streams;
}
}
}
| Extentsoftware/Quest | src/Quest.Lib/Search/Semantic/SemanticQuery.cs | C# | agpl-3.0 | 3,799 |
# Generated by Django 1.9.11 on 2017-02-24 09:38
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publisher_comments', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='comments',
name='comment_type',
field=models.CharField(blank=True, choices=[('default', 'Default'), ('decline_preview', 'Decline Preview')], default='default', max_length=255, null=True),
),
]
| edx/course-discovery | course_discovery/apps/publisher_comments/migrations/0002_comments_comment_type.py | Python | agpl-3.0 | 511 |
require 'coveralls'
Coveralls::Output.silent = true
Coveralls.wear_merged!('rails')
Coveralls::Output.silent = false
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# These two settings work together to allow you to limit a spec run
# to individual examples or groups you care about by tagging them with
# `:focus` metadata. When nothing is tagged with `:focus`, all examples
# get run.
config.filter_run :focus
config.run_all_when_everything_filtered = true
# Limits the available syntax to the non-monkey patched syntax that is recommended.
# For more details, see:
# - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
# - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = 'doc'
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| haxney/railskating | spec/spec_helper.rb | Ruby | agpl-3.0 | 3,294 |
# encoding: UTF-8
#
# == Schema Information
#
# Table name: paragraphs
#
# id :integer(4) not null, primary key
# news_id :integer(4) not null
# position :integer(4)
# second_part :boolean(1)
# locked_by_id :integer(4)
# body :text
# wiki_body :text
#
# A paragraph is a block of text from a news, with wiki syntax.
# The paragraph never modifies the body (or wiki_body) of a news,
# only the news known its state and when to do the synchronization!
#
# A paragraph can be split in several if it has a blank line in its body.
#
class Paragraph < ActiveRecord::Base
include ERB::Util
belongs_to :news
attr_accessor :user, :after, :already_split
attr_accessible :user, :after, :already_split, :wiki_body, :second_part, :news_id
scope :in_first_part, where(:second_part => false).order("position ASC")
scope :in_second_part, where(:second_part => true ).order("position ASC")
### Automatically split paragraphs ###
# Split body in paragraphs, but preserve code!
def split_body
parts = []
codemap = {}
str = wiki_body.gsub(/^``` ?(.+?)\r?\n(.+?)\r?\n```\r?$/m) do
id = Digest::SHA1.hexdigest($2)
codemap[id] = $&.chomp
id + "\n"
end
until str.empty?
left, sep, str = str.partition(/(\r?\n){2}/)
left.sub!(/\A(\r?\n)+/, '')
codemap.each { |id,code| left.gsub!(id, code) }
parts << left + sep
end
parts
end
before_validation :split_on_create, :on => :create
def split_on_create
return if already_split
sentences = split_body
self.wiki_body = sentences.pop
sentences.each do |body|
news.paragraphs.create(:wiki_body => body, :second_part => second_part, :already_split => true)
end
end
before_validation :split_on_update, :on => :update
def split_on_update
sentences = split_body
self.wiki_body = sentences.shift
sentences.reverse.each_with_index do |body,i|
p = news.paragraphs.create(:wiki_body => body, :second_part => second_part, :already_split => true, :user => user, :after => self.id)
p.insert_at(position + 1)
end
end
### Behaviour ###
def update_by(user)
if wiki_body.blank?
destroy
else
self.user = user
self.locked_by_id = nil
save
end
news.save
end
### Wikify ###
before_save :wikify_body
def wikify_body
self.body = wikify(wiki_body).gsub(/<\/?p>/, '')
end
### Chat ###
after_create :announce_create
def announce_create
return unless user
message = Redaction::ParagraphsController.new.render_to_string(:partial => 'board', :locals => {:action => 'paragraphe ajouté', :paragraph => self})
Board.create_for(news, :user => user, :kind => "creation", :message => message)
self.user = nil
end
after_update :announce_update
def announce_update
return unless user
message = Redaction::ParagraphsController.new.render_to_string(:partial => 'board', :locals => {:action => 'paragraphe modifié', :paragraph => self})
Board.create_for(news, :user => user, :kind => "edition", :message => message)
self.user = nil
end
before_destroy :announce_destroy
def announce_destroy
return unless user
message = Redaction::ParagraphsController.new.render_to_string(:partial => 'board', :locals => {:action => 'paragraphe supprimé', :paragraph => self})
Board.create_for(news, :user => user, :kind => "deletion", :message => message)
self.user = nil
end
# Warning, acts_as_list also declares a before_destroy callback,
# and this callback must be called after +announce_destroy+.
# So do NOT move this line upper in this file.
acts_as_list :scope => :news
def lock_by(user)
return true if locked_by_id == user.id
return false if locked?
self.locked_by_id = user.id
save
message = "<span class=\"paragraph\" data-id=\"#{self.id}\">#{user.name} édite le paragraphe #{html_escape wiki_body[0,20]}</span>"
Board.create_for(news, :user => user, :kind => "locking", :message => message)
true
end
def locked?
!!locked_by_id
end
### Presentation ###
def part
second_part ? 'second_part' : 'first_part'
end
end
| julienXX/linuxfr.org | app/models/paragraph.rb | Ruby | agpl-3.0 | 4,196 |
/**
* @alpha
*/
export enum MappingType {
ValueToText = 'value', // was 1
RangeToText = 'range', // was 2
RegexToText = 'regex',
SpecialValue = 'special',
}
/**
* @alpha
*/
export interface ValueMappingResult {
text?: string;
color?: string;
icon?: string;
index?: number;
}
/**
* @alpha
*/
interface BaseValueMap<T> {
type: MappingType;
options: T;
}
/**
* @alpha
*/
export interface ValueMap extends BaseValueMap<Record<string, ValueMappingResult>> {
type: MappingType.ValueToText;
}
/**
* @alpha
*/
export interface RangeMapOptions {
from: number | null; // changed from string
to: number | null;
result: ValueMappingResult;
}
/**
* @alpha
*/
export interface RangeMap extends BaseValueMap<RangeMapOptions> {
type: MappingType.RangeToText;
}
/**
* @alpha
*/
export interface RegexMapOptions {
pattern: string;
result: ValueMappingResult;
}
/**
* @alpha
*/
export interface RegexMap extends BaseValueMap<RegexMapOptions> {
type: MappingType.RegexToText;
}
/**
* @alpha
*/
export interface SpecialValueOptions {
match: SpecialValueMatch;
result: ValueMappingResult;
}
/**
* @alpha
*/
export enum SpecialValueMatch {
True = 'true',
False = 'false',
Null = 'null',
NaN = 'nan',
NullAndNaN = 'null+nan',
Empty = 'empty',
}
/**
* @alpha
*/
export interface SpecialValueMap extends BaseValueMap<SpecialValueOptions> {
type: MappingType.SpecialValue;
}
/**
* @alpha
*/
export type ValueMapping = ValueMap | RangeMap | RegexMap | SpecialValueMap;
| grafana/grafana | packages/grafana-data/src/types/valueMapping.ts | TypeScript | agpl-3.0 | 1,530 |
<?php
namespace Zendesk\API\Resources\HelpCenter;
use Zendesk\API\Exceptions\RouteException;
use Zendesk\API\Traits\Resource\Defaults;
use Zendesk\API\Traits\Resource\Locales;
use Zendesk\API\Traits\Resource\Search;
/**
* Class Articles
* https://developer.zendesk.com/rest_api/docs/help_center/articles
*/
class Articles extends ResourceAbstract
{
use Defaults;
use Locales {
getRoute as protected localesGetRoute;
}
use Search;
/**
* @{inheritdoc}
*/
protected $objectName = 'article';
/**
* @{inheritdoc}
*/
protected function setupRoutes()
{
parent::setUpRoutes();
$this->setRoutes([
'bulkAttach' => "$this->resourceName/{articleId}/bulk_attachments.json",
'create' => "{$this->prefix}sections/{section_id}/articles.json",
'updateSourceLocale' => "$this->resourceName/{articleId}/source_locale.json",
]);
}
/**
* Bulk upload attachments to a specified article
*
* @param int $articleId The article to update
* @param array $params An array of attachment ids
* @param string $routeKey The route to set
* @return null|\stdClass
* @throws \Exception
*/
public function bulkAttach($articleId, array $params, $routeKey = __FUNCTION__)
{
try {
$route = $this->getRoute($routeKey, ['articleId' => $articleId]);
} catch (RouteException $e) {
if (! isset($this->resourceName)) {
$this->resourceName = $this->getResourceNameFromClass();
}
$route = $this->resourceName . '.json';
$this->setRoute(__FUNCTION__, $route);
}
return $this->client->post(
$route,
['attachment_ids' => $params]
);
}
/**
* {@inheritdoc}
*/
public function getRoute($name, array $params = [])
{
$params = $this->addChainedParametersToParams($params, [
'section_id' => Sections::class,
]);
return $this->localesGetRoute($name, $params);
}
}
| libricks/zendextract | vendor/zendesk/zendesk_api_client_php/src/Zendesk/API/Resources/HelpCenter/Articles.php | PHP | agpl-3.0 | 2,145 |
package battlecode.server;
import org.java_websocket.WebSocket;
import org.java_websocket.handshake.ClientHandshake;
import org.java_websocket.server.WebSocketServer;
import java.net.InetSocketAddress;
import java.nio.channels.ClosedByInterruptException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
/**
* Serve a battlecode match over a websocket connection.
*
* Sends one battlecode event per websocket message; ignores incoming messages.
*
* @author james
*/
public class NetServer extends WebSocketServer {
private final List<byte[]> previousEvents;
private final BlockingQueue<byte[]> incomingEvents;
private boolean waitForClient;
private boolean done = false;
private boolean connected = false;
private Thread queueThread;
/**
* Create a new server.
* @param port
*/
public NetServer(int port, boolean waitForClient) {
super(new InetSocketAddress(port));
this.waitForClient = waitForClient;
previousEvents = new ArrayList<>();
incomingEvents = new ArrayBlockingQueue<>(64);
queueThread = new Thread(() -> {
try {
while (!done) {
byte[] event = incomingEvents.poll(300, TimeUnit.MILLISECONDS);
if (event != null) {
processEvent(event);
}
}
while (incomingEvents.size() > 0) {
byte[] event = incomingEvents.remove();
processEvent(event);
}
} catch(Exception e) {
ErrorReporter.report(e, true);
}
});
}
/**
* Run the server on a new thread.
*/
@Override
public void start() {
if (queueThread.isAlive() || done) {
throw new RuntimeException("Can't start server, already started");
}
queueThread.start();
super.start();
if (waitForClient) {
System.out.println("Waiting for connection from client...");
try {
while (!connected) {
Thread.sleep(300);
}
} catch (InterruptedException e) {
throw new RuntimeException("Bad things happened");
}
System.out.println("Connection received!");
}
}
/**
* Add an event.
* It will be sent to clients at some point in the future.
*
* @param event
*/
public void addEvent(byte[] event) {
if (done) {
throw new RuntimeException("Can't add event, server already finished");
}
incomingEvents.add(event);
}
/**
* Send all queued events and terminate.
* Blocks until finished.
*/
public void finish() {
if (!queueThread.isAlive()) {
throw new RuntimeException("Can't finish, queue thread already started");
}
if (done) {
throw new RuntimeException("Can't finish, already finished");
}
done = true;
try {
queueThread.join();
stop();
} catch (Exception e) {
ErrorReporter.report(e, true);
}
}
// implementation details
// Two threads: one polling websocket stuff, one awaiting queue inputs
// When there's a new client, we lock, all events are sent to client, we unlock
// When there is a queue input, we lock, all clients receive event, we unlock
// There may still be thread-safety issues here:
//
// onOpen
private void processEvent(byte[] event) {
synchronized (connections()) {
for (WebSocket client : connections()) {
client.send(event);
}
previousEvents.add(event);
}
}
@Override
public void onOpen(WebSocket client, ClientHandshake handshake) {
synchronized (connections()) {
connected = true;
for (byte[] event : previousEvents) {
client.send(event);
}
}
}
@Override
public void onClose(WebSocket conn, int code, String reason, boolean remote) {
System.out.println("Closed: "+conn.getRemoteSocketAddress() + " for "+reason);
}
@Override
public void onMessage(WebSocket ws, String s) {
System.err.println("Spurious message from "+
ws.getRemoteSocketAddress()+": `"+s+"`");
}
@Override
public void onError(WebSocket conn, Exception ex) {
if (!(ex instanceof ClosedByInterruptException)) {
System.err.println("Error from: "+conn.getRemoteSocketAddress()+": "+ex);
}
}
}
| battlecode/battlecode-server | src/main/battlecode/server/NetServer.java | Java | agpl-3.0 | 4,856 |
<?php
/*** COPYRIGHT NOTICE *********************************************************
*
* Copyright 2015 ProjeQtOr - Pascal BERNARD - support@projeqtor.org
* Contributors : -
*
* This file is part of ProjeQtOr.
*
* ProjeQtOr is free software: you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License as published by the Free
* Software Foundation, either version 3 of the License, or (at your option)
* any later version.
*
* ProjeQtOr is distributed in the hope that it will be useful, but WITHOUT ANY
* WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
* more details.
*
* You should have received a copy of the GNU Affero General Public License along with
* ProjeQtOr. If not, see <http://www.gnu.org/licenses/>.
*
* You can get complete code of ProjeQtOr, other resource, help and information
* about contributors at http://www.projeqtor.org
*
*** DO NOT REMOVE THIS NOTICE ************************************************/
/** ===========================================================================
* Save a note : call corresponding method in SqlElement Class
* The new values are fetched in $_REQUEST
*/
require_once "../tool/projeqtor.php";
// Get the note info
if (! array_key_exists('reportId',$_REQUEST)) {
throwError('reportId parameter not found in REQUEST');
}
$reportId=$_REQUEST['reportId'];
Sql::beginTransaction();
$item=new Favorite();
$user=getSessionUser();
$item->idUser=$user->id;
$item->scope='report';
$item->idReport=$reportId;
$item->idle=0;
$lst=$item->getSqlElementsFromCriteria(array('idUser'=>$user->id));
$item->sortOrder=count($lst)+1;
$result=$item->save();
$rpt=new Report($reportId); // validated to be numeric value in SqlElement base constructor.
$params=FavoriteParameter::returnReportParameters($rpt,true);
if (isset($params['period']) and isset($_REQUEST['periodScale']) and isset($_REQUEST['periodValue'])) {
unset($params['period']);
$params['periodScale']='';
$params['periodValue']='';
}
foreach ($params as $pName=>$pValue) {
$reqValue='';
if (isset($_REQUEST[$pName])) {
$reqValue=$_REQUEST[$pName];
}
if (trim($reqValue)!=trim($pValue)) {
$tp=new FavoriteParameter();
$tp->idUser=$item->idUser;
$tp->idReport=$item->idReport;
$tp->idFavorite=$item->id;
$tp->parameterName=$pName;
$tp->parameterValue=$reqValue;
$res=$tp->save();
}
}
// Message of correct saving
displayLastOperationStatus($result);
?> | papjul/projeqtor | tool/saveReportAsFavorite.php | PHP | agpl-3.0 | 2,571 |
import React from 'react';
import { shallow } from 'enzyme';
import SideBar from 'components/projects/SideBar';
describe('<SideBar />', () => {
const render = props => {
const defaultProps = {
reverse: false,
visibleColumns: {
chillyBin: true,
backlog: true,
done: true,
},
toggleColumn: sinon.stub(),
reverseColumns: sinon.stub()
};
return shallow(<SideBar {...defaultProps} {...props } />);
};
it('renders the component', () => {
const wrapper = render();
expect(wrapper).toExist();
});
});
| Codeminer42/cm42-central | spec/javascripts/components/projects/side_bar/side_bar_spec.js | JavaScript | agpl-3.0 | 579 |
class HealthCheckController < ApplicationController
def index
render :plain => "OK"
end
end
| standardnotes/web | app/controllers/health_check_controller.rb | Ruby | agpl-3.0 | 100 |
<?php
/**
*
* SugarCRM Community Edition is a customer relationship management program developed by
* SugarCRM, Inc. Copyright (C) 2004-2013 SugarCRM Inc.
*
* SuiteCRM is an extension to SugarCRM Community Edition developed by SalesAgility Ltd.
* Copyright (C) 2011 - 2018 SalesAgility Ltd.
*
* This program is free software; you can redistribute it and/or modify it under
* the terms of the GNU Affero General Public License version 3 as published by the
* Free Software Foundation with the addition of the following permission added
* to Section 15 as permitted in Section 7(a): FOR ANY PART OF THE COVERED WORK
* IN WHICH THE COPYRIGHT IS OWNED BY SUGARCRM, SUGARCRM DISCLAIMS THE WARRANTY
* OF NON INFRINGEMENT OF THIRD PARTY RIGHTS.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
* FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more
* details.
*
* You should have received a copy of the GNU Affero General Public License along with
* this program; if not, see http://www.gnu.org/licenses or write to the Free
* Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA.
*
* You can contact SugarCRM, Inc. headquarters at 10050 North Wolfe Road,
* SW2-130, Cupertino, CA 95014, USA. or at email address contact@sugarcrm.com.
*
* The interactive user interfaces in modified source and object code versions
* of this program must display Appropriate Legal Notices, as required under
* Section 5 of the GNU Affero General Public License version 3.
*
* In accordance with Section 7(b) of the GNU Affero General Public License version 3,
* these Appropriate Legal Notices must retain the display of the "Powered by
* SugarCRM" logo and "Supercharged by SuiteCRM" logo. If the display of the logos is not
* reasonably feasible for technical reasons, the Appropriate Legal Notices must
* display the words "Powered by SugarCRM" and "Supercharged by SuiteCRM".
*/
require_once('modules/ModuleBuilder/MB/AjaxCompose.php') ;
require_once('modules/ModuleBuilder/parsers/views/History.php') ;
require_once('modules/ModuleBuilder/parsers/ParserFactory.php') ;
class ViewHistory extends SugarView
{
public $pageSize = 10 ;
/**
* @see SugarView::_getModuleTitleParams()
*/
protected function _getModuleTitleParams($browserTitle = false)
{
global $mod_strings;
return array(
translate('LBL_MODULE_NAME', 'Administration'),
ModuleBuilderController::getModuleTitle(),
);
}
public function display()
{
$this->layout = strtolower($_REQUEST [ 'view' ]) ;
$subpanelName = null ;
if ((strtolower($this->layout) == 'listview') && (!empty($_REQUEST [ 'subpanel' ]))) {
$subpanelName = $_REQUEST [ 'subpanel' ] ;
}
$packageName = (isset($_REQUEST [ 'view_package' ]) && (strtolower($_REQUEST [ 'view_package' ]) != 'studio')) ? $_REQUEST [ 'view_package' ] : null ;
$this->module = $_REQUEST [ 'view_module' ] ;
$this->parser = ParserFactory::getParser($this->layout, $this->module, $packageName, $subpanelName) ;
$this->history = $this->parser->getHistory() ;
$action = ! empty($_REQUEST [ 'histAction' ]) ? $_REQUEST [ 'histAction' ] : 'browse' ;
$GLOBALS['log']->debug(get_class($this)."->display(): performing History action {$action}") ;
$this->$action() ;
}
public function browse()
{
$smarty = new Sugar_Smarty() ;
global $mod_strings ;
$smarty->assign('mod_strings', $mod_strings) ;
$smarty->assign('view_module', $this->module) ;
$smarty->assign('view', $this->layout) ;
if (! empty($_REQUEST [ 'subpanel' ])) {
$smarty->assign('subpanel', $_REQUEST [ 'subpanel' ]) ;
}
$stamps = array( ) ;
global $timedate ;
$userFormat = $timedate->get_date_time_format() ;
$page = ! empty($_REQUEST [ 'page' ]) ? $_REQUEST [ 'page' ] : 0 ;
$count = $this->history->getCount();
$ts = $this->history->getNth($page * $this->pageSize) ;
$snapshots = array( ) ;
for ($i = 0 ; $i <= $this->pageSize && $ts > 0 ; $i ++) {
$dbDate = $timedate->fromTimestamp($ts)->asDb();
$displayTS = $timedate->to_display_date_time($dbDate) ;
if ($page * $this->pageSize + $i + 1 == $count) {
$displayTS = translate("LBL_MB_DEFAULT_LAYOUT");
}
$snapshots [ $ts ] = $displayTS ;
$ts = $this->history->getNext() ;
}
if (count($snapshots) > $this->pageSize) {
$smarty->assign('nextPage', true) ;
}
$snapshots = array_slice($snapshots, 0, $this->pageSize, true) ;
$smarty->assign('currentPage', $page) ;
$smarty->assign('snapshots', $snapshots) ;
$html = $smarty->fetch('modules/ModuleBuilder/tpls/history.tpl') ;
echo $html ;
}
public function preview()
{
global $mod_strings ;
if (! isset($_REQUEST [ 'sid' ])) {
die('SID Required') ;
}
$sid = $_REQUEST [ 'sid' ] ;
$subpanel = '';
if (! empty($_REQUEST [ 'subpanel' ])) {
$subpanel = ',"' . $_REQUEST [ 'subpanel' ] . '"' ;
}
echo "<input type='button' name='close$sid' value='". translate('LBL_BTN_CLOSE')."' " .
"class='button' onclick='ModuleBuilder.tabPanel.removeTab(ModuleBuilder.tabPanel.get(\"activeTab\"));' style='margin:5px;'>" .
"<input type='button' name='restore$sid' value='" . translate('LBL_MB_RESTORE') . "' " .
"class='button' onclick='ModuleBuilder.history.revert(\"$this->module\",\"{$this->layout}\",\"$sid\"$subpanel);' style='margin:5px;'>" ;
$this->history->restoreByTimestamp($sid) ;
$view ;
if ($this->layout == 'listview') {
require_once("modules/ModuleBuilder/views/view.listview.php") ;
$view = new ViewListView() ;
} elseif ($this->layout == 'basic_search' || $this->layout == 'advanced_search') {
require_once("modules/ModuleBuilder/views/view.searchview.php") ;
$view = new ViewSearchView() ;
} elseif ($this->layout == 'dashlet' || $this->layout == 'dashletsearch') {
require_once("modules/ModuleBuilder/views/view.dashlet.php") ;
$view = new ViewDashlet() ;
} elseif ($this->layout == 'popuplist' || $this->layout == 'popupsearch') {
require_once("modules/ModuleBuilder/views/view.popupview.php") ;
$view = new ViewPopupview() ;
} else {
require_once("modules/ModuleBuilder/views/view.layoutview.php") ;
$view = new ViewLayoutView() ;
}
$view->display(true) ;
$this->history->undoRestore() ;
}
public function restore()
{
if (! isset($_REQUEST [ 'sid' ])) {
die('SID Required') ;
}
$sid = $_REQUEST [ 'sid' ] ;
$this->history->restoreByTimestamp($sid) ;
}
/**
* Restores a layout to its current customized state.
* Called when leaving a restored layout without saving.
*/
public function unrestore()
{
$this->history->undoRestore() ;
}
}
| pgorod/SuiteCRM | modules/ModuleBuilder/views/view.history.php | PHP | agpl-3.0 | 7,490 |
// -*- mode: c++ , coding: utf-8 -*-
/**
* tbrpg – Text based roll playing game
*
* Copyright © 2012, 2013 Mattias Andrée (maandree@kth.se)
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef __GUARD_DART_HPP__
#define __GUARD_DART_HPP__
#include <stdlib.h>
#include <algorithm>
#include <vector>
#include <unordered_map>
#include "Throwing.hpp"
#include "hash.hpp"
/**
* Text based roll playing game
*
* DD2387 Program construction with C++
* Laboration 3
*
* @author Mattias Andrée <maandree@kth.se>
*/
namespace tbrpg
{
/**
* Dart item cals
*/
class Dart: public Throwing
{
public:
/**
* Construction
*/
Dart();
/**
* Copy constructor
*
* @param original The object to clone
*/
Dart(const Dart& original);
/**
* Copy constructor
*
* @param original The object to clone
*/
Dart(Dart& original);
/**
* Move constructor
*
* @param original The object to clone
*/
Dart(Dart&& original);
/**
* Fork the object
*
* @return A fork of the object
*/
virtual Object* fork() const;
/**
* Destructor
*/
virtual ~Dart();
/**
* Assignment operator
*
* @param original The reference object
* @return The invoked object
*/
virtual Dart& operator =(const Dart& original);
/**
* Assignment operator
*
* @param original The reference object
* @return The invoked object
*/
virtual Dart& operator =(Dart& original);
/**
* Move operator
*
* @param original The moved object, its resourced will be moved
* @return The invoked object
*/
virtual Dart& operator =(Dart&& original);
/**
* Equality evaluator
*
* @param other The other comparand
* @return Whether the instances are equal
*/
virtual bool operator ==(const Dart& other) const;
/**
* Inequality evaluator
*
* @param other The other comparand
* @return Whether the instances are not equal
*/
virtual bool operator !=(const Dart& other) const;
protected:
/**
* Copy method
*
* @param self The object to modify
* @param original The reference object
*/
static void __copy__(Dart& self, const Dart& original);
public:
/**
* Hash method
*
* @return The object's hash code
*/
size_t hash() const;
};
}
namespace std
{
template<>
class hash<tbrpg::Dart>
{
public:
size_t operator()(const tbrpg::Dart& elem) const
{
return elem.hash();
}
};
template<>
class hash<tbrpg::Dart*>
{
public:
size_t operator()(tbrpg::Dart* elem) const
{
return elem == nullptr ? 0 : elem->hash();
}
};
}
#endif//__GUARD_DART_HPP__
| maandree/tbrpg | src/Dart.hpp | C++ | agpl-3.0 | 3,647 |
<?php
/*
------------------------------------------------------------------------
FusionInventory
Copyright (C) 2010-2022 by the FusionInventory Development Team.
http://www.fusioninventory.org/ http://forge.fusioninventory.org/
------------------------------------------------------------------------
LICENSE
This file is part of FusionInventory project.
FusionInventory is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
FusionInventory is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with FusionInventory. If not, see <http://www.gnu.org/licenses/>.
------------------------------------------------------------------------
@package FusionInventory
@author David Durieux
@co-author
@copyright Copyright (C) 2010-2022 FusionInventory team
@license AGPL License 3.0 or (at your option) any later version
http://www.gnu.org/licenses/agpl-3.0-standalone.html
@link http://www.fusioninventory.org/
@link http://forge.fusioninventory.org/projects/fusioninventory-for-glpi/
@since 2013
------------------------------------------------------------------------
*/
use PHPUnit\Framework\TestCase;
class CronTaskTest extends TestCase {
public static function setUpBeforeClass(): void {
// Delete all computers
$computer = new Computer();
$items = $computer->find();
foreach ($items as $item) {
$computer->delete(['id' => $item['id']], true);
}
// Delete all agents (force)
$pfAgent = new PluginFusioninventoryAgent();
$items = $pfAgent->find();
foreach ($items as $item) {
$pfAgent->delete(['id' => $item['id']], true);
}
// Delete all tasks
$pfTask = new PluginFusioninventoryTask();
$items = $pfTask->find();
foreach ($items as $item) {
$pfTask->delete(['id' => $item['id']], true);
}
// Delete al deploygroups
$pfDeployGroup = new PluginFusioninventoryDeployGroup();
$items = $pfDeployGroup->find();
foreach ($items as $item) {
$pfDeployGroup->delete(['id' => $item['id']], true);
}
// Delete al deploypackages
$pfDeployPackage = new PluginFusioninventoryDeployPackage();
$items = $pfDeployPackage->find();
foreach ($items as $item) {
$pfDeployPackage->delete(['id' => $item['id']], true);
}
$module = new PluginFusioninventoryAgentmodule();
$module->getFromDBByCrit(['modulename' => 'DEPLOY']);
$module->update([
'id' => $module->fields['id'],
'is_active' => 1
]);
}
/**
* @test
*/
public function prepareDb() {
$computer = new Computer();
$pfAgent = new PluginFusioninventoryAgent();
$pfDeployPackage = new PluginFusioninventoryDeployPackage();
$pfDeployGroup = new PluginFusioninventoryDeployGroup();
$pfTask = new PluginFusioninventoryTask();
$pfTaskjob = new PluginFusioninventoryTaskjob;
$pfDeployGroup_Dynamicdata = new PluginFusioninventoryDeployGroup_Dynamicdata();
$pfEntity = new PluginFusioninventoryEntity();
$pfEntity = new PluginFusioninventoryEntity();
$pfEntity->getFromDBByCrit(['entities_id' => 0]);
if (isset($pfEntity->fields['id'])) {
$pfEntity->update([
'id' => $pfEntity->fields['id'],
'agent_base_url' => 'http://127.0.0.1/glpi'
]);
} else {
$pfEntity->add([
'entities_id' => 0,
'agent_base_url' => 'http://127.0.0.1/glpi'
]);
}
// Create package
$input = [
'entities_id' => 0,
'name' => 'package'
];
$packages_id = $pfDeployPackage->add($input);
$this->assertNotFalse($packages_id);
// Create fusioninventory dynamic group
$input = [
'name' => 'all computers have name computer',
'type' => 'DYNAMIC'
];
$groups_id = $pfDeployGroup->add($input);
$this->assertNotFalse($groups_id);
$input = [
'plugin_fusioninventory_deploygroups_id' => $groups_id,
'fields_array' => 'a:2:{s:8:"criteria";a:1:{i:0;a:3:{s:5:"field";s:1:"1";s:10:"searchtype";s:8:"contains";s:5:"value";s:8:"computer";}}s:12:"metacriteria";s:0:"";}'
];
$groupDynamicId = $pfDeployGroup_Dynamicdata->add($input);
$this->assertNotFalse($groupDynamicId);
// create task
$input = [
'entities_id' => 0,
'name' => 'deploy',
'is_active' => 1
];
$tasks_id = $pfTask->add($input);
$this->assertNotFalse($tasks_id);
// create takjob
$input = [
'plugin_fusioninventory_tasks_id' => $tasks_id,
'entities_id' => 0,
'name' => 'deploy',
'method' => 'deployinstall',
'targets' => '[{"PluginFusioninventoryDeployPackage":"'.$packages_id.'"}]',
'actors' => '[{"PluginFusioninventoryDeployGroup":"'.$groups_id.'"}]'
];
$taskjobId = $pfTaskjob->add($input);
$this->assertNotFalse($taskjobId);
// Create computers + agents
$input = [
'entities_id' => 0,
'name' => 'computer1'
];
$computers_id = $computer->add($input);
$this->assertNotFalse($computers_id);
$input = [
'entities_id' => 0,
'name' => 'computer1',
'version' => '{"INVENTORY":"v2.3.11"}',
'device_id' => 'computer1',
'useragent' => 'FusionInventory-Agent_v2.3.11',
'computers_id'=> $computers_id
];
$agentId = $pfAgent->add($input);
$this->assertNotFalse($agentId);
$input = [
'entities_id' => 0,
'name' => 'computer2'
];
$computers_id = $computer->add($input);
$this->assertNotFalse($computers_id);
$input = [
'entities_id' => 0,
'name' => 'computer2',
'version' => '{"INVENTORY":"v2.3.11"}',
'device_id' => 'computer2',
'useragent' => 'FusionInventory-Agent_v2.3.11',
'computers_id'=> $computers_id
];
$agentId = $pfAgent->add($input);
$this->assertNotFalse($agentId);
$input = [
'entities_id' => 0,
'name' => 'computer3'
];
$computers_id = $computer->add($input);
$this->assertNotFalse($computers_id);
$input = [
'entities_id' => 0,
'name' => 'computer3',
'version' => '{"INVENTORY":"v2.3.11"}',
'device_id' => 'computer3',
'useragent' => 'FusionInventory-Agent_v2.3.11',
'computers_id'=> $computers_id
];
$agentId = $pfAgent->add($input);
$this->assertNotFalse($agentId);
// Create package
$input = [
'entities_id' => 0,
'name' => 'on demand package',
'is_recursive' => 0,
'plugin_fusioninventory_deploygroups_id' => $groups_id,
'json' => '{"jobs":{"checks":[],"associatedFiles":[],"actions":[]},"associatedFiles":[]}'
];
$packages_id_2 = $pfDeployPackage->add($input);
$this->assertNotFalse($packages_id_2);
// create task
$input = [
'entities_id' => 0,
'name' => 'ondemand',
'is_active' => 1,
'is_deploy_on_demand' => 1,
'reprepare_if_successful' => 0
];
$tasks_id_2 = $pfTask->add($input);
$this->assertNotFalse($tasks_id_2);
// create takjob
$input = [
'plugin_fusioninventory_tasks_id' => $tasks_id_2,
'entities_id' => 0,
'name' => 'deploy',
'method' => 'deployinstall',
'targets' => '[{"PluginFusioninventoryDeployPackage":"'.$packages_id_2.'"}]',
'actors' => '[{"PluginFusioninventoryDeployGroup":"'.$groups_id.'"}]'
];
$taskjobId = $pfTaskjob->add($input);
$this->assertNotFalse($taskjobId);
}
/**
* @test
*/
public function prepareTask() {
global $DB;
PluginFusioninventoryTask::cronTaskscheduler();
$pfTask = new PluginFusioninventoryTask();
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$this->assertArrayHasKey('id', $pfTask->fields);
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$pfAgent = new PluginFusioninventoryAgent();
$reference = [];
$pfAgent->getFromDBByCrit(['name' => 'computer1']);
$reference[$pfAgent->fields['id']] = 'computer1';
$pfAgent->getFromDBByCrit(['name' => 'computer2']);
$reference[$pfAgent->fields['id']] = 'computer2';
$pfAgent->getFromDBByCrit(['name' => 'computer3']);
$reference[$pfAgent->fields['id']] = 'computer3';
$this->assertEquals($reference, $data['agents']);
foreach ($data['tasks'] as $task_id => &$task) {
foreach ($task['jobs'] as $job_id => &$job) {
foreach ($job['targets'] as $target_id => &$target) {
foreach ($target['agents'] as $agent_id => &$agent) {
$logs = $data['tasks'][$task_id]['jobs'][$job_id]['targets'][$target_id]['agents'][$agent_id];
$this->assertEquals(1, count($logs));
/* We get something like:
[agent_id] => 1
[link] => ./vendor/bin/phpunit/front/computer.form.php?id=1
[numstate] => 0
[state] => prepared
[jobstate_id] => 1
[last_log_id] => 1
[last_log_date] => 2018-01-20 12:44:06
[timestamp] => 1516448646
[last_log] =>
*/
foreach ($logs as &$log) {
$this->assertEquals($log['agent_id'], $agent_id);
$this->assertEquals($log['state'], "prepared");
$this->assertEquals($log['last_log'], "");
}
}
}
}
}
}
/**
* @test
*/
public function prepareTaskWithNewComputer() {
$computer = new Computer();
$pfAgent = new PluginFusioninventoryAgent();
$input = [
'entities_id' => 0,
'name' => 'computer4'
];
$computers_id = $computer->add($input);
$this->assertNotFalse($computers_id);
$input = [
'entities_id' => 0,
'name' => 'computer4',
'version' => '{"INVENTORY":"v2.3.11"}',
'device_id' => 'computer4',
'useragent' => 'FusionInventory-Agent_v2.3.11',
'computers_id'=> $computers_id
];
$agentId = $pfAgent->add($input);
$this->assertNotFalse($agentId);
PluginFusioninventoryTask::cronTaskscheduler();
$pfTask = new PluginFusioninventoryTask();
// All tasks (active or not) and get logs
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$this->assertArrayHasKey('id', $pfTask->fields);
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$pfAgent = new PluginFusioninventoryAgent();
$reference = [];
$pfAgent->getFromDBByCrit(['name' => 'computer1']);
$reference[$pfAgent->fields['id']] = 'computer1';
$pfAgent->getFromDBByCrit(['name' => 'computer2']);
$reference[$pfAgent->fields['id']] = 'computer2';
$pfAgent->getFromDBByCrit(['name' => 'computer3']);
$reference[$pfAgent->fields['id']] = 'computer3';
$pfAgent->getFromDBByCrit(['name' => 'computer4']);
$reference[$pfAgent->fields['id']] = 'computer4';
$this->assertEquals($reference, $data['agents']);
}
/**
* @test
*/
public function prepareTaskWithdynamicgroupchanged() {
$computer = new Computer();
$computer->getFromDBByCrit(['name' => 'computer2']);
$computer->update([
'id' => $computer->fields['id'],
'name' => 'koin']);
PluginFusioninventoryTask::cronTaskscheduler();
$pfTask = new PluginFusioninventoryTask();
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$this->assertArrayHasKey('id', $pfTask->fields);
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$pfAgent = new PluginFusioninventoryAgent();
$reference = [];
$ref_prepared = [];
$pfAgent->getFromDBByCrit(['name' => 'computer1']);
$reference[$pfAgent->fields['id']] = 'computer1';
$agentId1 = $pfAgent->fields['id'];
$pfAgent->getFromDBByCrit(['name' => 'computer2']);
$reference[$pfAgent->fields['id']] = 'computer2';
$pfAgent->getFromDBByCrit(['name' => 'computer3']);
$reference[$pfAgent->fields['id']] = 'computer3';
$agentId2 = $pfAgent->fields['id'];
$pfAgent->getFromDBByCrit(['name' => 'computer4']);
$reference[$pfAgent->fields['id']] = 'computer4';
$ref_prepared[] = $pfAgent->fields['id'];
$ref_prepared[] = $agentId2;
$ref_prepared[] = $agentId1;
$this->assertEquals($reference, $data['agents']);
$pfTaskjob = new PluginFusioninventoryTaskjob();
$pfDeployPackage = new PluginFusioninventoryDeployPackage();
$pfTaskjob->getFromDBByCrit(['plugin_fusioninventory_tasks_id' => $pfTask->fields['id']]);
$pfDeployPackage->getFromDBByCrit(['name' => 'package']);
$this->assertEquals($ref_prepared, array_keys($data['tasks'][$pfTask->fields['id']]['jobs'][$pfTaskjob->fields['id']]['targets']['PluginFusioninventoryDeployPackage_'.$pfDeployPackage->fields['id']]['counters']['agents_prepared']));
}
/**
* @test
*/
public function prepareTaskDisabled() {
$pfTask = new PluginFusioninventoryTask();
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$this->assertArrayHasKey('id', $pfTask->fields);
$pfTask->update([
'id' => $pfTask->fields['id'],
'is_active' => 0
]);
PluginFusioninventoryTask::cronTaskscheduler();
// Only for active tasks and with logs
$data = $pfTask->getJoblogs([$pfTask->fields['id']], true, true);
$ref = [];
$this->assertEquals($ref, $data['agents'], 'Task inactive, so no agent prepared');
$ref_prepared = [];
$this->assertEquals($ref_prepared, $data['tasks']);
}
/**
* @test
*/
public function prepareTaskNoLogs() {
global $DB;
$pfTask = new PluginFusioninventoryTask();
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$this->assertArrayHasKey('id', $pfTask->fields);
$pfTask->update([
'id' => $pfTask->fields['id'],
'is_active' => 1
]);
PluginFusioninventoryTask::cronTaskscheduler();
$data = $pfTask->getJoblogs([$pfTask->fields['id']], false, false);
$pfAgent = new PluginFusioninventoryAgent();
$reference = [];
$pfAgent->getFromDBByCrit(['name' => 'computer1']);
$reference[$pfAgent->fields['id']] = 'computer1';
$pfAgent->getFromDBByCrit(['name' => 'computer2']);
$reference[$pfAgent->fields['id']] = 'computer2';
$pfAgent->getFromDBByCrit(['name' => 'computer3']);
$reference[$pfAgent->fields['id']] = 'computer3';
$pfAgent->getFromDBByCrit(['name' => 'computer4']);
$reference[$pfAgent->fields['id']] = 'computer4';
$this->assertEquals($reference, $data['agents']);
foreach ($data['tasks'] as $task_id => &$task) {
foreach ($task['jobs'] as $job_id => &$job) {
foreach ($job['targets'] as $target_id => &$target) {
foreach ($target['agents'] as $agent_id => &$agent) {
$logs = $data['tasks'][$task_id]['jobs'][$job_id]['targets'][$target_id]['agents'][$agent_id];
// No logs
$this->assertEquals(0, count($logs), print_r($logs, true));
}
}
}
}
}
/**
* @test
*/
public function prepareTaskNotRePrepareIfSuccessful() {
global $DB;
$_SESSION['glpi_plugin_fusioninventory']['includeoldjobs'] = 2;
$pfAgent = new PluginFusioninventoryAgent();
$pfTask = new PluginFusioninventoryTask();
$deploycommon = new PluginFusioninventoryDeployCommon();
$DB->query("TRUNCATE TABLE `glpi_plugin_fusioninventory_taskjoblogs`");
$DB->query("TRUNCATE TABLE `glpi_plugin_fusioninventory_taskjobstates`");
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$this->assertArrayHasKey('id', $pfTask->fields);
$pfTask->update([
'id' => $pfTask->fields['id'],
'reprepare_if_successful' => 0,
'is_active' => 1
]);
// prepare
PluginFusioninventoryTask::cronTaskscheduler();
$pfTaskjob = new PluginFusioninventoryTaskjob();
$pfDeployPackage = new PluginFusioninventoryDeployPackage();
$pfTaskjob->getFromDBByCrit(['plugin_fusioninventory_tasks_id' => $pfTask->fields['id']]);
$pfDeployPackage->getFromDBByCrit(['name' => 'package']);
$pfAgent->getFromDBByCrit(['name' => 'computer1']);
$agentComputer1Id = $pfAgent->fields['id'];
$pfAgent->getFromDBByCrit(['name' => 'computer2']);
$agentComputer2Id = $pfAgent->fields['id'];
$pfAgent->getFromDBByCrit(['name' => 'computer3']);
$agentComputer3Id = $pfAgent->fields['id'];
$pfAgent->getFromDBByCrit(['name' => 'computer4']);
$agentComputer4Id = $pfAgent->fields['id'];
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$reference = [
'agents_prepared' => [
$agentComputer1Id => 1,
$agentComputer3Id => 2,
$agentComputer4Id => 3
],
'agents_cancelled' => [],
'agents_running' => [],
'agents_success' => [],
'agents_error' => [],
'agents_notdone' => [
$agentComputer4Id => 3,
$agentComputer3Id => 2,
$agentComputer1Id => 1
]
];
$counters = $data['tasks'][$pfTask->fields['id']]['jobs'][$pfTaskjob->fields['id']]['targets']['PluginFusioninventoryDeployPackage_'.$pfDeployPackage->fields['id']]['counters'];
$this->assertEquals($reference, $counters);
// 1 computer deploy successfully
$agent = $pfAgent->infoByKey('computer1');
$taskjobstates = $pfTask->getTaskjobstatesForAgent(
$agent['id'],
['deployinstall']
);
foreach ($taskjobstates as $taskjobstate) {
$jobstate_order = $deploycommon->run($taskjobstate);
$params = [
'machineid' => 'computer1',
'uuid' => $jobstate_order['job']['uuid'],
'code' => 'ok',
'msg' => 'seems ok',
'sendheaders' => false
];
PluginFusioninventoryCommunicationRest::updateLog($params);
}
// 1 computer in error
$agent = $pfAgent->infoByKey('computer3');
$taskjobstates = $pfTask->getTaskjobstatesForAgent(
$agent['id'],
['deployinstall']
);
foreach ($taskjobstates as $taskjobstate) {
$jobstate_order = $deploycommon->run($taskjobstate);
$params = [
'machineid' => 'computer3',
'uuid' => $jobstate_order['job']['uuid'],
'code' => 'running',
'msg' => 'gogogo',
'sendheaders' => false
];
PluginFusioninventoryCommunicationRest::updateLog($params);
$params = [
'machineid' => 'computer3',
'uuid' => $jobstate_order['job']['uuid'],
'code' => 'ko',
'msg' => 'failure of check #1 (error)',
'sendheaders' => false
];
PluginFusioninventoryCommunicationRest::updateLog($params);
}
// re-prepare and will have only the computer in error be in prepared mode
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$reference = [
'agents_prepared' => [
$agentComputer4Id => 3,
],
'agents_cancelled' => [],
'agents_running' => [],
'agents_success' => [
$agentComputer1Id => 1
],
'agents_error' => [
$agentComputer3Id => 2
],
'agents_notdone' => [
$agentComputer4Id => 3
]
];
$counters = $data['tasks'][$pfTask->fields['id']]['jobs'][$pfTaskjob->fields['id']]['targets']['PluginFusioninventoryDeployPackage_'.$pfDeployPackage->fields['id']]['counters'];
$this->assertEquals($reference, $counters);
PluginFusioninventoryTask::cronTaskscheduler();
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$reference = [
'agents_prepared' => [
$agentComputer3Id => 7,
$agentComputer4Id => 3
],
'agents_cancelled' => [],
'agents_running' => [],
'agents_success' => [
$agentComputer1Id => 1
],
'agents_error' => [
$agentComputer3Id => 2
],
'agents_notdone' => [
$agentComputer4Id => 3
]
];
$counters = $data['tasks'][$pfTask->fields['id']]['jobs'][$pfTaskjob->fields['id']]['targets']['PluginFusioninventoryDeployPackage_'.$pfDeployPackage->fields['id']]['counters'];
$this->assertEquals($reference, $counters);
$pfTask->update([
'id' => $pfTask->fields['id'],
'reprepare_if_successful' => 1,
]);
PluginFusioninventoryTask::cronTaskscheduler();
$data = $pfTask->getJoblogs([$pfTask->fields['id']]);
$reference = [
'agents_prepared' => [
$agentComputer1Id => 9,
$agentComputer3Id => 7,
$agentComputer4Id => 3
],
'agents_cancelled' => [],
'agents_running' => [],
'agents_success' => [
$agentComputer1Id => 1,
],
'agents_error' => [
$agentComputer3Id => 2
],
'agents_notdone' => [
$agentComputer4Id => 3
]
];
$counters = $data['tasks'][$pfTask->fields['id']]['jobs'][$pfTaskjob->fields['id']]['targets']['PluginFusioninventoryDeployPackage_'.$pfDeployPackage->fields['id']]['counters'];
$this->assertEquals($reference, $counters);
}
/**
* @test
*/
public function cleanTasksAndJobs() {
global $DB;
$pfTask = new PluginFusioninventoryTask();
$pfTaskJob = new PluginFusioninventoryTaskJob();
$pfTaskJobstate = new PluginFusioninventoryTaskjobstate();
//We only work on 1 task
$pfTask->getFromDBByCrit(['name' => 'deploy']);
$pfTask->delete(['id' => $pfTask->fields['id']], true);
//Clean all taskjoblogs & states
$DB->query("TRUNCATE TABLE `glpi_plugin_fusioninventory_taskjoblogs`");
$DB->query("TRUNCATE TABLE `glpi_plugin_fusioninventory_taskjobstates`");
//Find the on demand task
$tasks = $pfTask->find(['name' => 'ondemand']);
$this->assertEquals(1, count($tasks));
$task = current($tasks);
$tasks_id = $task['id'];
//Prepare the task
PluginFusioninventoryTask::cronTaskscheduler();
//Set the first job as successfull
$query = "SELECT DISTINCT `plugin_fusioninventory_taskjobstates_id`
FROM glpi_plugin_fusioninventory_taskjoblogs LIMIT 1";
foreach ($DB->request($query) as $data) {
$pfTaskJobstate->changeStatusFinish($data['plugin_fusioninventory_taskjobstates_id'], '', 0);
}
//No task & jobtates should be removed because ask for cleaning 5 days from now
$index = $pfTask->cleanTasksAndJobs(5);
$this->assertEquals(0, $index);
//Set the joblogs date at 2 days ago
$datetime = new Datetime($_SESSION['glpi_currenttime']);
$datetime->modify('-4 days');
$query = "UPDATE `glpi_plugin_fusioninventory_taskjoblogs`
SET `date`='".$datetime->format('Y-m-d')." 00:00:00'";
$DB->query($query);
//No task & jobs should be removed because ask for cleaning 5 days from now
$index = $pfTask->cleanTasksAndJobs(5);
$this->assertEquals(0, $index);
$this->assertEquals(true, $pfTask->getFromDB($tasks_id));
$computer = new Computer();
$pfAgent = new PluginFusioninventoryAgent();
//Add a new computer into the dynamic group
$input = [
'entities_id' => 0,
'name' => 'computer5'
];
$computers_id = $computer->add($input);
$this->assertNotFalse($computers_id);
$input = [
'entities_id' => 0,
'name' => 'computer5',
'version' => '{"INVENTORY":"v2.3.21"}',
'device_id' => 'computer5',
'useragent' => 'FusionInventory-Agent_v2.3.21',
'computers_id' => $computers_id
];
$pfAgent->add($input);
//Reprepare the task
PluginFusioninventoryTask::cronTaskscheduler();
//One taskjob is finished and should be cleaned
$index = $pfTask->cleanTasksAndJobs(3);
$this->assertGreaterThan(0, $index);
//The task is still in DB because one job is not done
$this->assertEquals(1, countElementsInTable('glpi_plugin_fusioninventory_tasks',
['id' => $tasks_id]));
//Set the first job as successfull
$query = "SELECT DISTINCT `plugin_fusioninventory_taskjobstates_id`
FROM glpi_plugin_fusioninventory_taskjoblogs";
foreach ($DB->request($query) as $data) {
$pfTaskJobstate->changeStatusFinish($data['plugin_fusioninventory_taskjobstates_id'], '', 0);
}
$query = "UPDATE `glpi_plugin_fusioninventory_taskjoblogs`
SET `date`='".$datetime->format('Y-m-d')." 00:00:00'";
$DB->query($query);
//One taskjob is finished and should be cleaned
$index = $pfTask->cleanTasksAndJobs(2);
$this->assertGreaterThan(0, $index);
//The task is still in DB because one job is not done
$this->assertEquals(0, countElementsInTable('glpi_plugin_fusioninventory_tasks',
['id' => $tasks_id]));
}
}
| fusioninventory/fusioninventory-for-glpi | tests/Integration/Tasks/CronTaskTest.php | PHP | agpl-3.0 | 27,134 |
"""
Useful utilities for management commands.
"""
from django.core.management.base import CommandError
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from six import text_type
def get_mutually_exclusive_required_option(options, *selections):
"""
Validates that exactly one of the 2 given options is specified.
Returns the name of the found option.
"""
selected = [sel for sel in selections if options.get(sel)]
if len(selected) != 1:
selection_string = u', '.join('--{}'.format(selection) for selection in selections)
raise CommandError(u'Must specify exactly one of {}'.format(selection_string))
return selected[0]
def validate_mutually_exclusive_option(options, option_1, option_2):
"""
Validates that both of the 2 given options are not specified.
"""
if options.get(option_1) and options.get(option_2):
raise CommandError(u'Both --{} and --{} cannot be specified.'.format(option_1, option_2))
def validate_dependent_option(options, dependent_option, depending_on_option):
"""
Validates that option_1 is specified if dependent_option is specified.
"""
if options.get(dependent_option) and not options.get(depending_on_option):
raise CommandError(u'Option --{} requires option --{}.'.format(dependent_option, depending_on_option))
def parse_course_keys(course_key_strings):
"""
Parses and returns a list of CourseKey objects from the given
list of course key strings.
"""
try:
return [CourseKey.from_string(course_key_string) for course_key_string in course_key_strings]
except InvalidKeyError as error:
raise CommandError(u'Invalid key specified: {}'.format(text_type(error))) # lint-amnesty, pylint: disable=raise-missing-from
| stvstnfrd/edx-platform | openedx/core/lib/command_utils.py | Python | agpl-3.0 | 1,812 |
/******************************************************************************
* *
* Copyright: (c) Syncleus, Inc. *
* *
* You may redistribute and modify this source code under the terms and *
* conditions of the Open Source Community License - Type C version 1.0 *
* or any later version as published by Syncleus, Inc. at www.syncleus.com. *
* There should be a copy of the license included with this file. If a copy *
* of the license is not included you are granted no right to distribute or *
* otherwise use this file except through a legal and valid license. You *
* should also contact Syncleus, Inc. at the information below if you cannot *
* find a license: *
* *
* Syncleus, Inc. *
* 2604 South 12th Street *
* Philadelphia, PA 19148 *
* *
******************************************************************************/
package syncleus.dann.neural.util;
import java.util.Random;
import syncleus.dann.neural.Brain;
import syncleus.dann.neural.Neuron;
import syncleus.dann.neural.Synapse;
public abstract class AbstractNeuron implements Neuron {
/**
* Random number generator used to produce any needed RANDOM values.
*
* @since 1.0
*/
protected static final Random RANDOM = new Random();
private final Brain<InputNeuron, OutputNeuron, Neuron, Synapse<Neuron>> brain;
/**
* Creates a new instance of NeuronImpl with a RANDOM bias weight and
* HyperbolicTangentActivationFunction as the activation function.
*
* @since 1.0
*/
protected AbstractNeuron(
final Brain<InputNeuron, OutputNeuron, Neuron, Synapse<Neuron>> brain) {
if (brain == null)
throw new IllegalArgumentException("brain can not be null");
this.brain = brain;
}
protected Brain<InputNeuron, OutputNeuron, Neuron, Synapse<Neuron>> getBrain() {
return this.brain;
}
// TODO consider making this public and moving it to the neuron interface
public abstract double getOutput();
@Override
public String toString() {
return getClass().getSimpleName();
}
}
| automenta/java_dann | src/syncleus/dann/neural/util/AbstractNeuron.java | Java | agpl-3.0 | 2,719 |
"""
Acceptance tests for the teams feature.
"""
import json
import random
import time
from dateutil.parser import parse
import ddt
from nose.plugins.attrib import attr
from selenium.common.exceptions import TimeoutException
from uuid import uuid4
from common.test.acceptance.tests.helpers import get_modal_alert, EventsTestMixin, UniqueCourseTest
from common.test.acceptance.fixtures import LMS_BASE_URL
from common.test.acceptance.fixtures.course import CourseFixture
from common.test.acceptance.fixtures.discussion import (
Thread,
MultipleThreadFixture,
ForumsConfigMixin,
)
from common.test.acceptance.pages.lms.auto_auth import AutoAuthPage
from common.test.acceptance.pages.lms.course_info import CourseInfoPage
from common.test.acceptance.pages.lms.learner_profile import LearnerProfilePage
from common.test.acceptance.pages.lms.tab_nav import TabNavPage
from common.test.acceptance.pages.lms.teams import (
TeamsPage,
MyTeamsPage,
BrowseTopicsPage,
BrowseTeamsPage,
TeamManagementPage,
EditMembershipPage,
TeamPage
)
from common.test.acceptance.pages.common.utils import confirm_prompt
TOPICS_PER_PAGE = 12
class TeamsTabBase(EventsTestMixin, ForumsConfigMixin, UniqueCourseTest):
"""Base class for Teams Tab tests"""
def setUp(self):
super(TeamsTabBase, self).setUp()
self.tab_nav = TabNavPage(self.browser)
self.course_info_page = CourseInfoPage(self.browser, self.course_id)
self.teams_page = TeamsPage(self.browser, self.course_id)
# TODO: Refactor so resetting events database is not necessary
self.reset_event_tracking()
self.enable_forums()
def create_topics(self, num_topics):
"""Create `num_topics` test topics."""
return [{u"description": i, u"name": i, u"id": i} for i in map(str, xrange(num_topics))]
def create_teams(self, topic, num_teams, time_between_creation=0):
"""Create `num_teams` teams belonging to `topic`."""
teams = []
for i in xrange(num_teams):
team = {
'course_id': self.course_id,
'topic_id': topic['id'],
'name': 'Team {}'.format(i),
'description': 'Description {}'.format(i),
'language': 'aa',
'country': 'AF'
}
teams.append(self.post_team_data(team))
# Sadly, this sleep is necessary in order to ensure that
# sorting by last_activity_at works correctly when running
# in Jenkins.
# THIS IS AN ANTI-PATTERN - DO NOT COPY.
time.sleep(time_between_creation)
return teams
def post_team_data(self, team_data):
"""Given a JSON representation of a team, post it to the server."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/teams/',
data=json.dumps(team_data),
headers=self.course_fixture.headers
)
self.assertEqual(response.status_code, 200)
return json.loads(response.text)
def create_memberships(self, num_memberships, team_id):
"""Create `num_memberships` users and assign them to `team_id`. The
last user created becomes the current user."""
memberships = []
for __ in xrange(num_memberships):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
memberships.append(user_info)
self.create_membership(user_info['username'], team_id)
#pylint: disable=attribute-defined-outside-init
self.user_info = memberships[-1]
return memberships
def create_membership(self, username, team_id):
"""Assign `username` to `team_id`."""
response = self.course_fixture.session.post(
LMS_BASE_URL + '/api/team/v0/team_membership/',
data=json.dumps({'username': username, 'team_id': team_id}),
headers=self.course_fixture.headers
)
return json.loads(response.text)
def set_team_configuration(self, configuration, enroll_in_course=True, global_staff=False):
"""
Sets team configuration on the course and calls auto-auth on the user.
"""
#pylint: disable=attribute-defined-outside-init
self.course_fixture = CourseFixture(**self.course_info)
if configuration:
self.course_fixture.add_advanced_settings(
{u"teams_configuration": {u"value": configuration}}
)
self.course_fixture.install()
enroll_course_id = self.course_id if enroll_in_course else None
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, course_id=enroll_course_id, staff=global_staff).visit().user_info
self.course_info_page.visit()
def verify_teams_present(self, present):
"""
Verifies whether or not the teams tab is present. If it should be present, also
checks the text on the page (to ensure view is working).
"""
if present:
self.assertIn("Teams", self.tab_nav.tab_names)
self.teams_page.visit()
self.assertEqual(self.teams_page.active_tab(), 'browse')
else:
self.assertNotIn("Teams", self.tab_nav.tab_names)
def verify_teams(self, page, expected_teams):
"""Verify that the list of team cards on the current page match the expected teams in order."""
def assert_team_equal(expected_team, team_card_name, team_card_description):
"""
Helper to assert that a single team card has the expected name and
description.
"""
self.assertEqual(expected_team['name'], team_card_name)
self.assertEqual(expected_team['description'], team_card_description)
team_card_names = page.team_names
team_card_descriptions = page.team_descriptions
map(assert_team_equal, expected_teams, team_card_names, team_card_descriptions)
def verify_my_team_count(self, expected_number_of_teams):
""" Verify the number of teams shown on "My Team". """
# We are doing these operations on this top-level page object to avoid reloading the page.
self.teams_page.verify_my_team_count(expected_number_of_teams)
def only_team_events(self, event):
"""Filter out all non-team events."""
return event['event_type'].startswith('edx.team.')
@ddt.ddt
@attr(shard=5)
class TeamsTabTest(TeamsTabBase):
"""
Tests verifying when the Teams tab is present.
"""
def test_teams_not_enabled(self):
"""
Scenario: teams tab should not be present if no team configuration is set
Given I am enrolled in a course without team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(None)
self.verify_teams_present(False)
def test_teams_not_enabled_no_topics(self):
"""
Scenario: teams tab should not be present if team configuration does not specify topics
Given I am enrolled in a course with no topics in the team configuration
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": []})
self.verify_teams_present(False)
def test_teams_not_enabled_not_enrolled(self):
"""
Scenario: teams tab should not be present if student is not enrolled in the course
Given there is a course with team configuration and topics
And I am not enrolled in that course, and am not global staff
When I view the course info page
Then I should not see the Teams tab
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False
)
self.verify_teams_present(False)
def test_teams_enabled(self):
"""
Scenario: teams tab should be present if user is enrolled in the course and it has team configuration
Given I am enrolled in a course with team configuration and topics
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(1)})
self.verify_teams_present(True)
def test_teams_enabled_global_staff(self):
"""
Scenario: teams tab should be present if user is not enrolled in the course, but is global staff
Given there is a course with team configuration
And I am not enrolled in that course, but am global staff
When I view the course info page
Then I should see the Teams tab
And the correct content should be on the page
"""
self.set_team_configuration(
{u"max_team_size": 10, u"topics": self.create_topics(1)},
enroll_in_course=False,
global_staff=True
)
self.verify_teams_present(True)
@ddt.data(
'topics/{topic_id}',
'topics/{topic_id}/search',
'teams/{topic_id}/{team_id}/edit-team',
'teams/{topic_id}/{team_id}'
)
def test_unauthorized_error_message(self, route):
"""Ensure that an error message is shown to the user if they attempt
to take an action which makes an AJAX request while not signed
in.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration(
{u'max_team_size': 10, u'topics': topics},
global_staff=True
)
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
self.browser.delete_cookie('sessionid')
url = self.browser.current_url.split('#')[0]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
)
)
)
self.teams_page.wait_for_ajax()
self.assertEqual(
self.teams_page.warning_message,
u"Your request could not be completed. Reload the page and try again."
)
@ddt.data(
('browse', '.topics-list'),
# TODO: find a reliable way to match the "My Teams" tab
# ('my-teams', 'div.teams-list'),
('teams/{topic_id}/{team_id}', 'div.discussion-module'),
('topics/{topic_id}/create-team', 'div.create-team-instructions'),
('topics/{topic_id}', '.teams-list'),
('not-a-real-route', 'div.warning')
)
@ddt.unpack
def test_url_routing(self, route, selector):
"""Ensure that navigating to a URL route correctly updates the page
content.
"""
topics = self.create_topics(1)
topic = topics[0]
self.set_team_configuration({
u'max_team_size': 10,
u'topics': topics
})
team = self.create_teams(topic, 1)[0]
self.teams_page.visit()
# Get the base URL (the URL without any trailing fragment)
url = self.browser.current_url
fragment_index = url.find('#')
if fragment_index >= 0:
url = url[0:fragment_index]
self.browser.get(
'{url}#{route}'.format(
url=url,
route=route.format(
topic_id=topic['id'],
team_id=team['id']
))
)
self.teams_page.wait_for_page()
self.teams_page.wait_for_ajax()
self.assertTrue(self.teams_page.q(css=selector).present)
self.assertTrue(self.teams_page.q(css=selector).visible)
@attr(shard=5)
class MyTeamsTest(TeamsTabBase):
"""
Tests for the "My Teams" tab of the Teams page.
"""
def setUp(self):
super(MyTeamsTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.my_teams_page = MyTeamsPage(self.browser, self.course_id)
self.page_viewed_event = {
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'my-teams',
'topic_id': None,
'team_id': None
}
}
def test_not_member_of_any_teams(self):
"""
Scenario: Visiting the My Teams page when user is not a member of any team should not display any teams.
Given I am enrolled in a course with a team configuration and a topic but am not a member of a team
When I visit the My Teams page
And I should see no teams
And I should see a message that I belong to no teams.
"""
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.assertEqual(len(self.my_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertEqual(
self.my_teams_page.q(css='.page-content-main').text,
[u'You are not currently a member of any team.']
)
def test_member_of_a_team(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am enrolled in a course with a team configuration and a topic and am a member of a team
When I visit the My Teams page
Then I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should not see a pagination footer
"""
teams = self.create_teams(self.topic, 1)
self.create_membership(self.user_info['username'], teams[0]['id'])
with self.assert_events_match_during(self.only_team_events, expected_events=[self.page_viewed_event]):
self.my_teams_page.visit()
self.verify_teams(self.my_teams_page, teams)
def test_multiple_team_members(self):
"""
Scenario: Visiting the My Teams page when user is a member of a team should display the teams.
Given I am a member of a team with multiple members
When I visit the My Teams page
Then I should see the correct number of team members on my membership
"""
teams = self.create_teams(self.topic, 1)
self.create_memberships(4, teams[0]['id'])
self.my_teams_page.visit()
self.assertEqual(self.my_teams_page.team_memberships[0], '4 / 10 Members')
@attr(shard=5)
@ddt.ddt
class BrowseTopicsTest(TeamsTabBase):
"""
Tests for the Browse tab of the Teams page.
"""
def setUp(self):
super(BrowseTopicsTest, self).setUp()
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
@ddt.data(('name', False), ('team_count', True))
@ddt.unpack
def test_sort_topics(self, sort_order, reverse):
"""
Scenario: the user should be able to sort the list of topics by name or team count
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
When I choose a sort order
Then I should see the paginated list of topics in that order
"""
topics = self.create_topics(TOPICS_PER_PAGE + 1)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
for i, topic in enumerate(random.sample(topics, len(topics))):
self.create_teams(topic, i)
topic['team_count'] = i
self.topics_page.visit()
self.topics_page.sort_topics_by(sort_order)
topic_names = self.topics_page.topic_names
self.assertEqual(len(topic_names), TOPICS_PER_PAGE)
self.assertEqual(
topic_names,
[t['name'] for t in sorted(topics, key=lambda t: t[sort_order], reverse=reverse)][:TOPICS_PER_PAGE]
)
def test_sort_topics_update(self):
"""
Scenario: the list of topics should remain sorted after updates
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics and choose a sort order
Then I should see the paginated list of topics in that order
When I create a team in one of those topics
And I return to the topics list
Then I should see the topics in the correct sorted order
"""
topics = self.create_topics(3)
self.set_team_configuration({u"max_team_size": 100, u"topics": topics})
self.topics_page.visit()
self.topics_page.sort_topics_by('team_count')
topic_name = self.topics_page.topic_names[-1]
topic = [t for t in topics if t['name'] == topic_name][0]
self.topics_page.browse_teams_for_topic(topic_name)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
browse_teams_page.wait_for_page()
browse_teams_page.click_create_team_link()
create_team_page = TeamManagementPage(self.browser, self.course_id, topic)
create_team_page.create_team()
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
team_page.click_all_topics()
self.topics_page.wait_for_page()
self.topics_page.wait_for_ajax()
self.assertEqual(topic_name, self.topics_page.topic_names[0])
def test_list_topics(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see a list of topics for the course
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(2)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 2)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-2 out of 2 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_topic_pagination(self):
"""
Scenario: a list of topics should be visible in the "Browse" tab, paginated 12 per page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
Then I should see only the first 12 topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(20)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 20 total'))
self.assertTrue(self.topics_page.pagination_controls_visible())
self.assertFalse(self.topics_page.is_previous_page_button_enabled())
self.assertTrue(self.topics_page.is_next_page_button_enabled())
def test_go_to_numbered_page(self):
"""
Scenario: topics should be able to be navigated by page number
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter a valid page number in the page number input
Then I should see that page of topics
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(25)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.is_previous_page_button_enabled())
self.assertFalse(self.topics_page.is_next_page_button_enabled())
def test_go_to_invalid_page(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I enter an invalid page number in the page number input
Then I should stay on the current page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.go_to_page(3)
self.assertEqual(self.topics_page.get_current_page_number(), 1)
def test_page_navigation_buttons(self):
"""
Scenario: browsing topics should not respond to invalid page numbers
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
When I press the next page button
Then I should move to the next page
When I press the previous page button
Then I should move to the previous page
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(13)})
self.topics_page.visit()
self.topics_page.press_next_page_button()
self.assertEqual(len(self.topics_page.topic_cards), 1)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 13-13 out of 13 total'))
self.topics_page.press_previous_page_button()
self.assertEqual(len(self.topics_page.topic_cards), TOPICS_PER_PAGE)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-12 out of 13 total'))
def test_topic_pagination_one_page(self):
"""
Scenario: Browsing topics when there are fewer topics than the page size i.e. 12
all topics should show on one page
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse topics
And I should see corrected number of topic cards
And I should see the correct page header
And I should not see a pagination footer
"""
self.set_team_configuration({u"max_team_size": 10, u"topics": self.create_topics(10)})
self.topics_page.visit()
self.assertEqual(len(self.topics_page.topic_cards), 10)
self.assertTrue(self.topics_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.assertFalse(self.topics_page.pagination_controls_visible())
def test_topic_description_truncation(self):
"""
Scenario: excessively long topic descriptions should be truncated so
as to fit within a topic card.
Given I am enrolled in a course with a team configuration and a topic
with a long description
When I visit the Teams page
And I browse topics
Then I should see a truncated topic description
"""
initial_description = "A" + " really" * 50 + " long description"
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [{"name": "", "id": "", "description": initial_description}]}
)
self.topics_page.visit()
truncated_description = self.topics_page.topic_descriptions[0]
self.assertLess(len(truncated_description), len(initial_description))
self.assertTrue(truncated_description.endswith('...'))
self.assertIn(truncated_description.split('...')[0], initial_description)
def test_go_to_teams_list(self):
"""
Scenario: Clicking on a Topic Card should take you to the
teams list for that Topic.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
And I browse topics
And I click on the arrow link to view teams for the first topic
Then I should be on the browse teams page
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
self.topics_page.visit()
self.topics_page.browse_teams_for_topic('Example Topic')
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, topic)
browse_teams_page.wait_for_page()
self.assertEqual(browse_teams_page.header_name, 'Example Topic')
self.assertEqual(browse_teams_page.header_description, 'Description')
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse topics page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the browse topics page
Then my browser should post a page viewed event
"""
topic = {u"name": u"Example Topic", u"id": u"example_topic", u"description": "Description"}
self.set_team_configuration(
{u"max_team_size": 1, u"topics": [topic]}
)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'browse',
'topic_id': None,
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.topics_page.visit()
@attr(shard=5)
@ddt.ddt
class BrowseTeamsWithinTopicTest(TeamsTabBase):
"""
Tests for browsing Teams within a Topic on the Teams page.
"""
TEAMS_PAGE_SIZE = 10
def setUp(self):
super(BrowseTeamsWithinTopicTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
self.max_team_size = 10
self.set_team_configuration({
'course_id': self.course_id,
'max_team_size': self.max_team_size,
'topics': [self.topic]
})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.topics_page = BrowseTopicsPage(self.browser, self.course_id)
def teams_with_default_sort_order(self, teams):
"""Return a list of teams sorted according to the default ordering
(last_activity_at, with a secondary sort by open slots).
"""
return sorted(
sorted(teams, key=lambda t: len(t['membership']), reverse=True),
key=lambda t: parse(t['last_activity_at']).replace(microsecond=0),
reverse=True
)
def verify_page_header(self):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(self.browse_teams_page.header_name, self.topic['name'])
self.assertEqual(self.browse_teams_page.header_description, self.topic['description'])
def verify_search_header(self, search_results_page, search_query):
"""Verify that the page header correctly reflects the current topic's name and description."""
self.assertEqual(search_results_page.header_name, 'Team Search')
self.assertEqual(
search_results_page.header_description,
'Showing results for "{search_query}"'.format(search_query=search_query)
)
def verify_on_page(self, teams_page, page_num, total_teams, pagination_header_text, footer_visible):
"""
Verify that we are on the correct team list page.
Arguments:
teams_page (BaseTeamsPage): The teams page object that should be the current page.
page_num (int): The one-indexed page number that we expect to be on
total_teams (list): An unsorted list of all the teams for the
current topic
pagination_header_text (str): Text we expect to see in the
pagination header.
footer_visible (bool): Whether we expect to see the pagination
footer controls.
"""
sorted_teams = self.teams_with_default_sort_order(total_teams)
self.assertTrue(teams_page.get_pagination_header_text().startswith(pagination_header_text))
self.verify_teams(
teams_page,
sorted_teams[(page_num - 1) * self.TEAMS_PAGE_SIZE:page_num * self.TEAMS_PAGE_SIZE]
)
self.assertEqual(
teams_page.pagination_controls_visible(),
footer_visible,
msg='Expected paging footer to be ' + 'visible' if footer_visible else 'invisible'
)
@ddt.data(
('open_slots', 'last_activity_at', True),
('last_activity_at', 'open_slots', True)
)
@ddt.unpack
def test_sort_teams(self, sort_order, secondary_sort_order, reverse):
"""
Scenario: the user should be able to sort the list of teams by open slots or last activity
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic
When I choose a sort order
Then I should see the paginated list of teams in that order
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
for i, team in enumerate(random.sample(teams, len(teams))):
for _ in range(i):
user_info = AutoAuthPage(self.browser, course_id=self.course_id).visit().user_info
self.create_membership(user_info['username'], team['id'])
team['open_slots'] = self.max_team_size - i
# Re-authenticate as staff after creating users
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=True
).visit()
self.browse_teams_page.visit()
self.browse_teams_page.sort_teams_by(sort_order)
team_names = self.browse_teams_page.team_names
self.assertEqual(len(team_names), self.TEAMS_PAGE_SIZE)
sorted_teams = [
team['name']
for team in sorted(
sorted(teams, key=lambda t: t[secondary_sort_order], reverse=reverse),
key=lambda t: t[sort_order],
reverse=reverse
)
][:self.TEAMS_PAGE_SIZE]
self.assertEqual(team_names, sorted_teams)
def test_default_sort_order(self):
"""
Scenario: the list of teams should be sorted by last activity by default
Given I am enrolled in a course with team configuration and topics
When I visit the Teams page
And I browse teams within a topic
Then I should see a list of teams for that topic, sorted by last activity
"""
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1)
self.browse_teams_page.visit()
self.assertEqual(self.browse_teams_page.sort_order, 'last activity')
def test_no_teams(self):
"""
Scenario: Visiting a topic with no teams should not display any teams.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing no teams
And I should see no teams
And I should see a button to add a team
And I should not see a pagination footer
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.assertEqual(len(self.browse_teams_page.team_cards), 0, msg='Expected to see no team cards')
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_one_page(self):
"""
Scenario: Visiting a topic with fewer teams than the page size should
all those teams on one page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see a pagination header showing the number of teams
And I should see all the expected team cards
And I should see a button to add a team
And I should not see a pagination footer
"""
teams = self.teams_with_default_sort_order(
self.create_teams(self.topic, self.TEAMS_PAGE_SIZE, time_between_creation=1)
)
self.browse_teams_page.visit()
self.verify_page_header()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 1-10 out of 10 total'))
self.verify_teams(self.browse_teams_page, teams)
self.assertFalse(
self.browse_teams_page.pagination_controls_visible(),
msg='Expected paging footer to be invisible'
)
def test_teams_navigation_buttons(self):
"""
Scenario: The user should be able to page through a topic's team list
using navigation buttons when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I click on the next page button
Then I should see that I am on the second page of results
And when I click on the previous page button
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 1, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
self.browse_teams_page.press_next_page_button()
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-11 out of 11 total', True)
self.browse_teams_page.press_previous_page_button()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 11 total', True)
def test_teams_page_input(self):
"""
Scenario: The user should be able to page through a topic's team list
using the page input when it is longer than the page size.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the correct page header
And I should see that I am on the first page of results
When I input the second page
Then I should see that I am on the second page of results
When I input the first page
Then I should see that I am on the first page of results
"""
teams = self.create_teams(self.topic, self.TEAMS_PAGE_SIZE + 10, time_between_creation=1)
self.browse_teams_page.visit()
self.verify_page_header()
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
self.browse_teams_page.go_to_page(2)
self.verify_on_page(self.browse_teams_page, 2, teams, 'Showing 11-20 out of 20 total', True)
self.browse_teams_page.go_to_page(1)
self.verify_on_page(self.browse_teams_page, 1, teams, 'Showing 1-10 out of 20 total', True)
def test_browse_team_topics(self):
"""
Scenario: User should be able to navigate to "browse all teams" and "search team description" links.
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic
Then I should see the correct page header
And I should see the link to "browse teams in other topics"
When I should navigate to that link
Then I should see the topic browse page
"""
self.browse_teams_page.visit()
self.verify_page_header()
self.browse_teams_page.click_browse_all_teams_link()
self.topics_page.wait_for_page()
def test_search(self):
"""
Scenario: User should be able to search for a team
Given I am enrolled in a course with teams enabled
When I visit the Teams page for that topic
And I search for 'banana'
Then I should see the search result page
And the search header should be shown
And 0 results should be shown
And my browser should fire a page viewed event for the search page
And a searched event should have been fired
"""
# Note: all searches will return 0 results with the mock search server
# used by Bok Choy.
search_text = 'banana'
self.create_teams(self.topic, 5)
self.browse_teams_page.visit()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'search-teams',
'topic_id': self.topic['id'],
'team_id': None
}
}, {
'event_type': 'edx.team.searched',
'event': {
'search_text': search_text,
'topic_id': self.topic['id'],
'number_of_results': 0
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events, in_order=False):
search_results_page = self.browse_teams_page.search(search_text)
self.verify_search_header(search_results_page, search_text)
self.assertTrue(search_results_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
def test_page_viewed_event(self):
"""
Scenario: Visiting the browse page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page
Then my browser should post a page viewed event for the teams page
"""
self.create_teams(self.topic, 5)
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-topic',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.browse_teams_page.visit()
def test_team_name_xss(self):
"""
Scenario: Team names should be HTML-escaped on the teams page
Given I am enrolled in a course with teams enabled
When I visit the Teams page for a topic, with a team name containing JS code
Then I should not see any alerts
"""
self.post_team_data({
'course_id': self.course_id,
'topic_id': self.topic['id'],
'name': '<script>alert("XSS")</script>',
'description': 'Description',
'language': 'aa',
'country': 'AF'
})
with self.assertRaises(TimeoutException):
self.browser.get(self.browse_teams_page.url)
alert = get_modal_alert(self.browser)
alert.accept()
@attr(shard=5)
class TeamFormActions(TeamsTabBase):
"""
Base class for create, edit, and delete team.
"""
TEAM_DESCRIPTION = 'The Avengers are a fictional team of superheroes.'
topic = {'name': 'Example Topic', 'id': 'example_topic', 'description': 'Description'}
TEAMS_NAME = 'Avengers'
def setUp(self):
super(TeamFormActions, self).setUp()
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
def verify_page_header(self, title, description, breadcrumbs):
"""
Verify that the page header correctly reflects the
create team header, description and breadcrumb.
"""
self.assertEqual(self.team_management_page.header_page_name, title)
self.assertEqual(self.team_management_page.header_page_description, description)
self.assertEqual(self.team_management_page.header_page_breadcrumbs, breadcrumbs)
def verify_and_navigate_to_create_team_page(self):
"""Navigates to the create team page and verifies."""
self.browse_teams_page.click_create_team_link()
self.verify_page_header(
title='Create a New Team',
description='Create a new team if you can\'t find an existing team to join, '
'or if you would like to learn with friends you know.',
breadcrumbs='All Topics {topic_name}'.format(topic_name=self.topic['name'])
)
def verify_and_navigate_to_edit_team_page(self):
"""Navigates to the edit team page and verifies."""
# pylint: disable=no-member
self.assertEqual(self.team_page.team_name, self.team['name'])
self.assertTrue(self.team_page.edit_team_button_present)
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
# Edit page header.
self.verify_page_header(
title='Edit Team',
description='If you make significant changes, make sure you notify '
'members of the team before making these changes.',
breadcrumbs='All Topics {topic_name} {team_name}'.format(
topic_name=self.topic['name'],
team_name=self.team['name']
)
)
def verify_team_info(self, name, description, location, language):
"""Verify the team information on team page."""
# pylint: disable=no-member
self.assertEqual(self.team_page.team_name, name)
self.assertEqual(self.team_page.team_description, description)
self.assertEqual(self.team_page.team_location, location)
self.assertEqual(self.team_page.team_language, language)
def fill_create_or_edit_form(self):
"""Fill the create/edit team form fields with appropriate values."""
self.team_management_page.value_for_text_field(
field_id='name',
value=self.TEAMS_NAME,
press_enter=False
)
self.team_management_page.set_value_for_textarea_field(
field_id='description',
value=self.TEAM_DESCRIPTION
)
self.team_management_page.value_for_dropdown_field(field_id='language', value='English')
self.team_management_page.value_for_dropdown_field(field_id='country', value='Pakistan')
def verify_all_fields_exist(self):
"""
Verify the fields for create/edit page.
"""
self.assertEqual(
self.team_management_page.message_for_field('name'),
'A name that identifies your team (maximum 255 characters).'
)
self.assertEqual(
self.team_management_page.message_for_textarea_field('description'),
'A short description of the team to help other learners understand '
'the goals or direction of the team (maximum 300 characters).'
)
self.assertEqual(
self.team_management_page.message_for_field('country'),
'The country that team members primarily identify with.'
)
self.assertEqual(
self.team_management_page.message_for_field('language'),
'The language that team members primarily use to communicate with each other.'
)
@ddt.ddt
class CreateTeamTest(TeamFormActions):
"""
Tests for creating a new Team within a Topic on the Teams page.
"""
def setUp(self):
super(CreateTeamTest, self).setUp()
self.set_team_configuration({'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]})
self.browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
self.browse_teams_page.visit()
def test_user_can_see_create_team_page(self):
"""
Scenario: The user should be able to see the create team page via teams list page.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Teams page for that topic
Then I should see the Create Team page link on bottom
And When I click create team link
Then I should see the create team page.
And I should see the create team header
And I should also see the help messages for fields.
"""
self.verify_and_navigate_to_create_team_page()
self.verify_all_fields_exist()
def test_user_can_see_error_message_for_missing_data(self):
"""
Scenario: The user should be able to see error message in case of missing required field.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
And When I click create team button without filling required fields
Then I should see the error message and highlighted fields.
"""
self.verify_and_navigate_to_create_team_page()
# `submit_form` clicks on a button, but that button doesn't always
# have the click event handler registered on it in time. That's why
# this test is flaky. Unfortunately, I don't know of a straightforward
# way to write something that waits for that event handler to be bound
# to the button element. So I used time.sleep as well, even though
# the bok choy docs explicitly ask us not to:
# http://bok-choy.readthedocs.io/en/latest/guidelines.html
# Sorry! For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_management_page.submit_form()
self.team_management_page.wait_for(
lambda: self.team_management_page.validation_message_text,
"Validation message text never loaded."
)
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
self.assertTrue(self.team_management_page.error_for_field(field_id='description'))
def test_user_can_see_error_message_for_incorrect_data(self):
"""
Scenario: The user should be able to see error message in case of increasing length for required fields.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I add text > than 255 characters for name field
And I click Create button
Then I should see the error message for exceeding length.
"""
self.verify_and_navigate_to_create_team_page()
# Fill the name field with >255 characters to see validation message.
self.team_management_page.value_for_text_field(
field_id='name',
value='EdX is a massive open online course (MOOC) provider and online learning platform. '
'It hosts online university-level courses in a wide range of disciplines to a worldwide '
'audience, some at no charge. It also conducts research into learning based on how '
'people use its platform. EdX was created for students and institutions that seek to'
'transform themselves through cutting-edge technologies, innovative pedagogy, and '
'rigorous courses. More than 70 schools, nonprofits, corporations, and international'
'organizations offer or plan to offer courses on the edX website. As of 22 October 2014,'
'edX has more than 4 million users taking more than 500 courses online.',
press_enter=False
)
self.team_management_page.submit_form()
self.assertEqual(
self.team_management_page.validation_message_text,
'Check the highlighted fields below and try again.'
)
self.assertTrue(self.team_management_page.error_for_field(field_id='name'))
def test_user_can_create_new_team_successfully(self):
"""
Scenario: The user should be able to create new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I fill all the fields present with appropriate data
And I click Create button
Then I expect analytics events to be emitted
And I should see the page for my team
And I should see the message that says "You are member of this team"
And the new team should be added to the list of teams within the topic
And the number of teams should be updated on the topic card
And if I switch to "My Team", the newly created team is displayed
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.browse_teams_page.visit()
self.verify_and_navigate_to_create_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.created'
},
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'added_on_create',
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_management_page.submit_form()
# Verify that the page is shown for the new team
team_page = TeamPage(self.browser, self.course_id)
team_page.wait_for_page()
self.assertEqual(team_page.team_name, self.TEAMS_NAME)
self.assertEqual(team_page.team_description, self.TEAM_DESCRIPTION)
self.assertEqual(team_page.team_user_membership_text, 'You are a member of this team.')
# Verify the new team was added to the topic list
self.teams_page.click_specific_topic("Example Topic")
self.teams_page.verify_topic_team_count(1)
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(1)
# Verify that if one switches to "My Team" without reloading the page, the newly created team is shown.
self.verify_my_team_count(1)
def test_user_can_cancel_the_team_creation(self):
"""
Scenario: The user should be able to cancel the creation of new team.
Given I am enrolled in a course with a team configuration and a topic
When I visit the Create Team page for that topic
Then I should see the Create Team header and form
When I click Cancel button
Then I should see teams list page without any new team.
And if I switch to "My Team", it shows no teams
"""
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.verify_and_navigate_to_create_team_page()
# We add a sleep here to allow time for the click event handler to bind
# to the cancel button. Using time.sleep in bok-choy tests is,
# generally, an anti-pattern. So don't copy this :).
# For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_management_page.cancel_team()
self.browse_teams_page.wait_for_page()
self.assertTrue(self.browse_teams_page.get_pagination_header_text().startswith('Showing 0 out of 0 total'))
self.teams_page.click_all_topics()
self.teams_page.verify_team_count_in_first_topic(0)
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the create team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the create team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'new-team',
'topic_id': self.topic['id'],
'team_id': None
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_create_team_page()
@ddt.ddt
class DeleteTeamTest(TeamFormActions):
"""
Tests for deleting teams.
"""
def setUp(self):
super(DeleteTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
#need to have a membership to confirm it gets deleted as well
self.create_membership(self.user_info['username'], self.team['id'])
self.team_page.visit()
def test_cancel_delete(self):
"""
Scenario: The user should be able to cancel the Delete Team dialog
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I cancel the prompt
And I refresh the page
Then I should still see the team
"""
self.delete_team(cancel=True)
self.team_management_page.wait_for_page()
self.browser.refresh()
self.team_management_page.wait_for_page()
self.assertEqual(
' '.join(('All Topics', self.topic['name'], self.team['name'])),
self.team_management_page.header_page_breadcrumbs
)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_delete_team(self, role):
"""
Scenario: The user should be able to see and navigate to the delete team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Delete Team button
When I click the delete team button
And I confirm the prompt
Then I should see the browse teams page
And the team should not be present
"""
# If role is None, remain logged in as global staff
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.delete_team(require_notification=False)
browse_teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
browse_teams_page.wait_for_page()
self.assertNotIn(self.team['name'], browse_teams_page.team_names)
def delete_team(self, **kwargs):
"""
Delete a team. Passes `kwargs` to `confirm_prompt`.
Expects edx.team.deleted event to be emitted, with correct course_id.
Also expects edx.team.learner_removed event to be emitted for the
membership that is removed as a part of the delete operation.
"""
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.team_management_page.delete_team_button.click()
if 'cancel' in kwargs and kwargs['cancel'] is True:
confirm_prompt(self.team_management_page, **kwargs)
else:
expected_events = [
{
'event_type': 'edx.team.deleted',
'event': {
'team_id': self.team['id']
}
},
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'team_deleted',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
confirm_prompt(self.team_management_page, **kwargs)
def test_delete_team_updates_topics(self):
"""
Scenario: Deleting a team should update the team count on the topics page
Given I am staff user for a course with a team
And I delete a team
When I navigate to the browse topics page
Then the team count for the deletd team's topic should be updated
"""
self.delete_team(require_notification=False)
BrowseTeamsPage(self.browser, self.course_id, self.topic).click_all_topics()
topics_page = BrowseTopicsPage(self.browser, self.course_id)
topics_page.wait_for_page()
self.teams_page.verify_topic_team_count(0)
@ddt.ddt
class EditTeamTest(TeamFormActions):
"""
Tests for editing the team.
"""
def setUp(self):
super(EditTeamTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team = self.create_teams(self.topic, num_teams=1)[0]
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
self.team_page.visit()
def test_staff_can_navigate_to_edit_team_page(self):
"""
Scenario: The user should be able to see and navigate to the edit team page.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And I should see the edit team header
And I should also see the help messages for fields
"""
self.verify_and_navigate_to_edit_team_page()
self.verify_all_fields_exist()
def test_staff_can_edit_team_successfully(self):
"""
Scenario: The staff should be able to edit team successfully.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
And an analytics event should be fired
When I edit all the fields with appropriate data
And I click Update button
Then I should see the page for my team with updated data
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
expected_events = [
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'country',
'old': 'AF',
'new': 'PK',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'name',
'old': self.team['name'],
'new': self.TEAMS_NAME,
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'language',
'old': 'aa',
'new': 'en',
'truncated': [],
}
},
{
'event_type': 'edx.team.changed',
'event': {
'team_id': self.team['id'],
'field': 'description',
'old': self.team['description'],
'new': self.TEAM_DESCRIPTION,
'truncated': [],
}
},
]
with self.assert_events_match_during(
event_filter=self.only_team_events,
expected_events=expected_events,
):
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_staff_can_cancel_the_team_edit(self):
"""
Scenario: The user should be able to cancel the editing of team.
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the edit team page
Then I should see the Edit Team header
When I click Cancel button
Then I should see team page page without changes.
"""
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.cancel_team()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
def test_student_cannot_see_edit_button(self):
"""
Scenario: The student should not see the edit team button.
Given I am student for a course with a team
When I visit the Team profile page
Then I should not see the Edit Team button
"""
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page.visit()
self.assertFalse(self.team_page.edit_team_button_present)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged_user_can_edit_team(self, role):
"""
Scenario: The user with specified role should see the edit team button.
Given I am user with privileged role for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
"""
kwargs = {
'course_id': self.course_id,
'staff': False
}
if role is not None:
kwargs['roles'] = role
AutoAuthPage(self.browser, **kwargs).visit()
self.team_page.visit()
self.teams_page.wait_for_page()
self.assertTrue(self.team_page.edit_team_button_present)
self.verify_team_info(
name=self.team['name'],
description=self.team['description'],
location='Afghanistan',
language='Afar'
)
self.verify_and_navigate_to_edit_team_page()
self.fill_create_or_edit_form()
self.team_management_page.submit_form()
self.team_page.wait_for_page()
self.verify_team_info(
name=self.TEAMS_NAME,
description=self.TEAM_DESCRIPTION,
location='Pakistan',
language='English'
)
def test_page_viewed_event(self):
"""
Scenario: Visiting the edit team page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the edit team page
Then my browser should post a page viewed event
"""
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'edit-team',
'topic_id': self.topic['id'],
'team_id': self.team['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.verify_and_navigate_to_edit_team_page()
@ddt.ddt
class EditMembershipTest(TeamFormActions):
"""
Tests for administrating from the team membership page
"""
def setUp(self):
super(EditMembershipTest, self).setUp()
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': 10, 'topics': [self.topic]},
global_staff=True
)
self.team_management_page = TeamManagementPage(self.browser, self.course_id, self.topic)
self.team = self.create_teams(self.topic, num_teams=1)[0]
#make sure a user exists on this team so we can edit the membership
self.create_membership(self.user_info['username'], self.team['id'])
self.edit_membership_page = EditMembershipPage(self.browser, self.course_id, self.team)
self.team_page = TeamPage(self.browser, self.course_id, team=self.team)
def edit_membership_helper(self, role, cancel=False):
"""
Helper for common functionality in edit membership tests.
Checks for all relevant assertions about membership being removed,
including verify edx.team.learner_removed events are emitted.
"""
if role is not None:
AutoAuthPage(
self.browser,
course_id=self.course_id,
staff=False,
roles=role
).visit()
self.team_page.visit()
self.team_page.click_edit_team_button()
self.team_management_page.wait_for_page()
self.assertTrue(
self.team_management_page.membership_button_present
)
self.team_management_page.click_membership_button()
self.edit_membership_page.wait_for_page()
self.edit_membership_page.click_first_remove()
if cancel:
self.edit_membership_page.cancel_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 1)
else:
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'team_id': self.team['id'],
'remove_method': 'removed_by_admin',
'user_id': self.user_info['user_id']
}
}
]
with self.assert_events_match_during(
event_filter=self.only_team_events, expected_events=expected_events
):
self.edit_membership_page.confirm_delete_membership_dialog()
self.assertEqual(self.edit_membership_page.team_members, 0)
self.edit_membership_page.wait_for_page()
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and confirm the dialog
Then my membership should be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=False)
@ddt.data('Moderator', 'Community TA', 'Administrator', None)
def test_cancel_remove_membership(self, role):
"""
Scenario: The user should be able to remove a membership
Given I am staff user for a course with a team
When I visit the Team profile page
Then I should see the Edit Team button
And When I click edit team button
Then I should see the Edit Membership button
And When I click the edit membership button
Then I should see the edit membership page
And When I click the remove button and cancel the dialog
Then my membership should not be removed, and I should remain on the page
"""
self.edit_membership_helper(role, cancel=True)
@attr(shard=5)
@ddt.ddt
class TeamPageTest(TeamsTabBase):
"""Tests for viewing a specific team"""
SEND_INVITE_TEXT = 'Send this link to friends so that they can join too.'
def setUp(self):
super(TeamPageTest, self).setUp()
self.topic = {u"name": u"Example Topic", u"id": "example_topic", u"description": "Description"}
def _set_team_configuration_and_membership(
self,
max_team_size=10,
membership_team_index=0,
visit_team_index=0,
create_membership=True,
another_user=False):
"""
Set team configuration.
Arguments:
max_team_size (int): number of users a team can have
membership_team_index (int): index of team user will join
visit_team_index (int): index of team user will visit
create_membership (bool): whether to create membership or not
another_user (bool): another user to visit a team
"""
#pylint: disable=attribute-defined-outside-init
self.set_team_configuration(
{'course_id': self.course_id, 'max_team_size': max_team_size, 'topics': [self.topic]}
)
self.teams = self.create_teams(self.topic, 2)
if create_membership:
self.create_membership(self.user_info['username'], self.teams[membership_team_index]['id'])
if another_user:
AutoAuthPage(self.browser, course_id=self.course_id).visit()
self.team_page = TeamPage(self.browser, self.course_id, self.teams[visit_team_index])
def setup_thread(self):
"""
Create and return a thread for this test's discussion topic.
"""
thread = Thread(
id="test_thread_{}".format(uuid4().hex),
commentable_id=self.teams[0]['discussion_topic_id'],
body="Dummy text body."
)
thread_fixture = MultipleThreadFixture([thread])
thread_fixture.push()
return thread
def setup_discussion_user(self, role=None, staff=False):
"""Set this test's user to have the given role in its
discussions. Role is one of 'Community TA', 'Moderator',
'Administrator', or 'Student'.
"""
kwargs = {
'course_id': self.course_id,
'staff': staff
}
if role is not None:
kwargs['roles'] = role
#pylint: disable=attribute-defined-outside-init
self.user_info = AutoAuthPage(self.browser, **kwargs).visit().user_info
def verify_teams_discussion_permissions(self, should_have_permission):
"""Verify that the teams discussion component is in the correct state
for the test user. If `should_have_permission` is True, assert that
the user can see controls for posting replies, voting, editing, and
deleting. Otherwise, assert that those controls are hidden.
"""
thread = self.setup_thread()
self.team_page.visit()
self.assertEqual(self.team_page.discussion_id, self.teams[0]['discussion_topic_id'])
discussion = self.team_page.discussion_page
discussion.wait_for_page()
self.assertTrue(discussion.is_discussion_expanded())
self.assertEqual(discussion.get_num_displayed_threads(), 1)
self.assertTrue(discussion.has_thread(thread['id']))
assertion = self.assertTrue if should_have_permission else self.assertFalse
assertion(discussion.q(css='.post-header-actions').present)
assertion(discussion.q(css='.add-response').present)
def test_discussion_on_my_team_page(self):
"""
Scenario: Team Page renders a discussion for a team to which I belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the existing thread
And I should see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership()
self.verify_teams_discussion_permissions(True)
@ddt.data(True, False)
def test_discussion_on_other_team_page(self, is_staff):
"""
Scenario: Team Page renders a team discussion for a team to which I do
not belong.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When the team has a discussion with a thread
And I visit the Team page for that team
Then I should see a discussion with the correct discussion_id
And I should see the team's thread
And I should not see controls to change the state of the discussion
"""
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(staff=is_staff)
self.verify_teams_discussion_permissions(False)
@ddt.data('Moderator', 'Community TA', 'Administrator')
def test_discussion_privileged(self, role):
self._set_team_configuration_and_membership(create_membership=False)
self.setup_discussion_user(role=role)
self.verify_teams_discussion_permissions(True)
def assert_team_details(self, num_members, is_member=True, max_size=10):
"""
Verifies that user can see all the information, present on detail page according to their membership status.
Arguments:
num_members (int): number of users in a team
is_member (bool) default True: True if request user is member else False
max_size (int): number of users a team can have
"""
self.assertEqual(
self.team_page.team_capacity_text,
self.team_page.format_capacity_text(num_members, max_size)
)
self.assertEqual(self.team_page.team_location, 'Afghanistan')
self.assertEqual(self.team_page.team_language, 'Afar')
self.assertEqual(self.team_page.team_members, num_members)
if num_members > 0:
self.assertTrue(self.team_page.team_members_present)
else:
self.assertFalse(self.team_page.team_members_present)
if is_member:
self.assertEqual(self.team_page.team_user_membership_text, 'You are a member of this team.')
self.assertTrue(self.team_page.team_leave_link_present)
self.assertTrue(self.team_page.new_post_button_present)
else:
self.assertEqual(self.team_page.team_user_membership_text, '')
self.assertFalse(self.team_page.team_leave_link_present)
self.assertFalse(self.team_page.new_post_button_present)
def test_team_member_can_see_full_team_details(self):
"""
Scenario: Team member can see full info for team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see the full team detail
And I should see the team members
And I should see my team membership text
And I should see the language & country
And I should see the Leave Team and Invite Team
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assert_team_details(
num_members=1,
)
def test_other_users_can_see_limited_team_details(self):
"""
Scenario: Users who are not member of this team can only see limited info for this team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am not a member
When I visit the Team page for that team
Then I should not see full team detail
And I should see the team members
And I should not see my team membership text
And I should not see the Leave Team and Invite Team links
"""
self._set_team_configuration_and_membership(create_membership=False)
self.team_page.visit()
self.assert_team_details(is_member=False, num_members=0)
def test_user_can_navigate_to_members_profile_page(self):
"""
Scenario: User can navigate to profile page via team member profile image.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic of which I am a member
When I visit the Team page for that team
Then I should see profile images for the team members
When I click on the first profile image
Then I should be taken to the user's profile page
And I should see the username on profile page
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
learner_name = self.team_page.first_member_username
self.team_page.click_first_profile_image()
learner_profile_page = LearnerProfilePage(self.browser, learner_name)
learner_profile_page.wait_for_page()
learner_profile_page.wait_for_field('username')
self.assertTrue(learner_profile_page.field_is_visible('username'))
def test_join_team(self):
"""
Scenario: User can join a Team if not a member already..
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I visit the Team page for that team
Then I should see Join Team button
And I should not see New Post button
When I click on Join Team button
Then there should be no Join Team button and no message
And an analytics event should be emitted
And I should see the updated information under Team Details
And I should see New Post button
And if I switch to "My Team", the team I have joined is displayed
"""
self._set_team_configuration_and_membership(create_membership=False)
teams_page = BrowseTeamsPage(self.browser, self.course_id, self.topic)
teams_page.visit()
teams_page.view_first_team()
self.assertTrue(self.team_page.join_team_button_present)
expected_events = [
{
'event_type': 'edx.team.learner_added',
'event': {
'add_method': 'joined_from_team_view'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
self.team_page.click_join_team_button()
self.assertFalse(self.team_page.join_team_button_present)
self.assertFalse(self.team_page.join_team_message_present)
self.assert_team_details(num_members=1, is_member=True)
# Verify that if one switches to "My Team" without reloading the page, the newly joined team is shown.
self.teams_page.click_all_topics()
self.verify_my_team_count(1)
def test_already_member_message(self):
"""
Scenario: User should see `You are already in a team` if user is a
member of other team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am already a member of a team
And I visit a team other than mine
Then I should see `You are already in a team` message
"""
self._set_team_configuration_and_membership(membership_team_index=0, visit_team_index=1)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'You already belong to another team.')
self.assert_team_details(num_members=0, is_member=False)
def test_team_full_message(self):
"""
Scenario: User should see `Team is full` message when team is full.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And team has no space left
And I am not a member of any team
And I visit the team
Then I should see `Team is full` message
"""
self._set_team_configuration_and_membership(
create_membership=True,
max_team_size=1,
membership_team_index=0,
visit_team_index=0,
another_user=True
)
self.team_page.visit()
self.assertEqual(self.team_page.join_team_message, 'This team is full.')
self.assert_team_details(num_members=1, is_member=False, max_size=1)
def test_leave_team(self):
"""
Scenario: User can leave a team.
Given I am enrolled in a course with a team configuration, a topic,
and a team belonging to that topic
And I am a member of team
And I visit the team
And I should not see Join Team button
And I should see New Post button
Then I should see Leave Team link
When I click on Leave Team link
Then user should be removed from team
And an analytics event should be emitted
And I should see Join Team button
And I should not see New Post button
And if I switch to "My Team", the team I have left is not displayed
"""
self._set_team_configuration_and_membership()
self.team_page.visit()
self.assertFalse(self.team_page.join_team_button_present)
self.assert_team_details(num_members=1)
expected_events = [
{
'event_type': 'edx.team.learner_removed',
'event': {
'remove_method': 'self_removal'
}
}
]
with self.assert_events_match_during(event_filter=self.only_team_events, expected_events=expected_events):
# I think we're seeing the same problem that we're seeing in
# CreateTeamTest.test_user_can_see_error_message_for_missing_data.
# We click on the "leave team" link after it's loaded, but before
# its JavaScript event handler is added. Adding this sleep gives
# enough time for that event handler to bind to the link. Sorry!
# For the story to address this anti-pattern, see TNL-5820
time.sleep(0.5)
self.team_page.click_leave_team_link()
self.assert_team_details(num_members=0, is_member=False)
self.assertTrue(self.team_page.join_team_button_present)
# Verify that if one switches to "My Team" without reloading the page, the old team no longer shows.
self.teams_page.click_all_topics()
self.verify_my_team_count(0)
def test_page_viewed_event(self):
"""
Scenario: Visiting the team profile page should fire a page viewed event.
Given I am enrolled in a course with a team configuration and a topic
When I visit the team profile page
Then my browser should post a page viewed event
"""
self._set_team_configuration_and_membership()
events = [{
'event_type': 'edx.team.page_viewed',
'event': {
'page_name': 'single-team',
'topic_id': self.topic['id'],
'team_id': self.teams[0]['id']
}
}]
with self.assert_events_match_during(self.only_team_events, expected_events=events):
self.team_page.visit()
| itsjeyd/edx-platform | common/test/acceptance/tests/lms/test_teams.py | Python | agpl-3.0 | 84,795 |
using System.Web.Mvc;
namespace WebMvc.Areas.Controls
{
public class ControlsAreaRegistration : AreaRegistration
{
public override string AreaName
{
get
{
return "Controls";
}
}
public override void RegisterArea(AreaRegistrationContext context)
{
context.MapRoute(
"Controls_default",
"Controls/{controller}/{action}/{id}",
new { action = "Index", id = UrlParameter.Optional }
);
}
}
}
| Siyy/RoadFlow | src/RoadFlow/WebMvc/Areas/Controls/ControlsAreaRegistration.cs | C# | agpl-3.0 | 599 |
<?php
// Copyright (C) 2010-2017 Combodo SARL
//
// This file is part of iTop.
//
// iTop is free software; you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// iTop is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
//
// You should have received a copy of the GNU Affero General Public License
// along with iTop. If not, see <http://www.gnu.org/licenses/>
/**
* @copyright Copyright (C) 2010-2017 Combodo SARL
* @license http://opensource.org/licenses/AGPL-3.0
*/
Dict::Add('FR FR', 'French', 'Français', array(
'Class:AuditRule' => 'Règle d\'audit',
'Class:AuditRule+' => '',
'Class:AuditRule/Attribute:name' => 'Nom',
'Class:AuditRule/Attribute:name+' => '',
'Class:AuditRule/Attribute:description' => 'Description',
'Class:AuditRule/Attribute:description+' => '',
'Class:AuditRule/Attribute:query' => 'Requête',
'Class:AuditRule/Attribute:query+' => 'Expression OQL de calcul des éléments incorrects',
'Class:AuditRule/Attribute:valid_flag' => 'Interprétation',
'Class:AuditRule/Attribute:valid_flag+' => 'La requête définit-elle les éléments valides ?',
'Class:AuditRule/Attribute:valid_flag/Value:false' => 'Objets incorrects',
'Class:AuditRule/Attribute:valid_flag/Value:false+' => '',
'Class:AuditRule/Attribute:valid_flag/Value:true' => 'Objets valides',
'Class:AuditRule/Attribute:valid_flag/Value:true+' => '',
'Class:AuditRule/Attribute:category_id' => 'Catégorie',
'Class:AuditRule/Attribute:category_id+' => '',
'Class:AuditRule/Attribute:category_name' => 'Categorie',
'Class:AuditRule/Attribute:category_name+' => '',
'Class:AuditCategory' => 'Catégorie d\'audit',
'Class:AuditCategory+' => 'Une section de l\'audit',
'Class:AuditCategory/Attribute:name' => 'Nom',
'Class:AuditCategory/Attribute:name+' => 'Nom raccourci',
'Class:AuditCategory/Attribute:description' => 'Description',
'Class:AuditCategory/Attribute:description+' => 'Description',
'Class:AuditCategory/Attribute:definition_set' => 'Ensemble de définition',
'Class:AuditCategory/Attribute:definition_set+' => 'Expression OQL qui défini le périmètre d\'application de l\'audit',
'Class:AuditCategory/Attribute:rules_list' => 'Règles d\'audit',
'Class:AuditCategory/Attribute:rules_list+' => 'Règles d\'audit pour cette catégorie',
'Class:Query' => 'Requête',
'Class:Query+' => 'Une requête définit un ensemble d\'information de manière dynamique',
'Class:Query/Attribute:name' => 'Nom',
'Class:Query/Attribute:name+' => 'Identification de la requête',
'Class:Query/Attribute:description' => 'Description',
'Class:Query/Attribute:description+' => 'Description complète (finalité, utilisations, public)',
'Class:Query/Attribute:fields' => 'Champs',
'Class:Query/Attribute:fields+' => 'Liste CSV des attributs (ou alias.attribut) à exporter',
'Class:QueryOQL' => 'Requête OQL',
'Class:QueryOQL+' => 'Une requête écrite dans le langage "Object Query Language"',
'Class:QueryOQL/Attribute:oql' => 'Expression',
'Class:QueryOQL/Attribute:oql+' => 'Expression OQL',
'Class:URP_Profiles' => 'Profil',
'Class:URP_Profiles+' => 'Profil utilisateur',
'Class:URP_Profiles/Attribute:name' => 'Nom',
'Class:URP_Profiles/Attribute:name+' => '',
'Class:URP_Profiles/Attribute:description' => 'Description',
'Class:URP_Profiles/Attribute:description+' => '',
'Class:URP_Profiles/Attribute:user_list' => 'Utilisateurs',
'Class:URP_Profiles/Attribute:user_list+' => 'Comptes utilisateur (logins) ayant ce profil',
'Class:URP_UserProfile' => 'Utilisateur/Profil',
'Class:URP_UserProfile+' => '',
'Class:URP_UserProfile/Attribute:userid' => 'Utilisateur',
'Class:URP_UserProfile/Attribute:userid+' => '',
'Class:URP_UserProfile/Attribute:userlogin' => 'Login',
'Class:URP_UserProfile/Attribute:userlogin+' => '',
'Class:URP_UserProfile/Attribute:profileid' => 'Profil',
'Class:URP_UserProfile/Attribute:profileid+' => '',
'Class:URP_UserProfile/Attribute:profile' => 'Profil',
'Class:URP_UserProfile/Attribute:profile+' => '',
'Class:URP_UserProfile/Attribute:reason' => 'Raison',
'Class:URP_UserProfile/Attribute:reason+' => 'Justifie le rôle affecté à cet utilisateur',
'Class:URP_UserOrg' => 'Utilisateur/Organisation',
'Class:URP_UserOrg+' => 'Organisations permises pour l\'utilisateur',
'Class:URP_UserOrg/Attribute:userid' => 'Utilisateur',
'Class:URP_UserOrg/Attribute:userid+' => '',
'Class:URP_UserOrg/Attribute:userlogin' => 'Login',
'Class:URP_UserOrg/Attribute:userlogin+' => '',
'Class:URP_UserOrg/Attribute:allowed_org_id' => 'Organisation',
'Class:URP_UserOrg/Attribute:allowed_org_id+' => '',
'Class:URP_UserOrg/Attribute:allowed_org_name' => 'Organisation',
'Class:URP_UserOrg/Attribute:allowed_org_name+' => '',
'Class:URP_UserOrg/Attribute:reason' => 'Raison',
'Class:URP_UserOrg/Attribute:reason+' => 'Justifie la permission de voir les données de cette organisation',
'Class:URP_ActionGrant' => 'action_permission',
'Class:URP_ActionGrant+' => 'permissions on classes',
'Class:URP_ActionGrant/Attribute:profileid' => 'Profile',
'Class:URP_ActionGrant/Attribute:profileid+' => 'usage profile',
'Class:URP_ActionGrant/Attribute:profile' => 'Profile',
'Class:URP_ActionGrant/Attribute:profile+' => 'usage profile',
'Class:URP_ActionGrant/Attribute:class' => 'Class',
'Class:URP_ActionGrant/Attribute:class+' => 'Target class',
'Class:URP_ActionGrant/Attribute:permission' => 'Permission',
'Class:URP_ActionGrant/Attribute:permission+' => 'allowed or not allowed?',
'Class:URP_ActionGrant/Attribute:permission/Value:no' => 'non',
'Class:URP_ActionGrant/Attribute:permission/Value:no+' => 'non',
'Class:URP_ActionGrant/Attribute:permission/Value:yes' => 'oui',
'Class:URP_ActionGrant/Attribute:permission/Value:yes+' => 'oui',
'Class:URP_ActionGrant/Attribute:action' => 'Action',
'Class:URP_ActionGrant/Attribute:action+' => 'operations to perform on the given class',
'Class:URP_StimulusGrant' => 'stimulus_permission',
'Class:URP_StimulusGrant+' => 'permissions on stimilus in the life cycle of the object',
'Class:URP_StimulusGrant/Attribute:profileid' => 'Profile',
'Class:URP_StimulusGrant/Attribute:profileid+' => 'usage profile',
'Class:URP_StimulusGrant/Attribute:profile' => 'Profile',
'Class:URP_StimulusGrant/Attribute:profile+' => 'usage profile',
'Class:URP_StimulusGrant/Attribute:class' => 'Class',
'Class:URP_StimulusGrant/Attribute:class+' => 'Target class',
'Class:URP_StimulusGrant/Attribute:permission' => 'Permission',
'Class:URP_StimulusGrant/Attribute:permission+' => 'allowed or not allowed?',
'Class:URP_StimulusGrant/Attribute:permission/Value:no' => 'no',
'Class:URP_StimulusGrant/Attribute:permission/Value:no+' => 'no',
'Class:URP_StimulusGrant/Attribute:permission/Value:yes' => 'yes',
'Class:URP_StimulusGrant/Attribute:permission/Value:yes+' => 'yes',
'Class:URP_StimulusGrant/Attribute:stimulus' => 'Stimulus',
'Class:URP_StimulusGrant/Attribute:stimulus+' => 'stimulus code',
'Class:URP_AttributeGrant' => 'attribute_permission',
'Class:URP_AttributeGrant+' => 'permissions at the attributes level',
'Class:URP_AttributeGrant/Attribute:actiongrantid' => 'Action grant',
'Class:URP_AttributeGrant/Attribute:actiongrantid+' => 'action grant',
'Class:URP_AttributeGrant/Attribute:attcode' => 'Attribute',
'Class:URP_AttributeGrant/Attribute:attcode+' => 'attribute code',
'Class:User' => 'Utilisateur',
'Class:User+' => 'Compte utilisateur',
'Class:User/Attribute:finalclass' => 'Type de compte',
'Class:User/Attribute:finalclass+' => 'Nom de la classe instanciable',
'Class:User/Attribute:contactid' => 'Contact (personne)',
'Class:User/Attribute:contactid+' => '',
'Class:User/Attribute:last_name' => 'Nom',
'Class:User/Attribute:last_name+' => '',
'Class:User/Attribute:first_name' => 'Prénom',
'Class:User/Attribute:first_name+' => '',
'Class:User/Attribute:email' => 'Adresse email',
'Class:User/Attribute:email+' => '',
'Class:User/Attribute:login' => 'Login',
'Class:User/Attribute:login+' => '',
'Class:User/Attribute:language' => 'Langue',
'Class:User/Attribute:language+' => '',
'Class:User/Attribute:language/Value:EN US' => 'Anglais',
'Class:User/Attribute:language/Value:EN US+' => 'Anglais (Etats-unis)',
'Class:User/Attribute:language/Value:FR FR' => 'Français',
'Class:User/Attribute:language/Value:FR FR+' => 'Français (France)',
'Class:User/Attribute:profile_list' => 'Profils',
'Class:User/Attribute:profile_list+' => 'Rôles, ouvrants les droits d\'accès',
'Class:User/Attribute:allowed_org_list' => 'Organisations permises',
'Class:User/Attribute:allowed_org_list+' => 'L\'utilisateur a le droit de voir les données des organisations listées ici. Si aucune organisation n\'est spécifiée, alors aucune restriction ne s\'applique.',
'Class:User/Attribute:status' => 'Etat',
'Class:User/Attribute:status+' => 'Est-ce que ce compte utilisateur est actif, ou non?',
'Class:User/Attribute:status/Value:enabled' => 'Actif',
'Class:User/Attribute:status/Value:disabled' => 'Désactivé',
'Class:User/Error:LoginMustBeUnique' => 'Le login doit être unique - "%1s" est déjà utilisé.',
'Class:User/Error:AtLeastOneProfileIsNeeded' => 'L\'utilisateur doit avoir au moins un profil.',
'Class:User/Error:AtLeastOneOrganizationIsNeeded' => 'L\'utilisateur doit avoir au moins une organisation.',
'Class:User/Error:OrganizationNotAllowed' => 'Organisation non autorisée.',
'Class:User/Error:UserOrganizationNotAllowed' => 'L\'utilisateur n\'appartient pas à vos organisations.',
'Class:UserInternal' => 'Utilisateur interne',
'Class:UserInternal+' => 'Utilisateur défini dans iTop',
'Class:URP_Dimensions' => 'Dimension',
'Class:URP_Dimensions+' => 'Dimension applicative (défini des silos)',
'Class:URP_Dimensions/Attribute:name' => 'Nom',
'Class:URP_Dimensions/Attribute:name+' => '',
'Class:URP_Dimensions/Attribute:description' => 'Description',
'Class:URP_Dimensions/Attribute:description+' => '',
'Class:URP_Dimensions/Attribute:type' => 'Type',
'Class:URP_Dimensions/Attribute:type+' => 'Nom de classe ou type de données (unité de projection)',
'Class:URP_ProfileProjection' => 'profile_projection',
'Class:URP_ProfileProjection+' => 'profile projections',
'Class:URP_ProfileProjection/Attribute:dimensionid' => 'Dimension',
'Class:URP_ProfileProjection/Attribute:dimensionid+' => 'application dimension',
'Class:URP_ProfileProjection/Attribute:dimension' => 'Dimension',
'Class:URP_ProfileProjection/Attribute:dimension+' => 'application dimension',
'Class:URP_ProfileProjection/Attribute:profileid' => 'Profile',
'Class:URP_ProfileProjection/Attribute:profileid+' => 'usage profile',
'Class:URP_ProfileProjection/Attribute:profile' => 'Profile',
'Class:URP_ProfileProjection/Attribute:profile+' => 'Profile name',
'Class:URP_ProfileProjection/Attribute:value' => 'Value expression',
'Class:URP_ProfileProjection/Attribute:value+' => 'OQL expression (using $user) | constant | | +attribute code',
'Class:URP_ProfileProjection/Attribute:attribute' => 'Attribute',
'Class:URP_ProfileProjection/Attribute:attribute+' => 'Target attribute code (optional)',
'Class:URP_ClassProjection' => 'class_projection',
'Class:URP_ClassProjection+' => 'class projections',
'Class:URP_ClassProjection/Attribute:dimensionid' => 'Dimension',
'Class:URP_ClassProjection/Attribute:dimensionid+' => 'application dimension',
'Class:URP_ClassProjection/Attribute:dimension' => 'Dimension',
'Class:URP_ClassProjection/Attribute:dimension+' => 'application dimension',
'Class:URP_ClassProjection/Attribute:class' => 'Class',
'Class:URP_ClassProjection/Attribute:class+' => 'Target class',
'Class:URP_ClassProjection/Attribute:value' => 'Value expression',
'Class:URP_ClassProjection/Attribute:value+' => 'OQL expression (using $this) | constant | | +attribute code',
'Class:URP_ClassProjection/Attribute:attribute' => 'Attribute',
'Class:URP_ClassProjection/Attribute:attribute+' => 'Target attribute code (optional)',
'BooleanLabel:yes' => 'oui',
'BooleanLabel:no' => 'non',
'UI:Login:Title' => 'iTop login',
'Menu:WelcomeMenu' => 'Bienvenue', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:WelcomeMenu+' => 'Bienvenue dans iTop', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:WelcomeMenuPage' => 'Bienvenue', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:WelcomeMenuPage+' => 'Bienvenue dans iTop', // Duplicated into itop-welcome-itil (will be removed from here...)
'UI:WelcomeMenu:Title' => 'Bienvenue dans iTop',
'UI:WelcomeMenu:LeftBlock' => '<p>iTop est un portail opérationnel complet et libre pour gérer votre SI.</p>
<ul>il contient:
<li>Une base de gestion des configuration (CMDB - Configuration management database) pour documenter et gérer votre parc informatique.</li>
<li>Un module de gestion des incidents pour suivre les incidents d\'exploitation et gérer la communication à propos de ces incidents.</li>
<li>Un module de gestion des changements pour planifier et suivre les modifications de votre SI.</li>
<li>Une base des erreurs connues, pour accélérer la résolution des incidents.</li>
<li>Un module de gestion de la maintenance pour documenter les maintenances planifiées et informer les contacts appropriés.</li>
<li>Des tableaux de bord pour avoir rapidement une vue synthétique de votre SI.</li>
</ul>
<p>Tous ces modules peuvent être installés séparément, à votre rythme.</p>',
'UI:WelcomeMenu:RightBlock' => '<p>iTop a été conçu pour les fournisseurs de service, il permet à vos équipes IT de gérer facilement de multiples clients et organisations.
<ul>iTop fournit un riche ensemble de processus métier pour:
<li>Augmenter l\'efficacité de la gestion de votre SI</li>
<li>Accroitre la performance de vos équipes d\'exploitation</li>
<li>Améliorer la satisfaction client et fournir aux responsables des vues sur la performance interne du SI.</li>
</ul>
</p>
<p>iTop est complètement ouvert pour s\'intéger avec votre environnement informatique.</p>
<p>
<ul>Grâce à ce portail opérationnel de nouvelle génération:
<li>Gérez un environnement informatique de plus en plus complexe.</li>
<li>Mettez en place la méthodologie ITIL à votre rythme.</li>
<li>Contrôlez l\'actif le plus important de votre SI : la documentation.</li>
</ul>
</p>',
'UI:WelcomeMenu:AllOpenRequests' => 'Requêtes en cours: %1$d',
'UI:WelcomeMenu:MyCalls' => 'Mes Appels Support',
'UI:WelcomeMenu:OpenIncidents' => 'Incidents en cours: %1$d',
'UI:WelcomeMenu:AllConfigItems' => 'Actifs: %1$d',
'UI:WelcomeMenu:MyIncidents' => 'Mes Incidents',
'UI:AllOrganizations' => ' Toutes les Organisations ',
'UI:YourSearch' => 'Votre recherche',
'UI:LoggedAsMessage' => 'Connecté comme: %1$s',
'UI:LoggedAsMessage+Admin' => 'Connecté comme: %1$s (Administrateur)',
'UI:Button:Logoff' => 'Déconnexion',
'UI:Button:GlobalSearch' => 'Rechercher',
'UI:Button:Search' => 'Rechercher',
'UI:Button:Query' => ' Lancer la requête ',
'UI:Button:Save' => 'Sauver',
'UI:Button:Ok' => 'Ok',
'UI:Button:Cancel' => 'Annuler',
'UI:Button:Apply' => 'Appliquer',
'UI:Button:Back' => ' << Retour ',
'UI:Button:Restart' => ' |<< Recommencer ',
'UI:Button:Next' => ' Suite >> ',
'UI:Button:Finish' => ' Terminer ',
'UI:Button:DoImport' => ' Lancer l\'import ! ',
'UI:Button:Done' => ' Terminé ',
'UI:Button:SimulateImport' => ' Simuler l\'import ',
'UI:Button:Test' => 'Tester !',
'UI:Button:Evaluate' => ' Exécuter ',
'UI:Button:Evaluate:Title' => ' Exécuter (Ctrl+Entrée)',
'UI:Button:AddObject' => ' Ajouter... ',
'UI:Button:BrowseObjects' => ' Naviguer... ',
'UI:Button:Add' => ' Ajouter ',
'UI:Button:AddToList' => ' << Ajouter ',
'UI:Button:RemoveFromList' => ' Enlever >> ',
'UI:Button:FilterList' => ' Filtrer... ',
'UI:Button:Create' => ' Créer ',
'UI:Button:Delete' => ' Supprimer ! ',
'UI:Button:Rename' => ' Renommer... ',
'UI:Button:ChangePassword' => ' Changer ! ',
'UI:Button:ResetPassword' => ' Ràz du mot de passe ',
'UI:Button:Insert' => 'Insérer',
'UI:Button:More' => 'Plus',
'UI:Button:Less' => 'Moins',
'UI:SearchToggle' => 'Recherche',
'UI:ClickToCreateNew' => 'Créer un nouvel objet de type %1$s',
'UI:SearchFor_Class' => 'Rechercher des objets de type %1$s',
'UI:NoObjectToDisplay' => 'Aucun objet à afficher.',
'UI:Error:MandatoryTemplateParameter_object_id' => 'Le paramètre object_id est obligatoire quand link_attr est spécifié. Vérifiez la définition du modèle.',
'UI:Error:MandatoryTemplateParameter_target_attr' => 'Le paramètre taarget_attr est obligatoire quand link_attr est spécifié. Vérifiez la définition du modèle.',
'UI:Error:MandatoryTemplateParameter_group_by' => 'Le paramètre group_by est obligatoire. Vérifiez la définition du modèle.',
'UI:Error:InvalidGroupByFields' => 'Liste des champs "group by" incorrecte: "%1$s".',
'UI:Error:UnsupportedStyleOfBlock' => 'Erreur: style de bloc("%1$s") inconnu.',
'UI:Error:IncorrectLinkDefinition_LinkedClass_Class' => 'la définition du lien est incorrecte: la classe d\'objets à gérer: %1$s n\'est référencée par aucune clef externe de la classe %2$s',
'UI:Error:Object_Class_Id_NotFound' => 'L\'objet: %1$s:%2$d est introuvable.',
'UI:Error:WizardCircularReferenceInDependencies' => 'Erreur: Référence circulaire entre les dépendences entre champs, vérifiez le modèle de données.',
'UI:Error:UploadedFileTooBig' => 'Le fichier téléchargé est trop gros. (La taille maximale autorisée est %1$s). Pour modifier cette limite contactez votre administrateur iTop. (Réglages upload_max_filesize et post_max_size dans la configuration PHP sur le serveur)',
'UI:Error:UploadedFileTruncated.' => 'Le fichier téléchargé a été tronqué !',
'UI:Error:NoTmpDir' => 'Il n\'y a aucun répertoire temporaire de défini.',
'UI:Error:CannotWriteToTmp_Dir' => 'Impossible d\'écrire le fichier temporaire sur disque. upload_tmp_dir = "%1$s".',
'UI:Error:UploadStoppedByExtension_FileName' => 'Téléchargement arrêté à cause de l\'extension. (Nom du fichier original = "%1$s").',
'UI:Error:UploadFailedUnknownCause_Code' => 'Le téléchargement a échoué pour une raison inconnue. (Code d\'erreur = "%1$s").',
'UI:Error:1ParametersMissing' => 'Erreur: Pour effectuer cette opération il manque le paramètre suivant: %1$s.',
'UI:Error:2ParametersMissing' => 'Erreur: Pour effectuer cette opération il manque les paramètres suivants: %1$s and %2$s.',
'UI:Error:3ParametersMissing' => 'Erreur: Pour effectuer cette opération il manque les paramètres suivants: %1$s, %2$s and %3$s.',
'UI:Error:4ParametersMissing' => 'Erreur: Pour effectuer cette opération il manque les paramètres suivants: %1$s, %2$s, %3$s and %4$s.',
'UI:Error:IncorrectOQLQuery_Message' => 'Erreur: requête OQL incorrecte: %1$s',
'UI:Error:AnErrorOccuredWhileRunningTheQuery_Message' => 'Une erreur s\'est produite en exécutant la requête: %1$s',
'UI:Error:ObjectAlreadyUpdated' => 'Erreur: l\'objet a déjà été mis à jour.',
'UI:Error:ObjectCannotBeUpdated' => 'Erreur: l\'objet ne peut pas être mis à jour.',
'UI:Error:ObjectsAlreadyDeleted' => 'Erreur: les objets ont déjà été supprimés !',
'UI:Error:BulkDeleteNotAllowedOn_Class' => 'Vous n\'êtes pas autorisé à faire une suppression massive sur les objets de type %1$s',
'UI:Error:DeleteNotAllowedOn_Class' => 'Vous n\'êtes pas autorisé supprimer des objets de type %1$s',
'UI:Error:BulkModifyNotAllowedOn_Class' => 'Vous n\'êtes pas autorisé à faire une modification massive sur les objets de type %1$s',
'UI:Error:ObjectAlreadyCloned' => 'Erreur: l\'objet a déjà été dupliqué !',
'UI:Error:ObjectAlreadyCreated' => 'Erreur: l\'objet a déjà été créé !',
'UI:Error:Invalid_Stimulus_On_Object_In_State' => 'Erreur: le stimulus "%1$s" n\'est pas valide pour l\'objet %2$s dans l\'état "%3$s".',
'UI:GroupBy:Count' => 'Nombre',
'UI:GroupBy:Count+' => 'Nombre d\'éléments',
'UI:CountOfObjects' => '%1$d objets correspondants aux critères.',
'UI_CountOfObjectsShort' => '%1$d objets.',
'UI:NoObject_Class_ToDisplay' => 'Aucun objet %1$s à afficher',
'UI:History:LastModified_On_By' => 'Dernière modification par %2$s le %1$s.',
'UI:HistoryTab' => 'Historique',
'UI:NotificationsTab' => 'Notifications',
'UI:History:BulkImports' => 'Historique',
'UI:History:BulkImports+' => 'Liste des imports CSV (le dernier est en haut de la liste)',
'UI:History:BulkImportDetails' => 'Changements résultant de l\'import CSV du %1$s (auteur: %2$s)',
'UI:History:Date' => 'Date',
'UI:History:Date+' => 'Date de modification',
'UI:History:User' => 'Utilisateur',
'UI:History:User+' => 'Utilisateur qui a fait la modification',
'UI:History:Changes' => 'Changements',
'UI:History:Changes+' => 'Changements sur cet objet',
'UI:History:StatsCreations' => 'Créés',
'UI:History:StatsCreations+' => 'Nombre d\'objets créés',
'UI:History:StatsModifs' => 'Modifiés',
'UI:History:StatsModifs+' => 'Nombre d\'objets modifiés',
'UI:History:StatsDeletes' => 'Effacés',
'UI:History:StatsDeletes+' => 'Nombre d\'objets effacés',
'UI:Loading' => 'Chargement...',
'UI:Menu:Actions' => 'Actions',
'UI:Menu:OtherActions' => 'Autres Actions',
'UI:Menu:New' => 'Créer...',
'UI:Menu:Add' => 'Ajouter...',
'UI:Menu:Manage' => 'Gérer...',
'UI:Menu:EMail' => 'Envoyer par eMail',
'UI:Menu:CSVExport' => 'Exporter en CSV...',
'UI:Menu:Modify' => 'Modifier...',
'UI:Menu:Delete' => 'Supprimer...',
'UI:Menu:BulkDelete' => 'Supprimer...',
'UI:UndefinedObject' => 'non défini',
'UI:Document:OpenInNewWindow:Download' => 'Ouvrir dans un nouvelle fenêtre: %1$s, Télécharger: %2$s',
'UI:SelectAllToggle+' => 'Tout sélectionner / Tout déselectionner',
'UI:TruncatedResults' => '%1$d objets affichés sur %2$d',
'UI:SplitDateTime-Date' => 'date',
'UI:SplitDateTime-Time' => 'heure',
'UI:DisplayAll' => 'Tout afficher',
'UI:CollapseList' => 'Refermer',
'UI:CountOfResults' => '%1$d objet(s)',
'UI:ChangesLogTitle' => 'Liste de modifications (%1$d):',
'UI:EmptyChangesLogTitle' => 'Aucune modification',
'UI:SearchFor_Class_Objects' => 'Recherche d\'objets de type %1$s ',
'UI:OQLQueryBuilderTitle' => 'Constructeur de requêtes OQL',
'UI:OQLQueryTab' => 'Requête OQL',
'UI:SimpleSearchTab' => 'Recherche simple',
'UI:Details+' => 'Détails',
'UI:SearchValue:Any' => '* Indifférent *',
'UI:SearchValue:NbSelected' => '# sélectionné(e)s',
'UI:SearchValue:CheckAll' => 'Cocher',
'UI:SearchValue:UncheckAll' => 'Décocher',
'UI:SearchValue:Mixed' => '* Plusieurs *',
'UI:SelectOne' => '-- choisir une valeur --',
'UI:Login:Welcome' => 'Bienvenue dans iTop!',
'UI:Login:IncorrectLoginPassword' => 'Mot de passe ou identifiant incorrect.',
'UI:Login:IdentifyYourself' => 'Merci de vous identifier',
'UI:Login:UserNamePrompt' => 'Identifiant',
'UI:Login:PasswordPrompt' => 'Mot de passe',
'UI:Login:ForgotPwd' => 'Mot de passe oublié ?',
'UI:Login:ForgotPwdForm' => 'Mot de passe oublié',
'UI:Login:ForgotPwdForm+' => 'Vous pouvez demander à saisir un nouveau mot de passe. Vous allez recevoir un email et vous pourrez suivre les instructions.',
'UI:Login:ResetPassword' => 'Envoyer le message',
'UI:Login:ResetPwdFailed' => 'Impossible de vous faire parvenir le message: %1$s',
'UI:ResetPwd-Error-WrongLogin' => 'le compte \'%1$s\' est inconnu.',
'UI:ResetPwd-Error-NotPossible' => 'les comptes "externes" ne permettent pas la saisie d\'un mot de passe dans iTop.',
'UI:ResetPwd-Error-FixedPwd' => 'ce mode de saisie du mot de passe n\'est pas autorisé pour ce compte.',
'UI:ResetPwd-Error-NoContact' => 'le comte n\'est pas associé à une Personne.',
'UI:ResetPwd-Error-NoEmailAtt' => 'il manque un attribut de type "email" sur la Personne associée à ce compte. Veuillez contacter l\'administrateur de l\'application.',
'UI:ResetPwd-Error-NoEmail' => 'il manque une adresse email sur la Personne associée à ce compte. Veuillez contacter l\'administrateur de l\'application.',
'UI:ResetPwd-Error-Send' => 'erreur technique lors de l\'envoi de l\'email. Veuillez contacter l\'administrateur de l\'application.',
'UI:ResetPwd-EmailSent' => 'Veuillez vérifier votre boîte de réception. Ensuite, suivez les instructions données dans l\'email...',
'UI:ResetPwd-EmailSubject' => 'Changer votre mot de passe iTop',
'UI:ResetPwd-EmailBody' => '<body><p>Vous avez demandé à changer votre mot de passe iTop sans connaitre le mot de passe précédent.</p><p>Veuillez suivre le lien suivant (usage unique) afin de pouvoir <a href="%1$s">saisir un nouveau mot de passe</a></p>.',
'UI:ResetPwd-Title' => 'Nouveau mot de passe',
'UI:ResetPwd-Error-InvalidToken' => 'Désolé, le mot de passe a déjà été modifié avec le lien que vous avez suivi, ou bien vous avez reçu plusieurs emails. Dans ce cas, veillez à utiliser le tout dernier lien reçu.',
'UI:ResetPwd-Error-EnterPassword' => 'Veuillez saisir le nouveau mot de passe pour \'%1$s\'.',
'UI:ResetPwd-Ready' => 'Le mot de passe a bien été changé.',
'UI:ResetPwd-Login' => 'Cliquez ici pour vous connecter...',
'UI:Login:ChangeYourPassword' => 'Changer de mot de passe',
'UI:Login:OldPasswordPrompt' => 'Ancien mot de passe',
'UI:Login:NewPasswordPrompt' => 'Nouveau mot de passe',
'UI:Login:RetypeNewPasswordPrompt' => 'Resaisir le nouveau mot de passe',
'UI:Login:IncorrectOldPassword' => 'Erreur: l\'ancien mot de passe est incorrect',
'UI:LogOffMenu' => 'Déconnexion',
'UI:LogOff:ThankYou' => 'Merci d\'avoir utilisé iTop',
'UI:LogOff:ClickHereToLoginAgain' => 'Cliquez ici pour vous reconnecter...',
'UI:ChangePwdMenu' => 'Changer de mot de passe...',
'UI:Login:PasswordChanged' => 'Mot de passe mis à jour !',
'UI:AccessRO-All' => 'iTop est en lecture seule',
'UI:AccessRO-Users' => 'iTop est en lecture seule pour les utilisateurs finaux',
'UI:ApplicationEnvironment' => 'Environnement applicatif: %1$s',
'UI:Login:RetypePwdDoesNotMatch' => 'Les deux saisies du nouveau mot de passe ne sont pas identiques !',
'UI:Button:Login' => 'Entrer dans iTop',
'UI:Login:Error:AccessRestricted' => 'L\'accès à iTop est soumis à autorisation. Merci de contacter votre administrateur iTop.',
'UI:Login:Error:AccessAdmin' => 'Accès resreint aux utilisateurs possédant le profil Administrateur.',
'UI:CSVImport:MappingSelectOne' => '-- choisir une valeur --',
'UI:CSVImport:MappingNotApplicable' => '-- ignorer ce champ --',
'UI:CSVImport:NoData' => 'Aucune donnée... merci de fournir des données !',
'UI:Title:DataPreview' => 'Aperçu des données',
'UI:CSVImport:ErrorOnlyOneColumn' => 'Erreur: Les données semblent ne contenir qu\'une seule colonne. Avez-vous choisi le bon séparateur ?',
'UI:CSVImport:FieldName' => 'Champ n°%1$d',
'UI:CSVImport:DataLine1' => 'Données Ligne 1',
'UI:CSVImport:DataLine2' => 'Données Ligne 2',
'UI:CSVImport:idField' => 'id (Clef primaire)',
'UI:Title:BulkImport' => 'iTop - Import massif',
'UI:Title:BulkImport+' => 'Assistant d\'import CSV',
'UI:Title:BulkSynchro_nbItem_ofClass_class' => 'Synchronisation de %1$d éléments de type %2$s',
'UI:CSVImport:ClassesSelectOne' => '-- choisir une valeur --',
'UI:CSVImport:ErrorExtendedAttCode' => 'Erreur interne: "%1$s" n\'est pas une code correct car "%2$s" n\'est pas une clef externe de la classe "%3$s"',
'UI:CSVImport:ObjectsWillStayUnchanged' => '%1$d objets(s) resteront inchangés.',
'UI:CSVImport:ObjectsWillBeModified' => '%1$d objets(s) seront modifiés.',
'UI:CSVImport:ObjectsWillBeAdded' => '%1$d objets(s) seront créés.',
'UI:CSVImport:ObjectsWillHaveErrors' => '%1$d objets(s) seront en erreur.',
'UI:CSVImport:ObjectsRemainedUnchanged' => '%1$d objets(s) n\'ont pas changé.',
'UI:CSVImport:ObjectsWereModified' => '%1$d objets(s)ont été modifiés.',
'UI:CSVImport:ObjectsWereAdded' => '%1$d objets(s) ont été créés.',
'UI:CSVImport:ObjectsHadErrors' => '%1$d ligne(s) contenaient des erreurs.',
'UI:Title:CSVImportStep2' => 'Etape 2 sur 5: Options du format CSV',
'UI:Title:CSVImportStep3' => 'Etape 3 sur 5: Correspondance des données',
'UI:Title:CSVImportStep4' => 'Etape 4 sur 5: Simulation de l\'import',
'UI:Title:CSVImportStep5' => 'Etape 5 sur 5: Import terminé',
'UI:CSVImport:LinesNotImported' => 'Des lignes n\'ont pas été importées:',
'UI:CSVImport:LinesNotImported+' => 'Les lignes suivantes n\'ont pas été importées car elles contenaient des erreurs.',
'UI:CSVImport:SeparatorComma+' => ', (virgule)',
'UI:CSVImport:SeparatorSemicolon+' => '; (point-virgule)',
'UI:CSVImport:SeparatorTab+' => 'tab',
'UI:CSVImport:SeparatorOther' => 'autre :',
'UI:CSVImport:QualifierDoubleQuote+' => '" (guillemet double)',
'UI:CSVImport:QualifierSimpleQuote+' => '\' (guillemet simple / apostrophe)',
'UI:CSVImport:QualifierOther' => 'autre :',
'UI:CSVImport:TreatFirstLineAsHeader' => 'La première ligne est l\'en-tête (noms des colonnes)',
'UI:CSVImport:Skip_N_LinesAtTheBeginning' => 'Ignorer les %1$s premières lignes du fichier',
'UI:CSVImport:CSVDataPreview' => 'Aperçu des données CSV',
'UI:CSVImport:SelectFile' => 'Sélectionnez le fichier à importer:',
'UI:CSVImport:Tab:LoadFromFile' => 'Import depuis un fichier',
'UI:CSVImport:Tab:CopyPaste' => 'Copier/Coller de données',
'UI:CSVImport:Tab:Templates' => 'Modèles',
'UI:CSVImport:PasteData' => 'Collez ici les données à importer:',
'UI:CSVImport:PickClassForTemplate' => 'Choisissez un modèle à télécharger: ',
'UI:CSVImport:SeparatorCharacter' => 'Séparateur:',
'UI:CSVImport:TextQualifierCharacter' => 'Délimiteur de texte',
'UI:CSVImport:CommentsAndHeader' => 'Commentaires et en-tête',
'UI:CSVImport:SelectClass' => 'Sélectionner le type d\'objets à importer:',
'UI:CSVImport:AdvancedMode' => 'Mode expert',
'UI:CSVImport:AdvancedMode+' => 'En mode expert, l\'"id" (clef primaire) des objets peut être utilisé pour renommer des objets.Cependant la colonne "id" (si elle est présente) ne peut être utilisée que comme clef de recherche et ne peut pas être combinée avec une autre clef de recherche.',
'UI:CSVImport:SelectAClassFirst' => 'Pour configurer la correspondance, choississez d\'abord un type ci-dessus.',
'UI:CSVImport:HeaderFields' => 'Champs',
'UI:CSVImport:HeaderMappings' => 'Correspondance',
'UI:CSVImport:HeaderSearch' => 'Recherche ?',
'UI:CSVImport:AlertIncompleteMapping' => 'Veuillez choisir la correspondance pour chacun des champs.',
'UI:CSVImport:AlertMultipleMapping' => 'Veuillez vous assurer que chaque champ cible est sélectionné une seule fois.',
'UI:CSVImport:AlertNoSearchCriteria' => 'Veuillez choisir au moins une clef de recherche.',
'UI:CSVImport:Encoding' => 'Encodage des caractères',
'UI:CSVImport:DateAndTimeFormats' => 'Format de date et heure',
'UI:CSVImport:DefaultDateTimeFormat_Format_Example' => 'Format par défaut: %1$s (ex. %2$s)',
'UI:CSVImport:CustomDateTimeFormat' => 'Format spécial: %1$s',
'UI:CSVImport:CustomDateTimeFormatTooltip' => 'Codes de format:<table>
<tr><td>Y</td><td>année (sur 4 chiffres, ex. 2016)</td></tr>
<tr><td>y</td><td>année (sur 2 chiffres, ex. 16 pour 2016)</td></tr>
<tr><td>m</td><td>mois (sur 2 chiffres: 01..12)</td></tr>
<tr><td>n</td><td>month (sur 1 ou 2 chiffres sans le zero au début: 1..12)</td></tr>
<tr><td>d</td><td>jour (sur 2 chiffres: 01..31)</td></tr>
<tr><td>j</td><td>jour (sur 1 ou 2 chiffres sans le zero au début: 1..31)</td></tr>
<tr><td>H</td><td>heure (24 heures sur 2 chiffres: 00..23)</td></tr>
<tr><td>h</td><td>heure (12 heures sur 2 chiffres: 01..12)</td></tr>
<tr><td>G</td><td>heure (24 heures sur 1 ou 2 chiffres: 0..23)</td></tr>
<tr><td>g</td><td>heure (12 heures sur 1 ou 2 chiffres: 1..12)</td></tr>
<tr><td>a</td><td>am ou pm (en minuscules)</td></tr>
<tr><td>A</td><td>AM ou PM (en majuscules)</td></tr>
<tr><td>i</td><td>minutes (sur 2 chiffres: 00..59)</td></tr>
<tr><td>s</td><td>secondes (sur 2 chiffres: 00..59)</td></tr>
</table>',
'UI:CSVReport-Value-Modified' => 'Modifié',
'UI:CSVReport-Value-SetIssue' => 'Modification impossible - cause : %1$s',
'UI:CSVReport-Value-ChangeIssue' => 'Ne peut pas prendre la valeur \'%1$s\' - cause : %2$s',
'UI:CSVReport-Value-NoMatch' => 'Pas de correspondance',
'UI:CSVReport-Value-Missing' => 'Absence de valeur obligatoire',
'UI:CSVReport-Value-Ambiguous' => 'Ambigüité: %1$d objets trouvés',
'UI:CSVReport-Row-Unchanged' => 'inchangé',
'UI:CSVReport-Row-Created' => 'créé',
'UI:CSVReport-Row-Updated' => '%1$d colonnes modifiées',
'UI:CSVReport-Row-Disappeared' => 'disparu, %1$d colonnes modifiées',
'UI:CSVReport-Row-Issue' => 'Erreur: %1$s',
'UI:CSVReport-Value-Issue-Null' => 'Valeur obligatoire',
'UI:CSVReport-Value-Issue-NotFound' => 'Objet non trouvé',
'UI:CSVReport-Value-Issue-FoundMany' => 'Plusieurs objets trouvés (%1$d)',
'UI:CSVReport-Value-Issue-Readonly' => 'L\'attribut \'%1$s\' est en lecture seule (valeur courante: %2$s, valeur proposée: %3$s)',
'UI:CSVReport-Value-Issue-Format' => 'Echec de traitement de la valeur: %1$s',
'UI:CSVReport-Value-Issue-NoMatch' => 'Valeur incorrecte pour \'%1$s\': pas de correspondance, veuillez vérifier la syntaxe',
'UI:CSVReport-Value-Issue-Unknown' => 'Valeur incorrecte pour \'%1$s\': %2$s',
'UI:CSVReport-Row-Issue-Inconsistent' => 'Incohérence entre attributs: %1$s',
'UI:CSVReport-Row-Issue-Attribute' => 'Des attributs ont des valeurs incorrectes',
'UI:CSVReport-Row-Issue-MissingExtKey' => 'Ne peut pas être créé car il manque des clés externes : %1$s',
'UI:CSVReport-Row-Issue-DateFormat' => 'Format de date incorrect',
'UI:CSVReport-Row-Issue-Reconciliation' => 'Echec de réconciliation',
'UI:CSVReport-Row-Issue-Ambiguous' => 'Réconciliation ambigüe',
'UI:CSVReport-Row-Issue-Internal' => 'Erreur interne: %1$s, %2$s',
'UI:CSVReport-Icon-Unchanged' => 'Non modifié',
'UI:CSVReport-Icon-Modified' => 'Modifié',
'UI:CSVReport-Icon-Missing' => 'A disparu',
'UI:CSVReport-Object-MissingToUpdate' => 'Objet disparu: sera modifié',
'UI:CSVReport-Object-MissingUpdated' => 'Objet disparu: modifié',
'UI:CSVReport-Icon-Created' => 'Créé',
'UI:CSVReport-Object-ToCreate' => 'L\'objet sera créé',
'UI:CSVReport-Object-Created' => 'Objet créé',
'UI:CSVReport-Icon-Error' => 'Erreur',
'UI:CSVReport-Object-Error' => 'Erreur: %1$s',
'UI:CSVReport-Object-Ambiguous' => 'Ambigüité: %1$s',
'UI:CSVReport-Stats-Errors' => '%1$.0f %% des lignes chargées sont en erreur et seront ignorées.',
'UI:CSVReport-Stats-Created' => '%1$.0f %% des lignes chargées vont engendrer un nouvel objet.',
'UI:CSVReport-Stats-Modified' => '%1$.0f %% des lignes chargées vont modifier un objet.',
'UI:CSVExport:AdvancedMode' => 'Mode expert',
'UI:CSVExport:AdvancedMode+' => 'Dans le mode expert, des colonnes supplémentaires apparaissent: l\'identifiant de l\'objet, la valeur des clés externes et leurs attributs de reconciliation.',
'UI:CSVExport:LostChars' => 'Problème d\'encodage',
'UI:CSVExport:LostChars+' => 'Le fichier téléchargé sera encodé en %1$s. iTop a détecté des caractères incompatible avec ce format. Ces caractères seront soit remplacés par des caractères de substitution (par exemple: \'é\' transformé en \'e\'), soit perdus. Vous pouvez utiliser le copier/coller depuis votre navigateur web, ou bien contacter votre administrateur pour que l\'encodage corresponde mieux à votre besoin (Cf. paramètre \'csv_file_default_charset\').',
'UI:UniversalSearchTitle' => 'iTop - Recherche Universelle',
'UI:UniversalSearch:Error' => 'Erreur : %1$s',
'UI:UniversalSearch:LabelSelectTheClass' => 'Sélectionnez le type d\'objets à rechercher : ',
'UI:Audit:Title' => 'iTop - Audit de la CMDB',
'UI:Audit:InteractiveAudit' => 'Audit Interactif',
'UI:Audit:HeaderAuditRule' => 'Règle d\'audit',
'UI:Audit:HeaderNbObjects' => 'Nb d\'Objets',
'UI:Audit:HeaderNbErrors' => 'Nb d\'Erreurs',
'UI:Audit:PercentageOk' => '% Ok',
'UI:Audit:ErrorIn_Rule_Reason' => 'Erreur OQL dans la règle %1$s: %2$s.',
'UI:Audit:ErrorIn_Category_Reason' => 'Erreur OQL dans la catégorie %1$s: %2$s.',
'UI:RunQuery:Title' => 'iTop - Evaluation de requêtes OQL',
'UI:RunQuery:QueryExamples' => 'Exemples de requêtes',
'UI:RunQuery:HeaderPurpose' => 'Objectif',
'UI:RunQuery:HeaderPurpose+' => 'But de la requête',
'UI:RunQuery:HeaderOQLExpression' => 'Requête OQL',
'UI:RunQuery:HeaderOQLExpression+' => 'La requête en OQL',
'UI:RunQuery:ExpressionToEvaluate' => 'Requête à exécuter : ',
'UI:RunQuery:MoreInfo' => 'Plus d\'information sur la requête : ',
'UI:RunQuery:DevelopedQuery' => 'Requête OQL décompilée : ',
'UI:RunQuery:SerializedFilter' => 'Version sérialisée : ',
'UI:RunQuery:Error' => 'Une erreur s\'est produite durant l\'exécution de la requête : %1$s',
'UI:Query:UrlForExcel' => 'Lien à copier-coller dans Excel, pour déclarer une source de données à partir du web',
'UI:Query:UrlV1' => 'La liste des champs à exporter n\'a pas été spécifiée. La page <em>export-V2.php</em> ne peut pas fonctionner sans cette information. Par conséquent, le lien fourni ci-dessous pointe sur l\'ancienne page: <em>export.php</em>. Cette ancienne version de l\'export présente la limitation suivante : la liste des champs exportés varie en fonction du format de l\'export et du modèle de données. <br/>Si vous devez garantir la stabilité du format de l\'export (liste des colonnes) sur le long terme, alors vous devrez renseigner l\'attribut "Champs" et utiliser la page <em>export-V2.php</em>.',
'UI:Schema:Title' => 'Modèle de données iTop',
'UI:Schema:CategoryMenuItem' => 'Catégorie <b>%1$s</b>',
'UI:Schema:Relationships' => 'Relations',
'UI:Schema:AbstractClass' => 'Classe abstraite : les objets de cette classe ne peuvent pas être instanciés.',
'UI:Schema:NonAbstractClass' => 'Classe concrète : les objets de cette classe peuvent être instanciés.',
'UI:Schema:ClassHierarchyTitle' => 'Hiérachie des classes',
'UI:Schema:AllClasses' => 'Toutes les classes',
'UI:Schema:ExternalKey_To' => 'Clef externe vers %1$s',
'UI:Schema:Columns_Description' => 'Colonnes : <em>%1$s</em>',
'UI:Schema:Default_Description' => 'Valeur par défaut: "%1$s"',
'UI:Schema:NullAllowed' => 'Null autorisé',
'UI:Schema:NullNotAllowed' => 'Null interdit',
'UI:Schema:Attributes' => 'Attributs',
'UI:Schema:AttributeCode' => 'Code',
'UI:Schema:AttributeCode+' => 'Code interne de l\'attribut',
'UI:Schema:Label' => 'Label',
'UI:Schema:Label+' => 'Label de l\'attribut',
'UI:Schema:Type' => 'Type',
'UI:Schema:Type+' => 'Type de données de l\'attribut',
'UI:Schema:Origin' => 'Origine',
'UI:Schema:Origin+' => 'La classe de base dans laquelle l\'attribut est défini',
'UI:Schema:Description' => 'Description',
'UI:Schema:Description+' => 'Description de l\'attribut',
'UI:Schema:AllowedValues' => 'Valeurs possibles',
'UI:Schema:AllowedValues+' => 'Restrictions des valeurs possibles pour cet attribut',
'UI:Schema:MoreInfo' => 'Plus info',
'UI:Schema:MoreInfo+' => 'Plus d\'information à propos de la définition de ce champ dans la base de données',
'UI:Schema:SearchCriteria' => 'Critères de recherche',
'UI:Schema:FilterCode' => 'Code',
'UI:Schema:FilterCode+' => 'Code de ce critère de recherche',
'UI:Schema:FilterDescription' => 'Description',
'UI:Schema:FilterDescription+' => 'Description de ce critère de recherche',
'UI:Schema:AvailOperators' => 'Opérateurs',
'UI:Schema:AvailOperators+' => 'Opérateurs possibles pour ce critère de recherche',
'UI:Schema:ChildClasses' => 'Classes dérivées',
'UI:Schema:ReferencingClasses' => 'Classes faisant référence',
'UI:Schema:RelatedClasses' => 'Classes reliées',
'UI:Schema:LifeCycle' => 'Cycle de vie',
'UI:Schema:Triggers' => 'Déclencheurs',
'UI:Schema:Relation_Code_Description' => 'Relation <em>%1$s</em> (%2$s)',
'UI:Schema:RelationDown_Description' => 'Sens descendant: %1$s',
'UI:Schema:RelationUp_Description' => 'Sens montant: %1$s',
'UI:Schema:RelationPropagates' => '%1$s: se propage sur %2$d niveau(x), requête: %3$s',
'UI:Schema:RelationDoesNotPropagate' => '%1$s: ne se propage pas (%2$d niveaux), requête: %3$s',
'UI:Schema:Class_ReferencingClasses_From_By' => '%1$s est référencé par la classe %2$s via le champ %3$s',
'UI:Schema:Class_IsLinkedTo_Class_Via_ClassAndAttribute' => '%1$s est lié à la classe %2$s via %3$s::<em>%4$s</em>',
'UI:Schema:Links:1-n' => 'Classes pointant sur %1$s (liens 1:n) :',
'UI:Schema:Links:n-n' => 'Classes liées à %1$s (liens n:n) :',
'UI:Schema:Links:All' => 'Graphe de toutes les classes liées',
'UI:Schema:NoLifeCyle' => 'Aucun cycle de vie n\'est défini pour cette classe.',
'UI:Schema:LifeCycleTransitions' => 'Etats et Transitions',
'UI:Schema:LifeCyleAttributeOptions' => 'Options des attributs',
'UI:Schema:LifeCycleHiddenAttribute' => 'Caché',
'UI:Schema:LifeCycleReadOnlyAttribute' => 'Lecture seule',
'UI:Schema:LifeCycleMandatoryAttribute' => 'Obligatoire',
'UI:Schema:LifeCycleAttributeMustChange' => 'Doit changer',
'UI:Schema:LifeCycleAttributeMustPrompt' => 'L\'utilisateur se verra proposer de changer la valeur',
'UI:Schema:LifeCycleEmptyList' => 'liste vide',
'UI:Schema:ClassFilter' => 'Classe :',
'UI:Schema:DisplayLabel' => 'Affichage :',
'UI:Schema:DisplaySelector/LabelAndCode' => 'Label et code',
'UI:Schema:DisplaySelector/Label' => 'Label',
'UI:Schema:DisplaySelector/Code' => 'Code',
'UI:Schema:Attribute/Filter' => 'Filtre',
'UI:Schema:DefaultNullValue' => 'Valeur null par défaut : "%1$s"',
'UI:LinksWidget:Autocomplete+' => 'Tapez les 3 premiers caractères...',
'UI:Edit:TestQuery' => 'Tester la requête',
'UI:Combo:SelectValue' => '--- choisissez une valeur ---',
'UI:Label:SelectedObjects' => 'Objets sélectionnés: ',
'UI:Label:AvailableObjects' => 'Objets disponibles: ',
'UI:Link_Class_Attributes' => 'Attributs du type %1$s',
'UI:AddObjectsOf_Class_LinkedWith_Class_Instance' => 'Ajouter des objets de type %1$s liés à %3$s (%2$s)',
'UI:AddObjectsOf_Class_LinkedWith_Class' => 'Ajouter des objets de type %1$s à lier à cet objet de type %2$s',
'UI:ManageObjectsOf_Class_LinkedWith_Class_Instance' => 'Gérer les objets de type %1$s liés à %3$s (%2$s)',
'UI:AddLinkedObjectsOf_Class' => 'Ajouter des objets de type %1$s...',
'UI:RemoveLinkedObjectsOf_Class' => 'Enlever les objets sélectionnés',
'UI:Message:EmptyList:UseAdd' => 'La liste est vide, utilisez le bouton "Ajouter..." pour ajouter des objets.',
'UI:Message:EmptyList:UseSearchForm' => 'Utilisez le formulaire de recherche ci-dessus pour trouver les objets à ajouter.',
'UI:Wizard:FinalStepTitle' => 'Dernière étape: confirmation',
'UI:Title:DeletionOf_Object' => 'Suppression de %1$s',
'UI:Title:BulkDeletionOf_Count_ObjectsOf_Class' => 'Suppression massive de %1$d objets de type %2$s',
'UI:Delete:NotAllowedToDelete' => 'Vous n\'êtes pas autorisé à supprimer cet objet',
'UI:Delete:NotAllowedToUpdate_Fields' => 'Vous n\'êtes pas autorisé à mettre à jour les champs suivants : %1$s',
'UI:Error:NotEnoughRightsToDelete' => 'Cet objet ne peut pas être supprimé car l\'utilisateur courant n\'a pas les droits nécessaires.',
'UI:Error:CannotDeleteBecause' => 'Cet objet ne peut pas être effacé. Raison: %1$s',
'UI:Error:CannotDeleteBecauseOfDepencies' => 'Cet objet ne peut pas être supprimé, des opérations manuelles sont nécessaire avant sa suppression.',
'UI:Error:CannotDeleteBecauseManualOpNeeded' => 'Des opération manuelles sont nécessaires avant de pouvoir effacer cet objet',
'UI:Archive_User_OnBehalfOf_User' => '%1$s pour %2$s',
'UI:Delete:Deleted' => 'supprimé',
'UI:Delete:AutomaticallyDeleted' => 'supprimé automatiquement',
'UI:Delete:AutomaticResetOf_Fields' => 'mise à jour automatique des champ(s): %1$s',
'UI:Delete:CleaningUpRefencesTo_Object' => 'Suppression de toutes les références vers %1$s...',
'UI:Delete:CleaningUpRefencesTo_Several_ObjectsOf_Class' => 'Suppression de toutes les références vers les %1$d objets de type %2$s...',
'UI:Delete:Done+' => 'Ce qui a été effectué...',
'UI:Delete:_Name_Class_Deleted' => ' %2$s %1$s supprimé.',
'UI:Delete:ConfirmDeletionOf_Name' => 'Suppression de %1$s',
'UI:Delete:ConfirmDeletionOf_Count_ObjectsOf_Class' => 'Suppression de %1$d objets de type %2$s',
'UI:Delete:CannotDeleteBecause' => 'Ne peux pas être supprimé: %1$s',
'UI:Delete:ShouldBeDeletedAtomaticallyButNotPossible' => 'Devrait être supprimé automatiquement, mais cela n\'est pas possible: %1$s',
'UI:Delete:MustBeDeletedManuallyButNotPossible' => 'Doit être supprimé manuellement, mais cela n\'est pas possible: %1$s',
'UI:Delete:WillBeDeletedAutomatically' => 'Sera supprimé automatiquement',
'UI:Delete:MustBeDeletedManually' => 'Doit être supprimé manuellement',
'UI:Delete:CannotUpdateBecause_Issue' => 'Devrait être mis à jour automatiquement, mais: %1$s',
'UI:Delete:WillAutomaticallyUpdate_Fields' => 'Va être mis à jour automatiquement (champs impactés : %1$s)',
'UI:Delete:Count_Objects/LinksReferencing_Object' => '%1$d objets ou liens font référence à %2$s',
'UI:Delete:Count_Objects/LinksReferencingTheObjects' => '%1$d objets ou liens font référence à certain des objets à supprimer',
'UI:Delete:ReferencesMustBeDeletedToEnsureIntegrity' => 'pour garantir l\'intégrité de la base de données, toutes les références doivent être supprimées.',
'UI:Delete:Consequence+' => 'Ce qui va être effectué',
'UI:Delete:SorryDeletionNotAllowed' => 'Désolé, vous n\'êtes pas autorisé à supprimer cette objet. Voir les explications détaillées ci-dessus.',
'UI:Delete:PleaseDoTheManualOperations' => 'Vous devez effectuer les opération manuelles listées ci-dessus avant de pourvoir supprimer cet objet.',
'UI:Delect:Confirm_Object' => 'Confirmez que vous voulez bien supprimer %1$s.',
'UI:Delect:Confirm_Count_ObjectsOf_Class' => 'Confirmez que vous voulez bien supprimer les %1$d objets de type %2$s ci-dessous.',
'UI:WelcomeToITop' => 'Bienvenue dans iTop',
'UI:DetailsPageTitle' => 'iTop - %2$s - Détails de %1$s',
'UI:ErrorPageTitle' => 'iTop - Erreur',
'UI:ObjectDoesNotExist' => 'Désolé cet objet n\'existe pas (où vous n\'êtes pas autorisé à l\'afficher).',
'UI:ObjectArchived' => 'Cet objet a été archivé. Veuillez activer le mode Archive, ou contactez votre administrateur.',
'Tag:Archived' => 'Archivé',
'Tag:Archived+' => 'Accessible seulement dans le mode Archive',
'Tag:Obsolete' => 'Obsolète',
'Tag:Obsolete+' => 'Exclu de l\'analyse d\'impact et des résultats de recherche~~',
'Tag:Synchronized' => 'Synchronisé',
'ObjectRef:Archived' => 'Archivé',
'ObjectRef:Obsolete' => 'Obsolète',
'UI:SearchResultsPageTitle' => 'iTop - Résultats de la recherche',
'UI:SearchResultsTitle' => 'Recherche globale',
'UI:SearchResultsTitle+' => 'Résultat de recherche globale',
'UI:Search:NoSearch' => 'Rien à rechercher',
'UI:Search:NeedleTooShort' => 'La clé de recherche "%1$s" est trop courte. Veuillez saisir au moins %2$d caractères.',
'UI:Search:Ongoing' => 'Recherche de "%1$s"',
'UI:Search:Enlarge' => 'Elargir la recherche',
'UI:FullTextSearchTitle_Text' => 'Résultats pour "%1$s" :',
'UI:Search:Count_ObjectsOf_Class_Found' => 'Trouvé %1$d objet(s) de type %2$s.',
'UI:Search:NoObjectFound' => 'Aucun objet trouvé.',
'UI:ModificationPageTitle_Object_Class' => 'iTop - %2$s - Modification de %1$s',
'UI:ModificationTitle_Class_Object' => '%1$s - Modification de <span class="hilite">%2$s</span>',
'UI:ClonePageTitle_Object_Class' => 'iTop - %2$s - Duplication de %1$s',
'UI:CloneTitle_Class_Object' => ' %1$s - Duplication de <span class="hilite">%2$s</span>',
'UI:CreationPageTitle_Class' => 'iTop - Création d\'un objet de type %1$s ',
'UI:CreationTitle_Class' => 'Création d\'un objet de type %1$s',
'UI:SelectTheTypeOf_Class_ToCreate' => 'Sélectionnez le type de %1$s à créer :',
'UI:Class_Object_NotUpdated' => 'Aucun changement détecté, %2$s (type : %2$s) n\'a <strong>pas</strong> été modifié.',
'UI:Class_Object_Updated' => '%1$s (%2$s) - informations mises à jour.',
'UI:BulkDeletePageTitle' => 'iTop - Suppression massive',
'UI:BulkDeleteTitle' => 'Sélectionnez les objets à supprimer:',
'UI:PageTitle:ObjectCreated' => 'iTop objet créé.',
'UI:Title:Object_Of_Class_Created' => '%2$s - %1$s créé(e).',
'UI:Apply_Stimulus_On_Object_In_State_ToTarget_State' => '%1$s pour %2$s de l\'état %3$s vers l\'état %4$s.',
'UI:ObjectCouldNotBeWritten' => 'L\'objet ne peut pas être enregistré: %1$s',
'UI:PageTitle:FatalError' => 'iTop - Erreur Fatale',
'UI:SystemIntrusion' => 'Accès non autorisé. Vous êtes en train de d\'effectuer une opération qui ne vous est pas permise.',
'UI:FatalErrorMessage' => 'Erreur fatale, iTop ne peut pas continuer.',
'UI:Error_Details' => 'Erreur: %1$s.',
'UI:PageTitle:ClassProjections' => 'iTop gestion des utilisateurs - projections des classes',
'UI:PageTitle:ProfileProjections' => 'iTop gestion des utilisateurs - projections des profils',
'UI:UserManagement:Class' => 'Type',
'UI:UserManagement:Class+' => 'Type des objets',
'UI:UserManagement:ProjectedObject' => 'Objet',
'UI:UserManagement:ProjectedObject+' => 'L\'objet projeté',
'UI:UserManagement:AnyObject' => '* indifférent *',
'UI:UserManagement:User' => 'Utilisateur',
'UI:UserManagement:User+' => 'L\'utilisateur',
'UI:UserManagement:Profile' => 'Profil',
'UI:UserManagement:Profile+' => 'Profil dans lequel la projection est définie',
'UI:UserManagement:Action:Read' => 'Lecture',
'UI:UserManagement:Action:Read+' => 'Lecture et affichage d\'un objet',
'UI:UserManagement:Action:Modify' => 'Modification',
'UI:UserManagement:Action:Modify+' => 'Création et modification d\'un objet',
'UI:UserManagement:Action:Delete' => 'Suppression',
'UI:UserManagement:Action:Delete+' => 'Suppression d\'un objet',
'UI:UserManagement:Action:BulkRead' => 'Lecture en masse (export)',
'UI:UserManagement:Action:BulkRead+' => 'Export de liste d\'objets',
'UI:UserManagement:Action:BulkModify' => 'Modification en masse',
'UI:UserManagement:Action:BulkModify+' => 'Création et modification de plusieurs objets (import CSV)',
'UI:UserManagement:Action:BulkDelete' => 'Suppression en masse',
'UI:UserManagement:Action:BulkDelete+' => 'Suppression de plusieurs objets',
'UI:UserManagement:Action:Stimuli' => 'Stimuli',
'UI:UserManagement:Action:Stimuli+' => 'Actions autorisées',
'UI:UserManagement:Action' => 'Action',
'UI:UserManagement:Action+' => 'l\'action effectuée par l\'utilisateur',
'UI:UserManagement:TitleActions' => 'Actions',
'UI:UserManagement:Permission' => 'Permission',
'UI:UserManagement:Permission+' => 'Les droits de l\'utilisateur',
'UI:UserManagement:Attributes' => 'Champs',
'UI:UserManagement:ActionAllowed:Yes' => 'Oui',
'UI:UserManagement:ActionAllowed:No' => 'Non',
'UI:UserManagement:AdminProfile+' => 'Les administrateurs ont un accès total à tous les objets de la base de données.',
'UI:UserManagement:NoLifeCycleApplicable' => 'N/A',
'UI:UserManagement:NoLifeCycleApplicable+' => 'Aucun cycle de vie n\'est défini pour ce type d\'objets.',
'UI:UserManagement:GrantMatrix' => 'Matrice des droits',
'UI:UserManagement:LinkBetween_User_And_Profile' => 'Lien entre %1$s et %2$s',
'UI:UserManagement:LinkBetween_User_And_Org' => 'Lien entre %1$s et %2$s',
'Menu:AdminTools' => 'Outils d\'admin', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:AdminTools+' => 'Outils d\'administration', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:AdminTools?' => 'Ces outils sont accessibles uniquement aux utilisateurs possédant le profil Administrateur.', // Duplicated into itop-welcome-itil (will be removed from here...)
'UI:ChangeManagementMenu' => 'Gestion du Changement',
'UI:ChangeManagementMenu+' => 'Gestion du Changement',
'UI:ChangeManagementMenu:Title' => 'Résumé des changements',
'UI-ChangeManagementMenu-ChangesByType' => 'Changements par type',
'UI-ChangeManagementMenu-ChangesByStatus' => 'Changements par état',
'UI-ChangeManagementMenu-ChangesByWorkgroup' => 'Changements par workgroup',
'UI-ChangeManagementMenu-ChangesNotYetAssigned' => 'Changements en attente d\'assignation',
'UI:ConfigurationManagementMenu' => 'Gestion de Configuration',
'UI:ConfigurationManagementMenu+' => 'Gestion de Configuration',
'UI:ConfigurationManagementMenu:Title' => 'Résumé de l\'Infrastructure',
'UI-ConfigurationManagementMenu-InfraByType' => 'Nombre d\'éléments par type',
'UI-ConfigurationManagementMenu-InfraByStatus' => 'Nombre d\'éléments par état',
'UI:ConfigMgmtMenuOverview:Title' => 'Tableau de bord de la Gestion de Configuration',
'UI-ConfigMgmtMenuOverview-FunctionalCIbyStatus' => 'Actifs par état',
'UI-ConfigMgmtMenuOverview-FunctionalCIByType' => 'Actifs par type',
'UI:RequestMgmtMenuOverview:Title' => 'Tableau de bord de la Gestion des Demandes Utilisateurs',
'UI-RequestManagementOverview-RequestByService' => 'Demandes par service',
'UI-RequestManagementOverview-RequestByPriority' => 'Demandes par priorité',
'UI-RequestManagementOverview-RequestUnassigned' => 'Demandes non affectées à un agent',
'UI:IncidentMgmtMenuOverview:Title' => 'Tableau de bord de la Gestion des Incidents',
'UI-IncidentManagementOverview-IncidentByService' => 'Incidents par service',
'UI-IncidentManagementOverview-IncidentByPriority' => 'Incidents par priorité',
'UI-IncidentManagementOverview-IncidentUnassigned' => 'Incidents non affectés à un agent',
'UI:ChangeMgmtMenuOverview:Title' => 'Tableau de bord de la Gestion des Changements',
'UI-ChangeManagementOverview-ChangeByType' => 'Changes par type',
'UI-ChangeManagementOverview-ChangeUnassigned' => 'Changes non affectés à un agent',
'UI-ChangeManagementOverview-ChangeWithOutage' => 'Interruptions de service liées à un changement',
'UI:ServiceMgmtMenuOverview:Title' => 'Tableau de bord de la Gestion des Services',
'UI-ServiceManagementOverview-CustomerContractToRenew' => 'Contrats clients à renouveler dans les 30 jours',
'UI-ServiceManagementOverview-ProviderContractToRenew' => 'Contrats fournisseurs à renouveler dans les 30 jours',
'UI:ContactsMenu' => 'Contacts',
'UI:ContactsMenu+' => 'Contacts',
'UI:ContactsMenu:Title' => 'Résumé des contacts',
'UI-ContactsMenu-ContactsByLocation' => 'Contacts par emplacement',
'UI-ContactsMenu-ContactsByType' => 'Contacts par type',
'UI-ContactsMenu-ContactsByStatus' => 'Contacts par état',
'Menu:CSVImportMenu' => 'Import CSV', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:CSVImportMenu+' => 'Import ou mise à jour en masse', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:DataModelMenu' => 'Modèle de Données', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:DataModelMenu+' => 'Résumé du Modèle de Données', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:ExportMenu' => 'Exportation', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:ExportMenu+' => 'Exportation des résultats d\'une requête en HTML, CSV ou XML', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:NotificationsMenu' => 'Notifications', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:NotificationsMenu+' => 'Configuration des Notifications', // Duplicated into itop-welcome-itil (will be removed from here...)
'UI:NotificationsMenu:Title' => 'Configuration des <span class="hilite">Notifications</span>',
'UI:NotificationsMenu:Help' => 'Aide',
'UI:NotificationsMenu:HelpContent' => '<p>Dans iTop les notifications sont totalement configurables. Elles sont basées sur deux types d\'objets: <i>déclencheurs et actions</i>.</p>
<p><i><b>Les déclencheurs</b></i> définissent quand une notification doit être exécutée. Il y a 5 types de déclencheurs pour couvrir les 3 différentes phases du cycle de vie d\'un objet:
<ol>
<li>un déclencheur "sur création d\'un objet" est exécuté quand un objet d\'une classe spécifique est créé.</li>
<li>un déclencheur "sur objet entrant dans un état" est exécuté avant que l\'objet n\'entre dans un état donné (en venant d\'un autre état du cycle de vie)</li>
<li>un déclencheur "sur objet quittant un état" est exécuté quand l\'objet quitte un état spécifié</li>
<li>un déclencheur "sur dépassement de seuil" est exécuté quand un seuil prédéfini pour TTO/TTR est dépassé</li>
<li>un déclencheur "sur mise à jour depuis le portail" est exécuté quand un ticket est modifié depuis le portail dédié aux clients externes</li>
</ol>
</p>
<p>
<i><b>Les actions</b></i> définissent ce qui doit être exécuté. Pour le moment il existe un seul type d\'action: l\'envoi de mail.
Les actions de type mail définissent le modèle du message ainsi que les autres paramètres (destinataires, importance, etc.)</p>
<p>Une page spéciale: <a href="../setup/email.test.php" target="_blank">email.test.php</a> permet de tester votre configuration mail PHP.</p>
<p>Les actions doivent être associées à des déclencheurs pour pouvoir être exécutées.
Lors de l\'association à un déclencheur, on attribue à chaque action un numéro d\'ordre, qui définit la séquence des actions à exécuter.</p>',
'UI:NotificationsMenu:Triggers' => 'Déclencheurs',
'UI:NotificationsMenu:AvailableTriggers' => 'Déclencheurs existants',
'UI:NotificationsMenu:OnCreate' => 'A la création d\'un objet',
'UI:NotificationsMenu:OnStateEnter' => 'Quand un objet entre dans un état donné',
'UI:NotificationsMenu:OnStateLeave' => 'Quand un objet quitte un état donné',
'UI:NotificationsMenu:Actions' => 'Actions',
'UI:NotificationsMenu:AvailableActions' => 'Actions existantes',
'Menu:AuditCategories' => 'Catégories d\'audit', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:AuditCategories+' => 'Catégories d\'audit', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:Notifications:Title' => 'Catégories d\'audit', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:RunQueriesMenu' => 'Requêtes OQL', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:RunQueriesMenu+' => 'Executer une requête OQL', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:QueryMenu' => 'Livre des requêtes', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:QueryMenu+' => 'Livre des requêtes', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:DataAdministration' => 'Administration des données', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:DataAdministration+' => 'Administration des données', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UniversalSearchMenu' => 'Recherche Universelle', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UniversalSearchMenu+' => 'Rechercher n\'importe quel objet...', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UserManagementMenu' => 'Gestion des Utilisateurs', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UserManagementMenu+' => 'Gestion des Utilisateurs', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:ProfilesMenu' => 'Profils', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:ProfilesMenu+' => 'Profils', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:ProfilesMenu:Title' => 'Profils', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UserAccountsMenu' => 'Comptes Utilisateurs', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UserAccountsMenu+' => 'Comptes Utilisateurs', // Duplicated into itop-welcome-itil (will be removed from here...)
'Menu:UserAccountsMenu:Title' => 'Comptes Utilisateurs', // Duplicated into itop-welcome-itil (will be removed from here...)
'UI:iTopVersion:Short' => '%1$s version %2$s',
'UI:iTopVersion:Long' => '%1$s version %2$s-%3$s du %4$s',
'UI:PropertiesTab' => 'Propriétés',
'UI:OpenDocumentInNewWindow_' => 'Ouvrir ce document dans une autre fenêtre: %1$s',
'UI:DownloadDocument_' => 'Télécharger ce document: %1$s',
'UI:Document:NoPreview' => 'L\'aperçu n\'est pas disponible pour ce type de documents',
'UI:Download-CSV' => 'Télécharger %1$s',
'UI:DeadlineMissedBy_duration' => 'Passé de %1$s',
'UI:Deadline_LessThan1Min' => '< 1 min',
'UI:Deadline_Minutes' => '%1$d min',
'UI:Deadline_Hours_Minutes' => '%1$dh %2$dmin',
'UI:Deadline_Days_Hours_Minutes' => '%1$dj %2$dh %3$dmin',
'UI:Help' => 'Aide',
'UI:PasswordConfirm' => '(Confirmer)',
'UI:BeforeAdding_Class_ObjectsSaveThisObject' => 'Enregistrez l\'objet courant avant de créer de nouveaux éléments de type %1$s.',
'UI:DisplayThisMessageAtStartup' => 'Afficher ce message au démarrage',
'UI:RelationshipGraph' => 'Vue graphique',
'UI:RelationshipList' => 'Liste',
'UI:RelationGroups' => 'Groupes',
'UI:ElementsDisplayed' => 'Filtrage',
'UI:RelationGroupNumber_N' => 'Groupe n°%1$d',
'UI:Relation:ExportAsPDF' => 'Exporter en PDF...',
'UI:RelationOption:GroupingThreshold' => 'Seuil de groupage',
'UI:Relation:AdditionalContextInfo' => 'Infos complémentaires de contexte',
'UI:Relation:NoneSelected' => 'Aucune',
'UI:Relation:Zoom' => 'Zoom',
'UI:Relation:ExportAsAttachment' => 'Exporter comme une Pièce Jointe...',
'UI:Relation:DrillDown' => 'Détails...',
'UI:Relation:PDFExportOptions' => 'Options de l\'export en PDF',
'UI:Relation:AttachmentExportOptions_Name' => 'Options pour la Pièce Jointe à %1$s',
'UI:RelationOption:Untitled' => 'Sans Titre',
'UI:Relation:Key' => 'Légende',
'UI:Relation:Comments' => 'Commentaires',
'UI:RelationOption:Title' => 'Titre',
'UI:RelationOption:IncludeList' => 'Inclure la liste des objets',
'UI:RelationOption:Comments' => 'Commentaires',
'UI:Button:Export' => 'Exporter',
'UI:Relation:PDFExportPageFormat' => 'Format de page',
'UI:PageFormat_A3' => 'A3',
'UI:PageFormat_A4' => 'A4',
'UI:PageFormat_Letter' => 'Letter',
'UI:Relation:PDFExportPageOrientation' => 'Orientation de la page',
'UI:PageOrientation_Portrait' => 'Portrait',
'UI:PageOrientation_Landscape' => 'Paysage',
'UI:RelationTooltip:Redundancy' => 'Redondance',
'UI:RelationTooltip:ImpactedItems_N_of_M' => 'Nb éléments impactés: %1$d / %2$d',
'UI:RelationTooltip:CriticalThreshold_N_of_M' => 'Seuil critique: %1$d / %2$d',
'UI:OperationCancelled' => 'Opération Annulée',
'Portal:Title' => 'Portail utilisateur iTop',
'Portal:NoRequestMgmt' => 'Chèr(e) %1$s, vous avez été redirigé(e) vers cette page car votre compte utilisateur est configuré avec le profil \'Utilisateur du Portail\'. Malheureusement, iTop n\'a pas été installé avec le module de \'Gestion des Demandes\'. Merci de contacter votre administrateur iTop.',
'Portal:Refresh' => 'Rafraîchir',
'Portal:Back' => 'Retour',
'Portal:WelcomeUserOrg' => 'Bienvenue %1$s (%2$s)',
'Portal:TitleDetailsFor_Request' => 'Détail de la requête',
'Portal:ShowOngoing' => 'Requêtes en cours',
'Portal:ShowClosed' => 'Requêtes fermées',
'Portal:CreateNewRequest' => 'Créer une nouvelle requête',
'Portal:CreateNewRequestItil' => 'Créer une nouvelle requête',
'Portal:CreateNewIncidentItil' => 'Indiquer une panne',
'Portal:ChangeMyPassword' => 'Changer mon mot de passe',
'Portal:Disconnect' => 'Déconnexion',
'Portal:OpenRequests' => 'Mes requêtes en cours',
'Portal:ClosedRequests' => 'Mes requêtes fermées',
'Portal:ResolvedRequests' => 'Mes requêtes résolues',
'Portal:SelectService' => 'Choisissez un service dans le catalogue:',
'Portal:PleaseSelectOneService' => 'Veuillez choisir un service',
'Portal:SelectSubcategoryFrom_Service' => 'Choisissez une sous-catégorie du service %1$s:',
'Portal:PleaseSelectAServiceSubCategory' => 'Veuillez choisir une sous-catégorie',
'Portal:DescriptionOfTheRequest' => 'Entrez la description de votre requête:',
'Portal:TitleRequestDetailsFor_Request' => 'Détails de votre requête %1$s:',
'Portal:NoOpenRequest' => 'Aucune requête.',
'Portal:NoClosedRequest' => 'Aucune requête.',
'Portal:Button:ReopenTicket' => 'Réouvrir cette requête',
'Portal:Button:CloseTicket' => 'Clôre cette requête',
'Portal:Button:UpdateRequest' => 'Mettre à jour la requête',
'Portal:EnterYourCommentsOnTicket' => 'Vos commentaires à propos du traitement de cette requête:',
'Portal:ErrorNoContactForThisUser' => 'Erreur: l\'utilisateur courant n\'est pas associé à une Personne/Contact. Contactez votre administrateur.',
'Portal:Attachments' => 'Pièces jointes',
'Portal:AddAttachment' => ' Ajouter une pièce jointe ',
'Portal:RemoveAttachment' => ' Enlever la pièce jointe ',
'Portal:Attachment_No_To_Ticket_Name' => 'Pièce jointe #%1$d à %2$s (%3$s)',
'Portal:SelectRequestTemplate' => 'Sélectionnez un modèle de requête pour %1$s',
'Enum:Undefined' => 'Non défini',
'UI:DurationForm_Days_Hours_Minutes_Seconds' => '%1$s J %2$s H %3$s min %4$s s',
'UI:ModifyAllPageTitle' => 'Modification par lots',
'UI:Modify_N_ObjectsOf_Class' => 'Modification de %1$d objet(s) de type %2$s',
'UI:Modify_M_ObjectsOf_Class_OutOf_N' => 'Modification de %1$d (sur %3$d) objets de type %2$s',
'UI:Menu:ModifyAll' => 'Modifier...',
'UI:Button:ModifyAll' => 'Modifier',
'UI:Button:PreviewModifications' => 'Aperçu des modifications >>',
'UI:ModifiedObject' => 'Objet Modifié',
'UI:BulkModifyStatus' => 'Opération',
'UI:BulkModifyStatus+' => '',
'UI:BulkModifyErrors' => 'Erreur',
'UI:BulkModifyErrors+' => '',
'UI:BulkModifyStatusOk' => 'Ok',
'UI:BulkModifyStatusError' => 'Erreur',
'UI:BulkModifyStatusModified' => 'Modifié',
'UI:BulkModifyStatusSkipped' => 'Ignoré',
'UI:BulkModify_Count_DistinctValues' => '%1$d valeurs distinctes:',
'UI:BulkModify:Value_Exists_N_Times' => '%1$s, %2$d fois',
'UI:BulkModify:N_MoreValues' => '%1$d valeurs supplémentaires...',
'UI:AttemptingToSetAReadOnlyAttribute_Name' => 'Tentative de modification du champ en lecture seule: %1$s',
'UI:FailedToApplyStimuli' => 'L\'action a échoué',
'UI:StimulusModify_N_ObjectsOf_Class' => '%1$s: Modification de %2$d objet(s) de type %3$s',
'UI:CaseLogTypeYourTextHere' => 'Nouvelle entrée ici...',
'UI:CaseLog:Header_Date_UserName' => '%1$s - %2$s:',
'UI:CaseLog:InitialValue' => 'Valeur initiale:',
'UI:AttemptingToSetASlaveAttribute_Name' => 'Le champ %1$s ne peut pas être modifié car il est géré par une synchronisation avec une source de données. Valeur ignorée.',
'UI:ActionNotAllowed' => 'Vous n\'êtes pas autorisé à exécuter cette opération sur ces objets.',
'UI:BulkAction:NoObjectSelected' => 'Veuillez s\électionner au moins un objet pour cette opération.',
'UI:AttemptingToChangeASlaveAttribute_Name' => 'Le champ %1$s ne peut pas être modifié car il est géré par une synchronisation avec une source de données. Valeur inchangée.',
'UI:Pagination:HeaderSelection' => 'Total: %1$s éléments / %2$s éléments sélectionné(s).',
'UI:Pagination:HeaderNoSelection' => 'Total: %1$s éléments.',
'UI:Pagination:PageSize' => '%1$s éléments par page',
'UI:Pagination:PagesLabel' => 'Pages:',
'UI:Pagination:All' => 'Tous',
'UI:HierarchyOf_Class' => 'Hiérarchie de type %1$s',
'UI:Preferences' => 'Préférences...',
'UI:ArchiveModeOn' => 'Activer le mode Archive',
'UI:ArchiveModeOff' => 'Désactiver le mode Archive',
'UI:ArchiveMode:Banner' => 'Mode Archive',
'UI:ArchiveMode:Banner+' => 'Les objets archivés sont visibles, et aucune modification n\'est possible',
'UI:FavoriteOrganizations' => 'Organisations Favorites',
'UI:FavoriteOrganizations+' => 'Cochez dans la liste ci-dessous les organisations que vous voulez voir listées dans le menu principal. '.
'Ceci n\'est pas un réglage de sécurité. Les objets de toutes les organisations sont toujours visibles en choisissant "Toutes les Organisations" dans le menu.',
'UI:FavoriteLanguage' => 'Langue de l\'interface utilisateur',
'UI:Favorites:SelectYourLanguage' => 'Choisissez votre langue préférée',
'UI:FavoriteOtherSettings' => 'Autres réglages',
'UI:Favorites:Default_X_ItemsPerPage' => 'Longueur par défaut des listes: %1$s éléments par page',
'UI:Favorites:ShowObsoleteData' => 'Voir les données obsolètes',
'UI:Favorites:ShowObsoleteData+' => 'Voir les données obsolètes dans les résultats de recherche et dans les listes de choix',
'UI:NavigateAwayConfirmationMessage' => 'Toute modification sera perdue.',
'UI:CancelConfirmationMessage' => 'Vous allez perdre vos modifications. Voulez-vous continuer ?',
'UI:AutoApplyConfirmationMessage' => 'Des modifications n\'ont pas encore été prises en compte. Voulez-vous qu\'elles soient prises en compte automatiquement ?',
'UI:Create_Class_InState' => 'Créer l\'objet %1$s dans l\'état: ',
'UI:OrderByHint_Values' => 'Ordre de tri: %1$s',
'UI:Menu:AddToDashboard' => 'Ajouter au Tableau de Bord...',
'UI:Button:Refresh' => 'Rafraîchir',
'UI:Button:GoPrint' => 'Imprimer...',
'UI:ExplainPrintable' => 'Cliquez sur les icones %1$s pour cacher des éléments lors de l\'impression.<br/>Utilisez la fonction "Aperçu avant impression" de votre navigateur pour prévisualiser avant d\'imprimer.<br/>Note: cet en-tête ainsi que les icones %1$s ne seront pas imprimés.',
'UI:ConfigureThisList' => 'Configurer Cette Liste...',
'UI:ListConfigurationTitle' => 'Configuration de la liste',
'UI:ColumnsAndSortOrder' => 'Colonnes et ordre de tri:',
'UI:UseDefaultSettings' => 'Utiliser les réglages par défaut',
'UI:UseSpecificSettings' => 'Utiliser les réglages suivants:',
'UI:Display_X_ItemsPerPage' => 'Afficher %1$s éléments par page',
'UI:UseSavetheSettings' => 'Enregistrer ces réglages',
'UI:OnlyForThisList' => 'Seulement pour cette liste',
'UI:ForAllLists' => 'Défaut pour toutes les listes',
'UI:ExtKey_AsLink' => '%1$s (Lien)',
'UI:ExtKey_AsFriendlyName' => '%1$s (Nom)',
'UI:ExtField_AsRemoteField' => '%1$s (%2$s)',
'UI:Button:MoveUp' => 'Monter',
'UI:Button:MoveDown' => 'Descendre',
'UI:OQL:UnknownClassAndFix' => 'La classe "%1$s" est inconnue. Essayez plutôt "%2$s".',
'UI:OQL:UnknownClassNoFix' => 'La classe "%1$s" est inconnue',
'UI:Dashboard:Edit' => 'Editer cette page...',
'UI:Dashboard:Revert' => 'Revenir à la version d\'origine...',
'UI:Dashboard:RevertConfirm' => 'Toutes modifications apportées à la version d\'origine seront perdues. Veuillez confirmer l\'opération.',
'UI:ExportDashBoard' => 'Exporter dans un fichier',
'UI:ImportDashBoard' => 'Importer depuis un fichier...',
'UI:ImportDashboardTitle' => 'Importation depuis un fichier',
'UI:ImportDashboardText' => 'Choisissez un fichier de définition de tableau de bord :',
'UI:DashletCreation:Title' => 'Créer un Indicateur',
'UI:DashletCreation:Dashboard' => 'Tableau de bord',
'UI:DashletCreation:DashletType' => 'Type d\'Indicateur',
'UI:DashletCreation:EditNow' => 'Modifier le tableau de bord',
'UI:DashboardEdit:Title' => 'Editeur de tableau de bord',
'UI:DashboardEdit:DashboardTitle' => 'Titre',
'UI:DashboardEdit:AutoReload' => 'Réactualisation automatique',
'UI:DashboardEdit:AutoReloadSec' => 'Réactualisation toutes les (secondes)',
'UI:DashboardEdit:AutoReloadSec+' => 'Le minimum permis est de %1$d secondes',
'UI:DashboardEdit:Layout' => 'Mise en page',
'UI:DashboardEdit:Properties' => 'Propriétés du tableau de bord',
'UI:DashboardEdit:Dashlets' => 'Indicateurs',
'UI:DashboardEdit:DashletProperties' => 'Propriétés de l\'Indicateur',
'UI:Form:Property' => 'Propriété',
'UI:Form:Value' => 'Valeur',
'UI:DashletUnknown:Label' => 'Inconnu',
'UI:DashletUnknown:Description' => 'Element inconnu (est peut-être désinstallé)',
'UI:DashletUnknown:RenderText:View' => 'Impossible d\'effectuer le rendu de cet élément.',
'UI:DashletUnknown:RenderText:Edit' => 'Impossible d\'effectuer le rendu de cet élément (classe "%1$s"). Vérifiez avec votre administrateur si il est toujours disponible.',
'UI:DashletUnknown:RenderNoDataText:Edit' => 'Impossible d\'effectuer le rendu de cet élément (classe "%1$s").',
'UI:DashletUnknown:Prop-XMLConfiguration' => 'Configuration (XML)',
'UI:DashletProxy:Label' => 'Proxy',
'UI:DashletProxy:Description' => 'Proxy',
'UI:DashletProxy:RenderNoDataText:Edit' => 'Impossible d\'effectuer le rendu de cet élément externe (classe "%1$s").',
'UI:DashletProxy:Prop-XMLConfiguration' => 'Configuration (XML)',
'UI:DashletPlainText:Label' => 'Texte',
'UI:DashletPlainText:Description' => 'Text pur (pas de mise en forme)',
'UI:DashletPlainText:Prop-Text' => 'Texte',
'UI:DashletPlainText:Prop-Text:Default' => 'Veuillez saisir votre texte ici...',
'UI:DashletObjectList:Label' => 'Liste d\'objets',
'UI:DashletObjectList:Description' => 'Liste d\'objets',
'UI:DashletObjectList:Prop-Title' => 'Titre',
'UI:DashletObjectList:Prop-Query' => 'Requête OQL',
'UI:DashletObjectList:Prop-Menu' => 'Menu',
'UI:DashletGroupBy:Prop-Title' => 'Titre',
'UI:DashletGroupBy:Prop-Query' => 'Requête OQL',
'UI:DashletGroupBy:Prop-Style' => 'Style',
'UI:DashletGroupBy:Prop-GroupBy' => 'Grouper par',
'UI:DashletGroupBy:Prop-GroupBy:Hour' => 'Heure de %1$s (0-23)',
'UI:DashletGroupBy:Prop-GroupBy:Month' => 'Mois de %1$s (1 - 12)',
'UI:DashletGroupBy:Prop-GroupBy:DayOfWeek' => 'Jour de la semaine pour %1$s',
'UI:DashletGroupBy:Prop-GroupBy:DayOfMonth' => 'Jour du mois pour %1$s',
'UI:DashletGroupBy:Prop-GroupBy:Select-Hour' => '%1$s (heure)',
'UI:DashletGroupBy:Prop-GroupBy:Select-Month' => '%1$s (mois)',
'UI:DashletGroupBy:Prop-GroupBy:Select-DayOfWeek' => '%1$s (jour de la semaine)',
'UI:DashletGroupBy:Prop-GroupBy:Select-DayOfMonth' => '%1$s (jour du mois)',
'UI:DashletGroupBy:MissingGroupBy' => 'Veuillez sélectionner le champ sur lequel les objets seront groupés',
'UI:DashletGroupByPie:Label' => 'Secteurs',
'UI:DashletGroupByPie:Description' => 'Graphique à secteur',
'UI:DashletGroupByBars:Label' => 'Barres',
'UI:DashletGroupByBars:Description' => 'Graphique en Barres',
'UI:DashletGroupByTable:Label' => 'Table',
'UI:DashletGroupByTable:Description' => 'Table',
// New in 2.5
'UI:DashletGroupBy:Prop-Function' => 'Fonction d\'agrégation',
'UI:DashletGroupBy:Prop-FunctionAttribute' => 'Attribut',
'UI:DashletGroupBy:Prop-OrderDirection' => 'Type de tri',
'UI:DashletGroupBy:Prop-OrderField' => 'Trié par',
'UI:DashletGroupBy:Prop-Limit' => 'Limite',
'UI:DashletGroupBy:Order:asc' => 'Croissant',
'UI:DashletGroupBy:Order:desc' => 'Décroissant',
'UI:GroupBy:count' => 'Nombre',
'UI:GroupBy:count+' => 'Nombre d\'éléments',
'UI:GroupBy:sum' => 'Somme',
'UI:GroupBy:sum+' => 'Somme des %1$s',
'UI:GroupBy:avg' => 'Moyenne',
'UI:GroupBy:avg+' => 'Moyenne des %1$s',
'UI:GroupBy:min' => 'Minimum',
'UI:GroupBy:min+' => 'Minimum des %1$s',
'UI:GroupBy:max' => 'Maximum',
'UI:GroupBy:max+' => 'Maximum des %1$s',
// ---
'UI:DashletHeaderStatic:Label' => 'En-tête',
'UI:DashletHeaderStatic:Description' => 'En-tête présenté comme une barre horizontale',
'UI:DashletHeaderStatic:Prop-Title' => 'Titre',
'UI:DashletHeaderStatic:Prop-Title:Default' => 'Contacts',
'UI:DashletHeaderStatic:Prop-Icon' => 'Icône',
'UI:DashletHeaderDynamic:Label' => 'En-tête dynamique',
'UI:DashletHeaderDynamic:Description' => 'En-tête avec statistiques (regroupements)',
'UI:DashletHeaderDynamic:Prop-Title' => 'Titre',
'UI:DashletHeaderDynamic:Prop-Title:Default' => 'Contacts',
'UI:DashletHeaderDynamic:Prop-Icon' => 'Icône',
'UI:DashletHeaderDynamic:Prop-Subtitle' => 'Sous-titre',
'UI:DashletHeaderDynamic:Prop-Subtitle:Default' => 'Contacts',
'UI:DashletHeaderDynamic:Prop-Query' => 'Requête OQL',
'UI:DashletHeaderDynamic:Prop-GroupBy' => 'Grouper par',
'UI:DashletHeaderDynamic:Prop-Values' => 'Valeurs',
'UI:DashletBadge:Label' => 'Badge',
'UI:DashletBadge:Description' => 'Icône représentant une classe d\'objets, ainsi que des liens pour créer/rechercher',
'UI:DashletBadge:Prop-Class' => 'Classe',
'DayOfWeek-Sunday' => 'Dimanche',
'DayOfWeek-Monday' => 'Lundi',
'DayOfWeek-Tuesday' => 'Mardi',
'DayOfWeek-Wednesday' => 'Mercredi',
'DayOfWeek-Thursday' => 'Jeudi',
'DayOfWeek-Friday' => 'Vendredi',
'DayOfWeek-Saturday' => 'Samedi',
'Month-01' => 'Janvier',
'Month-02' => 'Février',
'Month-03' => 'Mars',
'Month-04' => 'Avril',
'Month-05' => 'Mai',
'Month-06' => 'Juin',
'Month-07' => 'Juillet',
'Month-08' => 'Août',
'Month-09' => 'Septembre',
'Month-10' => 'Octobre',
'Month-11' => 'Novembre',
'Month-12' => 'Décembre',
// Short version for the DatePicker
'DayOfWeek-Sunday-Min' => 'Di',
'DayOfWeek-Monday-Min' => 'Lu',
'DayOfWeek-Tuesday-Min' => 'Ma',
'DayOfWeek-Wednesday-Min' => 'Me',
'DayOfWeek-Thursday-Min' => 'Je',
'DayOfWeek-Friday-Min' => 'Ve',
'DayOfWeek-Saturday-Min' => 'Sa',
'Month-01-Short' => 'Jan',
'Month-02-Short' => 'Fév',
'Month-03-Short' => 'Mar',
'Month-04-Short' => 'Avr',
'Month-05-Short' => 'Mai',
'Month-06-Short' => 'Juin',
'Month-07-Short' => 'Juil',
'Month-08-Short' => 'Août',
'Month-09-Short' => 'Sept',
'Month-10-Short' => 'Oct',
'Month-11-Short' => 'Nov',
'Month-12-Short' => 'Déc',
'Calendar-FirstDayOfWeek' => 1, // 0 = Sunday, 1 = Monday, etc...
'UI:Menu:ShortcutList' => 'Créer un Raccourci...',
'UI:ShortcutListDlg:Title' => 'Créer un raccourci pour la liste',
'UI:ShortcutRenameDlg:Title' => 'Renommer le raccourci',
'UI:ShortcutDelete:Confirm' => 'Veuillez confirmer la suppression du ou des raccourci(s)',
'Menu:MyShortcuts' => 'Mes raccourcis', // Duplicated into itop-welcome-itil (will be removed from here...)
'Class:Shortcut' => 'Raccourci',
'Class:Shortcut+' => '',
'Class:Shortcut/Attribute:name' => 'Nom',
'Class:Shortcut/Attribute:name+' => 'Label utilisé dans le menu et comme titre de la page',
'Class:ShortcutOQL' => 'Raccourci vers une liste d\'objets',
'Class:ShortcutOQL+' => '',
'Class:ShortcutOQL/Attribute:oql' => 'Requête',
'Class:ShortcutOQL/Attribute:oql+' => 'Requête de définition de l\'ensemble des objets',
'Class:ShortcutOQL/Attribute:auto_reload' => 'Réactualisation automatique',
'Class:ShortcutOQL/Attribute:auto_reload/Value:none' => 'Désactivée',
'Class:ShortcutOQL/Attribute:auto_reload/Value:custom' => 'Personnalisée',
'Class:ShortcutOQL/Attribute:auto_reload_sec' => 'Réactualisation toutes les (secondes)',
'Class:ShortcutOQL/Attribute:auto_reload_sec/tip' => 'Le minimum permis est de %1$d secondes',
'UI:FillAllMandatoryFields' => 'Veuillez remplir tous les champs obligatoires.',
'UI:ValueMustBeSet' => 'Veuillez spécifier une valeur pour ce champ',
'UI:ValueMustBeChanged' => 'Veuillez modifier la valeur de ce champ',
'UI:ValueInvalidFormat' => 'Format invalide',
'UI:CSVImportConfirmTitle' => 'Veuillez confirmer cette opération',
'UI:CSVImportConfirmMessage' => 'Etes-vous sûr(e) de vouloir faire cela ?',
'UI:CSVImportError_items' => 'Erreurs: %1$d',
'UI:CSVImportCreated_items' => 'Créations: %1$d',
'UI:CSVImportModified_items' => 'Modifications: %1$d',
'UI:CSVImportUnchanged_items' => 'Inchangés: %1$d',
'UI:Button:Remove' => 'Enlever',
'UI:AddAnExisting_Class' => 'Ajouter des objets de type %1$s...',
'UI:SelectionOf_Class' => 'Sélection d\'objets de type %1$s',
'UI:AboutBox' => 'A propos d\'iTop...',
'UI:About:Title' => 'A propos d\'iTop',
'UI:About:DataModel' => 'Modèle de données',
'UI:About:Support' => 'Informations pour le support',
'UI:About:Licenses' => 'Licences',
'UI:About:InstallationOptions' => 'Options d\'installation',
'UI:About:Extension_Version' => 'Version: %1$s',
'UI:About:ManualExtensionSource' => 'Extension',
'UI:DisconnectedDlgMessage' => 'Vous êtes déconnecté(e). Vous devez vous identifier pour pouvoir continuer à utiliser l\'application.',
'UI:DisconnectedDlgTitle' => 'Attention !',
'UI:LoginAgain' => 'S\'identifier',
'UI:StayOnThePage' => 'Rester sur cette page',
'ExcelExporter:ExportMenu' => 'Exporter pour Excel...',
'ExcelExporter:ExportDialogTitle' => 'Export au format Excel',
'ExcelExporter:ExportButton' => 'Exporter',
'ExcelExporter:DownloadButton' => 'Télécharger %1$s',
'ExcelExporter:RetrievingData' => 'Récupération des données...',
'ExcelExporter:BuildingExcelFile' => 'Construction du fichier Excel...',
'ExcelExporter:Done' => 'Terminé.',
'ExcelExport:AutoDownload' => 'Téléchargement automatique dès que le fichier est prêt',
'ExcelExport:PreparingExport' => 'Préparation de l\'export...',
'ExcelExport:Statistics' => 'Statistiques',
'portal:legacy_portal' => 'Portail Utilisateurs',
'portal:backoffice' => 'Console iTop',
'UI:CurrentObjectIsLockedBy_User' => 'L\'objet est verrouillé car il est en train d\'être modifié par %1$s.',
'UI:CurrentObjectIsLockedBy_User_Explanation' => 'L\'objet est en train d\'être modifié par %1$s. Vos modifications ne peuvent pas être acceptées car elles risquent d\'être écrasées.',
'UI:CurrentObjectLockExpired' => 'Le verrouillage interdisant les modifications concurrentes a expiré.',
'UI:CurrentObjectLockExpired_Explanation' => 'Le verrouillage interdisant les modifications concurrentes a expiré. Vos modifications ne peuvent pas être acceptées car d\'autres utilisateurs peuvent modifier cet objet.',
'UI:ConcurrentLockKilled' => 'Le verrouillage en édition de l\'objet courant a été supprimé.',
'UI:Menu:KillConcurrentLock' => 'Supprimer le verrouillage !',
'UI:Menu:ExportPDF' => 'Exporter en PDF...',
'UI:Menu:PrintableVersion' => 'Version imprimable',
'UI:BrowseInlineImages' => 'Parcourir les images...',
'UI:UploadInlineImageLegend' => 'Ajouter une image',
'UI:SelectInlineImageToUpload' => 'Sélectionnez l\'image à ajouter',
'UI:AvailableInlineImagesLegend' => 'Images disponibles',
'UI:NoInlineImage' => 'Il n\'y a aucune image de disponible sur le serveur. Utilisez le bouton "Parcourir" (ci-dessus) pour sélectionner une image sur votre ordinateur et la télécharger sur le serveur.',
'UI:ToggleFullScreen' => 'Agrandir / Minimiser',
'UI:Button:ResetImage' => 'Récupérer l\'image initiale',
'UI:Button:RemoveImage' => 'Supprimer l\'image',
'UI:UploadNotSupportedInThisMode' => 'La modification d\'images ou de fichiers n\'est pas supportée dans ce mode.',
// Search form
'UI:Search:Toggle' => 'Réduire / Ouvrir',
'UI:Search:AutoSubmit:DisabledHint' => 'La soumission automatique a été desactivée pour cette classe',
'UI:Search:Criterion:MoreMenu:AddCriteria' => 'Ajouter un critère',
// - Add new criteria button
'UI:Search:AddCriteria:List:RecentlyUsed:Title' => 'Récents',
'UI:Search:AddCriteria:List:MostPopular:Title' => 'Populaires',
'UI:Search:AddCriteria:List:Others:Title' => 'Autres',
'UI:Search:AddCriteria:List:RecentlyUsed:Placeholder' => 'Aucun.',
// - Criteria titles
// - Default widget
'UI:Search:Criteria:Title:Default:Any' => '%1$s : Indifférent',
'UI:Search:Criteria:Title:Default:Empty' => '%1$s vide',
'UI:Search:Criteria:Title:Default:NotEmpty' => '%1$s non vide',
'UI:Search:Criteria:Title:Default:Equals' => '%1$s égal %2$s',
'UI:Search:Criteria:Title:Default:Contains' => '%1$s contient %2$s',
'UI:Search:Criteria:Title:Default:StartsWith' => '%1$s commence par %2$s',
'UI:Search:Criteria:Title:Default:EndsWith' => '%1$s fini par %2$s',
'UI:Search:Criteria:Title:Default:RegExp' => '%1$s correspond à %2$s',
'UI:Search:Criteria:Title:Default:GreaterThan' => '%1$s > %2$s',
'UI:Search:Criteria:Title:Default:GreaterThanOrEquals' => '%1$s >= %2$s',
'UI:Search:Criteria:Title:Default:LessThan' => '%1$s < %2$s',
'UI:Search:Criteria:Title:Default:LessThanOrEquals' => '%1$s <= %2$s',
'UI:Search:Criteria:Title:Default:Different' => '%1$s ≠ %2$s',
'UI:Search:Criteria:Title:Default:Between' => '%1$s entre [%2$s]',
'UI:Search:Criteria:Title:Default:BetweenDates' => '%1$s [%2$s]',
'UI:Search:Criteria:Title:Default:BetweenDates:All' => '%1$s : Indifférent',
'UI:Search:Criteria:Title:Default:BetweenDates:From' => '%1$s depuis %2$s',
'UI:Search:Criteria:Title:Default:BetweenDates:Until' => '%1$s jusqu\'à %2$s',
'UI:Search:Criteria:Title:Default:Between:All' => '%1$s : Indifférent',
'UI:Search:Criteria:Title:Default:Between:From' => '%1$s à partir de %2$s',
'UI:Search:Criteria:Title:Default:Between:Until' => '%1$s jusqu\'à %2$s',
// - Numeric widget
// None yet
// - DateTime widget
'UI:Search:Criteria:Title:DateTime:Between' => '%2$s <= 1$s <= %3$s',
// - Enum widget
'UI:Search:Criteria:Title:Enum:In' => '%1$s : %2$s',
'UI:Search:Criteria:Title:Enum:In:Many' => '%1$s : %2$s et %3$s autres',
'UI:Search:Criteria:Title:Enum:In:All' => '%1$s : Indifférent',
// - External key widget
'UI:Search:Criteria:Title:ExternalKey:Empty' => '%1$s est renseigné',
'UI:Search:Criteria:Title:ExternalKey:NotEmpty' => '%1$s n\'est pas renseigné',
'UI:Search:Criteria:Title:ExternalKey:Equals' => '%1$s %2$s',
'UI:Search:Criteria:Title:ExternalKey:In' => '%1$s : %2$s',
'UI:Search:Criteria:Title:ExternalKey:In:Many' => '%1$s : %2$s et %3$s autres',
'UI:Search:Criteria:Title:ExternalKey:In:All' => '%1$s : Indifférent',
// - Hierarchical key widget
'UI:Search:Criteria:Title:HierarchicalKey:Empty' => '%1$s est renseigné',
'UI:Search:Criteria:Title:HierarchicalKey:NotEmpty' => '%1$s n\'est pas renseigné',
'UI:Search:Criteria:Title:HierarchicalKey:Equals' => '%1$s %2$s',
'UI:Search:Criteria:Title:HierarchicalKey:In' => '%1$s : %2$s',
'UI:Search:Criteria:Title:HierarchicalKey:In:Many' => '%1$s : %2$s et %3$s autres',
'UI:Search:Criteria:Title:HierarchicalKey:In:All' => '%1$s : Indifférent',
/// - Criteria operators
// - Default widget
'UI:Search:Criteria:Operator:Default:Empty' => 'Vide',
'UI:Search:Criteria:Operator:Default:NotEmpty' => 'Non vide',
'UI:Search:Criteria:Operator:Default:Equals' => 'Egal',
'UI:Search:Criteria:Operator:Default:Between' => 'Compris entre',
// - String widget
'UI:Search:Criteria:Operator:String:Contains' => 'Contient',
'UI:Search:Criteria:Operator:String:StartsWith' => 'Commence par',
'UI:Search:Criteria:Operator:String:EndsWith' => 'Fini par',
'UI:Search:Criteria:Operator:String:RegExp' => 'Exp. rég.',
// - Numeric widget
'UI:Search:Criteria:Operator:Numeric:Equals' => 'Egal', // => '=',
'UI:Search:Criteria:Operator:Numeric:GreaterThan' => 'Supérieur', // => '>',
'UI:Search:Criteria:Operator:Numeric:GreaterThanOrEquals' => 'Sup. / égal', // > '>=',
'UI:Search:Criteria:Operator:Numeric:LessThan' => 'Inférieur', // => '<',
'UI:Search:Criteria:Operator:Numeric:LessThanOrEquals' => 'Inf. / égal', // > '<=',
'UI:Search:Criteria:Operator:Numeric:Different' => 'Différent', // => '≠',
// - Other translations
'UI:Search:Value:Filter:Placeholder' => 'Filtrez...',
'UI:Search:Value:Search:Placeholder' => 'Recherchez...',
'UI:Search:Value:Autocomplete:StartTyping' => 'Commencez à taper pour voir les valeurs possibles.',
'UI:Search:Value:Autocomplete:Wait' => 'Patientez ...',
'UI:Search:Value:Autocomplete:NoResult' => 'Aucun résultat.',
'UI:Search:Value:Toggler:CheckAllNone' => 'Cocher tout / aucun',
'UI:Search:Value:Toggler:CheckAllNoneFiltered' => 'Cocher tout / aucun visibles',
// - Widget other translations
'UI:Search:Criteria:Numeric:From' => 'De',
'UI:Search:Criteria:Numeric:Until' => 'à',
'UI:Search:Criteria:Numeric:PlaceholderFrom' => 'Indifférent',
'UI:Search:Criteria:Numeric:PlaceholderUntil' => 'Indifférent',
'UI:Search:Criteria:DateTime:From' => 'Depuis',
'UI:Search:Criteria:DateTime:FromTime' => 'Depuis',
'UI:Search:Criteria:DateTime:Until' => 'jusqu\'à',
'UI:Search:Criteria:DateTime:UntilTime' => 'jusqu\'à',
'UI:Search:Criteria:DateTime:PlaceholderFrom' => 'Indifférent',
'UI:Search:Criteria:DateTime:PlaceholderFromTime' => 'Indifférent',
'UI:Search:Criteria:DateTime:PlaceholderUntil' => 'Indifférent',
'UI:Search:Criteria:DateTime:PlaceholderUntilTime' => 'Indifférent',
'UI:Search:Criteria:Raw:Filtered' => 'Filtré',
'UI:Search:Criteria:Raw:FilteredOn' => 'Filtré sur %1$s',
));
//
// Expression to Natural language
//
Dict::Add('FR FR', 'French', 'Français', array(
'Expression:Operator:AND' => ' ET ',
'Expression:Operator:OR' => ' OU ',
'Expression:Operator:=' => ' : ',
'Expression:Unit:Short:DAY' => 'j',
'Expression:Unit:Short:WEEK' => 's',
'Expression:Unit:Short:MONTH' => 'm',
'Expression:Unit:Short:YEAR' => 'a',
'Expression:Unit:Long:DAY' => 'jour(s)',
'Expression:Unit:Long:HOUR' => 'heure(s)',
'Expression:Unit:Long:MINUTE' => 'minute(s)',
'Expression:Verb:NOW' => 'maintenant',
'Expression:Verb:ISNULL' => ' : non défini',
));
| changi67/itop | dictionaries/fr.dictionary.itop.ui.php | PHP | agpl-3.0 | 89,352 |
import { IFilters } from './types';
const pagesManagerFactory = (postsService, $q, _, moment, instagramService) => {
function PagesManager(
blogId: string,
status: string,
maxResults: number,
sort: string,
sticky: boolean,
highlight: boolean,
noSynd: boolean,
scheduled: boolean = false) { // eslint-disable-line padded-blocks
const SORTS = {
editorial: { order: { order: 'desc', missing: '_last', unmapped_type: 'long' } },
updated_first: { _updated: { order: 'desc', missing: '_last', unmapped_type: 'long' } },
newest_first: { _created: { order: 'desc', missing: '_last', unmapped_type: 'long' } },
oldest_first: { _created: { order: 'asc', missing: '_last', unmapped_type: 'long' } },
editorial_asc: { order: { order: 'asc', missing: '_last', unmapped_type: 'long' } },
};
const self = this;
/**
* Represent a page of posts
* @param {array} [posts=[]] - a list of post to initialize the page
*/
class Page {
posts: Array<any>;
constructor(posts = []) {
this.posts = posts;
}
addPost = (post) => {
this.posts.push(post);
}
}
/**
* Retrieve the given page of post from the api
* @param {integer} page - The page index to retrieve (start from 1)
* @param {integer} [max_results=self.maxResults] - The maximum number of results to retrieve
* @returns {promise}
*/
const retrievePage = (page, maxResultsInner?) => {
const options: IFilters = { status: self.status, authors: self.authors };
// only care about the sticky status if post if open otherwise show them all together
// @TODO refactor when refactoring the page manager
if (self.status === 'open' && !scheduled) {
options.sticky = sticky;
}
if (noSynd) {
options.noSyndication = true;
}
options.scheduled = scheduled;
// only care about the highlight status if it is set to true
if (self.highlight) {
options.highlight = self.highlight;
}
return postsService.getPosts(self.blogId, options, maxResultsInner || self.maxResults, page)
.then((data) => {
// update posts meta data (used to know the total number of posts and pages)
self.meta = data._meta;
return data;
});
};
/**
* Filter the posts in timeline by their highlight attribute
* @param {boolean} highlight - The value of the field (true or false)
* @returns {promise}
*/
const changeHighlight = (highlightParam: boolean) => {
self.highlight = highlightParam;
self.pages = [];
return fetchNewPage();
};
/**
* Change the order in the future posts request, remove exising post and load a new page
* @param {string} sortName - The name of the new order (see self.SORTS)
* @returns {promise}
*/
const changeOrder = (sortName: string) => {
self.sort = sortName;
self.pages = [];
return fetchNewPage();
};
/**
* Filter by author ids.
* @param {array} authors - The list of ids to filter with
* @returns {promise}
*/
const setAuthors = (authors: Array<any>) => {
self.authors = authors;
self.pages = [];
return fetchNewPage();
};
/**
* Fetch a new page of posts and add it to the Pages Manager.
* @returns {promise}
*/
const fetchNewPage = () => {
let promise = $q.when();
// for the first time, retrieve the updates just to know the latest update date
if (self.pages.length === 0) {
promise = self.retrieveUpdate().then((updates) => {
updateLatestDates(updates._items);
});
}
return promise.then(() => loadPage(self.pages.length + 1));
};
/**
* Retrieve all the updates since the latest updated date
* @param {boolean} [should_apply_updates=false] - If true, will apply the updates into the posts list
* @returns {promise}
*/
const retrieveUpdate = (shouldApplyUpdates) => {
const date = self.latestUpdatedDate ? self.latestUpdatedDate.utc().format() : undefined;
let page = 1;
const filters = {
updatedAfter: date,
excludeDeleted: false,
scheduled: false,
};
return postsService.getPosts(self.blogId, filters, undefined, page)
.then((updates) => {
const meta = updates._meta;
// if
// - there is no other page
// - or if we don't give a latest update date (b/c we look after the meta or the latest date)
// = then we return the first page of result
if (meta.total <= meta.max_results * meta.page || !angular.isDefined(date)) {
return updates;
// Otherwise we ask page after page and concatenate them in the response
}
const promises = [];
for (let i = meta.page + 1; i <= Math.floor(meta.total / meta.max_results) + 1; i++) {
page = i;
promises.push(postsService.getPosts(
self.blogId,
{ updatedAfter: date, excludeDeleted: false },
undefined,
page
));
}
return $q.all(promises).then((updatesPages) => angular.extend({}, updatesPages[0], {
_items: [].concat(...updatesPages.map((update) => update._items)),
_meta: angular.extend(meta, { max_results: meta.max_results * updatesPages.length }),
}));
})
// Apply the update if needed
.then((updates) => {
if (shouldApplyUpdates) {
applyUpdates(updates._items);
}
return updates;
});
};
/**
* Apply the given updates to the posts list
* @param {array} updates - List of updated posts
*/
const applyUpdates = (updates) => {
// eslint-disable-next-line
updates.forEach((post) => {
const existingPostIndexes = getPostPageIndexes(post);
const sameStatus = self.status === post.post_status;
const postSticky = post.sticky === sticky;
const statusIsOpen = self.status === 'open';
if (angular.isDefined(existingPostIndexes)) {
// post already in the list
// tslint:disable-next-line:curly
if (post.deleted) removePost(post);
if (post.post_status !== self.status || statusIsOpen
&& post.sticky !== sticky || self.highlight && !post.lb_highlight) {
removePost(post);
} else {
updatePost(post);
createPagesWithPosts(self.allPosts(), true);
}
} else if (!post.deleted && sameStatus && (self.status !== 'open' || postSticky)) {
// post doesn't exist in the list
addPost(post);
}
});
// update date
updateLatestDates(updates);
};
/**
* Replace the old post with the new updated post.
* Compare post ids instead of relying on indexes.
* This method does not return anything and modifies directly self.pages
*/
const updatePost = (postToUpdate) => {
self.pages.forEach((page, pageIndex) => {
page.posts.forEach((post, postIndex) => {
if (post._id === postToUpdate._id) {
self.pages[pageIndex].posts[postIndex] = postToUpdate;
}
});
});
};
/**
* Receives edit flags information, loop over them and find the right
* posts to update the data, then triggers callback
*/
const updatePostFlag = (postId, flag, cb) => {
self.pages.forEach((page, pageIndex) => {
page.posts.forEach((post, postIndex) => {
if (post._id === postId) {
self.pages[pageIndex].posts[postIndex]['edit_flag'] = flag;
cb(self.pages[pageIndex].posts[postIndex]);
}
});
});
};
/**
* Update the latest update date by using the given posts
* @param {array} posts - List of posts
*/
const updateLatestDates = (posts) => {
let date;
posts.forEach((post) => {
date = moment(post._updated);
if (angular.isDefined(self.latestUpdatedDate)) {
if (self.latestUpdatedDate.diff(date) < 0) {
self.latestUpdatedDate = date;
}
} else {
self.latestUpdatedDate = date;
}
});
};
/**
* Recreate the pages from the given posts
* @param {array} [posts=self.allPosts()] - List of posts
* @param {boolean} resetPages - Clear the array of pages or not
*/
const createPagesWithPosts = (postsParams, resetPages) => {
let posts = postsParams || self.allPosts();
if (resetPages) {
self.pages = [];
}
// respect the order
const sortBy = Object.keys(SORTS[self.sort])[0];
const orderBy = SORTS[self.sort][sortBy].order;
posts = _.sortByOrder(posts, sortBy, orderBy);
let page;
let processInstagram = false;
posts.forEach((post, index) => {
if (index % self.maxResults === 0) {
page = new Page();
}
page.addPost(post);
if (page.posts.length === self.maxResults) {
addPage(page);
page = undefined;
}
processInstagram = instagramService.postHasEmbed(post.items);
});
if (angular.isDefined(page)) {
addPage(page);
}
if (processInstagram) {
instagramService.processEmbeds();
}
};
/**
* Load the content of the given page
* @param {interger} page - index of the desired page
* @returns {promise}
*/
const loadPage = (page) => {
return retrievePage(page || self.pages.length).then((posts) => {
createPagesWithPosts(posts._items, false);
return posts;
});
};
/**
* Returns the page index and the post index of the given post in the local pages
* @param {Post} postToFind - post to find in the pages
* @returns {array|undefined} - [pageIndex, postIndex]
*/
const getPostPageIndexes = (postToFind) => {
if (!postToFind) {
return [0, 0];
}
let page;
for (let pageIndex = 0; pageIndex < self.pages.length; pageIndex++) {
page = self.pages[pageIndex];
for (let postIndex = 0; postIndex < page.posts.length; postIndex++) {
if (page.posts[postIndex]._id === postToFind._id) {
return [pageIndex, postIndex];
}
}
}
};
/**
* Add a post or a list of posts to the local pages
* @param {Post|array<Post>} postsParams - posts to be added to the pages
*/
const addPost = (postsParams) => {
const allPosts = self.allPosts();
const posts = angular.isArray(postsParams) ? postsParams : [postsParams];
// for every post, check if exist before or add it
posts.forEach((post) => {
if (!angular.isDefined(getPostPageIndexes(post))) {
allPosts.push(post);
}
});
// and recreate pages
createPagesWithPosts(allPosts, true);
// update date
updateLatestDates(allPosts);
};
/**
* Remove a post in the local pages
* @param {Post} postToRemove - posts to be removed from the pages
*/
const removePost = (postToRemove) => {
const indexes = getPostPageIndexes(postToRemove);
if (angular.isDefined(postToRemove)) {
const pageIndex = indexes[0];
const postIndex = indexes[1];
self.pages[pageIndex].posts.splice(postIndex, 1);
createPagesWithPosts(self.allPosts(), true);
}
};
/**
* Add the given page to the Page Manager
* @param {Page} page - a page instance
*/
const addPage = (page) => {
self.pages.push(page);
};
/**
* Returns the number of posts in the local pages
* @returns {integer}
*/
const count = () => {
return self.pages.reduce((previousValue, currentValue) => previousValue + currentValue.posts.length, 0);
};
angular.extend(self, {
/**
* List of page instances
*/
pages: [],
/**
* Represent the meta data of posts (total number for instance)
*/
meta: {},
/**
* Set the initial order (see self.SORTS)
*/
sort: sort || 'editorial',
blogId: blogId,
status: status,
/**
* Filter by post's highlight field
*/
highlight: highlight,
sticky: sticky,
changeHighlight: changeHighlight,
/**
* Change the order in the future posts request, remove exising post and load a new page
*/
changeOrder: changeOrder,
/**
* Initial authors filter
*/
authors: [],
/**
* Filter by author ids
*/
setAuthors: setAuthors,
/**
* Number of results per page
*/
maxResults: maxResults,
/**
*
* Remove a post from the page
*/
removePost: removePost,
/**
* Latest updated date. Used for retrieving updates since this date.
*/
latestUpdatedDate: undefined,
/**
* Fetch a new page of posts
*/
fetchNewPage: fetchNewPage,
/**
* Return the latest available updates
*/
retrieveUpdate: retrieveUpdate,
/**
* Return all the posts from the local pages
*/
allPosts: () => {
// flatten array
return [].concat(...self.pages.map((page) => page.posts));
},
/**
* Updates flag information, look for the right post and updates it
*/
updatePostFlag: updatePostFlag,
/**
* Returns the number of posts in the local pages
*/
count: count,
});
}
// return the Pages Manager constructor
return PagesManager;
};
pagesManagerFactory.$inject = ['postsService', '$q', 'lodash', 'moment', 'instagramService'];
export default pagesManagerFactory;
| superdesk/liveblog | client/app/scripts/liveblog-edit/pages-manager.service.ts | TypeScript | agpl-3.0 | 16,604 |
/**
* @author Olaf Radicke <briefkasten@olaf-rdicke.de>
* @date 2013-2014
* @copyright
* Copyright (C) 2013 Olaf Radicke <briefkasten@olaf-rdicke.de>
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or later
* version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include <core/controller/BasicProjectDataController.h>
#include <core/model/MakefileData.h>
#include <core/model/ProjectData.h>
#include <core/model/UserSession.h>
#include <tnt/httprequest.h>
#include <tnt/httpreply.h>
#include <cxxtools/log.h>
#include <string>
namespace Tww {
namespace Core {
log_define("Core.BasicProjectDataController")
void BasicProjectDataController::formDataAssume ( tnt::QueryParams& qparam ){
// query parameters
log_debug("################## formDataAssume ####################" );
// project data
this->projectData.setProjectName(
qparam.arg<std::string>("form_projectname")
);
this->projectData.setSourceCodeHeader(
qparam.arg<std::string>("form_sourcecodeheader")
);
this->projectData.setDoxygenTemplates(
qparam.arg<bool>("form_doxygen_templates")
);
this->projectData.setFormToken(
qparam.arg<bool>("form_csrf_token")
);
log_debug("form_flash_messages_support: "
<< qparam.arg<bool>("form_flash_messages_support") );
this->projectData.setFlashMessagesSupport(
qparam.arg<bool>("form_flash_messages_support")
);
log_debug("isFlashMessagesSupport(): "
<< this->projectData.isFlashMessagesSupport() );
this->projectData.setCxxtoolsLoging(
qparam.arg<bool>("form_cxxtools_loging")
);
this->projectData.setRouteReverse(
qparam.arg<bool>("form_route_reverse")
);
this->projectData.setTntDBsupport(
qparam.arg<bool>("form_tntdb_support")
);
this->makefileData.setCppLinkerFlags("-I./src -ltntnet");
if ( this->projectData.isTntDB() ) {
this->makefileData.setCppLinkerFlags("-I./src -ltntnet -ltntdb");
}
if ( this->projectData.isCxxtoolsLoging( ) ) {
this->makefileData.setCppLinkerFlags("-I./src -ltntnet -lcxxtools");
}
if ( this->projectData.isTntDB() && this->projectData.isCxxtoolsLoging( ) ) {
this->makefileData.setCppLinkerFlags("-I./src -ltntnet -lcxxtools -ltntdb");
}
// makefile data
this->makefileData.setBinName(
qparam.arg<std::string>("form_binaryfilename")
);
}
void BasicProjectDataController::worker (
tnt::HttpRequest& request,
tnt::HttpReply& reply,
tnt::QueryParams& qparam
){
// query parameters
std::string form_licence_template =
qparam.arg<std::string>("form_licence_template");
bool form_assume_licence =
qparam.arg<bool>("form_assume_licence");
bool form_save_button =
qparam.arg<bool>("form_save_button");
std::stringstream file_projectdata;
file_projectdata << this->userSession.getSessionPath() << "/tntwebwizard.pro";
std::stringstream file_makefile;
file_makefile << this->userSession.getSessionPath() << "/Makefile.tnt";
log_debug("file_projectdata: " << file_projectdata.str() );
log_debug("file_makefile: " << file_makefile.str() );
log_debug("form_save_button: " << form_save_button );
log_debug("form_assume_licence: " << form_assume_licence );
log_debug("form_licence_template: " << form_licence_template );
// save button pressed
if ( form_save_button ) {
log_debug( "###### " << __LINE__ << " ######" );
this->formDataAssume ( qparam );
if( qparam.arg<std::string>("form_projectname") == "" ){
this->feedback="The project name is not set!.";
this->warning = true;
return;
}
if( qparam.arg<std::string>("form_sourcecodeheader") == "" ){
this->feedback="The source code header is not set!.";
this->warning = true;
return;
}
if( qparam.arg<std::string>("form_binaryfilename") == "" ){
this->feedback="It is no binary file name set!.";
this->warning = true;
return;
}
this->projectData.write( file_projectdata.str() );
this->makefileData.write( file_makefile.str() );
this->feedback="The basic project date is saved.";
this->warning = false;
return;
} else {
log_debug( "###### " << __LINE__ << " ######" );
// assume licence button pressed
if ( form_assume_licence ) {
log_debug( "###### " << __LINE__ << " ######" );
this->formDataAssume ( qparam );
log_debug("add licence: " << form_licence_template );
if( form_licence_template == "" ){
this->feedback="It is no licence template select!.";
this->warning = true;
return;
} else {
this->assumeLicence( form_licence_template );
}
// page (first) load
} else {
log_debug( "###### " << __LINE__ << " ######" );
// read project configuration...
log_debug("file_projectdata: " << file_projectdata.str() );
this->projectData.read( file_projectdata.str() );
this->makefileData.read( file_makefile.str() );
}
}
}
void BasicProjectDataController::assumeLicence( const std::string& _licence_template ){
std::stringstream licencetext;
if ( _licence_template == "AGPL3" ) {
licencetext << "Copyright (C) <year> <name of author> \n"
<< "\n"
<< "This program is free software: you can redistribute it and/or modify \n"
<< "it under the terms of the GNU Affero General Public License as published by \n"
<< "the Free Software Foundation, either version 3 of the License, or later \n"
<< "version. \n"
<< "\n"
<< "This program is distributed in the hope that it will be useful, \n"
<< "but WITHOUT ANY WARRANTY; without even the implied warranty of \n"
<< "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \n"
<< "GNU Affero General Public License for more details. \n"
<< "\n"
<< "You should have received a copy of the GNU Affero General Public License \n"
<< "along with this program. If not, see <http://www.gnu.org/licenses/>. \n";
}
if ( _licence_template == "Apache" ) {
licencetext << " \n"
<< "Licensed to the Apache Software Foundation (ASF) under one \n"
<< "or more contributor license agreements. See the NOTICE file \n"
<< "distributed with this work for additional information \n"
<< "regarding copyright ownership. The ASF licenses this file \n"
<< "to you under the Apache License, Version 2.0 (the \n"
<< "\"License\"); you may not use this file except in compliance \n"
<< "with the License. You may obtain a copy of the License at \n"
<< " \n"
<< " http://www.apache.org/licenses/LICENSE-2.0 \n"
<< " \n"
<< "Unless required by applicable law or agreed to in writing, \n"
<< "software distributed under the License is distributed on an \n"
<< "\"AS IS\" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY \n"
<< "KIND, either express or implied. See the License for the \n"
<< "specific language governing permissions and limitations \n";
}
if ( _licence_template == "MIT" ) {
licencetext << " \n"
<< "Copyright (c) <year> <copyright holders> \n"
<< " \n"
<< "Permission is hereby granted, free of charge, to any person obtaining a copy \n"
<< "of this software and associated documentation files (the \"Software\"), to deal \n"
<< "in the Software without restriction, including without limitation the rights \n"
<< "to use, copy, modify, merge, publish, distribute, sublicense, and/or sell \n"
<< "copies of the Software, and to permit persons to whom the Software is \n"
<< "furnished to do so, subject to the following conditions: \n"
<< " \n"
<< "The above copyright notice and this permission notice shall be included in \n"
<< "all copies or substantial portions of the Software. \n"
<< " \n"
<< "THE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR \n"
<< "IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, \n"
<< "FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE \n"
<< "AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER \n"
<< "LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, \n"
<< "OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN \n"
<< "THE SOFTWARE. \n";
}
if ( _licence_template == "GPL3" ) {
licencetext << " \n"
<< "<one line to give the program's name and a brief idea of what it does.> \n"
<< "Copyright (C) <year> <name of author> \n"
<< " \n"
<< "This program is free software: you can redistribute it and/or modify \n"
<< "it under the terms of the GNU General Public License as published by \n"
<< "the Free Software Foundation, either version 3 of the License, or \n"
<< "(at your option) any later version. \n"
<< " \n"
<< "This program is distributed in the hope that it will be useful, \n"
<< "but WITHOUT ANY WARRANTY; without even the implied warranty of \n"
<< "MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the \n"
<< "GNU General Public License for more details. \n"
<< " \n"
<< "You should have received a copy of the GNU General Public License \n"
<< "along with this program. If not, see <http://www.gnu.org/licenses/>. \n" ;
}
if ( _licence_template == "BSD2" ) {
licencetext << " \n"
<< "Copyright (c) <YEAR>, <OWNER>\n"
<< "All rights reserved.\n"
<< " \n"
<< "Redistribution and use in source and binary forms, with or without \n"
<< "modification, are permitted provided that the following conditions \n"
<< " are met:\n"
<< " \n"
<< "1. Redistributions of source code must retain the above copyright \n"
<< " notice, this list of conditions and the following disclaimer.\n"
<< " \n"
<< "2. Redistributions in binary form must reproduce the above \n"
<< " copyright notice, this list of conditions and the following \n"
<< " disclaimer in the documentation and/or other materials provided \n"
<< " with the distribution. \n"
<< " \n"
<< "THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS \n"
<< "\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT \n"
<< "LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS \n"
<< "FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE \n"
<< "COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, \n"
<< "INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, \n"
<< "BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; \n"
<< "LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER \n"
<< "CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT \n"
<< "LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN \n"
<< "ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE \n"
<< "POSSIBILITY OF SUCH DAMAGE. \n" ;
}
this->projectData.setSourceCodeHeader( licencetext.str() );
}
} // namespace core
} // namespace Tww
| OlafRadicke/tntwebwizard | src/core/controller/BasicProjectDataController.cpp | C++ | agpl-3.0 | 12,421 |
/*******************************************************************************
* Copyright (C) 2020, exense GmbH
*
* This file is part of STEP
*
* STEP is free software: you can redistribute it and/or modify
* it under the terms of the GNU Affero General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* STEP is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with STEP. If not, see <http://www.gnu.org/licenses/>.
******************************************************************************/
package step.core.plans.builder;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import org.junit.Test;
import step.core.artefacts.AbstractArtefact;
import step.core.artefacts.CheckArtefact;
import step.core.plans.Plan;
public class PlanBuilderTest {
@Test
public void testNoRoot() {
Exception ex = null;
try {
PlanBuilder.create().add(artefact("Root"));
} catch(Exception e) {
ex = e;
}
assertNotNull(ex);
assertEquals("No root artefact defined. Please first call the method startBlock to define the root element", ex.getMessage());
}
@Test
public void testUnablancedBlock() {
Exception ex = null;
try {
PlanBuilder.create().startBlock(artefact("Root")).build();
} catch(Exception e) {
ex = e;
}
assertNotNull(ex);
assertEquals("Unbalanced block CheckArtefact [Root]", ex.getMessage());
}
@Test
public void testEmptyStack() {
Exception ex = null;
try {
PlanBuilder.create().endBlock().build();
} catch(Exception e) {
ex = e;
}
assertNotNull(ex);
assertEquals("Empty stack. Please first call startBlock before calling endBlock", ex.getMessage());
}
@Test
public void test() {
Plan plan =PlanBuilder.create().startBlock(artefact("Root")).endBlock().build();
assertEquals("Root", plan.getRoot().getDescription());
}
public static AbstractArtefact artefact(String description) {
CheckArtefact a = new CheckArtefact();
a.setDescription(description);
return a;
}
}
| denkbar/step | step-controller/step-controller-server/src/test/java/step/core/plans/builder/PlanBuilderTest.java | Java | agpl-3.0 | 2,456 |
/*
* This is part of Geomajas, a GIS framework, http://www.geomajas.org/.
*
* Copyright 2008-2016 Geosparc nv, http://www.geosparc.com/, Belgium.
*
* The program is available in open source according to the GNU Affero
* General Public License. All contributions in this program are covered
* by the Geomajas Contributors License Agreement. For full licensing
* details, see LICENSE.txt in the project root.
*/
package org.geomajas.internal.service.pipeline;
import org.geomajas.global.GeomajasException;
import org.geomajas.layer.feature.attribute.StringAttribute;
import org.geomajas.service.pipeline.AbstractPipelineInterceptor;
import org.geomajas.service.pipeline.PipelineContext;
/**
* Pipeline interceptor for testing execution modes.
*
* @author Joachim Van der Auwera
*/
public class ForTestInterceptor extends AbstractPipelineInterceptor<StringAttribute> {
private ExecutionMode executionMode = ExecutionMode.EXECUTE_ALL;
private String fromMsg = "_before_";
private String toMsg = "_after_";
public void setExecutionMode(ExecutionMode executionMode) {
this.executionMode = executionMode;
}
public void setFromMsg(String fromMsg) {
this.fromMsg = fromMsg;
}
public void setToMsg(String toMsg) {
this.toMsg = toMsg;
}
@Override
public ExecutionMode beforeSteps(PipelineContext context, StringAttribute response)
throws GeomajasException {
response.setValue(response.getValue() + fromMsg);
return executionMode;
}
@Override
public void afterSteps(PipelineContext context, StringAttribute response) throws GeomajasException {
response.setValue(response.getValue() + toMsg);
}
}
| geomajas/geomajas-project-server | impl/src/test/java/org/geomajas/internal/service/pipeline/ForTestInterceptor.java | Java | agpl-3.0 | 1,635 |